From 53fe9c652fb3ed48e411ec9916cea8f6704c7039 Mon Sep 17 00:00:00 2001 From: Enrico Fagnoni Date: Sun, 31 Dec 2023 15:10:47 +0100 Subject: [PATCH] Dev 4.0 (#21) * refactory for 4.0.0 * updated --- CHANGELOG.md | 4 +- CONTRIBUTING.md | 17 ++ Dockerfile | 60 ++-- LICENSE | 2 +- README.md | 107 +++----- bin/sdaas | 4 + docker-compose.yaml | 25 ++ etc/splash.txt | 8 + modules/core | 220 +++++++++++++++ modules/driver | 129 +++++++++ modules/sparql | 116 ++++++++ modules/testdriver | 10 + modules/view | 63 +++++ modules/w3c | 81 ++++++ scripts/activity.include | 139 ---------- scripts/archetypes/README.md | 1 - scripts/asserting.include | 69 ----- .../axioms/uncompleted_graphs.sparq_select | 14 - scripts/bg_reasoning.include | 257 ------------------ scripts/caching.include | 14 - scripts/curl_utils.include | 39 --- scripts/kb.include | 60 ---- scripts/learning.include | 198 -------------- scripts/logging.include | 55 ---- scripts/platform.include | 101 ------- scripts/reasoning.include | 40 --- scripts/ruleset.include | 110 -------- scripts/sdaas | 165 ----------- scripts/teaching.include | 38 --- scripts/testing.include | 50 ---- sdaas-entrypoint.sh | 20 -- tests/data/ask-false.xml | 6 + tests/data/ask-true.xml | 6 + tests/data/empty-select.xml | 10 + tests/data/empty-store.nt | 41 +++ tests/data/not-empty-select.xml | 13 + tests/data/sample1.ttl | 69 ----- tests/functional/data/geo.ttl | 52 ---- tests/functional/kbQueryTest.bats | 60 ---- tests/functional/platformBaseTest.bats | 19 -- tests/functional/sparqlTest.bats | 75 +++++ tests/functional/w3cTest.bats | 44 +++ tests/system/gettingStartedCETest.bats | 51 ++++ tests/system/platform/.gitignore | 3 - .../axioms/calculated_trusts.construct | 26 -- .../platform/axioms/city_name_index.construct | 5 - .../axioms/city_names_starting_with_m.select | 6 - .../axioms/default_trustmap.construct | 21 -- tests/system/platform/build.sdaas | 41 --- tests/system/platform/data/introspection.ttl | 67 ----- tests/system/platform/data/kees.ttl | 8 - tests/system/platform/data/trustmap.ttl | 40 --- tests/system/platform/gateways/istat.awk | 2 - tests/system/platform/platformTest.bats | 50 ---- tests/system/platform/questions/README.md | 5 - .../platform/questions/triplecounts.sparql | 3 - .../system/platform/tests/1_istat_exists.ask | 3 - tests/system/platform/tests/2_empty.select | 1 - tests/unit/activityTest.bats | 46 ---- tests/unit/assertingTest.bats | 42 --- tests/unit/cachingTest.bats | 42 --- tests/unit/coreTest.bats | 119 ++++++++ tests/unit/data/01_ruleset/1_test.construct | 0 tests/unit/data/01_ruleset/2_test.update | 0 tests/unit/data/01_ruleset/3_test.reasoning | 0 tests/unit/data/simple.csv | 5 - tests/unit/data/testing/01_test.ask | 0 tests/unit/data/testing/02_test.ask | 0 tests/unit/data/testing/03_test.select | 0 tests/unit/data/two_triples.nt | 2 - tests/unit/data/two_triples.ttl | 4 - tests/unit/driverTest.bats | 94 +++++++ tests/unit/kbTest.bats | 46 ---- tests/unit/learnTest.bats | 86 ------ tests/unit/loggingTest.bats | 75 ----- tests/unit/reasoningTest.bats | 26 -- tests/unit/rulesetTest.bats | 35 --- tests/unit/sparqlTest.bats | 52 ++++ tests/unit/stubs/asserting_stub.include | 38 --- tests/unit/stubs/bg_reasoning_stub.include | 9 - tests/unit/stubs/caching_stub.include | 5 - tests/unit/stubs/chmod_stub.include | 1 - tests/unit/stubs/curl_stub.include | 22 -- tests/unit/stubs/date_stub.include | 1 - tests/unit/stubs/gzip_stub.include | 1 - tests/unit/stubs/kb_stub.include | 5 - tests/unit/testingTest.bats | 24 -- tests/unit/testsid.include | 24 ++ tests/unit/viewTest.bats | 53 ++++ 89 files changed, 1323 insertions(+), 2477 deletions(-) create mode 100644 CONTRIBUTING.md create mode 100644 bin/sdaas create mode 100644 docker-compose.yaml create mode 100644 etc/splash.txt create mode 100644 modules/core create mode 100644 modules/driver create mode 100644 modules/sparql create mode 100644 modules/testdriver create mode 100644 modules/view create mode 100644 modules/w3c delete mode 100644 scripts/activity.include delete mode 100644 scripts/archetypes/README.md delete mode 100644 scripts/asserting.include delete mode 100644 scripts/axioms/uncompleted_graphs.sparq_select delete mode 100644 scripts/bg_reasoning.include delete mode 100644 scripts/caching.include delete mode 100644 scripts/curl_utils.include delete mode 100644 scripts/kb.include delete mode 100644 scripts/learning.include delete mode 100644 scripts/logging.include delete mode 100644 scripts/platform.include delete mode 100644 scripts/reasoning.include delete mode 100644 scripts/ruleset.include delete mode 100644 scripts/sdaas delete mode 100644 scripts/teaching.include delete mode 100644 scripts/testing.include delete mode 100644 sdaas-entrypoint.sh create mode 100644 tests/data/ask-false.xml create mode 100644 tests/data/ask-true.xml create mode 100644 tests/data/empty-select.xml create mode 100644 tests/data/empty-store.nt create mode 100644 tests/data/not-empty-select.xml delete mode 100644 tests/data/sample1.ttl delete mode 100644 tests/functional/data/geo.ttl delete mode 100644 tests/functional/kbQueryTest.bats delete mode 100644 tests/functional/platformBaseTest.bats create mode 100644 tests/functional/sparqlTest.bats create mode 100644 tests/functional/w3cTest.bats create mode 100644 tests/system/gettingStartedCETest.bats delete mode 100644 tests/system/platform/.gitignore delete mode 100644 tests/system/platform/axioms/calculated_trusts.construct delete mode 100644 tests/system/platform/axioms/city_name_index.construct delete mode 100644 tests/system/platform/axioms/city_names_starting_with_m.select delete mode 100644 tests/system/platform/axioms/default_trustmap.construct delete mode 100644 tests/system/platform/build.sdaas delete mode 100644 tests/system/platform/data/introspection.ttl delete mode 100644 tests/system/platform/data/kees.ttl delete mode 100644 tests/system/platform/data/trustmap.ttl delete mode 100644 tests/system/platform/gateways/istat.awk delete mode 100644 tests/system/platform/platformTest.bats delete mode 100644 tests/system/platform/questions/README.md delete mode 100644 tests/system/platform/questions/triplecounts.sparql delete mode 100644 tests/system/platform/tests/1_istat_exists.ask delete mode 100644 tests/system/platform/tests/2_empty.select delete mode 100644 tests/unit/activityTest.bats delete mode 100644 tests/unit/assertingTest.bats delete mode 100644 tests/unit/cachingTest.bats create mode 100644 tests/unit/coreTest.bats delete mode 100644 tests/unit/data/01_ruleset/1_test.construct delete mode 100644 tests/unit/data/01_ruleset/2_test.update delete mode 100644 tests/unit/data/01_ruleset/3_test.reasoning delete mode 100644 tests/unit/data/simple.csv delete mode 100644 tests/unit/data/testing/01_test.ask delete mode 100644 tests/unit/data/testing/02_test.ask delete mode 100644 tests/unit/data/testing/03_test.select delete mode 100644 tests/unit/data/two_triples.nt delete mode 100644 tests/unit/data/two_triples.ttl create mode 100644 tests/unit/driverTest.bats delete mode 100644 tests/unit/kbTest.bats delete mode 100644 tests/unit/learnTest.bats delete mode 100644 tests/unit/loggingTest.bats delete mode 100644 tests/unit/reasoningTest.bats delete mode 100644 tests/unit/rulesetTest.bats create mode 100644 tests/unit/sparqlTest.bats delete mode 100644 tests/unit/stubs/asserting_stub.include delete mode 100644 tests/unit/stubs/bg_reasoning_stub.include delete mode 100644 tests/unit/stubs/caching_stub.include delete mode 100644 tests/unit/stubs/chmod_stub.include delete mode 100644 tests/unit/stubs/curl_stub.include delete mode 100644 tests/unit/stubs/date_stub.include delete mode 100644 tests/unit/stubs/gzip_stub.include delete mode 100644 tests/unit/stubs/kb_stub.include delete mode 100644 tests/unit/testingTest.bats create mode 100644 tests/unit/testsid.include create mode 100644 tests/unit/viewTest.bats diff --git a/CHANGELOG.md b/CHANGELOG.md index 6498c74..d585c7f 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -4,9 +4,9 @@ This project adheres to [Semantic Versioning](http://semver.org/). ## [unreleased] -### Fixed +Complete project refactory + -- reopened bug #19 (FAILCHECK mode does not work): removed extra blank ## [3.3.1] diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md new file mode 100644 index 0000000..efa2585 --- /dev/null +++ b/CONTRIBUTING.md @@ -0,0 +1,17 @@ +# How to Contribute + +We'd love to accept your patches and contributions to this project. There are +just a few small guidelines you need to follow. + +## Contributor License Agreement + +Contributions to this project must be accompanied by a Contributor License +Agreement. You (or your employer) retain the copyright to your contribution; +this simply gives us permission to use and redistribute your contributions as +part of the project. Head over to +[LinkedData.Center's Contributor License Agreement (CLA)](http://sites.linkeddata.center/help/legal/cla_v1). + +You generally only need to submit a CLA once, so if you've already submitted one +(even if it was for a different project), you probably don't need to do it +again. + diff --git a/Dockerfile b/Dockerfile index f0b387c..1c723b9 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,54 +1,36 @@ -# Copyright (C) 2019-2020 LinkedData.Center - All Rights Reserved -# Permission to copy and modify is granted under the MIT license -FROM alpine/helm as helm -FROM mikefarah/yq as yq -FROM linkeddatacenter/sdaas-rdfstore:2.1.5 +# Copyright (C) 2023 LinkedData.Center - All Rights Reserved +FROM ubuntu:22.04 LABEL authors="enrico@linkeddata.center" -USER root -COPY --from=helm /usr/bin/helm /usr/bin/helm -COPY --from=yq /usr/bin/yq /usr/bin/yq - -ARG SHACLVER=1.3.2 -ARG SHACLROOT=/opt/shacl-${SHACLVER}/bin - RUN apt-get update && \ apt-get install -y --no-install-recommends \ - gettext \ + curl \ + ca-certificates \ + raptor2-utils \ bats \ - git \ - unzip \ jq \ - csvtool && \ - curl --output /tmp/shacl.zip https://repo1.maven.org/maven2/org/topbraid/shacl/${SHACLVER}/shacl-${SHACLVER}-bin.zip && \ - unzip /tmp/shacl.zip -d /opt && \ - chmod +x ${SHACLROOT}/* - - + csvtool \ + libxml2-utils ###### Variables affecting the image building -ENV SDAAS_BIN_DIR=/opt/sdaas +ENV SDAAS_INSTALL_DIR=/opt/sdaas ENV SDAAS_WORKSPACE=/workspace -ENV SDAAS_LOG_DIR="$SDAAS_WORKSPACE" -ENV PATH=${SHACLROOT}:${PATH} +COPY modules "$SDAAS_INSTALL_DIR" +COPY bin/sdaas /usr/bin/sdaas +COPY /etc/* /etc/ +RUN chmod -R 0755 /usr/bin/sdaas -###### Runtime variables -ENV SD_UPLOAD_DIR /var/spool/sdaas -ENV SD_SPARQL_ENDPOINT http://localhost:8080/sdaas/sparql -ENV SD_QUADSTORE kb - -COPY scripts "$SDAAS_BIN_DIR" -COPY sdaas-entrypoint.sh /sdaas-entrypoint.sh - -RUN mkdir -p "${SDAAS_BIN_DIR}" "${SDAAS_LOG_DIR}" "${SD_UPLOAD_DIR}" "${SDAAS_WORKSPACE}" ; \ - chmod -R 0755 "$SDAAS_BIN_DIR" /sdaas-entrypoint.sh; \ - chown -R jetty.jetty "${SDAAS_WORKSPACE}" "$SDAAS_LOG_DIR" "$SD_UPLOAD_DIR" +RUN useradd -m -d /workspace -s /bin/bash -g users -u 1001 sdaas +USER sdaas +WORKDIR "${SDAAS_WORKSPACE}" +## Variables affecting program execution +ENV SD_LOG_PRIORITY=6 +ENV SD_TMP_DIR="/tmp" -USER jetty +# Uncomment this to change the default web agent signature +#ENV SD_APPLICATION_ID="example.org SDaaS" -WORKDIR "${SDAAS_WORKSPACE}" -ENTRYPOINT ["/sdaas-entrypoint.sh"] -CMD [""] \ No newline at end of file +ENTRYPOINT ["/usr/bin/sdaas"] \ No newline at end of file diff --git a/LICENSE b/LICENSE index 3ab1dba..f8b7bea 100644 --- a/LICENSE +++ b/LICENSE @@ -1,6 +1,6 @@ MIT License -Copyright (c) 2018-2019 LinkedData.Center SRL +Copyright (c) 2018-2024 LinkedData.Center SRL Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal diff --git a/README.md b/README.md index e0f5308..9c79e97 100644 --- a/README.md +++ b/README.md @@ -4,117 +4,86 @@ A platform to build knowledge graphs. -This is an open source implementation of the [LinkeData.Center SDaaS™ product](https://it.linkeddata.center/p/sdaas). -See documentation in [SDaaS wiki](https://bitbucket.org/linkeddatacenter/sdaas/wiki/Home). +This is the open source implementation of the [LinkeData.Center SDaaS™ product](https://it.linkeddata.center/p/sdaas). -The SDaaS requires [docker](https://www.docker.com/) - -This implementation embeds a sdaas-rdfstore based on blazegraph. - - - -> ## End of Support approaching +> ## end-of-support approaching for Anassimene releases (SDaaS 3.x) > -> Starting from the major release of version 4 of SDaaS Enterprise Edition, planned for 2024 Q1, +> Starting from the major release of version 4 of SDaaS (Pitagora), planned in 2024 Q1, > The Community Edition 3.x will no longer supported by LinkedData.Center. > > If you plan to use SDaaS for professional use, please consider moving to Enterprise Edition. > Contact https://LinkedData.Center for more info, prices, support and documentation. -> -> This repository will continue to exists and maintained and support from community is welcome -> -> As always, documentation, support, and training for SDaaS Community Edition will continue to be available by LinkedData.Center as professional services. - -## 🚀 Quickstart - -This command will start a sdaas platform attached to an internal rdfstore with a micro memory foorprint - docker run --rm -ti -p 8080:8080 linkeddatacenter/sdaas-ce --reboot -browse local reasoner at http://localhost:8080/sdaas type `exit` to leave the platform. -This command is the same as the previous but does not expose the workbench and uses a small memory foorprint - - docker run --rm -ti -e SDAAS_SIZE=small linkeddatacenter/sdaas-ce +### 🚀 Quickstart +``` +docker compose up -d --build +docker compose logs tests +docker compose exec cli sdaas +exit +docker compose down +``` -To run sdaas platform withouth the local rdfstore - docker run --rm -ti -e SD_NOWARMUP=1 linkeddatacenter/sdaas-ce - -Use this command to start and stop by hand a local micro rdfstore: +## Installation & usage - SD_START_LOCAL_REASONING_ENGINE # you can specify the required memory footprint, default=micro - SD_STOP_LOCAL_REASONING_ENGINE +See the [documentation](https://gitlab.com/linkeddatacenter/sdaas/doc) for more info. ## Start test environment -** build local image:** - - - docker build -t sdaas . +**setup a vpn and run a graph store:** +To run functional and system tests you will need the local instance of blazegraph running in the same network of SDaaS. +By default, test scripts expect blazegraph endpoint at http://kb:8080/sdaas. -**Smoke tests:** +``` +docker network create myvpn +docker run --network myvpn --name kb -d linkeddatacenter/sdaas-rdfstore +``` -Manually start sdaas cli without the local reasoner +**build and run local image:** ``` -docker run --name sdmp --rm -ti -v ${PWD}:/workspace --entrypoint bash sdaas -git --version -jq --version -yq --version -gettext --version -command -v csvtool -command -v shaclvalidate.sh -scripts/sdaas -exit +docker build -t linkeddatacenter/sdaas-ce . +docker run --rm -ti --network=myvpn -v "${PWD}":/workspace linkeddatacenter/sdaas-ce ``` - **Unit tests:** -In order to run unit tests bats is needed (see https://github.com/bats-core/bats-core ): +In order to run unit tests bats is used (see https://github.com/bats-core/bats-core ): - bats tests/unit/ - +``` +bats tests/unit/ +``` **Functional tests:** -To run functional and system tests you will need the local instance of blazegraph. -By default, test scripts expect blazegraph endpoint at http://localhost:8080/sdaas -but you can configure a different address exporting the the SD_SPARQL_ENDPOINT. -The instance of blazegraph must share /workspace volume with sdaas. - -For functional test execute: +For functional tests, execute: ``` -/sdaas-start -d #start embedded graph engine in background bats tests/functional ``` **System tests:** -For system test, verify that the host is able to access Internet then execute +For system tests, verify that the host can access the Internet then execute: ``` -bats tests/system/platform -scripts/sdaas -SD_SPARQL_QUERY csv "SELECT (COUNT(?s) AS ?edges) WHERE{?s?p?o}" -curl -d ESTCARD http://localhost:8080/sdaas/sparql -# in both case you should > 31K triples -SD_SPARQL_UPDATE "DROP ALL" -exit +bats tests/system ``` To free the docker resources: - exit - +``` +exit +docker rm -f kb +docker network rm myvpn +``` -Have a look also to the [developer wiki](https://github.com/linkeddatacenter/sdaas-ce/wiki) ## Push to docker hub @@ -125,9 +94,9 @@ To push a new docker image to docker hub: docker login # input the docker hub credentials... docker build -t linkeddatacenter/sdaas-ce . -docker tag linkeddatacenter/sdaas-ce linkeddatacenter/sdaas-ce:3.3.1 docker push linkeddatacenter/sdaas-ce -docker push linkeddatacenter/sdaas-ce:3.3.1 +docker tag linkeddatacenter/sdaas-ce linkeddatacenter/sdaas-ce:4.0.0-rc1 +docker push linkeddatacenter/sdaas-ce:4.0.0-rc1 ``` @@ -135,7 +104,7 @@ docker push linkeddatacenter/sdaas-ce:3.3.1 The sdaas community edition platform is derived from [LinkedData.Center SDaas Product](https://it.linkeddata.center/p/sdaas) and licensed with MIT by LinkedData.Center -Copyright (C) 2018-2023 LinkedData.Center SRL +Copyright (C) 2018-2024 LinkedData.Center SRL - All Rights Reserved Permission to copy and modify is granted under the [MIT license](LICENSE) diff --git a/bin/sdaas b/bin/sdaas new file mode 100644 index 0000000..7cbcb8d --- /dev/null +++ b/bin/sdaas @@ -0,0 +1,4 @@ +#!/usr/bin/env bash +# Copyright (C) 2023 LinkedData.Center - All Rights Reserved + +exec bash --init-file $SDAAS_INSTALL_DIR/core "$@" diff --git a/docker-compose.yaml b/docker-compose.yaml new file mode 100644 index 0000000..b6c67fe --- /dev/null +++ b/docker-compose.yaml @@ -0,0 +1,25 @@ +version: "3.3" + +services: + + kb: + image: linkeddatacenter/sdaas-rdfstore + ports: + - "9999:8080" + + cli: + build: . + stdin_open: true + tty: true + depends_on: + - kb + volumes: + - .:/workspace + + tests: + build: . + command: sdaas -c "bats tests/{unit,functional,system}" + depends_on: + - kb + volumes: + - .:/workspace diff --git a/etc/splash.txt b/etc/splash.txt new file mode 100644 index 0000000..5c3bcc2 --- /dev/null +++ b/etc/splash.txt @@ -0,0 +1,8 @@ + ____ ____ ____ + / ___|| _ \ __ _ __ _/ ___| + \___ \| | | |/ _` |/ _` \___ \ + ___) | |_| | (_| | (_| |___) | + |____/|____/ \__,_|\__,_|____/ + + Smart Data as a Service platform - Pitagora edition + diff --git a/modules/core b/modules/core new file mode 100644 index 0000000..9a2ab1f --- /dev/null +++ b/modules/core @@ -0,0 +1,220 @@ +if [[ ! -z ${__module_core+x} ]]; then return ; else __module_core=1 ; fi +# Copyright (C) 2019-2023 LinkedData.Center - All Rights Reserved + + +################################################################################ +## Costants (read only variables) +################################################################################ +SDAAS_VERSION="4.0" +SDAAS_VERSION_NAME="Pitagora" +SDAAS_REFERENCE_DOC="https://sdaas.netlify.app/reference/command" + +################################################################################ +## Configuration variables' default values +################################################################################ +STORE=${STORE:-"http://kb:8080/sdaas/sparql"} +STORE_TYPE=${STORE_TYPE:-w3c} + +SD_LOG_PRIORITY=${SD_LOG_PRIORITY:-5} +SD_TMP_DIR=${SD_TMP_DIR:-"/tmp"} +SD_APPLICATION_ID=${SD_APPLICATION_ID:-"SDaaS-EE"} +SD_ABORT_ON_FAIL=${SD_ABORT_ON_FAIL:-false} +SDAAS_SIGNATURE=${SDAAS_SIGNATURE:-"$SD_APPLICATION_ID $SDAAS_VERSION"} + + +################################################################################ +## Core functions definition +################################################################################ + +sd_log() { + # parse and validate inputs + local PRIORITY=5 + local SIZE=1024 + local FILE MESSAGE + local OPTIND opt; while getopts ":f:p:S:" opt; do + case "${opt}" in + p) PRIORITY="${OPTARG}" ;; + f) FILE="$(readlink -f "${OPTARG}")" ;; + S) + (( OPTARG += 0 )) # cast to integer + if [[ $OPTARG -gt 0 ]]; then + SIZE=$OPTARG; + fi + ;; + esac + done; shift $((OPTIND-1)) + + MESSAGE="$@" + MESSAGE="${MESSAGE:0:$SIZE}" + + # normalize priority level + case "${PRIORITY^^}" in + CRITICAL | 2) PRIORITY=2 ;; + ERROR | 3) PRIORITY=3 ;; + WARNING | 4) PRIORITY=4 ;; + NOTICE | 5) PRIORITY=5 ;; + INFORMATIONAL | 6) PRIORITY=6 ;; + DEBUG | 7) PRIORITY=7 ;; + * ) + MESSAGE="invalid log priority $PRIORITY" + PRIORITY=3 + ;; + esac + + # normalize priority level mnemonic + local mnemonic + case "${PRIORITY^^}" in + 2) mnemonic=CRITICAL ;; + 3) mnemonic=ERROR ;; + 4) mnemonic=WARNING ;; + 5) mnemonic=NOTICE ;; + 6) mnemonic=INFORMATIONAL ;; + 7) mnemonic=DEBUG ;; + * ) mnemonic="$PRIORITY" + ;; + esac + + # format and print log message + if [[ "$PRIORITY" -le "$SD_LOG_PRIORITY" ]]; then + echo "[$mnemonic] [sdaas-$$ ${FUNCNAME[1]}] "$MESSAGE"" >&2 + if [[ -r "$FILE" ]]; then + local file_size=$(stat -c %s "$FILE") + if [[ "$file_size" -gt 1024 ]]; then + head -c 1024 "$FILE" >&2 + echo "...see more info at $FILE (size=$file_size bytes)" >&2 + else + cat "$FILE" >&2 + fi + fi + fi +} + + +sd_validate() { + local var="$1" + local regexp="$2" + if ! [[ "${!var}" =~ $regexp ]]; then + sd_log -p ERROR "${FUNCNAME[2]} parameter $var does not match '$regexp'" + return 1 + fi +} + + +sd_abort() { + sd_log -p CRITICAL "***** CRITICAL ERROR DETECTED ($1) EXITING" + exit 2 +} + + +sd_include() { + local forceFlag=0 + if [[ "$1" == "-f" ]]; then + forceFlag=1 + shift + fi + local module="$1" + if [[ "$forceFlag" -eq 1 ]]; then + unset "__module_$1" + sd_log "forced reloading of module $module" + fi + + # test if the module was already included + if eval "[ ! -z \${__module_${module}+x} ]" ; then return ; fi + + if [[ -e "$HOME/modules/$module" ]]; then + # try local defined module (override default installation) + source "$HOME/modules/$module" "$@" + elif [[ -e "$SDAAS_INSTALL_DIR/$module" ]]; then + source "$SDAAS_INSTALL_DIR/$module" "$@" + else + sd_log -p ERROR "unknow module $module" + return 1 + fi +} + + +sd_curl() { + sd_log -p DEBUG "executing sd_curl $@" + curl -L -A "$SDAAS_SIGNATURE" --retry 3 --retry-delay 3 --retry-max-time 30 --retry-connrefused "$@" +} + + +sd() { + local abortOnFail=${SD_ABORT_ON_FAIL:-false} + local help=false + local OPTIND opt ; while getopts ":hA" opt; do + case "${opt}" in + h) help=true ;; + A) abortOnFail=true ;; + *) + sd_log -p ERROR "Invalid option: -$OPTARG . Usage: sd [-h] [-A] MODULE FUNCTION" + return 1 + ;; + esac + done; shift $((OPTIND-1)) + + local module="$1" + local function_name="$2" + sd_validate module "^[a-z][a-z0-9-]+$" || return 1 + sd_validate function_name "^[a-z][a-z0-9-]+$" || return 2 + shift 2 + + if $help ; then + # print the URL of the manual + echo "$SDAAS_REFERENCE_DOC/sd_${module}_${function_name}" + return 0 + fi + + # load module + sd_include "$module" || return 3 + + ## test module signature + local module_signature="__module_${module}" + [[ "${!module_signature}" -eq 1 ]] || sd_abort "Invalid signature for module $module" + + # build and execute a command + "sd_${module}_${function_name}" "$@" + local status="$?" + if $abortOnFail && [[ $status -gt 0 ]]; then + sd_abort "exit=$status, sd_${module}_${function_name}" + fi + return $status +} + + +sd_uuid() { + local prefix="${1:-"urn:uuid:"}" + echo -n "$prefix" + cat /proc/sys/kernel/random/uuid +} + + + +################################################################################ +## module commands definition +################################################################################ + +sd_core_version() { + echo "$SDAAS_VERSION" +} + + + +################################################################################ +## Commands shortcut +################################################################################ +sd_list() { sd sparql query -O 'csv-h' "$@"; } +sd_rule() { sd sparql query -O ntriples "$@"; } +sd_test() { sd sparql query -O test "$@"; } + + + +########################### +# Module initialization +########################### +if [[ "$1" != "NO_SPLASH" ]]; then + sd_log -p INFORMATIONAL -f /etc/splash.txt "Copyright (C) 2018-2023 https://linkeddata.center" +fi + +# Set the prompt in interactive mode +PS1="sdaas > " diff --git a/modules/driver b/modules/driver new file mode 100644 index 0000000..4b4bcbe --- /dev/null +++ b/modules/driver @@ -0,0 +1,129 @@ +if [[ ! -z ${__module_driver+x} ]]; then return ; else __module_driver=1 ; fi +# Copyright (C) 2019-2023 LinkedData.Center - All Rights Reserved + + +################################################################################ +## Core commands definition +################################################################################ + +sd_driver_validate() { + [[ "$#" -eq 1 ]] || return 99 + local sid="$1" + sd_validate sid "^[a-zA-Z]+$" || return 1 + sd_validate "$sid" "^https?:" || return 2 + + ## Validate and load the STORE driver + local driver_type_var="${sid}_TYPE" + local driver="${!driver_type_var}" + + ## Lazy initialization of ${sid}_TYPE driver variable + if [[ -z "$driver" ]]; then + sd_log -p WARNING "valid driver for $sid not found, w3c used" + driver="w3c" + declare -g "$driver_type_var=$driver" + fi + + sd_include "$driver" || sd_abort "internal error including driver $driver" + + ## Call specific driver validation + "sd_${!driver_type_var}_validate" "$sid" +} + + + +sd_driver_update() { + [[ "$#" -eq 2 ]] || return 99 + local sid="$1" + local update_command="$2" + + sd_driver_validate "$sid" || return 1 + # sd_validate not used for update_command becaus can be anything + + # call driver function + local driver_type_var="${sid}_TYPE" + "sd_${!driver_type_var}_update" "$sid" "$update_command" +} + + +sd_driver_query() { + [[ "$#" -eq 3 ]] || return 99 + local sid="$1" + local mime_type="$2" + local query_command="$3" + + sd_driver_validate "$sid" || return 1 + sd_validate mime_type "^[a-z]+\/[a-z0-9.+-]+$" || return 2 + # sd_validate not used for query_command becaus can be anithing + + # call driver function and post-processing results + local driver_type_var="${sid}_TYPE" + "sd_${!driver_type_var}_query" "${sid}" "$mime_type" "$query_command" | tr -d '\r' +} + + +sd_driver_load() { + [[ "$#" -eq 5 ]] || return 99 + local sid="$1" + local inputFormat="$2" + local resource="$3" + local accrualPolicy="$4" + local graph="$5" + + sd_driver_validate "$sid" || return 1 + sd_validate inputFormat "^(rdfxml|ntriples|turtle|trig|rss-tag-soup|grddl|guess|rdfa|json|nquads)$" || return 2 + sd_validate resource "^((http|https|ftp|file)://.*/|@.+)" || return 3 + sd_validate accrualPolicy "^(PUT|POST)$" || return 4 + + # Auto detect input base uri and rewrite resource and graph if needed + local input_baseuri + if [[ "$resource" == "@-" ]]; then + resource="-" + input_baseuri="$graph" + elif [[ "${resource:0:1}" == "@" ]]; then + # rewrite resource and additional check for file existence + resource="${resource:1}" + input_baseuri="$graph" + + if [[ ! -r "$resource" ]]; then + sd_log -p ERROR "'$resource' file not found" + return 3 + fi + else + input_baseuri="$resource" + + # if resource and empty graph rewrite graph + if [[ -z "$graph" ]]; then + graph="$resource" + fi + fi + + sd_validate graph "^(http|https|ftp|file|urn):" || return 5 + + # call rapper and pipe it to driver load function + local driver_type_var="${sid}_TYPE" + rapper -i "$inputFormat" -I "$input_baseuri" -o "ntriples" -O "$graph" -wq "$resource" | "sd_${!driver_type_var}_load" "$sid" "$graph" "$accrualPolicy" +} + + +sd_driver_size() { + [[ "$#" -eq 1 ]] || return 99 + local sid="$1" + + sd_driver_validate "$sid" || return 1 + + # call driver function and post-processing results + local driver_type_var="${sid}_TYPE" + "sd_${!driver_type_var}_size" "$sid" +} + + + +sd_driver_erase() { + [[ "$#" -eq 1 ]] || return 99 + local sid="$1" + sd_driver_validate "$sid" || return 1 + + # call driver function and post-processing results + local driver_type_var="${sid}_TYPE" + "sd_${!driver_type_var}_erase" "$sid" +} diff --git a/modules/sparql b/modules/sparql new file mode 100644 index 0000000..679310d --- /dev/null +++ b/modules/sparql @@ -0,0 +1,116 @@ +if [[ ! -z ${__module_sparql+x} ]]; then return ; else __module_sparql=1 ; fi +# Copyright (C) 2019-2023 LinkedData.Center - All Rights Reserved + +sd_include driver + +################################################################################ +## Core commands definition +################################################################################ + +sd_sparql_update() { + # parse and validate inputs + local sid="STORE" + local OPTIND opt; while getopts ":s:" opt; do + case "${opt}" in + s) sid="${OPTARG}" ;; + *) + sd_log -p ERROR "Invalid option: -$OPTARG ." + return 1 + ;; + esac + done; shift $((OPTIND-1)) + local update_command="${1:-"@-"}" + + # call driver function + sd_log -p DEBUG "calling sd_driver_update '$sid' '$update_command'" + sd_driver_update "$sid" "$update_command" +} + + + +sd_sparql_query() { + # parse and validate inputs + local sid="STORE" + local out_format="xml" + local OPTIND opt; while getopts ":O:s:" opt; do + case "${opt}" in + O) out_format="${OPTARG}" ;; + s) sid="${OPTARG}" ;; + *) + sd_log -p ERROR "Invalid option: -$OPTARG ." + return 1 + ;; + esac + done; shift $((OPTIND-1)) + local query_command="${1:-"@-"}" + + # Validate out_format + local media_type + case "${out_format}" in + csv*) media_type="text/csv" ;; + tsl) media_type="text/tab-separated-values" ;; + json) media_type="application/sparql-results+json" ;; + xml|boolean|test) media_type="application/sparql-results+xml" ;; + ntriples) media_type="application/n-triples" ;; + turtle) media_type="text/turtle" ;; + rdfxml) media_type="application/rdf+xml" ;; + *) + sd_log -p ERROR "format $out_format not supported" + return 1 + ;; + esac + + # call driver function and post-processing results + function query { + sd_log -p DEBUG "calling sd_driver_query '$sid' '$media_type' '$query_command', using $out_format as output processor" + sd_driver_query "$sid" "$media_type" "$query_command" + } + + case "$out_format" in + csv-h) query | csvtool drop 1 - ;; + csv-1) query | csvtool head 2 - | csvtool drop 1 -;; + csv-f1) query | csvtool head 2 - | csvtool drop 1 - | csvtool format '%(1)\n' - ;; + boolean) query | xmllint --xpath "//*[local-name()='boolean']/text()" - 2>/dev/null;; + test) query | \ + xmllint --xpath "((count(//*[local-name()='results']/*) + count(//*[local-name()='boolean'])) = 0) \ + or //*[local-name()='boolean'][text()='true']" - 2>/dev/null \ + | grep -q "true" + ;; + *) query ;; + esac +} + + +sd_sparql_graph() { + # parse and validate inputs + local sid="STORE" + local accualPolicy="POST" + local inputFormat="guess" + local resource="@-" + local OPTIND opt; while getopts ":s:a:r:f:" opt; do + case "${opt}" in + s) sid="${OPTARG}" ;; + a) accualPolicy="${OPTARG}" ;; + f) inputFormat="${OPTARG}" ;; + r) resource="${OPTARG}" ;; + *) + sd_log -p ERROR "Invalid option: -$OPTARG ." + return 1 + ;; + esac + done; shift $((OPTIND-1)) + local graph="$1" + + # Try a good default if empty graph passed + if [[ -z "$graph" ]]; then + if [[ -n "$resource" && "${resource:0:1}" != "@" ]]; then + graph="$resource" + else + graph="$(sd_uuid)" + fi + fi + + "sd_driver_load" "$sid" "$inputFormat" "$resource" "$accualPolicy" "$graph" +} + + diff --git a/modules/testdriver b/modules/testdriver new file mode 100644 index 0000000..ddc099f --- /dev/null +++ b/modules/testdriver @@ -0,0 +1,10 @@ +if [[ ! -z ${__module_testdriver+x} ]]; then return ; else __module_testdirver=1 ; fi +# Copyright (C) 2019-2023 LinkedData.Center - All Rights Reserved + +sd_testdriver_validate() { return 0; } +sd_testdriver_load() { return 0; } +sd_testdriver_size() { return 0; } +sd_testdriver_erase() { return 0; } +sd_testdriver_update() { return 0; } +sd_testdriver_query() { return 0; } + diff --git a/modules/view b/modules/view new file mode 100644 index 0000000..e4c2756 --- /dev/null +++ b/modules/view @@ -0,0 +1,63 @@ +if [[ ! -z ${__module_view+x} ]]; then return ; else __module_view=1 ; fi +# Copyright (C) 2019-2023 LinkedData.Center - All Rights Reserved + + +################################################################################ +## Core commands definition +################################################################################ + +sd_view_config() { + if [[ "$#" -ne 0 ]]; then + sd_log -p ERROR "extra parameters found" + return 1 + fi + _view_config_compgen() { + for var in $(compgen -v "$1"); do + printf "$var=\"${!var}\"\n" + done + } + + _view_config_compgen SDAAS_ + _view_config_compgen SD_ + _view_config_compgen STORE_ +} + + +sd_view_modules() { + if [[ "$#" -ne 0 ]]; then + sd_log -p ERROR "extra parameters found" + return 1 + fi + local moduleName moduleVar status + + for modulePath in $(ls "$SDAAS_INSTALL_DIR"); do + moduleName="$(basename "$modulePath")" + moduleVar=$(printf "__module_%s" "$moduleName") + if [[ $(eval echo "\${$moduleVar}") ]]; then + status=" --cached" + else + status="" + fi + printf "%s%s\n" "$(basename "$moduleName")" "$status" + done +} + + + +sd_view_module() { + if [[ "$#" -ne 1 ]]; then + sd_log -p ERROR "a module is required" + return 1 + fi + local MODULE=${1} + + if [[ -z "$MODULE" ]]; then + sd_log -p ERROR "No module name specified (try 'sd view modules')" + return 1 + fi + + for command in $(grep 'sd_.*() {$' "$SDAAS_INSTALL_DIR/$MODULE" | awk '{print $1}' | sed 's/()//'); do + printf "%s()\n" "$command" + done +} + diff --git a/modules/w3c b/modules/w3c new file mode 100644 index 0000000..12fd7d9 --- /dev/null +++ b/modules/w3c @@ -0,0 +1,81 @@ +if [[ ! -z ${__module_w3c+x} ]]; then return ; else __module_w3c=1 ; fi +# Copyright (C) 2019-2023 LinkedData.Center - All Rights Reserved + + + +################################################################################ +## Driver function implementation +################################################################################ +######################################################################## + +sd_w3c_validate() { + return 0 +} + + +sd_w3c_update() { + local sid="$1" + local update_command="$2" + sd_log -p DEBUG "sid=$sid update_command=$update_command" + SD_LAST_SPARQL_UPDATE_RESULT="$(sd_curl -s --fail-with-body -X POST --data-binary "$update_command" \ + --header "Content-Type: application/sparql-update; charset=utf-8" "${!sid}" )" + + if [[ $? -eq 0 ]]; then + sd_log -p DEBUG "$SD_LAST_SPARQL_UPDATE_RESULT" + else + sd_log -p ERROR "$SD_LAST_SPARQL_UPDATE_RESULT" + return 1 + fi +} + + +sd_w3c_query() { + local sid="$1" + local mime_type="$2" + local query_command="$3" + + sd_curl -s --fail-with-body --compressed \ + -X POST \ + --data-binary "$query_command" \ + --header "Content-Type: application/sparql-query; charset=utf-8" \ + --header "Accept: $mime_type; charset=utf-8" \ + "${!sid}" +} + + + +# Loads all nTriples in stdin to a Graph +sd_w3c_load() { + local sid="$1" + local graph="$2" + local accrualPolicy="$3" + + local first_line + + function stream_data { + if [[ "$accrualPolicy" == 'PUT' ]]; then + echo "DROP SILENT GRAPH <$graph>;" + fi + echo "INSERT DATA{ GRAPH <$graph> {" && echo "$first_line" && cat && echo "}}" + } + + if ! read -r first_line; then + # return without doing nothing is stdin steram is empty + sd_log -p INFORMATIONAL "No data stream, load skipped" + return 0 + else + stream_data | sd_w3c_update "$sid" "@-" + fi +} + + + +sd_w3c_size() { + sd_w3c_query "$1" "text/csv" "SELECT (COUNT(?s) AS ?ntriples) WHERE { ?s ?p ?o }" | csvtool drop 1 - +} + + +sd_w3c_erase() { + sd_w3c_update "$1" "DROP SILENT ALL" +} + diff --git a/scripts/activity.include b/scripts/activity.include deleted file mode 100644 index c39f236..0000000 --- a/scripts/activity.include +++ /dev/null @@ -1,139 +0,0 @@ -# Copyright (C) 2019 LinkedData.Center - All Rights Reserved -# Permission to copy and modify is granted under the MIT license -if [ ! -z ${__module_activity+x} ]; then return ; else __module_activity=1 ; fi - -SD_REQUIRES_VAR SD_CACHE -SD_REQUIRES_CMD SD_LOAD_RDF_FILE SD_LOG md5sum find realpath tail awk mktemp - -SD_URISPACE=${SD_URISPACE:-"urn:sdaas:resource:"} -SD_AGENT_URI=${SD_AGENT_URI:-"https://linkeddata.center/agent/anassimene#me"} -_SD_ACCRUAL_PLAN=${_SD_ACCRUAL_PLAN:-"replace"} - -SD_ACTIVITY_DIR='' -SD_ACTIVITY_ID='' -SD_ACTIVITY_URI='' -_SD_ACTIVITY_STATUS='noop' - -# @description -# append a prov statement after substituting variables: -# . **TIMESTAMP** with the current time in xsd format -# -# @arg $1 a prov statement template -function _SD_LOG_PROV { - local now="$(date '+%Y-%m-%dT%T')" - local provStatement="${1//TIMESTAMP/\"$now\"^^xsd:dateTime}" - echo "$provStatement" >> "$SD_ACTIVITY_DIR/prov.ttl" -} - - -function _SD_ACTIVITY_LOG { - SD_LOG "activity $SD_ACTIVITY_ID $1" "$2" -} - -# @description -# create a temporary directory with a in and out subdirectories to store activity input and output -# it also create and initialize a prov.ttl file to save provenance info -# @arg $1 an activity comment or an uri plan ( must star with http or urn: or file:// ) -# @arg $2 forced directory to create activity space -# -# on return sets: -# SD_ACTIVITY_DIR -# SD_ACTIVITY_ID -# SD_ACTIVITY_URI -# _SD_ACTIVITY_STATUS='running' -# -function _SD_START_ACTIVITY { - local uriOrComment="${1:-'generic data ingestion activity'}" - SD_ACTIVITY_DIR="$(realpath "${2:-$(mktemp -d --tmpdir="$SD_CACHE" XXXXXXXXXX)}")" - - SD_ACTIVITY_ID="$(basename "$SD_ACTIVITY_DIR")" - SD_ACTIVITY_URI="${SD_URISPACE}${SD_ACTIVITY_ID}_activity" - - mkdir -p "$SD_ACTIVITY_DIR/out" "$SD_ACTIVITY_DIR/in" || SD_FATAL_ERROR "Error creating $SD_ACTIVITY_DIR structure" - - - # prepare plan object - local plan - if [[ "$uriOrComment" =~ ^(http|HTTP|urn:|file://) ]] ; then - plan="<$uriOrComment>" - else - plan="\"\"\"$uriOrComment\"\"\"" - fi - - cat<<-EOT > "$SD_ACTIVITY_DIR/prov.ttl" - @prefix : <${SD_URISPACE}${SD_ACTIVITY_ID}_> . - @prefix prov: . - @prefix rdfs: . - @prefix xsd: . - @prefix dct: . - @prefix sd: . - @prefix kees: . - - :activity a prov:Activity; - prov:qualifiedAssociation :activity_owner. - :activity_owner a prov:Association ; - prov:agent <$SD_AGENT_URI> ; - prov:hadRole kees:namedGraphGenerator ; - prov:hadPlan $plan. - EOT - _SD_LOG_PROV ":activity prov:startedAtTime TIMESTAMP ." - _SD_ACTIVITY_STATUS='running' - _SD_ACTIVITY_LOG "starded $uriOrComment" -} - - -function _SD_MK_UID { - local seed=${1-=$(cat /dev/random)} - echo -n "$seed"| md5sum | awk '{print $1}' -} - - - - -# on return sets: -# _SD_ACTIVITY_STATUS='noop' -function _SD_COMMIT_ACTIVITY { - local graphName="$1" - - if [ "$_SD_ACCRUAL_PLAN" = "replace" ]; then - _SD_ACTIVITY_LOG "completed by replacing graph <$graphName>" - SD_SPARQL_UPDATE "DROP SILENT GRAPH <$graphName>" ; - else - _SD_ACTIVITY_LOG "completed by appending data to graph <$graphName>" - fi - - local rdfFile - for rdfFile in $(find "$SD_ACTIVITY_DIR/out" -name "*" -type f -print); do - SD_LOAD_RDF_FILE "$graphName" "$rdfFile" - done - _SD_LOG_PROV " -:activity prov:endedAtTime TIMESTAMP ; prov:generated :graph . -:graph sd:name <$graphName>; - prov:wasGeneratedBy :activity ; - dct:created TIMESTAMP ; - dct:modified TIMESTAMP . -" - SD_LOAD_RDF_FILE "$graphName" "$SD_ACTIVITY_DIR/prov.ttl" - - if [ $SD_DEBUG -eq 0 ]; then rm -rf "$SD_ACTIVITY_DIR" ; fi - - _SD_ACTIVITY_STATUS='noop' -} - - -# on return sets: -# SD_ACTIVITY_STATUS=noop' -function _SD_INVALIDATE_GRAPH { - local graphName="$1" - - _SD_LOG_PROV " -:activity prov:endedAtTime TIMESTAMP; prov:invalidated :graph . -:graph sd:name <$graphName>; - prov:invalidatedAtTime TIMESTAMP; - prov:wasInvalidatedBy :activity . -" - SD_LOAD_RDF_FILE "$graphName" "$SD_ACTIVITY_DIR/prov.ttl" - _SD_ACTIVITY_LOG "invalidated graph $graphName due error" - _SD_ACTIVITY_STATUS="noop" -} - diff --git a/scripts/archetypes/README.md b/scripts/archetypes/README.md deleted file mode 100644 index a45eb7f..0000000 --- a/scripts/archetypes/README.md +++ /dev/null @@ -1 +0,0 @@ -No archetipes available in community edition \ No newline at end of file diff --git a/scripts/asserting.include b/scripts/asserting.include deleted file mode 100644 index 18b23ce..0000000 --- a/scripts/asserting.include +++ /dev/null @@ -1,69 +0,0 @@ -# Copyright (C) 2019-2020 LinkedData.Center - All Rights Reserved -# Permission to copy and modify is granted under the MIT license -if [ ! -z ${__module_asserting+x} ]; then return ; else __module_asserting=1 ; fi - -SD_VERBOSE=${SD_VERBOSE:=1} - -## -# @description: shows a string to std error if verbose > 0 -# @arg $1 a string -## -function SD_FATAL_ERROR { - if [ "$SD_VERBOSE" -gt 0 ]; then - >&2 echo "$1" - exit 1 - fi -} - - -## -# @description: tests that an enviroment variable exists -# @arg $1 var name -## -function SD_REQUIRES_VAR { - local var - for var in "$@"; do - local value=${!var} - if [ -z "$value" ]; then - SD_FATAL_ERROR "Mandatory environment variable $var not defined." - fi - done -} - -## -# @description: tests that a command exists -# @arg $1 command name -## -function SD_REQUIRES_CMD { - local cmd - for cmd in "$@" ; do - hash $cmd 2>/dev/null || SD_FATAL_ERROR "I require $cmd but it it's not installed." - done -} - - -## -# @description: includes a file only if it was not already include -# it try first to load the file -# than to load from local script directory addindg .include extension -# than from sdaas installation directory addindg .include extension -# -# @arg $1 include file -# @arg $2..$# paramether passed to source to drive include special behaviour -## -function SD_INCLUDE { - SD_REQUIRES_VAR _SDAAS_DIR - local -r file="$1" - shift - - # test if the module was already included - if eval "[ ! -z \${__module_${file}+x} ]" ; then return ; fi - - if [ -f "$file" ]; then - source "$file" "$@" - elif [ -f "$_SDAAS_DIR/$file.include" ] ; then - source "$_SDAAS_DIR/$file.include" "$@" "$@" - else - source "$(pwd)/scripts/$file.include" "$@" "$@" - fi -} diff --git a/scripts/axioms/uncompleted_graphs.sparq_select b/scripts/axioms/uncompleted_graphs.sparq_select deleted file mode 100644 index d19853d..0000000 --- a/scripts/axioms/uncompleted_graphs.sparq_select +++ /dev/null @@ -1,14 +0,0 @@ -# Copyright (C) 2019 LinkedData.Center - All Rights Reserved -# Permission to copy and modify is granted under the MIT license -# Finds all incompleted named graph -PREFIX dct: -PREFIX sd: -PREFIX prov: -SELECT DISTINCT ?g WHERE { - ?x sd:name ?g. - { ?x prov:invalidatedAtTime [] } - UNION - { FILTER NOT EXISTS { ?y sd:name ?g; dct:created [] }} - UNION - { FILTER NOT EXISTS { ?z sd:name ?g; dct:modified [] }} -} \ No newline at end of file diff --git a/scripts/bg_reasoning.include b/scripts/bg_reasoning.include deleted file mode 100644 index 4407bfd..0000000 --- a/scripts/bg_reasoning.include +++ /dev/null @@ -1,257 +0,0 @@ -# Copyright (C) 2019-2020 LinkedData.Center - All Rights Reserved -# Permission to copy and modify is granted under the MIT license -if [ ! -z ${__module_bg_reasoning+x} ]; then return ; else __module_bg_reasoning=1 ; fi - -SD_INCLUDE curl_utils - -SD_REQUIRES_CMD mktemp dirname gzip cut tail - - -_SD_LOCAL_REASONER_STARTED=${_SD_LOCAL_REASONER_STARTED:-0} - -## -# @description creates an instance of blazegraph in http://localhost:8080/sdaas/sparql -# -# @arg $1 the memory footprints (default micro) -# -## -function SD_START_LOCAL_REASONING_ENGINE { - local memoryFootprint="${1:-micro}" - - if [ $_SD_LOCAL_REASONER_STARTED -eq 1 ]; then - SD_DEBUG_INFO "SD_START_LOCAL_REASONING_ENGINE already stared" - else - /sdaas-start --size $memoryFootprint > "$SD_CACHE/rdfstore.log" 2>&1 & - if [ $? -eq 0 ]; then - _SD_LOCAL_REASONER_STARTED=1 - SD_WARMUP_REASONING_ENGINE - else - SD_FATAL_ERROR "Error launcing local reasoner" - fi - fi - SD_LOG "$memoryFootprint SPARQL endpoint available at http://localhost:8080/sdaas/sparql (logs in $SD_CACHE/rdfstore.log)" -} - - -## -# @description test that the engine is running -## -function SD_WARMUP_REASONING_ENGINE { - local endpoint="${1:-"http://localhost:8080/sdaas"}" - if [ $_SD_LOCAL_REASONER_STARTED -eq 1 ]; then - local retry=1 - while ! curl -s -f -I "$endpoint/status" > /dev/null ; do - SD_LOG "Reasoning engine warming up $retry" - sleep $(($retry*2)) - let retry++ - [ $retry -le 6 ] || SD_FATAL_ERROR "Sorry... internal reasoner startup failed.." - done - fi -} - - - -## -# @description creates a reasoner based on blazegraph -## -function SD_STOP_LOCAL_REASONING_ENGINE { - SD_DEBUG_INFO "$(/sdaas-stop)" - _SD_LOCAL_REASONER_STARTED=0 -} - - -## -# @description create a reasoner based on blazegraph extensions -# each reasoner can have an archetype (ie. specific features pre-configured). e.g.: -# . **rdfs** implements an rdfs triplestore reasoner with both textual and geospatial features installed -# . **geo** is a sparql quadstore with [geospatial search](https://github.com/blazegraph/database/wiki/GeoSpatial) -# . **lexical** is a sparql quadstore with lucene [textual indices](https://github.com/blazegraph/database/wiki/FullTextSearch) to triplestore -# . **smart** is a quadstore with both textual and geospatial features installed -# the namespace a **kb** is reserved -# knowledge base archetipes are in the scripts/archetipes directory -# -# @arg $1 a reasoner archetype -# @arg $2 a blazegraph endpoint -# @arg $3 namespace suffix (optional for testing purpose) -# -# @return the sparql service endpoint fot reasoner -## -function SD_CREATE_REASONER { - SD_REQUIRES_VAR 1 - local archetype="$1" - local endpoint=${2:-"http://localhost:8080/sdaas"} - local namespace="${3:-"$1-$RANDOM"}" - - [ "$namespace" != "kb" ] || SD_FATAL_ERROR "SD_CREATE_REASONER kb namespace is reserved" - - SD_DEBUG_INFO "In SD_CREATE_REASONER archetype=$archetype namespace=$namespace" - - local archetypes="$_SDAAS_DIR/archetypes" - - [ -f "${archetypes}/${archetype}.txt" ] || SD_FATAL_ERROR "Invalid reasoner archetype $archetype." - - local description=$(cat "$archetypes/${archetype}.txt" | sed "s/%namespace/$namespace/g") - - local outputBuffer=$(SD_MK_DEBUG_TMP_FILE new_namespace) - _SD_CURL_CMD \ - "$endpoint/namespace" \ - "$outputBuffer" \ - -X POST \ - --data-binary "$description" \ - --header 'Content-Type:text/plain' \ - || SD_FATAL_ERROR "SD_CREATE_REASONER error creating namespace, see $outputBuffer" - SD_DEBUG_INFO "See create namespace output in $outputBuffer" - if [ $SD_DEBUG -eq 0 ] ; then rm -f "$outputBuffer" ; fi - - ## Load specific archetype resources - if [ -r "${archetypes}/${archetype}.ttl" ] ; then - SD_REASONER_LOAD $namespace "${archetypes}/${archetype}.ttl" turtle "urn:sdaas:archetype:${archetype}" - fi - echo "$endpoint/namespace/$namespace/sparql" -} - - -## -# @description destroy a previpus created endpoint -# -# @arg $1 a blazegraph namespace sparql endpoint -## -function SD_DESTROY_REASONER { - SD_REQUIRES_VAR 1 - local sparqlEndpoint="$1" - - SD_DEBUG_INFO "In SD_DESTROY_REASONER ${endpoint}" - - [[ $sparqlEndpoint =~ namespace/.+/sparql ]] || SD_FATAL_ERROR "SD_DESTROY_REASONER unable to destroy $endpoint" - - ## Ignore errors in deleting a reasoner - endpoint=$(dirname "$sparqlEndpoint") - SD_DEBUG_INFO "$(_SD_CURL_CMD "$endpoint" - -X DELETE)" -} - - -## -# @description a sparql query client output goes to stdout -# -# @example -# SD_REASONER_QUERY rdfs text/csv "select * where {?s ?p ?o} LIMIT 1" -# SD_REASONER_QUERY rdfs text/csv "@path/to/input_file_with_query" -# -# @arg $1 a sparql endpoint -# @arg $2 mime type for accepted result (i.e. text/csv or text/turtle) or some shortcuts: -# "csv" a shortcut for text/csv -# "csv-h" text/csv without headers -# "csv-1" return just first line (removing headers) in csv -# "csv-f1" return just first field in first line (removing headers) in csv -# "bool" return true or false if result contains true -# "xml" a shortcut for application/sparql-results+xml -# @arg $3 a string with a valid sparql query statement (with extension) or a filename prefixed by the @ character -# -# @exitcode 0 If successfull. -# @exitcode >0 On failure -## -function SD_REASONER_QUERY { - local endpoint="$1" - local request="$2" - local query="$3" - - SD_DEBUG_INFO "In SD_REASONER_QUERY with endpoint=$endpoint request='$request' query='${query:0:40}'" - - local accept - case "$request" in - csv*) accept="text/csv" ; ;; - bool|xml) accept="application/sparql-results+xml" ; ;; - *) accept="$request" ; ;; - esac - - local outputBuffer=$(SD_MK_DEBUG_TMP_FILE sparql_query_output) - _SD_CURL_CMD \ - "${endpoint}" \ - "$outputBuffer" \ - -X POST \ - --data-binary "$query" \ - --header "Content-Type: application/sparql-query" \ - --header "Accept: $accept" \ - || SD_FATAL_ERROR "SD_REASONER_QUERY failed, see $outputBuffer" - SD_DEBUG_INFO "See unfiltered output in $outputBuffer" - - # Post processing - case "$request" in - csv-h) tail -n +2 "$outputBuffer"; ;; - csv-1) head -2 "$outputBuffer" | tail -1; ;; - csv-f1) head -2 "$outputBuffer" | tail -1 | cut --delimiter=',' -f1; ;; - bool) if grep -q "true" "$outputBuffer" ; then echo true; else echo false; fi; ;; - *) cat "$outputBuffer" ; ;; - esac - - if [ $SD_DEBUG -eq 0 ] ; then rm -f "$outputBuffer" ; fi -} - - -# -# @description a sparql update client output goes to stdout -# -# @arg $1 a sparql endpoint -# @arg $2 a sparql update string or @filename -# -function SD_REASONER_UPDATE { - local endpoint="$1" - local data="$2" - - SD_DEBUG_INFO "In SD_REASONER_UPDATE with endpoint=$endpoint data='${data:0:40}'" - - local output=$(SD_MK_DEBUG_TMP_FILE SD_REASONER_UPDATE_output) - _SD_CURL_CMD \ - "${endpoint}" \ - "$output" \ - -X POST \ - --data-binary "$data" \ - --header "Content-Type: application/sparql-update" \ - || SD_FATAL_ERROR "SD_REASONER_UPDATE failed: see $output" - - if [ $SD_DEBUG -eq 0 ]; then rm -f "$output"; fi -} - - - -## -# @description Simple loading of a a file into a reasoner. An shared upload directory must exist -# -# @arg $1 a sparql endpoint -# @arg $2 a graph name if "default" file is added to default graph -# @arg $3 an input file -# @arg $4 the input file format Format can be: guess(default), 'turtle', 'ntriples', 'rdfxml' -# @ard $5 the upload directory (that must be referred -## -function SD_REASONER_LOAD { - local endpoint="$1" - local graphName="$2" - local inputFile="$3" - local format="${4:-guess}" - local uploadDir="${5:-/tmp/upload}" - - local extension - case $format in - turtle) extension='ttl'; ;; - ntriples) extension='nt'; ;; - rdfxml) extension='rdf'; ;; - *) extension="${inputFile##*.}"; ;; - esac - - mkdir -p "$uploadDir" || SD_FATAL_ERROR "SD_LOAD_RDF_FILE failed creating upload dir in $uploadDir" - - local sharedFile="$(mktemp --tmpdir="$uploadDir" "r-XXXXXXXXXX.$extension.gz")" - gzip -c "$inputFile" > "$sharedFile" - chmod +r "$sharedFile" - - if [ "$graphName" = "default" ]; then - local graphStatement="" - else - local graphStatement=" INTO GRAPH <$graphName>" - fi - - SD_REASONER_UPDATE "$endpoint" "LOAD ${graphStatement}" - if [ $SD_DEBUG -lt 10 ]; then rm -f "$sharedFile" ; fi -} - - diff --git a/scripts/caching.include b/scripts/caching.include deleted file mode 100644 index 7457b4f..0000000 --- a/scripts/caching.include +++ /dev/null @@ -1,14 +0,0 @@ -# Copyright (C) 2019 LinkedData.Center - All Rights Reserved -# Permission to copy and modify is granted under the MIT license -if [ ! -z ${__module_caching+x} ]; then return ; else __module_caching=1 ; fi -SD_REQUIRES_CMD SD_FATAL_ERROR - -SD_CACHE=${SD_CACHE:="/tmp/sdaas"} - -function SD_CLEAN_CACHE { - if [ ! -d $SD_CACHE ]; then - mkdir -m 711 -p $SD_CACHE || SD_FATAL_ERROR "Can't create $SD_CACHE directory!" - else - rm -rf $SD_CACHE/* || SD_FATAL_ERROR "Can't delete $SD_CACHE directory!" - fi -} diff --git a/scripts/curl_utils.include b/scripts/curl_utils.include deleted file mode 100644 index 1561075..0000000 --- a/scripts/curl_utils.include +++ /dev/null @@ -1,39 +0,0 @@ -# Copyright (C) 2019 LinkedData.Center - All Rights Reserved -# Permission to copy and modify is granted under the MIT license -if [ ! -z ${__module_curl_utils+x} ]; then return ; else __module_curl_utils=1 ; fi - -SD_REQUIRES_CMD SD_DEBUG_INFO curl - - -### default command used to get web resources -SD_DEFAULT_DOWNLOADER=${SD_DEFAULT_DOWNLOADER:-_SD_CURL_CMD} -_SD_DEFAULT_CURL_OPTIONS=${_SD_DEFAULT_CURL_OPTIONS:-"-L --compressed --retry 3 --retry-delay 3 --retry-max-time 30 --retry-connrefused"} - - -# A downloader must accept at least two parameters: -# $1 = the url to download -# $2 = the location of the output file -# Other parameters can be added -# -# It must return 0 if download succeded or > 0 otherwhise -# -# It sholud also to send download info to a debugging file -# -function _SD_CURL_CMD { - SD_REQUIRES_VAR 1 2 - local url=$1 - local outputPath=$2 - shift 2 - - local curlCmd=(curl $_SD_DEFAULT_CURL_OPTIONS ) - curlCmd+=("$@") - - curlCmd+=(-s -w "%{http_code}" -o "$outputPath" "$url") - local httpStatus=$("${curlCmd[@]}") - if [[ $httpStatus == 2* ]] ; then - return 0 - else - SD_DEBUG_INFO "access to $url failed with result $httpStatus, output in $outputPath" - return 1 - fi -} diff --git a/scripts/kb.include b/scripts/kb.include deleted file mode 100644 index 8ad5b11..0000000 --- a/scripts/kb.include +++ /dev/null @@ -1,60 +0,0 @@ -# Copyright (C) 2019-2020 LinkedData.Center - All Rights Reserved -# Permission to copy and modify is granted under the MIT license -if [ ! -z ${__module_kb+x} ]; then return ; else __module_kb=1 ; fi - -SD_INCLUDE bg_reasoning - - -SD_SPARQL_ENDPOINT=${SD_SPARQL_ENDPOINT:-"http://localhost:8080/sdaas/sparql"} -SD_UPLOAD_DIR=${SD_UPLOAD_DIR:-"/tmp/upload"} - - - -########################################################################## -### WARNING: for SD_ENABLE_LEXICAL_REASONING and SD_WARMUP -### SD_SPARQL_ENDPOINT must point to a sdaas rdfstore (local or remote) -### if remote, SD_UPLOAD_DIR must be shared -########################################################################## - - -## -# @description a test that the engine is ok and some init actions (if required) use -## -function SD_WARMUP { - SD_WARMUP_REASONING_ENGINE "$(dirname "$SD_SPARQL_ENDPOINT")" -} - - - -## -# @arg $1 mime type for accepted result (i.e. text/csv or text/turtle) or some shortcuts: -# "csv" a shortcut for text/csv -# "csv-h" text/csv without headers -# "csv-1" return just first line (removing headers) in csv -# "csv-f1" return just first field in first line (removing headers) in csv -# "bool" return true or false if result contains true -# "xml" a shortcut for application/sparql-results+xml -# @arg $2 a string with a valid sparql query statement (with extension) or a filename prefixed by the @ character -## -function SD_SPARQL_QUERY { - SD_REASONER_QUERY "$SD_SPARQL_ENDPOINT" "$1" "$2" -} - - -## -# @arg $1 a sparql update string or @filename -## -function SD_SPARQL_UPDATE { - SD_REASONER_UPDATE "$SD_SPARQL_ENDPOINT" "$1" -} - - -## -# @arg $1 a graph name if "default" file is added to default graph -# @arg $2 an input file -# @arg $3 the input file format Format can be: guess(default), 'turtle', 'ntriples', 'rdfxml' -## -function SD_LOAD_RDF_FILE { - SD_REASONER_LOAD "$SD_SPARQL_ENDPOINT" "$1" "$2" "$3" "$SD_UPLOAD_DIR" -} - diff --git a/scripts/learning.include b/scripts/learning.include deleted file mode 100644 index 0086f67..0000000 --- a/scripts/learning.include +++ /dev/null @@ -1,198 +0,0 @@ -# Copyright (C) 2019 LinkedData.Center - All Rights Reserved -# Permission to copy and modify is granted under the MIT license -if [ ! -z ${__module_learning+x} ]; then return ; else __module_learning=1 ; fi - -SD_INCLUDE activity -SD_INCLUDE curl_utils - -SD_REQUIRES_VAR SD_CACHE -SD_REQUIRES_CMD SD_LOG find realpath tail - - -# @description -# downloads web resources into the activity "in" directoty -# It requires a list of download requests in _SD_SOURCES[@] variable. Download requests must be in the form: [ | [ | [ | downloader]]] where: -# . is a file or an url recognized by downloader. file is converted in url as "file://localhost$(realpath "$url")" -# . is an extension to be added to the downloaded file, ifi is equal to "auto" it tries to preserve the original extension (if present) or adds ".data" -# . extra options added to downloader command -# . the commad to be used to download. it must accept end write result on std out -# if not specified it uses the command contained in SD_DEFAULT_DOWNLOADER (curl) -# -# @arg $_SD_SOURCES [@] a list of download request -# @arg $SD_ACTIVITY_DIR -# @arg $SD_DEFAULT_DOWNLOADER a default download command (like curl) -# @arg $SD_DEBUG_FILE a file to store debug info -function _SD_EXTRACT { - _SD_LOG_PROV ":activity prov:wasInfluencedBy :extraction. :extraction a prov:Activity; prov:startedAtTime TIMESTAMP ." - - local downloadRequest - for downloadRequest in "${_SD_SOURCES[@]}"; do - ##### parse downloadRequest - local url extension downloaderOpts downloader - IFS='|' read url extension downloaderOpts downloader <<< "$downloadRequest" - extension=${extension:-auto} - - # if $url is a file, force cp instead of the default downloader - if [ -z "$downloader" ] ; then - if [ -f "$url" ] ; then - downloader="cp -f" - else - downloader="$SD_DEFAULT_DOWNLOADER" - fi - fi - - # save the extension of the input url - if [ "$extension" = 'auto' ]; then - ##### try to preseve extension, if any - local resourcePath="${url##*/}" - extension="${resourcePath##*.}" - if [ "$extension" = "$resourcePath" ]; then extension='data'; fi # i.e. if no extension in url than use .data - extension=".${extension}" - fi - local outPath="in/$(_SD_MK_UID "$url")${extension}" - - SD_DEBUG_INFO "in _SD_EXTRACT downloader='$downloader' url='$url' downloaderOpts='$downloaderOpts' outPath='$outPath'" - - if $downloader "$url" "$SD_ACTIVITY_DIR/$outPath" $downloaderOpts ; then - _SD_ACTIVITY_LOG "downloaded $url" - _SD_LOG_PROV ":extraction prov:generated ." - else - _SD_ACTIVITY_LOG "error downloading $url" - _SD_LOG_PROV ":extraction prov:invalidated ." - return 1 - fi - done - _SD_LOG_PROV ":extraction prov:endedAtTime TIMESTAMP ." -} - - -function _SD_TRANSFORMER { - local preprocessor="$1" - local inputFile="$2" - local pipeline="$3" - local outputPath="$4" - $preprocessor "$inputFile"| $pipeline 2>> "$SD_DEBUG_FILE" 1> "$outputPath" -} - -# @description -# apply a set of transformation steps to the activity input -# a transformation has this general format -# where: -# . a command to exttract data or an alias -# . is a input filter for find command relative to activity dir that must exists -# . is a directory relative to activity dir. If not existing it is created -# . changes last extension to be applied to all output filename (empty by default). If is equal to 'keep' extension is noth changed -# . it is a pipeline that will be applied to all inputs -# The ouput filenames are generated from input one -# -# @arg $1 a list of transformation -function _SD_TRANSFORM { - SD_REQUIRES_VAR SD_ACTIVITY_DIR - - # If no transformation, just move all files in input queue to the output queueu - # equivalent to _SD_TRANSFORMATIONS=('|in|out|keep|') but faster - if [ -z "$_SD_TRANSFORMATIONS" ] ; then - for file in $(find "$SD_ACTIVITY_DIR/in" -name "*" -type f -print); do - mv -f "$file" "$SD_ACTIVITY_DIR"/out/ || SD_FATAL_ERROR "error moving files from in to out in _SD_TRANSFORM" - done - return 0 - fi - - _SD_LOG_PROV ":activity prov:wasInfluencedBy :transformation. :transformation a prov:Activity; prov:wasInformedBy :extraction; prov:startedAtTime TIMESTAMP ." - - local transformationRequest - for transformationRequest in "${_SD_TRANSFORMATIONS[@]}"; do - #### parse transformation request - local preprocessor inputFilter outputDir postprocessor pipeline - IFS='|' read -r preprocessor inputFilter outputDir postprocessor pipeline <<< "$transformationRequest" - preprocessor=${preprocessor:-cat} - inputFilter=${inputFilter:-in} - outputDir=${outputDir:=out} - pipeline=${pipeline:-cat} - #note: postprocessor can be empty when you want to remove an exension (e.g. in gzip processing) - - #### extract input dir and file name filter to be used with find command - local inputDir="${inputFilter%/*}" - local fileNameFilter="${inputFilter##*/}" - if [ "$fileNameFilter" = "$inputFilter" ]; then fileNameFilter='*' ; fi ##no trailing / in input dir - - - #### ensure input and output dir exist - test -d "$SD_ACTIVITY_DIR/$inputDir" || SD_FATAL_ERROR "_SD_TRANSFORM input dir does not exists" - test -d "$SD_ACTIVITY_DIR/$outputDir" || mkdir "$SD_ACTIVITY_DIR/$outputDir" || SD_FATAL_ERROR "_SD_TRANSFORM cant create output dir $outputDir" - - local inputFile - for inputFile in $(cd "$SD_ACTIVITY_DIR"; find "$inputDir" -name "$fileNameFilter" -type f -print | sort); do - #### generate an output path - local outputFile - local outputFileName="$(basename "$inputFile")" - if [ "$postprocessor" = "keep" ]; then - outputFile="$outputDir/${outputFileName}" - else - outputFile="$outputDir/${outputFileName%.*}" # remove old extension - outputFile+="${postprocessor}" # add new estension (if set) - fi - SD_DEBUG_INFO "In _SD_TRANSFORM preprocessor='$preprocessor' inputFile='$SD_ACTIVITY_DIR/$inputFile' pipeline='$pipeline' output='$SD_ACTIVITY_DIR/$outputFile'" - $preprocessor "$SD_ACTIVITY_DIR/$inputFile" | eval $pipeline 2>> "$SD_DEBUG_FILE" 1> "$SD_ACTIVITY_DIR/$outputFile" - if [ $? -ne 0 ]; then SD_FATAL_ERROR "_SD_TRANSFORMER cant execute transformation" ; fi - _SD_LOG_PROV ":transformation prov:used ; prov:generated ." - - done - _SD_ACTIVITY_LOG "transformation pipeline: $inputDir/$fileNameFilter -> $preprocessor -> $pipeline -> $outputDir" - done - - _SD_LOG_PROV ":transformation prov:endedAtTime TIMESTAMP ." -} - - -# @description -# implements a named graph ingestion activity maging provenance info -# -# @arg $1 the name the graph to,load -# @arg $2 a source URI or an array of source URIs. If not provided defaults to $1 -# @arg $3 an optional transformation or an array transformation -# -# -# @example -# #loads https://www.w3.org/ns/prov web resource in graph named using as source -# #same as SD_LEARN https://www.w3.org/ns/prov https://www.w3.org/ns/prov -# SD_LEARN https://www.w3.org/ns/prov -# -function SD_LEARN { - SD_REQUIRES_VAR 1 - local g="$1" - - ########### PARSE ARGS - case $# in - 1) - _SD_SOURCES=("$g") - _SD_TRANSFORMATIONS='' - ;; - 2) - if [ ${2:${#2} - 3} = "[@]" ]; then _SD_SOURCES=("${!2}"); else _SD_SOURCES=("$2"); fi - _SD_TRANSFORMATIONS='' - ;; - 3) - if [ ${2:${#2} - 3} = "[@]" ]; then _SD_SOURCES=("${!2}"); else _SD_SOURCES=("$2"); fi - if [ ${3:${#3} - 3} = "[@]" ]; then _SD_TRANSFORMATIONS=("${!3}"); else _SD_TRANSFORMATIONS=("$3"); fi - ;; - *) - SD_FATAL_ERROR "Wrong argument number for SD_LEARN" - ;; - esac - - # start an activity in not yet started - if [ "$_SD_ACTIVITY_STATUS" != "running" ] ; then - _SD_START_ACTIVITY "learning of graph <$g>" - fi - - # ETL process - if _SD_EXTRACT ; then - _SD_TRANSFORM - _SD_COMMIT_ACTIVITY "$g" - else - _SD_INVALIDATE_GRAPH "$g" - return 1 - fi -} - diff --git a/scripts/logging.include b/scripts/logging.include deleted file mode 100644 index 98fec5a..0000000 --- a/scripts/logging.include +++ /dev/null @@ -1,55 +0,0 @@ -# Copyright (C) 2019 LinkedData.Center - All Rights Reserved -# Permission to copy and modify is granted under the MIT license -if [ ! -z ${__module_logging+x} ]; then return ; else __module_logging=1 ; fi - -SD_REQUIRES_VAR SD_CACHE SD_VERBOSE -SD_REQUIRES_CMD date - -SD_DEBUG=${SD_DEBUG:=0} -SD_LOG_FILE=${SD_LOG_FILE:="$SD_CACHE/session_$$.log"} -SD_DEBUG_FILE=${SD_DEBUG_FILE:="$SD_CACHE/session_$$.debug"} - -# Shows a string to std output if verbose > 0 -# $1 a string -function SD_SHOW { - if [ "$SD_VERBOSE" -gt 0 ]; then - echo "$1" - fi -} - -# $1 a string -# if $2 exists log only to file -function SD_LOG { - local log_string="sdaas $(date) - $1" - if [ -z "$2" ]; then - SD_SHOW "$log_string" - fi - echo "$log_string" >> "$SD_LOG_FILE" -} - -# $1 a string or a file name -function SD_DEBUG_INFO { - if [ $SD_DEBUG -gt 0 ]; then - if [ -f "$1" ]; then - cat "$1" >> "$SD_DEBUG_FILE" - else - echo "$1" >> "$SD_DEBUG_FILE" - fi - fi -} - - -# $1 a string or a file name -function SD_MK_DEBUG_TMP_FILE { - local prefix=${1:-"debug"} - local tmpFile=$(mktemp --tmpdir="$SD_CACHE" "${prefix}.XXXXXXXXXXXX") - echo "$tmpFile" -} - - -function SD_START_LOGGING { - touch "$SD_LOG_FILE" || SD_FATAL_ERROR "Can't touch log file($SD_LOG_FILE). Is $SD_CACHE directory writable?" - touch "$SD_DEBUG_FILE" || SD_FATAL_ERROR "Can't touch debug file ($SD_DEBUG_FILE). Is $SD_CACHE directory writable?" - SD_LOG "SD_START_LOGGING logging $SD_LOG_FILE" - SD_LOG "SD_START_LOGGING debug info in $SD_DEBUG_FILE" -} diff --git a/scripts/platform.include b/scripts/platform.include deleted file mode 100644 index 2b50a44..0000000 --- a/scripts/platform.include +++ /dev/null @@ -1,101 +0,0 @@ -# Copyright (C) 2019-2020 LinkedData.Center - All Rights Reserved -# Permission to copy and modify is granted under the MIT license -if [ ! -z ${__module_platform+x} ]; then return ; else __module_platform=1 ; fi -SD_QUADSTORE=${SD_QUADSTORE:-kb} -SD_DEBUG=${SD_DEBUG:-0} -SD_ACCRUAL_POLICY=${SD_ACCRUAL_POLICY:-pull} - -_SD_REBOOT=${_SD_REBOOT:-0} -_SD_RELEASE_NAME="Anassimene" - -_SD_START=$SECONDS -_SDAAS_DIR=${_SDAAS_DIR:-$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )} - -# READ SECRETS FROM .env file -if [ -f .env ]; then - if [ "$(stat -c "%a" .env)" != "600" ]; then - >&2 echo "WARNING: consider to set permission to .env file to 600 (i.e. for your eyes only)!" - fi - . .env -fi - - -######################################################################################## -# INCLUDING CORE COMPONENTS -######################################################################################## -source "$_SDAAS_DIR/asserting.include" -SD_INCLUDE caching -SD_INCLUDE logging -SD_INCLUDE bg_reasoning -SD_INCLUDE $SD_QUADSTORE -SD_INCLUDE learning -SD_INCLUDE reasoning -SD_INCLUDE teaching - - -function SD_BEGIN_INGESTION { - _SD_START=$SECONDS - # Cleanup the entire knowledge base - SD_CLEAN_CACHE - SD_START_LOGGING - - # Auto Warmup rdfstore - if [ $SD_NOWARMUP -eq 0 ]; then - SD_WARMUP - fi - - SD_LOG "LinkedData.Center SDaaS platform ($_SD_RELEASE_NAME) using '$SD_QUADSTORE' graph technology." - if [ $_SD_REBOOT -eq 1 ]; then - SD_LOG "Erasing the knowledge base... (it could take a while)" - SD_SPARQL_UPDATE "DROP ALL"; - fi -} - - -function SD_THATS_ALL_FOLKS { - #SD_LOG "Computing knowledge base configuration default axioms." - #SD_SPARQL_UPDATE @$_SDAAS_DIR/axioms/sdaas-axioms.sparq_update - local inconsistentGraphs="$(SD_SPARQL_QUERY csv-h "@$_SDAAS_DIR/axioms/uncompleted_graphs.sparq_select")" - if [ -z "$inconsistentGraphs" ] ; then - SD_LOG "Knowledge ingestion succesfully completed in $(( SECONDS - _SD_START )) seconds." - return 0 - else - SD_LOG "WARNING: the knowledge base contains following inconsistent graphs:" - echo "$inconsistentGraphs" - return 1 - fi -} - - - -function SD_STATUS { - cat <<-EOT - Smart Data as a Service (SDaaS) platform vocabulary - Copyright (C) 2018-2020 http://linkeddata.center/ - - Configuration variables: - ------------------- - - $(set | egrep "^SD_.+=") - - - Available functions: - ------------------- - - $(set | egrep "^SD_.+ ()") - - - Enabled modules: - ----------------- - - $(set | egrep "^__module_" | sed 's/__module_//' | sed 's/=1//') - - EOT -} - - -######################################################################################## -# EXECUTE SCRIPT INITIALIZATON -######################################################################################## - -SD_BEGIN_INGESTION \ No newline at end of file diff --git a/scripts/reasoning.include b/scripts/reasoning.include deleted file mode 100644 index 24f5a81..0000000 --- a/scripts/reasoning.include +++ /dev/null @@ -1,40 +0,0 @@ -# Copyright (C) 2019 LinkedData.Center - All Rights Reserved -# Permission to copy and modify is granted under the MIT license -if [ ! -z ${__module_reasoning+x} ]; then return ; else __module_reasoning=1 ; fi - -SD_INCLUDE activity -SD_INCLUDE curl_utils - -SD_REQUIRES_CMD mktemp SD_SPARQL_QUERY - -## just for legacy compatibility -function SD_REASONING_BY { $* ; } - -function SD_EVAL_CONSTRUCTOR { - SD_REQUIRES_VAR 1 2 - local graphName="$1" - local constructor="$2" - - # start an activity in not yes started - if [ "$_SD_ACTIVITY_STATUS" != "running" ] ; then - _SD_START_ACTIVITY "reasoning on graph <$graphName>" - fi - - local queryResult=${3:-"${SD_ACTIVITY_DIR}/out/construction.ttl"} - - # If constructor is an URL, download it first - if [[ "${constructor:0:10}" =~ "://" ]]; then - local queryFile="${SD_ACTIVITY_DIR}/in/constructor.query" - _SD_CURL_CMD "$constructor" "$queryFile" || SD_FATAL "Unable to download ${constructor:0:40}" - constructor="@$queryFile" - fi - - _SD_ACTIVITY_LOG "evaluating axiom ${constructor:0:60}..." - if SD_SPARQL_QUERY "text/turtle" "$constructor" > "$queryResult" ; then - _SD_COMMIT_ACTIVITY "$graphName" - else - _SD_INVALIDATE_GRAPH "$graphName" - return 1 - fi -} - diff --git a/scripts/ruleset.include b/scripts/ruleset.include deleted file mode 100644 index 70543eb..0000000 --- a/scripts/ruleset.include +++ /dev/null @@ -1,110 +0,0 @@ -if [ ! -z ${__module_agent_rule+x} ]; then return ; else __module_agent_rule=1 ; fi - -SD_INCLUDE activity - -SD_REQUIRES_CMD SD_EVAL_CONSTRUCTOR SD_SPARQL_UPDATE SD_LEARN envsubst - - - -# Start a ruleset related activity adding some extra metadata -# retrived in the source file -function SD_START_RULESET_ACTIVITY { - SD_REQUIRES_VAR 1 2 3 - local ruleName="$1" - local title="$2" - local source="$3" - - local graphDescription - local graphTrust - - _SD_START_ACTIVITY "inferencing axiom $title from ruleset $ruleName" - _SD_LOG_PROV ":graph a kees:InferenceGraph; dct:title \"$ruleName\"@it ." - - graphDescription=$(grep -oP '(?<=##@ )[^"]*' "$source" | head -1) - if [ ! -z "$graphDescription" ]; then - _SD_LOG_PROV ":graph dct:description \"$graphDescription\"@it ." - fi - - graphTrust=$(grep -oP '(?<=#trust=)[0-1]\.\d\d' "$source" | head -1) - if [ ! -z "$graphTrust" ]; then - _SD_LOG_PROV " -:axiom_trust a ; - :graph ; - kees:trustGraphMetric; - $graphTrust ; - true . -" - fi -} - - -# Like _SD_COMMIT_ACTIVITY but without dropping the graph -function _SD_COMMIT_REASONING { - local graphName="$1" - - _SD_LOG_PROV " - :activity prov:endedAtTime TIMESTAMP ; prov:generated :graph . - :graph sd:name <$graphName>; - prov:wasGeneratedBy :activity ; - dct:created TIMESTAMP ; - dct:modified TIMESTAMP . - " - SD_LOAD_RDF_FILE "$graphName" "$SD_ACTIVITY_DIR/prov.ttl" - - if [ $SD_DEBUG -eq 0 ]; then rm -rf "$SD_ACTIVITY_DIR" ; fi - _SD_ACTIVITY_STATUS='noop' -} - - - -# $1 must be the graph base name space -# $2 must be the rule directory that contains axioms -# A rule can be: -# - a sparql query construct directive (if axiomExtension = construct) -# - a static linked data file (if axiomExtension = ttl) -# - a sparql query select directive, in this case the query is evaluated and the result -# passed to a php reasoner whose name is the same of the select but with axiomExtension .php -function SD_EVAL_RULESET { - SD_REQUIRES_VAR 1 2 - local graphNS="$1" - local rule="$2" - - local axiom - for axiom in $(ls -v "${rule}"); do - local axiomExtension="${axiom##*.}" - local axiomName="${axiom%.*}" - local ruleName=$(basename -- "$rule") - local graphName="${graphNS}${rule}/${axiom}" - - case "$axiomExtension" in - construct) - SD_START_RULESET_ACTIVITY "$ruleName" "$axiomName" "${rule}/$axiom" - SD_EVAL_CONSTRUCTOR "$graphName" "@${rule}/$axiom" - ;; - update) - ( - SD_START_RULESET_ACTIVITY "$ruleName" "$axiomName" "${rule}/$axiom" - export rule axiom graphNS axiomName ruleName graphName - cat "${rule}/$axiom" | envsubst | SD_SPARQL_UPDATE "@-" - if [ $? -eq 0 ]; then - _SD_COMMIT_REASONING "$graphName" - else - _SD_INVALIDATE_GRAPH "$graphName" - return 1 - fi - ) - ;; - reasoning) - SD_START_RULESET_ACTIVITY "$ruleName" "$axiomName" "${rule}/$axiom" - SD_SPARQL_UPDATE "DROP SILENT GRAPH <$graphName>" ; - . "${rule}/$axiom" - if [ $? -eq 0 ]; then - _SD_COMMIT_REASONING "$graphName" - else - _SD_INVALIDATE_GRAPH "$graphName" - return 1 - fi - ;; - esac - done -} \ No newline at end of file diff --git a/scripts/sdaas b/scripts/sdaas deleted file mode 100644 index 822b5fb..0000000 --- a/scripts/sdaas +++ /dev/null @@ -1,165 +0,0 @@ -#!/usr/bin/env bash -# Copyright (C) 2019-2020 LinkedData.Center - All Rights Reserved -# Permission to copy and modify is granted under the MIT license - -_SDAAS_DIR="$(dirname "$( realpath "$0" )")" - -_SDAAS_VERSION="3" -_SDAAS_AGENT_NAME="Anassimene" - -_SD_MUST_EXIT=0 -_SD_REBOOT=0 -SD_VERBOSE=${SD_VERBOSE:-1} -SD_DEBUG=${SD_DEBUG:-0} -SD_QUADSTORE=${SD_QUADSTORE:-kb} -SD_NOWARMUP=${SD_NOWARMUP:-0} - -function _SDAAS_FATAL_ERROR { - >&2 cat <<-EOT - Smart Data as a Service (SDaaS) platform - '$_SDAAS_AGENT_NAME' edition - Copyright (C) 2018-2020 http://linkeddata.center/ - - valid options: - - -d, --debug - saves extra debug information without deleting cached info on termination - - This option is equivalent to declare SD_DEBUG=1 environment variable - - -f, --file - executes a build script - - -k, --graph-engine kb|ekb|blazegraph|neptune - defines the backed technology to be used. Supported technologies: - kb (default) : use the kb archetype of the sddas reasoner as rdfstore. - Requires a running local instance of blazegraph, forces "pull" policy in a one step (i.e. no chunks support). - This is the only option available in the sdaas community edition - blazegraph: use a remote blazegraph instance (you need setup env variables SD_BG_ENDPOINT and SD_BG_NAMESPACE) - ekb: use a LinkedData.Center GDaaS instance (you need setup env variables SD_KBUSER and SD_KBPASSWORD) - neptune: use a aws neptune instance (you need setup SD_NEPTUNE_INSTANCE, only works on same VPN as neptune host) - - N.B. each graph engine may require additional special setup procedures. - This option is equivalent to use SD_QUADSTORE environment variable - - -a, --accrual-policy pull|push - defines the policy used to store RDF statements in the graph engine: - .) with "pull" data is published to a shared resurce and - then the graph engine is requested to load it. - This is fast but it requires a bidirectional channel from sdaas and - the graph engine. It is the default. - .) with "push" data is pushed by sdaas to the graph db - Data is splitted in small chunks. By defauult the chunks size is 100000 triples - for the pull policy and 50000 triples for the push policy. - - This option is equivalent to use SD_ACCRUAL_POLICY environment variable - You can personalize the chunks size using SD_LOAD_CHUNK_SIZE environment variable. - - -q, --quiet - Do not show progress meter or error messages. - - -u, --urispace - use this namespace when creating new URIs. Default urn:sdaas:resource: - same as defining the SD_URISPACE= environment variable - - --reboot - WARNING!!! erase all knowledge base before executing the rebuild process - - --no-warmup skipp the reasoner warmup (usefull if reasoner in not loaded at startup) - same as defining the SD_NOWARMUP=1 environment variable - - --verbose - -v is equivalent to --verbose 1, --verbose 0 is an alias of -q - - --version - prints the platform version - EOT - exit 1 -} - - -### Parse command line options -__parsed=$(getopt --options=qvda:k:f:u: --longoptions=quiet,verbose:,debug,reboot,version,file:,graph-engine:,accrual-policy:,urispace:,no-warmup --name "$0" -- "$@") || _SDAAS_FATAL_ERROR -eval set -- "$__parsed" -unset __parsed - -while true; do - case "$1" in - -q|--quiet) - SD_VERBOSE=0 - shift - ;; - -v) - SD_VERBOSE=1 - shift - ;; - --verbose) - SD_VERBOSE=$2 - shift 2 - ;; - -d|--debug) - SD_DEBUG=1 - shift - ;; - --reboot) - _SD_REBOOT=1 - shift - ;; - --version) - echo "Smart Data as a Service Platform version $_SDAAS_VERSION providing the $_SDAAS_AGENT_NAME agent." - shift - ;; - -f|--file) - _SD_PROGRAMFILE="$2" - shift 2 - ;; - -k|--graph-engine) - SD_QUADSTORE="$2" - shift 2 - ;; - -a|--accrual-policy) - SD_ACCRUAL_POLICY="$2" - shift 2 - ;; - -u|--urispace) - SD_URISPACE="$2" - shift 2 - ;; - --no-warmup) - SD_NOWARMUP=1 - shift - ;; - --) - shift - break - ;; - *) - _SDAAS_FATAL_ERROR - ;; - esac -done - -if [ $_SD_MUST_EXIT -eq 1 ]; then exit; fi - -if [ -f "$_SD_PROGRAMFILE" ]; then - . "$_SDAAS_DIR/platform.include" - . "$_SD_PROGRAMFILE" -else - inifile=$(mktemp --tmpdir) - if [ -f ~/.bashrc ]; then - cat ~/.bashrc > "$inifile" - fi - cat <<-EOT >> "$inifile" - _SD_REBOOT=${_SD_REBOOT} - SD_VERBOSE=${SD_VERBOSE} - SD_DEBUG=${SD_DEBUG} - SD_QUADSTORE=${SD_QUADSTORE} - SD_ACCRUAL_POLICY=${SD_ACCRUAL_POLICY} - SD_URISPACE=${SD_URISPACE} - SD_NOWARMUP=${SD_NOWARMUP} - . "$_SDAAS_DIR/platform.include" - PS1="sdaas > " - EOT - bash --init-file $inifile - rm -f "$inifile" -fi - diff --git a/scripts/teaching.include b/scripts/teaching.include deleted file mode 100644 index ddf5eca..0000000 --- a/scripts/teaching.include +++ /dev/null @@ -1,38 +0,0 @@ -# Copyright (C) 2019 LinkedData.Center - All Rights Reserved -# Permission to copy and modify is granted under the MIT license -if [ ! -z ${__module_teaching+x} ]; then return ; else __module_teaching=1 ; fi - - - -function SD_CREATE_DATA_DISTRIBUTION { - local distDir=${1:-"distrib/$(date +'%s')"} - local questionsDir=${2:-"questions"} - - mkdir -p "$distDir" || SD_FATAL_ERROR "Can't create $distDir" - local query - for query in "$questionsDir"/*.* ; do - local filename=$(basename "$query") - local extension="${filename##*.}" - local filename="${filename%.*}" - case "$extension" in - sparql|rq|select) - SD_LOG "Generating answers for $filename tabular question" - SD_SPARQL_QUERY "text/csv" "@$query" > "$distDir/$filename.csv" - ;; - construct) - SD_LOG "Generating answers for $filename graph question" - SD_SPARQL_QUERY "text/turtle" "@$query" > "$distDir/$filename.ttl" - ;; - script) - SD_LOG "Generating custom answers from script in $query" - . $query $distDir - ;; - md) - SD_LOG "Copying documentation file $query" - cp $query "$distDir" - esac - done - - SD_LOG "Distribution completed in $distDir:" -} - diff --git a/scripts/testing.include b/scripts/testing.include deleted file mode 100644 index 7604113..0000000 --- a/scripts/testing.include +++ /dev/null @@ -1,50 +0,0 @@ -# Copyright (C) 2019 LinkedData.Center - All Rights Reserved -if [ ! -z ${__module_testing+x} ]; then return ; else __module_testing=1 ; fi - - -# extra files to be copied in the distribution -function SD_DATA_TEST { - local testDir=${1:-"tests"} - local failMode=${2:-CONTINUE} - - local failedTests=0 - - if [ ! -d "$testDir" ]; then return 0; fi - - SD_LOG "Testing knowledge graph integrity..." - - - for testFile in $(find ${testDir} -name "*.ask"); do - echo -n " $(basename $testFile)..." - case "$(SD_SPARQL_QUERY xml @$testFile)" in - *true* ) echo "OK";; - *false* ) - echo "FAIL" - if [ "$failMode" = "FASTFAIL" ]; then - SD_FATAL_ERROR "test FASTFAIL" - else - ((failedTests++)) - fi - ;; - esac - done - - for testFile in $(find ${testDir} -name "*.select" ); do - echo -n " $(basename $testFile)..." - if [ -z "$(SD_SPARQL_QUERY csv-h @$testFile)" ] ; then - echo "OK" - else - echo "FAIL" - if [ "$failMode" = "FASTFAIL" ]; then - SD_FATAL_ERROR "test FASTFAIL" - else - ((failedTests++)) - fi - fi - done - - if [ "$failMode" = "FAILCHECK" -a $failedTests -gt 0 ] ; then - SD_FATAL_ERROR "abort on $failedTests test failed." - fi -} - diff --git a/sdaas-entrypoint.sh b/sdaas-entrypoint.sh deleted file mode 100644 index 881dcc3..0000000 --- a/sdaas-entrypoint.sh +++ /dev/null @@ -1,20 +0,0 @@ -#!/usr/bin/env bash -# Copyright (C) 2020 LinkedData.Center - All Rights Reserved -# Permission to copy and modify is granted under the MIT license - -export _SD_LOCAL_REASONER_STARTED=0 - - -if [ ${SD_NOWARMUP:=0} -ne 1 ]; then - /sdaas-start -d --size ${SDAAS_SIZE:-micro} - _SD_LOCAL_REASONER_STARTED=1 -fi - - -# priority to local override of the sdaas scripts -if [ -d "${SDAAS_WORKSPACE:=/workspace}/scripts/sdaas" ]; then - echo "Running SDaaS from workspace" - exec "${SDAAS_WORKSPACE}/scripts/sdaas" "$@" -else - exec $SDAAS_BIN_DIR/sdaas "$@" -fi diff --git a/tests/data/ask-false.xml b/tests/data/ask-false.xml new file mode 100644 index 0000000..19e16d9 --- /dev/null +++ b/tests/data/ask-false.xml @@ -0,0 +1,6 @@ + + + + + false + \ No newline at end of file diff --git a/tests/data/ask-true.xml b/tests/data/ask-true.xml new file mode 100644 index 0000000..7edf9b5 --- /dev/null +++ b/tests/data/ask-true.xml @@ -0,0 +1,6 @@ + + + + + true + \ No newline at end of file diff --git a/tests/data/empty-select.xml b/tests/data/empty-select.xml new file mode 100644 index 0000000..da8984c --- /dev/null +++ b/tests/data/empty-select.xml @@ -0,0 +1,10 @@ + + + + + + + + + + \ No newline at end of file diff --git a/tests/data/empty-store.nt b/tests/data/empty-store.nt new file mode 100644 index 0000000..f2554da --- /dev/null +++ b/tests/data/empty-store.nt @@ -0,0 +1,41 @@ +_:service . +_:service . +_:service . +_:service . +_:service . +_:service . +_:service . +_:service . +_:service . +_:service . +_:service . +_:service . +_:service . +_:service . +_:service . +_:service . +_:service . +_:service . +_:service . +_:service . +_:service . +_:service . +_:service . +_:service . +_:service . +_:service . +_:service . +_:service . +_:service _:defaultDataset . +_:defaultDataset . +_:defaultDataset . +_:defaultDataset "kb"^^ . +_:defaultDataset "kb"^^ . +_:defaultDataset . +_:defaultDataset "^.*"^^ . +_:defaultDataset _:defaultGraph . +_:defaultGraph . +_:defaultGraph "0"^^ . +_:defaultGraph "3106"^^ . +_:defaultGraph "0"^^ . +_:defaultGraph "0"^^ . diff --git a/tests/data/not-empty-select.xml b/tests/data/not-empty-select.xml new file mode 100644 index 0000000..d366655 --- /dev/null +++ b/tests/data/not-empty-select.xml @@ -0,0 +1,13 @@ + + + + + + + + + 12342 + + + + \ No newline at end of file diff --git a/tests/data/sample1.ttl b/tests/data/sample1.ttl deleted file mode 100644 index f71399c..0000000 --- a/tests/data/sample1.ttl +++ /dev/null @@ -1,69 +0,0 @@ -@prefix txn: . -@prefix srv: . -@prefix log: . -@prefix xsd: . - -txn:123 a log:Transaction ; - log:processedBy srv:A ; - log:processedAt "2015-10-16T10:22:23"^^xsd:dateTime ; - log:statusCode 200 . - -txn:124 a log:Transaction ; - log:processedBy srv:B ; - log:processedAt "2015-10-16T10:22:24"^^xsd:dateTime ; - log:statusCode 200 . - -txn:125 a log:Transaction ; - log:processedBy srv:C ; - log:processedAt "2015-10-16T10:22:24"^^xsd:dateTime ; - log:statusCode 200 . - -txn:126 a log:Transaction ; - log:processedBy srv:A ; - log:processedAt "2015-10-16T10:22:25"^^xsd:dateTime ; - log:statusCode 200 . - -txn:127 a log:Transaction ; - log:processedBy srv:B ; - log:processedAt "2015-10-16T10:22:25"^^xsd:dateTime ; - log:statusCode 200 . - -txn:128 a log:Transaction ; - log:processedBy srv:C ; - log:processedAt "2015-10-16T10:22:26"^^xsd:dateTime ; - log:statusCode 200 . - -txn:129 a log:Transaction ; - log:processedBy srv:A ; - log:processedAt "2015-10-16T10:22:28"^^xsd:dateTime ; - log:statusCode 500 . - -txn:130 a log:Transaction ; - log:processedBy srv:B ; - log:processedAt "2015-10-16T10:22:31"^^xsd:dateTime ; - log:statusCode 200 . - -txn:131 a log:Transaction ; - log:processedBy srv:C ; - log:processedAt "2015-10-16T10:22:31"^^xsd:dateTime ; - log:statusCode 200 . - -txn:132 a log:Transaction ; - log:processedBy srv:A ; - log:processedAt "2015-10-16T10:22:32"^^xsd:dateTime ; - log:statusCode 500 . - -txn:133 a log:Transaction ; - log:processedBy srv:B ; - log:processedAt "2015-10-16T10:22:33"^^xsd:dateTime ; - log:statusCode 200 . - -txn:134 a log:Transaction ; - log:processedBy srv:C ; - log:processedAt "2015-10-16T10:22:33"^^xsd:dateTime ; - log:statusCode 200 . - -txn:135 a log:Transaction ; - log:processedBy srv:A ; - log:processedAt "2015-10-16T10:22:35"^^xsd:dateTime ; - log:statusCode 401 . \ No newline at end of file diff --git a/tests/functional/data/geo.ttl b/tests/functional/data/geo.ttl deleted file mode 100644 index 1c34324..0000000 --- a/tests/functional/data/geo.ttl +++ /dev/null @@ -1,52 +0,0 @@ -@prefix rdf: . -@prefix rdfs: . -@prefix geoliteral: . -@prefix example: . - -example:Oktoberfest-2013 - rdf:type example:Fair ; - rdfs:label "Oktoberfest 2013" ; - example:happened "48.13188#11.54965#1379714400"^^geoliteral:lat-lon-time ; - example:city example:Munich . - -example:RAR-2013 - rdf:type example:Festival ; - rdfs:label "Rock am Ring 2013" ; - example:happened "50.33406#6.94259#1370556000"^^geoliteral:lat-lon-time ; - example:city example:Nuerburg . - -example:Oktoberfest-2014 - rdf:type example:Fair ; - rdfs:label "Oktoberfest 2014" ; - example:happened "48.13188#11.54965#1411164000"^^geoliteral:lat-lon-time ; - example:city example:Munich . - -example:RAR-2014 - rdf:type example:Festival ; - rdfs:label "Rock am Ring 2014" ; - example:happened "50.33406#6.94259#1401919200"^^geoliteral:lat-lon-time ; - example:city example:Nuerburg . - -example:Oktoberfest-2015 - rdf:type example:Fair ; - rdfs:label "Oktoberfest 2015" ; - example:happened "48.13188#11.54965#1442613600"^^geoliteral:lat-lon-time ; - example:city example:Munich . - -example:RAR-2015 - rdf:type example:Festival ; - rdfs:label "Rock am Ring 2015" ; - example:happened "50.36780#7.31170#1464904800"^^geoliteral:lat-lon-time ; - example:city example:Mendig . - -example:Munich - rdf:type example:City ; - example:location "48.13743#11.57549"^^geoliteral:lat-lon . - -example:Nuerburg - rdf:type example:City ; - example:location "50.34188#6.95203"^^geoliteral:lat-lon . - -example:Mendig - rdf:type example:City ; - example:location "50.36667#7.28333"^^geoliteral:lat-lon . \ No newline at end of file diff --git a/tests/functional/kbQueryTest.bats b/tests/functional/kbQueryTest.bats deleted file mode 100644 index abee013..0000000 --- a/tests/functional/kbQueryTest.bats +++ /dev/null @@ -1,60 +0,0 @@ -#!/usr/bin/env bats - -function setup { - SD_CACHE='/tmp/ftest' - SD_LOG_FILE="$SD_CACHE/session.log" - SD_DEBUG_FILE="$SD_CACHE/session.debug" - . "$BATS_TEST_DIRNAME/../../scripts/platform.include" - - - SD_BEGIN_INGESTION - SD_SPARQL_UPDATE "DROP ALL" - SD_LOAD_RDF_FILE urn:graph:geoexample "$BATS_TEST_DIRNAME/data/geo.ttl" - SD_SPARQL_UPDATE ' - INSERT DATA { - GRAPH { - ,, . - } - } - ' -} - - -@test "KB query test 1" { - run SD_SPARQL_QUERY 'application/sparql-results+xml' 'ASK { GRAPH {?s ?p ?o}}' - [ $status -eq 0 ] - [[ $output =~ "true" ]] -} - - - -@test "KB query test 2" { - run SD_SPARQL_QUERY 'application/sparql-results+xml' 'ASK { GRAPH { }}' - [ $status -eq 0 ] - [[ $output =~ "true" ]] -} - - - -@test "KB query test 3" { - run SD_SPARQL_QUERY bool 'ASK { GRAPH { }}' - [ $status -eq 0 ] - [ $output = "true" ] -} - - - -@test "KB query test 4" { - run SD_SPARQL_QUERY bool 'ASK { GRAPH { }}' - [ $status -eq 0 ] - [ $output = "false" ] -} - - - -@test "KB query test 5" { - run SD_SPARQL_QUERY csv-f1 'SELECT ?s ?p {?s ?p }' - [ $status -eq 0 ] - [ $output = "urn:s:1" ] -} - diff --git a/tests/functional/platformBaseTest.bats b/tests/functional/platformBaseTest.bats deleted file mode 100644 index 1cd6e02..0000000 --- a/tests/functional/platformBaseTest.bats +++ /dev/null @@ -1,19 +0,0 @@ -#!/usr/bin/env bats - - -@test "SDAAS platform inclusion" { - SD_CACHE='/tmp/ftest' - SD_LOG_FILE="$SD_CACHE/session.log" - SD_DEBUG_FILE="$SD_CACHE/session.debug" - . "$BATS_TEST_DIRNAME/../../scripts/platform.include" - run echo "OK" - [ ! -f .env ] - [ -f /tmp/ftest/session.log ] - [ -f /tmp/ftest/session.debug ] - [ ! -z "$SD_DEBUG" ] - [ $__module_caching -eq 1 ] - [ $__module_logging -eq 1 ] - [ $__module_bg_reasoning -eq 1 ] - [ $__module_kb -eq 1 ] - [ $__module_learning -eq 1 ] -} diff --git a/tests/functional/sparqlTest.bats b/tests/functional/sparqlTest.bats new file mode 100644 index 0000000..628747c --- /dev/null +++ b/tests/functional/sparqlTest.bats @@ -0,0 +1,75 @@ +#!/usr/bin/env bats + +function on_script_startup { + source "$SDAAS_INSTALL_DIR/core" NO_SPLASH + STORE="http://kb:8080/sdaas/sparql" + STORE_TYPE=w3c + INSERT_TEST_STATEMENT='INSERT DATA { GRAPH { } }' + sd_include sparql +} + +on_script_startup + + +function setup { + sd_driver_erase STORE +} + + +########## sd_sparql_update + +@test "sd_sparql_update insert data" { + run sd_sparql_update "$INSERT_TEST_STATEMENT" + [[ "$status" -eq 0 ]] + [[ "$(sd_driver_size STORE)" -eq 1 ]] +} + + +@test "sd_sparql_update wrong statement" { + run sd_sparql_update "NO UPDATE STATEMENT" + [[ "$status" -ne 0 ]] + [[ "${lines[0]}" =~ ^\[ERROR\] ]] + [[ "$(sd_driver_size STORE)" -eq 0 ]] +} + + +########## sd_sparql_query + + +@test "sd_sparql_query inserted data" { + sd_sparql_update "$INSERT_TEST_STATEMENT" + run sd_sparql_query -O "csv-h" "SELECT ?s ?p ?o { ?s ?p ?o }" + [[ "$status" -eq 0 ]] + [[ "${lines[0]}" == "urn:uri:s,urn:uri:p,urn:uri:o" ]] +} + + + +########## sd_sparql_graph + +@test "sd_sparql_graph from file " { + run sd_sparql_graph -f ntriples -a PUT -r "@tests/data/empty-store.nt" "urn:graph:store" + [[ "$status" -eq 0 ]] + [[ "$(sd_driver_size STORE)" -eq 41 ]] +} + + +@test "sd_sparql_graph from stream " { + cat "tests/data/empty-store.nt" | sd_sparql_graph -a PUT "urn:graph:store" + [[ "$(sd_driver_size STORE)" -eq 41 ]] +} + + +@test "sd_sparql_graph with put and guess" { + cat "tests/data/empty-store.nt" | sd_sparql_graph -a PUT "urn:graph:store" + [[ "$(sd_driver_size STORE)" -eq 41 ]] + cat "tests/data/empty-store.nt" | sd_sparql_graph -a PUT "urn:graph:store" + [[ "$(sd_driver_size STORE)" -eq 41 ]] +} + +@test "sd_sparql_graph with post and guess" { + cat "tests/data/empty-store.nt" | sd_sparql_graph "urn:graph:store" + [[ "$(sd_driver_size STORE)" -eq 41 ]] + cat "tests/data/empty-store.nt" | sd_sparql_graph "urn:graph:store" + [[ "$(sd_driver_size STORE)" -eq 82 ]] +} \ No newline at end of file diff --git a/tests/functional/w3cTest.bats b/tests/functional/w3cTest.bats new file mode 100644 index 0000000..c1dd046 --- /dev/null +++ b/tests/functional/w3cTest.bats @@ -0,0 +1,44 @@ +#!/usr/bin/env bats + +function on_script_startup { + source "$SDAAS_INSTALL_DIR/core" NO_SPLASH + TESTSTORE="http://kb:8080/sdaas/sparql" + INSERT_TEST_STATEMENT='INSERT DATA { GRAPH { } }' + sd_include w3c +} + +on_script_startup + +function setup { + sd_w3c_erase TESTSTORE +} + + + +@test "sd_w3c_update insert data" { + run sd_w3c_update TESTSTORE "$INSERT_TEST_STATEMENT" + [[ "$status" -eq 0 ]] + [[ "$(sd_w3c_size TESTSTORE)" -eq 1 ]] +} + + +@test "sd_w3c_update wrong statement" { + run sd_w3c_update TESTSTORE "NO UPDATE STATEMENT" + [[ "$status" -ne 0 ]] + [[ "${lines[0]}" =~ ERROR ]] + [[ "$(sd_w3c_size TESTSTORE)" -eq 0 ]] +} + + + +@test "sd_w3c_query inserted data" { + sd_w3c_update TESTSTORE "$INSERT_TEST_STATEMENT" + run sd_w3c_query TESTSTORE "text/csv" "SELECT ?s ?p ?o { ?s ?p ?o }" + [[ "$status" -eq 0 ]] + [[ "${lines[0]}" =~ "s,p,o" ]] + [[ "${lines[1]}" =~ "urn:uri:s,urn:uri:p,urn:uri:o" ]] +} + + + + diff --git a/tests/system/gettingStartedCETest.bats b/tests/system/gettingStartedCETest.bats new file mode 100644 index 0000000..79c8096 --- /dev/null +++ b/tests/system/gettingStartedCETest.bats @@ -0,0 +1,51 @@ +#!/usr/bin/env bats + + +function on_script_startup { + STORE="http://kb:8080/sdaas/sparql" + STORE_TYPE="w3c" + SD_LOG_PRIORITY=3 + source "$SDAAS_INSTALL_DIR/core" NO_SPLASH +} + +on_script_startup + + + +@test "step1: drop all" { + run sd sparql update "DROP ALL" + [[ "$status" -eq 0 ]] + [[ "$(sd driver size STORE)" -eq 0 ]] +} + +@test "step2: load from sparql update" { + run sd sparql update 'LOAD INTO GRAPH ' + [[ "$status" -eq 0 ]] + [[ "$(sd driver size STORE)" -eq 16389 ]] +} + +@test "step3: load from resource" { + run sd sparql graph -a PUT -r https://schema.org/version/latest/schemaorg-current-http.ttl "urn:graph:1" + [[ "$status" -eq 0 ]] + [[ "$(sd driver size STORE)" -eq 16389 ]] +} + + +@test "step4: query" { + run sd sparql query -O csv "SELECT ?g (COUNT (?s) AS ?subjects) WHERE {GRAPH ?g{?s?p ?o}} GROUP BY ?g" + [[ "$status" -eq 0 ]] + [[ "${lines[0]}" == "g,subjects" ]] + [[ "$(sd driver size STORE)" -eq 16389 ]] + [[ "${lines[1]}" == "urn:graph:0,16389" ]] + [[ "${lines[2]}" == "urn:graph:1,16389" ]] +} + +@test "step5: query by streamed command" { + run sd sparql query -O csv "SELECT DISTINCT ?class WHERE { ?s a ?class} LIMIT 10" + [[ "$status" -eq 0 ]] + [[ "${lines[0]}" == "class" ]] + [[ "${#lines[@]}" -eq 11 ]] +} + + + diff --git a/tests/system/platform/.gitignore b/tests/system/platform/.gitignore deleted file mode 100644 index 26348f7..0000000 --- a/tests/system/platform/.gitignore +++ /dev/null @@ -1,3 +0,0 @@ -.cache/ -distrib/ -scripts/ diff --git a/tests/system/platform/axioms/calculated_trusts.construct b/tests/system/platform/axioms/calculated_trusts.construct deleted file mode 100644 index bc8e1ec..0000000 --- a/tests/system/platform/axioms/calculated_trusts.construct +++ /dev/null @@ -1,26 +0,0 @@ -# -# This axiom calulate the mean value for graph trust -# -# Copyright (c) 2017 by LinkedData.Center. Some right reserved, refer to project license -# -PREFIX kees: -PREFIX qb: -PREFIX daq: -PREFIX sd: - -CONSTRUCT { - ?graphUri sd:name ?g; kees:trust ?trust -} WHERE { - { - SELECT ?g (AVG(?value) AS ?trust) - WHERE { - GRAPH ?g {} - ?observation a qb:Observation ; - daq:metric kees:trustGraphMetric; - daq:computedOn ?g ; - daq:value ?value - } GROUP BY ?g - } - BIND ( IRI(CONCAT("urn:graph:",MD5(STR(?g)))) AS ?graphUri) - FILTER NOT EXISTS { [] sd:name ?g; kees:trust [] } -} \ No newline at end of file diff --git a/tests/system/platform/axioms/city_name_index.construct b/tests/system/platform/axioms/city_name_index.construct deleted file mode 100644 index a306bc9..0000000 --- a/tests/system/platform/axioms/city_name_index.construct +++ /dev/null @@ -1,5 +0,0 @@ -PREFIX schema: -CONSTRUCT { ?s ?text } -WHERE { - ?s a schema:City; schema:name ?text -} \ No newline at end of file diff --git a/tests/system/platform/axioms/city_names_starting_with_m.select b/tests/system/platform/axioms/city_names_starting_with_m.select deleted file mode 100644 index 7974da5..0000000 --- a/tests/system/platform/axioms/city_names_starting_with_m.select +++ /dev/null @@ -1,6 +0,0 @@ -#Just selects all cities whose name starts with the letter M -PREFIX schema: -SELECT ?s ?text WHERE { - ?s a schema:City; schema:name ?text - FILTER ( STRSTARTS( ?text , 'M')) -} \ No newline at end of file diff --git a/tests/system/platform/axioms/default_trustmap.construct b/tests/system/platform/axioms/default_trustmap.construct deleted file mode 100644 index c61382d..0000000 --- a/tests/system/platform/axioms/default_trustmap.construct +++ /dev/null @@ -1,21 +0,0 @@ -# -# This axiom calulate default for unspecified graph trust observation -# -# Copyright (c) 2017 by LinkedData.Center. Some right reserved, refer to project license -# -PREFIX kees: -PREFIX qb: -PREFIX daq: -PREFIX sdaas: -PREFIX sd: - -CONSTRUCT { - [] a qb:Observation ; - daq:computedOn ?g ; - daq:metric kees:trustGraphMetric; - daq:value 0.5 ; - daq:isEstimated true . -} WHERE { - GRAPH ?g {?s ?p ?o} - FILTER NOT EXISTS { ?observation daq:computedOn ?g } -} \ No newline at end of file diff --git a/tests/system/platform/build.sdaas b/tests/system/platform/build.sdaas deleted file mode 100644 index c0b0ad5..0000000 --- a/tests/system/platform/build.sdaas +++ /dev/null @@ -1,41 +0,0 @@ -############################################################################### -# This is an example sdaas platform script -# run it with the command: -# sdaas -f build.sdaas -############################################################################### - -# Stop on any error -set -a - -SD_INCLUDE testing - -# Define a bulk of RDF data dump files -KEES_SRCs=("data/kees.ttl" "data/introspection.ttl" "data/trustmap.ttl") - -# Define a transformation pipeline -ISTAT_GATEWAY=("|in|out|.ttl|iconv -f ISO88592 -t UTF-8|tr -d '\r' | awk -f gateways/istat.awk") - - -# Ingest data from a set of files -SD_LEARN "urn:kees:config" KEES_SRCs[@] - -# Ingest data from a simple RDF web resource -SD_LEARN http://schema.org/ https://schema.org/version/latest/schemaorg-current-https.ttl - -# Ingest data from a web resource through a gateway -SD_LEARN \ - "urn:graph:istat" \ - 'https://s3-eu-west-1.amazonaws.com/demo.hub1.linkeddata.center/data/comuni.csv' \ - ISTAT_GATEWAY[@] - -# Evaluate some reasonings: -SD_EVAL_CONSTRUCTOR "urn:graph:trustmap_default" @axioms/default_trustmap.construct -SD_EVAL_CONSTRUCTOR "urn:graph:calculated_trusts" @axioms/calculated_trusts.construct - -# Answer predefined questions -SD_CREATE_DATA_DISTRIBUTION /tmp/distrib - -# test knowledge base -SD_DATA_TEST -# Do final knowledge base consistence checks -SD_THATS_ALL_FOLKS \ No newline at end of file diff --git a/tests/system/platform/data/introspection.ttl b/tests/system/platform/data/introspection.ttl deleted file mode 100644 index 7261ec5..0000000 --- a/tests/system/platform/data/introspection.ttl +++ /dev/null @@ -1,67 +0,0 @@ -@prefix dcat: . -@prefix foaf: . -@prefix xsd: . -@prefix void: . -@prefix prov: . -@prefix rdfs: . -@prefix kees: . -@prefix sd: . -@prefix dct: . -@prefix : <#> . - -:table_introspection_catalogue a kees:Table ; - dct:identifier "introspection:catalogue" ; - rdfs:label "Knowledge base catalog"@en ; - rdfs:comment "All RDFtriples by graphs"@en ; - kees:queryText """# All RDFtriples by graphs -PREFIX sd: -PREFIX rdfs: -PREFIX dct: -PREFIX dcat: -PREFIX foaf: -PREFIX kees: - -SELECT ?type ?title ?publisher ?license ( ?graph AS ?graph_name) ?trust ?size -WHERE { - - { - SELECT ?graph (COUNT(*) AS ?size) - WHERE { - GRAPH ?graph {?s ?p ?o} - } GROUP BY ?graph - } - - OPTIONAL { [] sd:name ?graph; a ?ut} - BIND( COALESCE(?ut, sd:NamedGraph) AS ?uriType ) - VALUES (?uriType ?type) { - (kees:LinkedDataGraph "A - dataset") - (kees:InferredKnowledgeGraph "B - inference") - (kees:TBoxGraph "C - vocabulary") - (kees:KBConfigGraph "D - config") - (sd:NamedGraph "Generic named graph") - } - - OPTIONAL { [] sd:name ?graph; kees:trust ?trust } - - OPTIONAL { - [] sd:name ?graph; dct:source ?source. - ?ds dct:title ?dataset_title ; dcat:distribution ?dist. - ?dist dcat:accessURL ?source. - OPTIONAL{ ?dist dct:license ?licenseURL } - OPTIONAL{ ?licenseURL dct:title ?license_title } - OPTIONAL { ?ds dct:publisher [ foaf:name ?ds_publisher] } - } - OPTIONAL { [] a kees:KnowledgeBase; dct:publisher [ foaf:name ?kb_publisher] } - - OPTIONAL { - [] a kees:InferredKnowledgeGraph; sd:name ?graph; dct:title ?graph_title . - BIND ( "SDaaS Agent" as ?agent ) - BIND ( "N.A." as ?license_title ) - } - BIND( COALESCE(?graph_title, ?dataset_title) AS ?title ) - BIND( COALESCE(?license_title, ?licenseURL, "not provided") AS ?license ) - BIND( COALESCE(?ds_publisher, ?agent, ?kb_publisher) AS ?publisher ) - -} ORDER BY ?type ?publisher DESC(?size) -""" -. diff --git a/tests/system/platform/data/kees.ttl b/tests/system/platform/data/kees.ttl deleted file mode 100644 index 6bbd2fd..0000000 --- a/tests/system/platform/data/kees.ttl +++ /dev/null @@ -1,8 +0,0 @@ -@prefix rdfs: . -@prefix kees: . -@prefix sd: . -@prefix dct: . -@prefix foaf: . -@prefix dcat: . -@prefix xsd: . -@prefix : <#> . diff --git a/tests/system/platform/data/trustmap.ttl b/tests/system/platform/data/trustmap.ttl deleted file mode 100644 index 4bc1c82..0000000 --- a/tests/system/platform/data/trustmap.ttl +++ /dev/null @@ -1,40 +0,0 @@ -@prefix rdfs: . -@prefix kees: . -@prefix qb: . -@prefix daq: . - - -[] a qb:Observation ; - rdfs:comment "Subjective ranks could be biased"; - daq:computedOn ; - daq:metric kees:trustGraphMetric; - daq:value 0.98 ; - daq:isEstimated true . - -[] a qb:Observation ; - rdfs:comment "Official source"; - daq:computedOn ; - daq:metric kees:trustGraphMetric; - daq:value 1.00 ; - daq:isEstimated false . - -[] a qb:Observation ; - rdfs:comment "Official source"; - daq:computedOn ; - daq:metric kees:trustGraphMetric; - daq:value 1.00 ; - daq:isEstimated false . - -[] a qb:Observation ; - rdfs:comment "Deterministic algorithm"; - daq:computedOn ; - daq:metric kees:trustGraphMetric; - daq:value 1.00 ; - daq:isEstimated false . - -[] a qb:Observation ; - rdfs:comment "Deterministic algorithm"; - daq:computedOn ; - daq:metric kees:trustGraphMetric; - daq:value 1.00 ; - daq:isEstimated false . diff --git a/tests/system/platform/gateways/istat.awk b/tests/system/platform/gateways/istat.awk deleted file mode 100644 index 188c121..0000000 --- a/tests/system/platform/gateways/istat.awk +++ /dev/null @@ -1,2 +0,0 @@ -BEGIN { FS=";"; print "@prefix schema: ." } -NR>1 { printf " a schema:City; schema:name \"\"\"%s\"\"\".\n", $5, $6 } diff --git a/tests/system/platform/platformTest.bats b/tests/system/platform/platformTest.bats deleted file mode 100644 index cfb0054..0000000 --- a/tests/system/platform/platformTest.bats +++ /dev/null @@ -1,50 +0,0 @@ -#!/usr/bin/env bats - -function doTest { - rm -rf "/tmp/distrib" - cd $BATS_TEST_DIRNAME - ../../../scripts/sdaas -f build.sdaas --reboot - cat /tmp/distrib/triplecounts.csv - } - - -@test "Ingestion platform acceptance test..." { - run doTest - echo "$output" > /tmp/x - [ $status -eq 0 ] - [[ "${lines[0]}" =~ "SD_START_LOGGING logging" ]] - [[ "${lines[1]}" =~ "SD_START_LOGGING debug info in /tmp/sdaas/session_" ]] - [[ "${lines[2]}" =~ "LinkedData.Center SDaaS platform (Anassimene) using 'kb' graph technology." ]] - [[ "${lines[3]}" =~ "Erasing the knowledge base... (it could take a while)" ]] - [[ "${lines[4]}" =~ "starded learning of graph " ]] - [[ "${lines[5]}" =~ "downloaded data/kees.ttl" ]] - [[ "${lines[6]}" =~ "downloaded data/introspection.ttl" ]] - [[ "${lines[7]}" =~ "downloaded data/trustmap.ttl" ]] - [[ "${lines[8]}" =~ "completed by replacing graph " ]] - [[ "${lines[9]}" =~ "starded learning of graph " ]] - [[ "${lines[10]}" =~ "downloaded https://schema.org/version/latest/schemaorg-current-https.ttl" ]] - [[ "${lines[11]}" =~ "completed by replacing graph " ]] - [[ "${lines[12]}" =~ "starded learning of graph " ]] - [[ "${lines[13]}" =~ "downloaded https://s3-eu-west-1.amazonaws.com/demo.hub1.linkeddata.center/data/comuni.csv" ]] - [[ "${lines[14]}" =~ "transformation pipeline: in/* -> cat -> iconv -f ISO88592 -t UTF-8|tr -d '\r' | awk -f gateways/istat.awk -> out" ]] - [[ "${lines[15]}" =~ "completed by replacing graph " ]] - [[ "${lines[16]}" =~ "starded reasoning on graph " ]] - [[ "${lines[17]}" =~ "evaluating axiom @axioms/default_trustmap.construct..." ]] - [[ "${lines[18]}" =~ "completed by replacing graph " ]] - [[ "${lines[19]}" =~ "starded reasoning on graph " ]] - [[ "${lines[20]}" =~ "evaluating axiom @axioms/calculated_trusts.construct..." ]] - [[ "${lines[21]}" =~ "completed by replacing graph " ]] - [[ "${lines[22]}" =~ "Copying documentation file questions/README.md" ]] - [[ "${lines[23]}" =~ "Generating answers for triplecounts tabular question" ]] - [[ "${lines[24]}" =~ "Distribution completed in /tmp/distrib" ]] - [[ "${lines[25]}" =~ "Testing knowledge graph integrity..." ]] - [[ "${lines[26]}" =~ "1_istat_exists.ask...OK" ]] - [[ "${lines[27]}" =~ "2_empty.select...OK" ]] - [[ "${lines[28]}" =~ "Knowledge ingestion succesfully completed" ]] - [[ "${lines[29]}" =~ "graphName,RDF_statements" ]] - [[ "${lines[30]}" =~ "urn:graph:istat,15981" ]] - [[ "${lines[31]}" =~ "http://schema.org/" ]] - [[ "${lines[32]}" =~ "urn:kees:config,55" ]] - [[ "${lines[33]}" =~ "urn:graph:calculated_trusts,23" ]] - [[ "${lines[34]}" =~ "urn:graph:trustmap_default,13" ]] -} diff --git a/tests/system/platform/questions/README.md b/tests/system/platform/questions/README.md deleted file mode 100644 index ad249ee..0000000 --- a/tests/system/platform/questions/README.md +++ /dev/null @@ -1,5 +0,0 @@ -KNOWLEDGE BASE QUESTIONS -======================== - -Here add your build release note - diff --git a/tests/system/platform/questions/triplecounts.sparql b/tests/system/platform/questions/triplecounts.sparql deleted file mode 100644 index 3380a65..0000000 --- a/tests/system/platform/questions/triplecounts.sparql +++ /dev/null @@ -1,3 +0,0 @@ -SELECT ?graphName (count(*) as ?RDF_statements) WHERE { - Graph ?graphName { ?s ?p ?o } -} Group by ?graphName ORDER BY DESC(?RDF_statements) \ No newline at end of file diff --git a/tests/system/platform/tests/1_istat_exists.ask b/tests/system/platform/tests/1_istat_exists.ask deleted file mode 100644 index 137a129..0000000 --- a/tests/system/platform/tests/1_istat_exists.ask +++ /dev/null @@ -1,3 +0,0 @@ -ASK { - GRAPH {?s ?p ?o} -} \ No newline at end of file diff --git a/tests/system/platform/tests/2_empty.select b/tests/system/platform/tests/2_empty.select deleted file mode 100644 index 3ff3482..0000000 --- a/tests/system/platform/tests/2_empty.select +++ /dev/null @@ -1 +0,0 @@ -SELECT ?x ?y { ?x ?y} \ No newline at end of file diff --git a/tests/unit/activityTest.bats b/tests/unit/activityTest.bats deleted file mode 100644 index a2004aa..0000000 --- a/tests/unit/activityTest.bats +++ /dev/null @@ -1,46 +0,0 @@ -#!/usr/bin/env bats - -function SD_LOAD_RDF_FILE { : ; } - -function setup { - for stub in asserting caching date curl; do - . "$BATS_TEST_DIRNAME/stubs/${stub}_stub.include" - done - SD_INCLUDE logging - SD_INCLUDE activity - CREATE_STUB_CACHE - mkdir -p "$SD_CACHE/activity.test" -} - -function teardown { - if [ $SD_DEBUG -eq 0 ]; then DROP_STUB_CACHE ; fi -} - - -@test "_SD_START_ACTIVITY" { - _SD_ACTIVITY_STATUS="noop" - SD_URISPACE="urn:" - _SD_START_ACTIVITY "commento esplicito" "$SD_CACHE/activity.test" - [ -d "$SD_ACTIVITY_DIR" ] - [ -d "$SD_ACTIVITY_DIR/in" ] - [ -f "$SD_ACTIVITY_DIR/prov.ttl" ] - [ ! -z "$SD_ACTIVITY_URI" ] - [ "$_SD_ACTIVITY_STATUS" = "running" ] - #>&2 cat /tmp/stub_cache/activity.test/prov.ttl - run cat "$SD_ACTIVITY_DIR/prov.ttl" - [ "${lines[0]}" = "@prefix : ." ] - [ "${lines[1]}" = "@prefix prov: ." ] - [ "${lines[2]}" = "@prefix rdfs: ." ] - [ "${lines[3]}" = "@prefix xsd: ." ] - [ "${lines[4]}" = "@prefix dct: ." ] - [ "${lines[5]}" = "@prefix sd: ." ] - [ "${lines[6]}" = "@prefix kees: ." ] - [ "${lines[7]}" = ":activity a prov:Activity;" ] - [ "${lines[8]}" = " prov:qualifiedAssociation :activity_owner." ] - [ "${lines[9]}" = ":activity_owner a prov:Association ;" ] - [ "${lines[10]}" = " prov:agent ;" ] - [ "${lines[11]}" = " prov:hadRole kees:namedGraphGenerator ;" ] - [ "${lines[12]}" = " prov:hadPlan \"\"\"commento esplicito\"\"\"." ] - [ "${lines[13]}" = ":activity prov:startedAtTime \"Sun Dec 24 00:00:00 UTC 2017\"^^xsd:dateTime ." ] -} - diff --git a/tests/unit/assertingTest.bats b/tests/unit/assertingTest.bats deleted file mode 100644 index 9eede45..0000000 --- a/tests/unit/assertingTest.bats +++ /dev/null @@ -1,42 +0,0 @@ -#!/usr/bin/env bats - -function setup { - . scripts/asserting.include -} - -@test "SD_VERBOSE default is 1" { - [ "$SD_VERBOSE" -eq 1 ] -} - -@test "SD_FATAL_ERROR is ok" { - run SD_FATAL_ERROR "here error description" - [ "$status" -eq 1 ] - [ "$output" = "here error description" ] -} - - -@test "SD_REQUIRES_VAR with existing variable" { - testvar=1 - run SD_REQUIRES_VAR SD_VERBOSE testvar - [ $status -eq 0 ] -} - - -@test "SD_REQUIRES_VAR with not existing variable" { - run SD_REQUIRES_VAR not_exists - [ "$status" -gt 0 ] - [ "$output" = "Mandatory environment variable not_exists not defined." ] -} - - -@test "SD_REQUIRES_CMD with existing commands" { - run SD_REQUIRES_CMD ls dir grep - [ "$status" -eq 0 ] -} - - -@test "SD_REQUIRES_CMD with not existing command" { - run SD_REQUIRES_CMD ls dir not_exists grep - [ "$status" -eq 1 ] - [ "$output" = "I require not_exists but it it's not installed." ] -} \ No newline at end of file diff --git a/tests/unit/cachingTest.bats b/tests/unit/cachingTest.bats deleted file mode 100644 index c7dfbd5..0000000 --- a/tests/unit/cachingTest.bats +++ /dev/null @@ -1,42 +0,0 @@ -#!/usr/bin/env bats - -function setup { - . "$BATS_TEST_DIRNAME/stubs/asserting_stub.include" - rm -rf /tmp/testcleancache - SD_INCLUDE caching -} - -@test "SD_CACHE default" { - [ "$SD_CACHE"=".cache" ] -} - - -@test "SD_CLEAN_CACHE in custom position" { - SD_CACHE=/tmp/testcleancache/dir - run SD_CLEAN_CACHE - [ "$status" -eq 0 ] - [ -d /tmp/testcleancache/dir ] - rm -rf /tmp/testcleancache -} - - -#@test "SD_CLEAN_CACHE unwritable" { -# SD_CACHE="/nonexistingdir" -# run SD_CLEAN_CACHE -# [ "$status" -eq 2 ] -# [ ! -f "/nonexistingdir" ] -#} - - -@test "SD_CLEAN_CACHE is cleaned" { - SD_CACHE=/tmp/testcleancache/dir - run SD_CLEAN_CACHE - [ "$status" -eq 0 ] - - touch $SD_CACHE/file1 - run SD_CLEAN_CACHE - [ "$status" -eq 0 ] - [ ! -f "$SD_CACHE/file1" ] - rm -rf /tmp/testcleancache -} - diff --git a/tests/unit/coreTest.bats b/tests/unit/coreTest.bats new file mode 100644 index 0000000..cbb796c --- /dev/null +++ b/tests/unit/coreTest.bats @@ -0,0 +1,119 @@ +#!/usr/bin/env bats + +function on_script_startup { + _SAVED_PRIORITY="$SD_LOG_PRIORITY" + SD_LOG_PRIORITY=2 + source "$SDAAS_INSTALL_DIR/core" NO_SPLASH +} + +on_script_startup + +function setup { + SD_LOG_PRIORITY=2 +} + + +function teardown { + SD_LOG_PRIORITY="$_SAVED_PRIORITY" +} + + +########## Test sd_log + +@test "core sd_log with default priority" { + SD_LOG_PRIORITY=7 + run sd_log "test message as NOTICE" + [ "$status" -eq 0 ] + [[ "${lines[0]}" =~ NOTICE.+test\ message\ as\ NOTICE$ ]] +} + + +@test "core sd_log with ERROR priority" { + SD_LOG_PRIORITY=7 + run sd_log -p ERROR "test message as ERROR" + [ "$status" -eq 0 ] + [[ "${lines[0]}" =~ ERROR.+test\ message\ as\ ERROR ]] +} + + +@test "core sd_log with 3 priority" { + SD_LOG_PRIORITY=7 + run sd_log -p 3 "test message as ERROR" + [ "$status" -eq 0 ] + [[ "${lines[0]}" =~ ERROR.+test\ message\ as\ ERROR$ ]] +} + + +@test "core sd_log with file" { + SD_LOG_PRIORITY=7 + echo "test line 1" > /tmp/sd_log_test_file.txt + run sd_log -p ERROR -f /tmp/sd_log_test_file.txt "file as ERROR" + [ "$status" -eq 0 ] + [[ "${lines[0]}" =~ file\ as\ ERROR$ ]] + [ "${lines[1]}" == 'test line 1' ] + run rm /tmp/sd_log_test_file.txt + [ "$status" -eq 0 ] +} + + + +########## Test sd_validate + +@test "core sd_validate ok" { + local MYSTORE="http://test:9090/xxx/sparql" + local MYSTORE_TYPE=w3c + local sid=MYSTORE + run sd_validate sid "^[a-zA-Z]+$" + [ "$status" -eq 0 ] + run sd_validate "$sid" "^http://" + [ "$status" -eq 0 ] + run sd_validate MYSTORE_TYPE "^w3c$" +} + + + +@test "core sd_validate ko" { + local MYSTORE="http://test:9090/xxx/sparql" + local sid=MYSTORE + run sd_validate "$sid" "^[a-zA-Z]+$" + [ "$status" -ne 0 ] +} + +########## Test sd_abort + +@test "core sd_abort" { + run sd_abort HALT + [ "$status" -eq 2 ] +} + + +########## Test sd + +@test "core sd with base params" { + run sd core version + [ "$status" -eq 0 ] + [ "${lines[0]}" == "$SDAAS_VERSION" ] +} + + + +@test "core sd or direct command must provide the same result" { + local res=$(sd core version) + run sd_core_version + [ "$status" -eq 0 ] + [ "${lines[0]}" == "$res" ] +} + + + +@test "core sd abort on failing" { + run sd -A NONESISTENT NONESISTENT + [ "$status" -ne 0 ] +} + + +@test "core sd help" { + run sd -h view modules + [ "$status" -eq 0 ] + [[ "${lines[0]}" == "https://sdaas.netlify.app/reference/command/sd_view_modules" ]] +} \ No newline at end of file diff --git a/tests/unit/data/01_ruleset/1_test.construct b/tests/unit/data/01_ruleset/1_test.construct deleted file mode 100644 index e69de29..0000000 diff --git a/tests/unit/data/01_ruleset/2_test.update b/tests/unit/data/01_ruleset/2_test.update deleted file mode 100644 index e69de29..0000000 diff --git a/tests/unit/data/01_ruleset/3_test.reasoning b/tests/unit/data/01_ruleset/3_test.reasoning deleted file mode 100644 index e69de29..0000000 diff --git a/tests/unit/data/simple.csv b/tests/unit/data/simple.csv deleted file mode 100644 index 9c5961c..0000000 --- a/tests/unit/data/simple.csv +++ /dev/null @@ -1,5 +0,0 @@ -field1,field2 -a,b -c,d -yY,z -z,z \ No newline at end of file diff --git a/tests/unit/data/testing/01_test.ask b/tests/unit/data/testing/01_test.ask deleted file mode 100644 index e69de29..0000000 diff --git a/tests/unit/data/testing/02_test.ask b/tests/unit/data/testing/02_test.ask deleted file mode 100644 index e69de29..0000000 diff --git a/tests/unit/data/testing/03_test.select b/tests/unit/data/testing/03_test.select deleted file mode 100644 index e69de29..0000000 diff --git a/tests/unit/data/two_triples.nt b/tests/unit/data/two_triples.nt deleted file mode 100644 index f679247..0000000 --- a/tests/unit/data/two_triples.nt +++ /dev/null @@ -1,2 +0,0 @@ - . - . diff --git a/tests/unit/data/two_triples.ttl b/tests/unit/data/two_triples.ttl deleted file mode 100644 index c12a64d..0000000 --- a/tests/unit/data/two_triples.ttl +++ /dev/null @@ -1,4 +0,0 @@ -@prefix s: . -@prefix p: . -@prefix o: . -s:1 p:1 o:1, o:2. diff --git a/tests/unit/driverTest.bats b/tests/unit/driverTest.bats new file mode 100644 index 0000000..2518c9c --- /dev/null +++ b/tests/unit/driverTest.bats @@ -0,0 +1,94 @@ +#!/usr/bin/env bats + +function on_script_startup { + source "$SDAAS_INSTALL_DIR/core" NO_SPLASH + sd_include driver +} + +on_script_startup + + + +########## sd_driver_validate + + +@test "driver sd_driver_validate with missing endpoint" { + run sd_driver_validate MYSID + [[ "$status" -ne 0 ]] +} + + +@test "driver sd_driver_validate with invalid endpoint syntax" { + local MYSID="ftp://query.wikidata.org/sparql" + run sd_driver_validate MYSID + [[ "$status" -ne 0 ]] +} + + +@test "driver sd_driver_validate to a store without driver" { + local MYTEST="http://example.org/sparql" + [[ -z "$MYTEST_TYPE" ]] + sd_driver_validate MYTEST + [[ "$MYTEST_TYPE" == "w3c" ]] +} + + +@test "driver sd_driver_validate to a store with valid driver" { + local TEST="http://example/sparql" + local TEST_TYPE="testdriver" + run sd_driver_validate TEST + [[ "$status" -eq 0 ]] +} + + +@test "driver sd_driver_validate to an invalid driver" { + local TEST="http://example/sparql" + local TEST_TYPE="not exists" + run sd_driver_validate TEST + [[ "$status" -ne 0 ]] +} + + +@test "driver sd_driver_validate gsp without endpoint" { + local TEST="http://example/sparql" + local TEST_TYPE="gsp" + run sd_driver_validate TEST + [[ "$status" -ne 0 ]] +} + + +########## sd_driver_update + +@test "driver sd_driver_update" { + local TEST="http://example/sparql" + local TEST_TYPE="testdriver" + run sd_driver_update TEST "STATEMENT" + [[ "$status" -eq 0 ]] +} + +@test "driver sd_driver_update invalid sid" { + run sd_driver_update NOSTORE "STATEMENT" + [[ "$status" -eq 1 ]] +} + +########## sd_driver_query + +@test "driver sd_driver_query" { + local TEST="http://example/sparql" + local TEST_TYPE="testdriver" + run sd_driver_query TEST "text/csv" "STATEMENT" + [[ "$status" -eq 0 ]] +} + + +@test "driver sd_driver_query invalid sid" { + run sd_driver_query INVALIDSID "text/csv" "STATEMENT" + [[ "$status" -gt 0 ]] +} + + + +@test "driver sd_driver_query missing mimetype" { + run sd_driver_query STORE "STATEMENT" + [[ "$status" -gt 0 ]] +} \ No newline at end of file diff --git a/tests/unit/kbTest.bats b/tests/unit/kbTest.bats deleted file mode 100644 index 90fb6c8..0000000 --- a/tests/unit/kbTest.bats +++ /dev/null @@ -1,46 +0,0 @@ -#!/usr/bin/env bats - -function setup { - for stub in asserting caching gzip curl chmod ; do - . "$BATS_TEST_DIRNAME/stubs/${stub}_stub.include" - done - - SD_REASONER_ENDPOINT="http://localhost:8080/sdaas" - STUB_CURL="--cmd" - SD_INCLUDE logging - SD_INCLUDE bg_reasoning true - CREATE_STUB_CACHE -} - -function teardown { - DROP_STUB_CACHE -} - - -@test "No SD_CREATE_REASONER for kb" { - run SD_CREATE_REASONER kb - [ "$status" -ne 0 ] -} - - -@test "no SD_DESTROY_REASONER" { - run SD_DESTROY_REASONER "$SD_REASONER_ENDPOINT" - [ "$status" -ne 0 ] -} - - - -@test "SD_REASONER_QUERY" { - run SD_REASONER_QUERY "$SD_REASONER_ENDPOINT/sparql" 'text/csv' '@query' 'tracefile' - [[ ${lines[0]} =~ "--header Content-Type: application/sparql-query" ]] -} - - - -@test "SD_REASONER_LOAD" { - run SD_REASONER_LOAD "$SD_REASONER_ENDPOINT/sparql" graph "$BATS_TEST_DIRNAME/data/two_triples.nt" guess - #echo "$output" > /tmp/x.out - [[ ${lines[0]} = "gzip -c $BATS_TEST_DIRNAME/data/two_triples.nt" ]] - [[ ${lines[1]} = *"LOAD INTO GRAPH "* ]] -} - diff --git a/tests/unit/learnTest.bats b/tests/unit/learnTest.bats deleted file mode 100644 index af08e04..0000000 --- a/tests/unit/learnTest.bats +++ /dev/null @@ -1,86 +0,0 @@ -#!/usr/bin/env bats - -function SD_LOAD_RDF_FILE { : ; } - -function DOWNLOADER_STUB { : ; } - -function setup { - for stub in asserting caching date curl; do - . "$BATS_TEST_DIRNAME/stubs/${stub}_stub.include" - done - SD_INCLUDE logging - SD_INCLUDE learning - CREATE_STUB_CACHE - mkdir -p "$SD_CACHE/activity.test" - dd if=/dev/zero of=/tmp/file1.ttl bs=10K count=1 > /dev/null 2>&1 - dd if=/dev/zero of=/tmp/file2.ttl bs=10K count=1 > /dev/null 2>&1 -} - -function teardown { - if [ $SD_DEBUG -eq 0 ]; then DROP_STUB_CACHE ; fi - rm -f /tmp/file1.ttl /tmp/file2.ttl -} - - -@test "_SD_EXTRACT" { - SD_DEFAULT_DOWNLOADER=DOWNLOADER_STUB - _SD_START_ACTIVITY "commento esplicito" "$SD_CACHE/activity.test" - - filename1=$(_SD_MK_UID "${BATS_TEST_DIRNAME}/data/two_triples.nt") - filename2=$(_SD_MK_UID "${BATS_TEST_DIRNAME}/data/two_triples.ttl") - filename3=$(_SD_MK_UID "http://example.com/data") - _SD_SOURCES=("$BATS_TEST_DIRNAME/data/two_triples.nt" "$BATS_TEST_DIRNAME/data/two_triples.ttl" "http://example.com/data|.rdf|other content|echo") - run _SD_EXTRACT - [ $status -eq 0 ] - [ "${lines[0]}" = "sdaas Sun Dec 24 00:00:00 UTC 2017 - activity activity.test downloaded ${BATS_TEST_DIRNAME}/data/two_triples.nt" ] - [ "${lines[1]}" = "sdaas Sun Dec 24 00:00:00 UTC 2017 - activity activity.test downloaded ${BATS_TEST_DIRNAME}/data/two_triples.ttl" ] - [ "${lines[3]}" = "sdaas Sun Dec 24 00:00:00 UTC 2017 - activity activity.test downloaded http://example.com/data" ] - - run cat "$SD_ACTIVITY_DIR/prov.ttl" - [ $status -eq 0 ] - [ "${lines[14]}" = ':activity prov:wasInfluencedBy :extraction. :extraction a prov:Activity; prov:startedAtTime "Sun Dec 24 00:00:00 UTC 2017"^^xsd:dateTime .' ] - [ "${lines[15]}" = ":extraction prov:generated ." ] - [ "${lines[16]}" = ":extraction prov:generated ." ] - [ "${lines[17]}" = ":extraction prov:generated ." ] - [ "${lines[18]}" = ':extraction prov:endedAtTime "Sun Dec 24 00:00:00 UTC 2017"^^xsd:dateTime .' ] -} - - - -function TRANSFORMER_STUB { - echo "$@" -} - - -@test "_SD_TRANSFORM" { - _SD_START_ACTIVITY "commento esplicito" "$SD_CACHE/activity.test" - - # simulate two zipped file in input queue - cat $BATS_TEST_DIRNAME/data/two_triples.nt | gzip -c > $SD_ACTIVITY_DIR/in/file1.nt.gz - cat $BATS_TEST_DIRNAME/data/two_triples.ttl | gzip -c > $SD_ACTIVITY_DIR/in/file2.ttl.gz - cp $BATS_TEST_DIRNAME/data/simple.csv $SD_ACTIVITY_DIR/in/file3.csv - - _SD_TRANSFORMATIONS=('zcat|in/*.gz|unzipped' 'cat|unzipped|out|.rdf|' 'cat|in/*.csv|cutted|keep|tail -n +2' "cat|cutted|out|keep|tr -d 'Y'") - # Same as: - #_SD_TRANSFORMATIONS=('zcat|in/*.gz|unzipped||cat' 'cat|unzipped|out|.rdf|cat' 'cat|in/*.csv|out|keep|tr -d \'Y\' | tail -n +2') - - run _SD_TRANSFORM - [ $status -eq 0 ] - #echo "$output" > /tmp/x - [ "${lines[0]}" = "sdaas Sun Dec 24 00:00:00 UTC 2017 - activity activity.test transformation pipeline: in/*.gz -> zcat -> cat -> unzipped" ] - [ "${lines[1]}" = "sdaas Sun Dec 24 00:00:00 UTC 2017 - activity activity.test transformation pipeline: unzipped/* -> cat -> cat -> out" ] - [ "${lines[2]}" = "sdaas Sun Dec 24 00:00:00 UTC 2017 - activity activity.test transformation pipeline: in/*.csv -> cat -> tail -n +2 -> cutted" ] - [ "${lines[3]}" = "sdaas Sun Dec 24 00:00:00 UTC 2017 - activity activity.test transformation pipeline: cutted/* -> cat -> tr -d 'Y' -> out" ] - run cat "$SD_ACTIVITY_DIR/prov.ttl" - #cp "$SD_ACTIVITY_DIR/prov.ttl" /tmp/x - [ $status -eq 0 ] - [ "${lines[14]}" = ':activity prov:wasInfluencedBy :transformation. :transformation a prov:Activity; prov:wasInformedBy :extraction; prov:startedAtTime "Sun Dec 24 00:00:00 UTC 2017"^^xsd:dateTime .' ] - [ "${lines[15]}" = ':transformation prov:used ; prov:generated .' ] - [ "${lines[16]}" = ':transformation prov:used ; prov:generated .' ] - [ "${lines[17]}" = ':transformation prov:used ; prov:generated .' ] - [ "${lines[18]}" = ':transformation prov:used ; prov:generated .' ] - [ "${lines[19]}" = ':transformation prov:used ; prov:generated .' ] - [ "${lines[20]}" = ':transformation prov:used ; prov:generated .' ] - [ "${lines[21]}" = ':transformation prov:endedAtTime "Sun Dec 24 00:00:00 UTC 2017"^^xsd:dateTime .' ] -} - diff --git a/tests/unit/loggingTest.bats b/tests/unit/loggingTest.bats deleted file mode 100644 index 4372b35..0000000 --- a/tests/unit/loggingTest.bats +++ /dev/null @@ -1,75 +0,0 @@ -#!/usr/bin/env bats - -function setup { - for stub in asserting caching date; do - . "$BATS_TEST_DIRNAME/stubs/${stub}_stub.include" - done - CREATE_STUB_CACHE - SD_INCLUDE logging -} - - -function teardown { - DROP_STUB_CACHE -} - - -@test "SD_START_LOGGING" { - SD_DEBUG_FILE="$SD_CACHE/session_1.debug" - SD_LOG_FILE="$SD_CACHE/session_1.log" - SD_START_LOGGING - [ -f "$SD_CACHE/session_1.log" ] - [ -f "$SD_CACHE/session_1.debug" ] -} - - - -@test "SD_SHOW" { - run SD_SHOW hello - [ "$output" = "hello" ] -} - - -@test "SD_SHOW silent" { - SD_VERBOSE=0 - run SD_SHOW hello - [ "$output" = "" ] -} - - -@test "SD_LOG" { - SD_LOG_FILE="$SD_CACHE/session_0.log" - run SD_LOG hello - [ "$output" = "sdaas Sun Dec 24 00:00:00 UTC 2017 - hello" ] - run cat "$SD_CACHE/session_0.log" - [ "$output" = "sdaas Sun Dec 24 00:00:00 UTC 2017 - hello" ] -} - - -@test "SD_LOG only on file" { - SD_LOG_FILE="$SD_CACHE/session_0.log" - run SD_LOG "hello1" silent - run SD_LOG "hello2" silent - [ "$output" = "" ] - run cat "$SD_CACHE/session_0.log" - [ "${lines[0]}" = "sdaas Sun Dec 24 00:00:00 UTC 2017 - hello1" ] - [ "${lines[1]}" = "sdaas Sun Dec 24 00:00:00 UTC 2017 - hello2" ] -} - - -@test "SD_DEBUG_INFO" { - SD_DEBUG=1 - SD_DEBUG_FILE="$SD_CACHE/session_2.debug" - run SD_DEBUG_INFO "hello1" - run SD_DEBUG_INFO "hello2" - [ "$output" = "" ] - run cat "$SD_CACHE/session_2.debug" - [ "${lines[0]}" = "hello1" ] - [ "${lines[1]}" = "hello2" ] -} - - -@test "SD_MK_DEBUG_TMP_FILE" { - tmpFile=$(SD_MK_DEBUG_TMP_FILE test) - [[ $tmpFile =~ ^$SD_CACHE/test ]] -} \ No newline at end of file diff --git a/tests/unit/reasoningTest.bats b/tests/unit/reasoningTest.bats deleted file mode 100644 index e96cdf3..0000000 --- a/tests/unit/reasoningTest.bats +++ /dev/null @@ -1,26 +0,0 @@ -#!/usr/bin/env bats - -function setup { - for stub in asserting caching kb date; do - . "$BATS_TEST_DIRNAME/stubs/${stub}_stub.include" - done - SD_INCLUDE logging - SD_INCLUDE reasoning - CREATE_STUB_CACHE -} - -function teardown { - if [ $SD_DEBUG -eq 0 ]; then DROP_STUB_CACHE ; fi -} - - -@test "SD_EVAL_CONSTRUCTOR" { - run SD_EVAL_CONSTRUCTOR graph constructor "$SD_CACHE/eval_constructor.ttl" - #echo "$output" > /tmp/x - [[ "${lines[0]}" =~ 'reasoning on graph ' ]] - [[ "${lines[1]}" =~ 'evaluating axiom constructor...' ]] - [ "${lines[2]}" = 'SD_SPARQL_QUERY text/turtle constructor' ] - [[ "${lines[3]}" =~ 'completed by replacing graph ' ]] - [ "${lines[4]}" = 'SD_SPARQL_UPDATE DROP SILENT GRAPH ' ] - [[ "${lines[5]}" =~ 'SD_LOAD_RDF_FILE graph' ]] -} diff --git a/tests/unit/rulesetTest.bats b/tests/unit/rulesetTest.bats deleted file mode 100644 index 9ff8cf3..0000000 --- a/tests/unit/rulesetTest.bats +++ /dev/null @@ -1,35 +0,0 @@ -#!/usr/bin/env bats - -function setup { - for stub in asserting caching kb date; do - . "$BATS_TEST_DIRNAME/stubs/${stub}_stub.include" - done - SD_INCLUDE logging - SD_INCLUDE learning - SD_INCLUDE reasoning - SD_INCLUDE ruleset - CREATE_STUB_CACHE -} - -function teardown { - if [ $SD_DEBUG -eq 0 ]; then DROP_STUB_CACHE ; fi -} - - -@test "SD_EVAL_RULESET" { - run SD_EVAL_RULESET urn:graph: tests/unit/data/01_ruleset - #echo "$output" > /tmp/x - [[ "${lines[0]}" =~ 'starded inferencing axiom 1_test from ruleset 01_ruleset' ]] - [[ "${lines[1]}" =~ 'evaluating axiom @tests/unit/data/01_ruleset/1_test.construct...' ]] - [[ "${lines[2]}" =~ 'SD_SPARQL_QUERY text/turtle @tests/unit/data/01_ruleset/1_test.construct' ]] - [[ "${lines[3]}" =~ 'completed by replacing graph ' ]] - [[ "${lines[4]}" =~ 'SD_SPARQL_UPDATE DROP SILENT GRAPH ' ]] - [[ "${lines[5]}" =~ 'SD_LOAD_RDF_FILE urn:graph:tests/unit/data/01_ruleset/1_test.construct' ]] - [[ "${lines[6]}" =~ 'SD_LOAD_RDF_FILE urn:graph:tests/unit/data/01_ruleset/1_test.construct' ]] - [[ "${lines[7]}" =~ 'starded inferencing axiom 2_test from ruleset 01_ruleset' ]] - [[ "${lines[8]}" =~ 'SD_SPARQL_UPDATE @-' ]] - [[ "${lines[9]}" =~ 'SD_LOAD_RDF_FILE urn:graph:tests/unit/data/01_ruleset/2_test.update /tmp/stub_cache/' ]] - [[ "${lines[10]}" =~ 'starded inferencing axiom 3_test from ruleset 01_ruleset' ]] - [[ "${lines[11]}" =~ 'SD_SPARQL_UPDATE DROP SILENT GRAPH ' ]] - [[ "${lines[12]}" =~ 'SD_LOAD_RDF_FILE urn:graph:tests/unit/data/01_ruleset/3_test.reasoning' ]] -} diff --git a/tests/unit/sparqlTest.bats b/tests/unit/sparqlTest.bats new file mode 100644 index 0000000..48c6b7e --- /dev/null +++ b/tests/unit/sparqlTest.bats @@ -0,0 +1,52 @@ +#!/usr/bin/env bats + +function on_script_startup { + source "$SDAAS_INSTALL_DIR/core" NO_SPLASH + STORE=http://dummy.example.org/sparql + STORE_TYPE=testdriver + sd_include sparql + load testsid.include +} + +on_script_startup + + +########## sd_sparql_update + + +@test "sd_sparql_update sid management" { + test_sid sd_sparql_update "STATEMENT" +} + + +########## sd_sparql_query + + +@test "sd_sparql_query sid management" { + test_sid sd_sparql_query "STATEMENT" +} + + + + + +########## sd_sparql_graph + + +@test "sd_sparql_graph sid management" { + test_sid sd_sparql_graph -f ntriples -a PUT -r "@tests/data/empty-store.nt" "urn:graph:store" +} + + + +@test "sd_sparql_graph from stream " { + cat "tests/data/empty-store.nt" | sd_sparql_graph -a PUT "urn:graph:store" + [[ "$status" -eq 0 ]] +} + + +@test "sd_sparql_graph with extra option" { + run sd_sparql_graph -x -f ntriples -a PUT -r "@tests/data/empty-store.nt" "urn:graph:store" + [[ "$status" -ne 0 ]] +} + diff --git a/tests/unit/stubs/asserting_stub.include b/tests/unit/stubs/asserting_stub.include deleted file mode 100644 index 9623809..0000000 --- a/tests/unit/stubs/asserting_stub.include +++ /dev/null @@ -1,38 +0,0 @@ -if [ ! -z ${__module_asserting+x} ]; then return ; else __module_asserting=1 ; fi - -SD_VERBOSE=${SD_VERBOSE:=1} -function SD_FATAL_ERROR { exit 2; } -function SD_REQUIRES_CMD { hash "$@" ; } -function SD_REQUIRES_VAR { : ; } -function SD_INCLUDE { . "scripts/$1.include" ; } -function SD_LOG { : ; } - - -function _note { - >&2 echo "---------------" - >&2 echo "$@" - >&2 echo "---------------" -} - -function _o { - >&2 echo "---------------" - >&2 echo "$output" - >&2 echo "---------------" -} - - -function _file { - >&2 echo "---------------" - >&2 cat "$1" - >&2 echo "---------------" -} - - - -function _log { - >&2 echo "-----LOG------" - if [ -f /tmp/stub_cache/session_$$.log ]; then >&2 cat /tmp/stub_cache/session_$$.log ; fi - >&2 echo "------DBG------" - if [ -f /tmp/stub_cache/session_$$.debug ]; then >&2 cat /tmp/stub_cache/session_$$.debug ; fi - -} diff --git a/tests/unit/stubs/bg_reasoning_stub.include b/tests/unit/stubs/bg_reasoning_stub.include deleted file mode 100644 index 1e7eadb..0000000 --- a/tests/unit/stubs/bg_reasoning_stub.include +++ /dev/null @@ -1,9 +0,0 @@ -if [ ! -z ${__module_bg_reasoning+x} ]; then return ; else __module_bg_reasoning=1 ; fi - -SD_REASONER_ENDPOINT="http://localhost:9999/blazegraph" - -function SD_CREATE_REASONER { >&2 echo "SD_CREATE_REASONER $@" ; echo "1" ;} -function SD_REASONER_QUERY { >&2 echo "SD_REASONER_QUERY" ; } -function SD_REASONER_UPDATE { >&2 echo "SD_REASONER_UPDATE" ; } -function SD_REASONER_LOAD { >&2 echo "SD_REASONER_LOAD" ; } -function SD_DESTROY_REASONER { >&2 echo "SD_DESTROY_REASONER" ; } \ No newline at end of file diff --git a/tests/unit/stubs/caching_stub.include b/tests/unit/stubs/caching_stub.include deleted file mode 100644 index 778bf8c..0000000 --- a/tests/unit/stubs/caching_stub.include +++ /dev/null @@ -1,5 +0,0 @@ -if [ ! -z ${__module_caching+x} ]; then return ; else __module_caching=1 ; fi -SD_CACHE="/tmp/stub_cache" -function CREATE_STUB_CACHE { mkdir -p "$SD_CACHE" ; } -function DROP_STUB_CACHE { rm -rf "$SD_CACHE" ; } -function SD_CLEAN_CACHE { rm -rf "$SD_CACHE/*" ; } diff --git a/tests/unit/stubs/chmod_stub.include b/tests/unit/stubs/chmod_stub.include deleted file mode 100644 index 14ddf48..0000000 --- a/tests/unit/stubs/chmod_stub.include +++ /dev/null @@ -1 +0,0 @@ -function chmod { : ; } diff --git a/tests/unit/stubs/curl_stub.include b/tests/unit/stubs/curl_stub.include deleted file mode 100644 index d547c18..0000000 --- a/tests/unit/stubs/curl_stub.include +++ /dev/null @@ -1,22 +0,0 @@ -STUB_CURL="" -function curl { - case $STUB_CURL in - @*) - >&2 cat ${STUB_CURL#"@"} - ;; - "--cmd") - >&2 echo "curl $@" - ;; - "--stdin") - >&2 cat - ;; - "--noop") - return - ;; - *) - >&2 echo "$STUB_CURL" - ;; - esac -} - - diff --git a/tests/unit/stubs/date_stub.include b/tests/unit/stubs/date_stub.include deleted file mode 100644 index ab54474..0000000 --- a/tests/unit/stubs/date_stub.include +++ /dev/null @@ -1 +0,0 @@ -function date { echo ${STUB_DATE:="Sun Dec 24 00:00:00 UTC 2017"} ; } diff --git a/tests/unit/stubs/gzip_stub.include b/tests/unit/stubs/gzip_stub.include deleted file mode 100644 index 27c33ca..0000000 --- a/tests/unit/stubs/gzip_stub.include +++ /dev/null @@ -1 +0,0 @@ -function gzip { >&2 echo ${STUB_GZIP:="gzip $@"} ; } diff --git a/tests/unit/stubs/kb_stub.include b/tests/unit/stubs/kb_stub.include deleted file mode 100644 index 1d7e551..0000000 --- a/tests/unit/stubs/kb_stub.include +++ /dev/null @@ -1,5 +0,0 @@ -if [ ! -z ${__module_kb+x} ]; then return ; else __module_kb=1 ; fi - -function SD_LOAD_RDF_FILE { >&2 echo "SD_LOAD_RDF_FILE $@" ; } -function SD_SPARQL_QUERY { >&2 echo "SD_SPARQL_QUERY $@" ; } -function SD_SPARQL_UPDATE { >&2 echo "SD_SPARQL_UPDATE $@" ; } diff --git a/tests/unit/testingTest.bats b/tests/unit/testingTest.bats deleted file mode 100644 index aa87d66..0000000 --- a/tests/unit/testingTest.bats +++ /dev/null @@ -1,24 +0,0 @@ -#!/usr/bin/env bats - -function setup { - for stub in asserting caching kb date; do - . "$BATS_TEST_DIRNAME/stubs/${stub}_stub.include" - done - SD_INCLUDE testing - SD_INCLUDE logging - CREATE_STUB_CACHE -} - -function teardown { - if [ $SD_DEBUG -eq 0 ]; then DROP_STUB_CACHE ; fi -} - - -@test "SD_DATA_TEST" { - run SD_DATA_TEST tests/unit/data/testing - #echo "$output" > /tmp/x - [[ "${lines[0]}" =~ 'sdaas Sun Dec 24 00:00:00 UTC 2017 - Testing knowledge graph integrity...' ]] - [[ "${lines[1]}" =~ '01_test.ask...SD_SPARQL_QUERY xml @tests/unit/data/testing/01_test.ask' ]] - [[ "${lines[2]}" =~ '02_test.ask...SD_SPARQL_QUERY xml @tests/unit/data/testing/02_test.ask' ]] - [[ "${lines[3]}" =~ '03_test.select...SD_SPARQL_QUERY csv-h @tests/unit/data/testing/03_test.select' ]] -} \ No newline at end of file diff --git a/tests/unit/testsid.include b/tests/unit/testsid.include new file mode 100644 index 0000000..e5320bc --- /dev/null +++ b/tests/unit/testsid.include @@ -0,0 +1,24 @@ + +function test_sid { + local cmd="$1" + shift 1 + + # test default STORE + run "$cmd" "$@" + [[ "$status" -eq 0 ]] + + # test with good extra store + local MYSTORE=http://dummy.example.org/sparql + local MYSTORE_TYPE=testdriver + run "$cmd" -s MYSTORE "$@" + [[ "$status" -eq 0 ]] + + + # test with bad store + run "$cmd" -s NOTEXIST "$@" + [[ "$status" -eq 1 ]] + + # test extra options + run "$cmd" -z "$@" + [[ "$status" -gt 0 ]] +} diff --git a/tests/unit/viewTest.bats b/tests/unit/viewTest.bats new file mode 100644 index 0000000..141c742 --- /dev/null +++ b/tests/unit/viewTest.bats @@ -0,0 +1,53 @@ +#!/usr/bin/env bats + +function on_script_startup { + source "$SDAAS_INSTALL_DIR/core" NO_SPLASH + sd_include view +} + +on_script_startup + + +@test "view modules" { + run sd view modules + [[ "$status" -eq 0 ]] + [[ ${#lines[@]} -ge 6 ]] +} + + + +@test "view modules extra argument" { + run sd view modules xxx + [[ "$status" -ne 0 ]] + [[ ${output} =~ ERROR ]] +} + + + +@test "view module" { + run sd view module view + [ "$status" -eq 0 ] + [ ${#lines[@]} -ge 3 ] +} + + + +@test "view module with missing module" { + run sd view module + [ "$status" -ne 0 ] + [[ ${output} =~ ERROR ]] +} + + +@test "view config" { + run sd view config + [ "$status" -eq 0 ] + [ ${#lines[@]} -gt 1 ] +} + +@test "view config with extra param" { + run sd view config xxx + [ "$status" -ne 0 ] + [[ ${output} =~ ERROR ]] +} +