diff --git a/helm/airgap-deployment/prepare-airgap.bash b/helm/airgap-deployment/prepare-airgap.bash index 10db6c9d..c80612f5 100644 --- a/helm/airgap-deployment/prepare-airgap.bash +++ b/helm/airgap-deployment/prepare-airgap.bash @@ -117,3 +117,4 @@ wget -O- https://github.com/strimzi/strimzi-kafka-operator/releases/download/${S ( cd ${OFFLINE_DIR} && rm -rf emqx-operator && git clone https://github.com/emqx/emqx-operator.git && cd emqx-operator && git checkout ${EMQX_OPERATOR_VERSION} ) ( cd ${OFFLINE_DIR} && rm -rf postgres-operator && git clone https://github.com/zalando/postgres-operator.git && cd postgres-operator && git checkout ${POSTGRES_OPERATOR_VERSION} ) ( cd ${OFFLINE_DIR} && rm -rf helm-charts && git clone https://github.com/vmware-tanzu/helm-charts.git && cd helm-charts && git checkout ${VELERO_HELM_VERSION}) +( cd ${OFFLINE_DIR} && rm -rf Reloader && git clone https://github.com/stakater/Reloader.git && cd Reloader && git checkout ${RELOADER_HELM_VERSION}) diff --git a/helm/charts/kafka-bridges/kms b/helm/charts/kafka-bridges/kms deleted file mode 120000 index 83db3131..00000000 --- a/helm/charts/kafka-bridges/kms +++ /dev/null @@ -1 +0,0 @@ -../../../semantic-model/kms/ \ No newline at end of file diff --git a/helm/charts/kafka-bridges/templates/alerta-bridge-deployment.yaml b/helm/charts/kafka-bridges/templates/alerta-bridge-deployment.yaml index f5da7727..3a074cf6 100644 --- a/helm/charts/kafka-bridges/templates/alerta-bridge-deployment.yaml +++ b/helm/charts/kafka-bridges/templates/alerta-bridge-deployment.yaml @@ -58,7 +58,5 @@ spec: items: - key: "config.json" path: "config.json" - - key: "knowledge.ttl" - path: "knowledge.ttl" imagePullSecrets: - name: {{ .Values.pullSecretCredentials }} \ No newline at end of file diff --git a/helm/charts/kafka-bridges/templates/bridge-configmap.yaml b/helm/charts/kafka-bridges/templates/bridge-configmap.yaml index 025e9419..eb6a2e43 100644 --- a/helm/charts/kafka-bridges/templates/bridge-configmap.yaml +++ b/helm/charts/kafka-bridges/templates/bridge-configmap.yaml @@ -114,5 +114,4 @@ data: "password": "POSTGRES_PASSWORD" } } - knowledge.ttl: | -{{ .Files.Get "kms/knowledge.ttl" | indent 4 }} + diff --git a/helm/charts/kafka-bridges/templates/debezium-bridge-deployment.yaml b/helm/charts/kafka-bridges/templates/debezium-bridge-deployment.yaml index 8677d8db..26e8f832 100644 --- a/helm/charts/kafka-bridges/templates/debezium-bridge-deployment.yaml +++ b/helm/charts/kafka-bridges/templates/debezium-bridge-deployment.yaml @@ -8,6 +8,8 @@ metadata: namespace: {{ .Release.Namespace }} labels: app: debezium-bridge + annotations: + configmap.reloader.stakater.com/reload: "knowledge" spec: replicas: {{ .Values.kafkaBridge.debezium.replicaCount }} selector: @@ -45,19 +47,20 @@ spec: initialDelaySeconds: 5 volumeMounts: - name: config - mountPath: /opt/config + mountPath: /opt/config/config.json + readOnly: true + subPath: config.json + - name: knowledge-config + mountPath: /opt/config/knowledge.ttl + subPath: knowledge.ttl readOnly: true resources: volumes: - name: config configMap: - # Provide the name of the ConfigMap you want to mount. name: bridge-configmap - # An array of keys from the ConfigMap to create as files - items: - - key: "config.json" - path: "config.json" - - key: "knowledge.ttl" - path: "knowledge.ttl" + - name: knowledge-config + configMap: + name: knowledge imagePullSecrets: - name: {{ .Values.pullSecretCredentials }} \ No newline at end of file diff --git a/helm/charts/kafka-bridges/templates/knowledge-configmap.yaml b/helm/charts/kafka-bridges/templates/knowledge-configmap.yaml new file mode 100644 index 00000000..ee69a37f --- /dev/null +++ b/helm/charts/kafka-bridges/templates/knowledge-configmap.yaml @@ -0,0 +1,30 @@ +--- +# yamllint disable rule:line-length +# yamllint disable rule:braces +apiVersion: v1 +kind: ConfigMap +metadata: + name: knowledge + namespace: {{ .Release.Namespace }} + labels: + app: knowledge +data: + {{- $configmap := (lookup "v1" "ConfigMap" .Release.Namespace "knowledge") }} + {{- if $configmap }} + {{- $knowledge := get $configmap.data "knowledge.ttl" }} + knowledge.ttl: {{ $knowledge | quote }} + {{- else }} + knowledge.ttl: | + PREFIX rdf: + PREFIX rdfs: + PREFIX iff: + PREFIX oee: + PREFIX owl: + + ### Test classes + iff:cutter_test rdfs:subClassOf iff:machine_test ; + a rdfs:Class . + iff:plasmacutter_test rdfs:subClassOf iff:cutter_test ; + a rdfs:Class . + ### End of Test Content + {{- end }} diff --git a/helm/charts/kafka-bridges/templates/ngsild-updates-bridge-deployment.yaml b/helm/charts/kafka-bridges/templates/ngsild-updates-bridge-deployment.yaml index 5d62c1c3..2209a0a4 100644 --- a/helm/charts/kafka-bridges/templates/ngsild-updates-bridge-deployment.yaml +++ b/helm/charts/kafka-bridges/templates/ngsild-updates-bridge-deployment.yaml @@ -62,7 +62,5 @@ spec: items: - key: "config.json" path: "config.json" - - key: "knowledge.ttl" - path: "knowledge.ttl" imagePullSecrets: - name: {{ .Values.pullSecretCredentials }} \ No newline at end of file diff --git a/helm/charts/kafka-bridges/templates/timescaledb-bridge-deployment.yaml b/helm/charts/kafka-bridges/templates/timescaledb-bridge-deployment.yaml index 88b5bde6..85f132c2 100644 --- a/helm/charts/kafka-bridges/templates/timescaledb-bridge-deployment.yaml +++ b/helm/charts/kafka-bridges/templates/timescaledb-bridge-deployment.yaml @@ -64,7 +64,5 @@ spec: items: - key: "config.json" path: "config.json" - - key: "knowledge.ttl" - path: "knowledge.ttl" imagePullSecrets: - name: {{ .Values.pullSecretCredentials }} \ No newline at end of file diff --git a/helm/common.yaml b/helm/common.yaml index 1b0019b4..d1593bfa 100644 --- a/helm/common.yaml +++ b/helm/common.yaml @@ -13,4 +13,8 @@ db: clusterSvcPostfix: cluster svcPort: "5432" dbUser: "ngb" - oispdbUser: "oisp" \ No newline at end of file + oispdbUser: "oisp" + +ontology: + baseUri: https://industryfusion.github.io/contexts/staging/ontology/v0.1/ + \ No newline at end of file diff --git a/helm/env.sh b/helm/env.sh index e30ad15c..f1d6d1f5 100644 --- a/helm/env.sh +++ b/helm/env.sh @@ -14,6 +14,7 @@ export STRIMZI_VERSION="0.32.0" export OFFLINE=${OFFLINE:-false} export OFFLINE_DIR=$(cd $DIRNAME/airgap-deployment; pwd) export KEYCLOAK_VERSION=21.1.2 +export RELOADER_HELM_VERSION=v1.0.67 COMMON_MAIN_REGISTRY=$(yq ".mainRegistry" < $DIRNAME/common.yaml) COMMON_EXTERNAL_REGISTRY=$(yq ".externalRegistry" < $DIRNAME/common.yaml) COMMON_EXTERNAL_REGISTRY2=$(yq ".externalRegistry2" < $DIRNAME/common.yaml) diff --git a/helm/install_operators.sh b/helm/install_operators.sh index ad6f19b7..ec48e405 100755 --- a/helm/install_operators.sh +++ b/helm/install_operators.sh @@ -118,6 +118,19 @@ fi sleep 10 done + +printf "\n" +printf "\033[1mInstalling Reloader Operator\n" +printf -- "------------------------\033[0m\n" +if [ "$OFFLINE" = "true" ]; then + ( cd ${OFFLINE_DIR}/Reloader && helm upgrade --install --atomic reloader ./deployments/kubernetes/chart/reloader \ + --set reloader.image.name=${REGISTRY}/stakater/reloader --set reloader.reloadOnCreate=true) +else + helm repo add stakater https://stakater.github.io/stakater-charts + helm repo update + helm upgrade --install reloader stakater/reloader --version ${RELOADER_HELM_VERSION} --set reloader.reloadOnCreate=true +fi + printf "\n" printf "\033[1mPrepare Velero Helm Chart Repo\n" printf -- "------------------------\033[0m\n" @@ -125,4 +138,5 @@ printf -- "------------------------\033[0m\n" if [ ! "$OFFLINE" = "true" ]; then ( cd ${OFFLINE_DIR} && rm -rf helm-charts && git clone https://github.com/vmware-tanzu/helm-charts.git && cd helm-charts && git checkout ${VELERO_HELM_VERSION} ) fi + printf -- "\033[1mOperators installed successfully.\033[0m\n" diff --git a/semantic-model/kms/knowledge.ttl b/semantic-model/kms/knowledge.ttl index 241436b2..f8d09d17 100644 --- a/semantic-model/kms/knowledge.ttl +++ b/semantic-model/kms/knowledge.ttl @@ -1,102 +1,453 @@ -PREFIX rdf: -PREFIX rdfs: -PREFIX iff: -PREFIX oee: -PREFIX owl: - -### Test classes -iff:cutter_test rdfs:subClassOf iff:machine_test ; - a iff:class . -iff:plasmacutter_test rdfs:subClassOf iff:cutter_test ; - a iff:class . -### End of Test Content - -iff:entity a iff:class ; - a rdfs:class . -iff:machine a iff:class ; - a rdfs:class . -iff:filter rdfs:subClassOf iff:machine ; - a iff:class . -iff:plasmacutter rdfs:subClassOf iff:cutter ; - a iff:class . -iff:lasercutter rdfs:subClassOf iff:cutter; - a iff:class . -iff:cutter rdfs:subClassOf iff:machine ; - a iff:class . -iff:workpiece rdfs:subClassOf iff:entity ; - a iff:class . -iff:filterCartridge rdfs:subClassOf iff:entity ; - a iff:class . - - -iff:scheduleEntity a iff:class . -iff:oeeTemplate rdfs:subClassOf iff:scheduleEntity . - -### Machine states -iff:machineState a rdfs:class . -iff:state_OFF a iff:machineState . -iff:state_OFF iff:stateValidFor iff:filter, iff:cutter . -iff:state_ON a iff:machineState . -iff:state_ON iff:stateValidFor iff:filter, iff:cutter . -iff:state_PREPARING a iff:machineState . -iff:state_PREPARING iff:stateValidFor iff:cutter . -iff:state_PROCESSING a iff:machineState . -iff:state_PROCESSING iff:stateValidFor iff:cutter . -iff:state_CLEARING a iff:machineState . -iff:state_CLEARING iff:stateValidFor iff:cutter . -iff:state_CLEANING a iff:machineState . -iff:state_CLEANING iff:stateValidFor iff:filter . -iff:state_ERROR a iff:machineState . -iff:state_ERROR iff:stateValidFor iff:filter, iff:cutter . - -### OEE definitions: What state defines availability -iff:state_PROCESSING oee:availability "1" . -iff:state_PREPARING oee:availability "0" . - -# labels for states -iff:state_ON rdfs:label "ON" . -iff:state_OFF rdfs:label "OFF" . -iff:state_PREPARING rdfs:label "PREPARING" . -iff:state_PROCESSING rdfs:label "PROCESSING" . -iff:state_CLEARING rdfs:label "ON" . -iff:state_ERROR rdfs:label "ERROR" . - -iff:WC0 a iff:WC . -iff:WC1 a iff:WC . -iff:WC2 a iff:WC . -iff:WC3 a iff:WC . -iff:higherHazardLevel a owl:TransitiveProperty . -iff:WC3 iff:higherHazardLevel iff:WC2 . -iff:WC2 iff:higherHazardLevel iff:WC1 . -iff:WC1 iff:higherHazardLevel iff:WC0 . -#iff:WC1 a rdf:Bag . -iff:WC1 iff:containsMaterialNumber [ - a rdf:Bag ; - rdf:_1 "1.4301" ; - rdf:_2 "1.4302" ; - rdf:_3 "1.4303" ; - rdf:_4 "1.4304" ; - rdf:_5 "1.4305" ; -] . -iff:WC2 iff:containsMaterialNumber [ - a rdf:Bag ; - rdf:_1 "1.3301" ; - rdf:_2 "1.3302" ; - rdf:_3 "1.3303" ; - rdf:_4 "1.3304" ; - rdf:_5 "1.3305" ; - ] . -iff:WC3 iff:containsMaterialNumber [ - a rdf:Bag ; - rdf:_1 "1.5301" ; - rdf:_2 "1.5302" ; - rdf:_3 "1.5303" ; - rdf:_4 "1.5304" ; - rdf:_5 "1.5305" ; - ] . - -# Severity -iff:severityCritical a iff:severityClass . -iff:SeverityWarning a iff:severityClass . -iff:severityCritical iff:severityCode "critical" . -iff:severityWarning iff:severityCode "warning" . \ No newline at end of file +@prefix : . +@prefix default1: . +@prefix default2: . +@prefix default3: . +@prefix default4: . +@prefix default5: . +@prefix default6: . +@prefix default7: . +@prefix owl: . +@prefix rdfs: . +@prefix xsd: . + +default7:error a default7:ProcessStep, + owl:NamedIndividual . + +default7:hasNextStep a owl:ObjectProperty ; + rdfs:domain default7:ProcessStep ; + rdfs:range default7:ProcessStep . + +default7:isProcessType a owl:ObjectProperty ; + rdfs:domain default7:ProcessStep ; + rdfs:range default7:StepType . + +default7:step1 a default7:ProcessStep, + owl:NamedIndividual ; + default7:hasNextStep default7:step2 ; + default7:isProcessType default7:initial . + +default7:undefined a default7:ProcessStep, + owl:NamedIndividual . + +default6:hasCurrentStep a owl:ObjectProperty ; + rdfs:domain default6:Process ; + rdfs:range default5:Property . + +default6:hasTimeInCurrentStep a owl:ObjectProperty ; + rdfs:domain default6:Process . + +: a owl:Ontology ; + owl:imports default5: ; + owl:versionIRI . + +:Lasercutter a owl:Class, + owl:NamedIndividual ; + rdfs:subClassOf :Cutter . + +:Plasmacutter a owl:Class, + owl:NamedIndividual ; + rdfs:subClassOf :Cutter . + +:hasCartridge a owl:ObjectProperty ; + rdfs:domain :Filter ; + rdfs:range default5:Relationship . + +:hasFilter a owl:ObjectProperty ; + rdfs:domain :Cutter ; + rdfs:range default5:Relationship . + +:hasHeight a owl:ObjectProperty ; + rdfs:domain :Workpiece ; + rdfs:range default5:Property . + +:hasInWorkpiece a owl:ObjectProperty ; + rdfs:domain :Cutter ; + rdfs:range default5:Relationship . + +:hasLength a owl:ObjectProperty ; + rdfs:domain :Workpiece ; + rdfs:range default5:Property . + +:hasMaterial a owl:ObjectProperty ; + rdfs:domain :Workpiece ; + rdfs:range default5:Property . + +:hasOutWorkpiece a owl:ObjectProperty ; + rdfs:domain :Cutter ; + rdfs:range default5:Relationship . + +:hasState a owl:ObjectProperty ; + rdfs:domain :Machine ; + rdfs:range default5:Property . + +:hasStrength a owl:ObjectProperty ; + rdfs:domain :Filter ; + rdfs:range default5:Property . + +:hasWidth a owl:ObjectProperty ; + rdfs:domain :Workpiece ; + rdfs:range default5:Property . + +:isUsedFrom a owl:ObjectProperty ; + rdfs:domain :FilterCartridge ; + rdfs:range default5:Property . + +:isUsedUntil a owl:ObjectProperty ; + rdfs:domain :FilterCartridge ; + rdfs:range default5:Property . + +default1: a owl:Ontology ; + owl:versionIRI . + +default1:isValidFor a owl:ObjectProperty ; + rdfs:domain default1:MachineState . + +default1:severityCritical a owl:NamedIndividual, + default1:SeverityClass ; + rdfs:label "critical" . + +default1:severityWarning a owl:NamedIndividual, + default1:SeverityClass ; + rdfs:label "warning" . + +default1:state_CLEANING a owl:NamedIndividual, + default1:MachineState ; + rdfs:label "CLEANING" ; + default1:isValidFor :Filter . + +default1:state_CLEARING a owl:NamedIndividual, + default1:MachineState ; + rdfs:label "CLEARING" ; + default1:isValidFor :Cutter . + +default1:state_ERROR a owl:NamedIndividual, + default1:MachineState ; + rdfs:label "ERROR" ; + default1:isValidFor :Machine . + +default1:state_OFF a owl:NamedIndividual, + default1:MachineState ; + rdfs:label "OFF" ; + default1:isValidFor :Machine . + +default1:state_ON a owl:NamedIndividual, + default1:MachineState ; + rdfs:label "ON" ; + default1:isValidFor :Machine . + +default1:state_PREPARING a owl:NamedIndividual, + default1:MachineState ; + rdfs:label "PREPARING" ; + default1:isValidFor :Cutter . + +default1:state_PROCESSING a owl:NamedIndividual, + default1:MachineState ; + rdfs:label "PROCESSING" ; + default1:isValidFor :Cutter . + +default2: a owl:Ontology ; + owl:imports , + , + default5: ; + owl:versionIRI . + +default2:hasWasteclass a owl:ObjectProperty ; + rdfs:domain :FilterCartridge ; + rdfs:range default5:Property . + +default3: a owl:Ontology ; + owl:imports ; + owl:versionIRI . + +default3:hasWasteclass a owl:ObjectProperty ; + rdfs:domain default4:Material ; + rdfs:range default3:Wasteclass . + +default3:higherHazardLevel a owl:ObjectProperty, + owl:TransitiveProperty ; + rdfs:domain default3:Wasteclass ; + rdfs:range default3:Wasteclass . + +default4: a owl:Ontology ; + owl:versionIRI . + +default4:EN_1.3401 a owl:NamedIndividual, + default4:Material ; + default3:hasWasteclass default3:WC1 ; + default4:contains default4:_17, + default4:_18, + default4:_19, + default4:_20, + default4:_21, + default4:_22 . + +default4:EN_1.4301 a owl:NamedIndividual, + default4:Material ; + default3:hasWasteclass default3:WC2 ; + default4:contains default4:_10, + default4:_11, + default4:_12, + default4:_13, + default4:_14, + default4:_15, + default4:_16, + default4:_9 . + +default4:EN_1.5301 a owl:NamedIndividual, + default4:Material ; + default3:hasWasteclass default3:WC3 ; + default4:contains default4:_1, + default4:_2, + default4:_3, + default4:_4, + default4:_5, + default4:_6, + default4:_7, + default4:_8 ; + default4:hasEvaporationTemperature "1200" . + +default4:contains a owl:ObjectProperty ; + rdfs:domain default4:Material ; + rdfs:range default4:_ElementOccurance . + +default4:hasChemicalSymbol a owl:DatatypeProperty ; + rdfs:domain default4:ChemicalElement ; + rdfs:range xsd:string . + +default4:hasEvaporationTemperature a owl:DatatypeProperty ; + rdfs:domain default4:Material ; + rdfs:range xsd:string . + +default4:hasMaxPercent a owl:DatatypeProperty ; + rdfs:domain default4:_ElementOccurance ; + rdfs:range xsd:string . + +default4:hasMinPercent a owl:DatatypeProperty ; + rdfs:domain default4:_ElementOccurance ; + rdfs:range xsd:string . + +default4:isChemicalElement a owl:ObjectProperty ; + rdfs:domain default4:_ElementOccurance ; + rdfs:range default4:ChemicalElement . + + a owl:Ontology ; + owl:imports default5: ; + owl:versionIRI . + + a owl:Ontology ; + owl:versionIRI . + +default7:end a default7:StepType, + owl:NamedIndividual . + +default7:initial a default7:StepType, + owl:NamedIndividual . + +default7:step2 a default7:ProcessStep, + owl:NamedIndividual ; + default7:hasNextStep default7:step3 . + +default7:step3 a default7:ProcessStep, + owl:NamedIndividual ; + default7:isProcessType default7:end . + +default3:WC0 a owl:NamedIndividual, + default3:Wasteclass . + +default3:WC3 a owl:NamedIndividual, + default3:Wasteclass ; + default3:higherHazardLevel default3:WC2 . + +default4:_1 a owl:NamedIndividual, + default4:_ElementOccurance ; + default4:hasMaxPercent "0.08" ; + default4:isChemicalElement default4:carbon . + +default4:_10 a owl:NamedIndividual ; + default4:hasMaxPercent "2.0" ; + default4:isChemicalElement default4:manganese . + +default4:_11 a owl:NamedIndividual ; + default4:hasMaxPercent "1.0" ; + default4:isChemicalElement default4:silicon . + +default4:_12 a owl:NamedIndividual ; + default4:hasMaxPercent "0.045" ; + default4:isChemicalElement default4:phosphorus . + +default4:_13 a owl:NamedIndividual ; + default4:hasMaxPercent "0.03" ; + default4:isChemicalElement default4:sulfur . + +default4:_14 a owl:NamedIndividual ; + default4:hasMaxPercent "19.5" ; + default4:hasMinPercent "17.5" ; + default4:isChemicalElement default4:chromium . + +default4:_15 a owl:NamedIndividual ; + default4:hasMaxPercent "10.5" ; + default4:hasMinPercent "8.0" ; + default4:isChemicalElement default4:nickel . + +default4:_16 a owl:NamedIndividual ; + default4:hasMaxPercent "0.1" ; + default4:isChemicalElement default4:nitrogen . + +default4:_17 a owl:NamedIndividual ; + default4:hasMaxPercent "1.3" ; + default4:hasMinPercent "1.1" ; + default4:isChemicalElement default4:carbon . + +default4:_18 a owl:NamedIndividual ; + default4:hasMaxPercent "13" ; + default4:hasMinPercent "12" ; + default4:isChemicalElement default4:manganese . + +default4:_19 a owl:NamedIndividual ; + default4:hasMaxPercent "0.50" ; + default4:hasMinPercent "0.30" ; + default4:isChemicalElement default4:silicon . + +default4:_2 a owl:NamedIndividual, + default4:_ElementOccurance ; + default4:hasMaxPercent "2.0" ; + default4:isChemicalElement default4:manganese . + +default4:_20 a owl:NamedIndividual ; + default4:hasMaxPercent "0.1" ; + default4:isChemicalElement default4:phosphorus . + +default4:_21 a owl:NamedIndividual ; + default4:hasMaxPercent "0.04" ; + default4:isChemicalElement default4:sulfur . + +default4:_22 a owl:NamedIndividual ; + default4:hasMaxPercent "1.5" ; + default4:isChemicalElement default4:chromium . + +default4:_3 a owl:NamedIndividual, + default4:_ElementOccurance ; + default4:hasChemicalSymbol "Cr" ; + default4:hasMaxPercent "1.0" ; + default4:isChemicalElement default4:silicon . + +default4:_4 a owl:NamedIndividual, + default4:_ElementOccurance ; + default4:hasMaxPercent "0.045" ; + default4:isChemicalElement default4:phosphorus . + +default4:_5 a owl:NamedIndividual, + default4:_ElementOccurance ; + default4:hasMaxPercent "0.03" ; + default4:isChemicalElement default4:sulfur . + +default4:_6 a owl:NamedIndividual, + default4:_ElementOccurance ; + default4:hasMaxPercent "19.0" ; + default4:hasMinPercent "17.0" ; + default4:isChemicalElement default4:chromium . + +default4:_7 a owl:NamedIndividual, + default4:_ElementOccurance ; + default4:hasMaxPercent "12.0" ; + default4:hasMinPercent "12.0" ; + default4:isChemicalElement default4:nickel . + +default4:_8 a owl:NamedIndividual, + default4:_ElementOccurance ; + default4:hasMaxPercent "0.7" ; + default4:hasMinPercent "0.17" ; + default4:isChemicalElement default4:titan . + +default4:_9 a owl:NamedIndividual, + default4:_ElementOccurance ; + default4:hasMaxPercent "0.07" ; + default4:isChemicalElement default4:carbon . + +default4:nitrogen a owl:NamedIndividual, + default4:ChemicalElement ; + default4:hasChemicalSymbol "N" . + +default4:titan a owl:NamedIndividual, + default4:ChemicalElement ; + default4:hasChemicalSymbol "Ti" . + +default6:Process a owl:Class . + +:Entity a owl:Class . + +default1:SeverityClass a owl:Class . + +default3:WC1 a owl:NamedIndividual, + default3:Wasteclass ; + default3:higherHazardLevel default3:WC0 . + +default3:WC2 a owl:NamedIndividual, + default3:Wasteclass ; + default3:higherHazardLevel default3:WC1 . + +default4:nickel a owl:NamedIndividual, + default4:ChemicalElement ; + default4:hasChemicalSymbol "Ni" . + +default7:StepType a owl:Class . + +:Filter a owl:Class, + owl:NamedIndividual ; + rdfs:subClassOf :Machine . + +:FilterCartridge a owl:Class ; + rdfs:subClassOf :Entity . + +default4:carbon a owl:NamedIndividual, + default4:ChemicalElement ; + default4:hasChemicalSymbol "C" . + +default4:chromium a owl:NamedIndividual, + default4:ChemicalElement ; + default4:hasChemicalSymbol "Cr" . + +default4:manganese a owl:NamedIndividual, + default4:ChemicalElement ; + default4:hasChemicalSymbol "Mn" . + +default4:phosphorus a owl:NamedIndividual, + default4:ChemicalElement ; + default4:hasChemicalSymbol "P" . + +default4:silicon a owl:NamedIndividual, + default4:ChemicalElement ; + default4:hasChemicalSymbol "Si" . + +default4:sulfur a owl:NamedIndividual, + default4:ChemicalElement ; + default4:hasChemicalSymbol "S" . + +default5: a owl:Ontology . + +:Workpiece a owl:Class ; + rdfs:subClassOf :Entity . + +default5:Relationship a owl:Class . + +:Machine a owl:Class, + owl:NamedIndividual . + +default4:Material a owl:Class . + +default3:Wasteclass a owl:Class . + +default7:ProcessStep a owl:Class . + +:Cutter a owl:Class, + owl:NamedIndividual ; + rdfs:subClassOf :Machine . + +default1:MachineState a owl:Class . + +default5:Property a owl:Class . + +default4:ChemicalElement a owl:Class . + +default4:_ElementOccurance a owl:Class . + diff --git a/semantic-model/kms/model-example.jsonld b/semantic-model/kms/model-example.jsonld index 3f4fd6e3..e8c47a6b 100644 --- a/semantic-model/kms/model-example.jsonld +++ b/semantic-model/kms/model-example.jsonld @@ -1,241 +1,131 @@ [ { - "@context": "https://uri.etsi.org/ngsi-ld/v1/ngsi-ld-core-context.jsonld", - "id": "urn:plasmacutter:1", - "type": "https://industry-fusion.com/types/v0.9/cutter", - "https://industry-fusion.com/types/v0.9/state": [ + "@context": "https://industryfusion.github.io/contexts/v0.1/context.jsonld", + "id": "urn:cutter:1", + "type": "iffBaseEntities:Machine", + "iffBaseEntities:hasState": [ { "type": "Property", - "observedAt": "2023-03-24 13:42:32.0", "value": { - "@id": "https://industry-fusion.com/types/v0.9/state_PROCESSING" - }}, - { - "type": "Property", - "observedAt": "2023-03-24 13:52:32.0", - "value": { - "@id": "https://industry-fusion.com/types/v0.9/state_PROCESSING" - } - }], - "https://industry-fusion.com/types/v0.9/hasOutWorkpiece": [{ - "type": "Relationship", - "object": "urn:workpiece:1", - "observedAt": "2023-03-24 13:42:32.0" - - }, - { - "type": "Relationship", - "object": "urn:workpiece:2", - "observedAt": "2023-03-24 13:52:32.0" - - }], - "https://industry-fusion.com/types/v0.9/hasInWorkpiece": { - "type": "Relationship", - "object": "urn:workpiece:1" - }, - "https://industry-fusion.com/types/v0.9/hasFilter": { - "type": "Relationship", - "object": "urn:filter:1" - }, - "https://industry-fusion.com/oee/v0.9/availabilityState":[ - { - "type": "Property", - "value": "1", - "observedAt": "2023-03-24 13:42:32.0" - }, - { - "type": "Property", - "value": "1", - "observedAt": "2023-03-24 13:52:32.0" - } - ] + "@id": "iffBaseKnowledge:state_ON" + }}] }, { - "@context": "https://uri.etsi.org/ngsi-ld/v1/ngsi-ld-core-context.jsonld", - "id": "urn:filter:1", - "type": "https://industry-fusion.com/types/v0.9/filter", - "https://industry-fusion.com/types/v0.9/state": [ + "@context": "https://industryfusion.github.io/contexts/v0.1/context.jsonld", + "id": "urn:plasmacutter:1", + "type": "iffBaseEntities:Cutter", + "iffBaseEntities:hasState": [ { - "type": "Property", - "value": { - "@id": "https://industry-fusion.com/types/v0.9/state_ON" + "type": "Property", + "value": { + "@id": "iffBaseKnowledge:state_PROCESSING" + }}], + "iffBaseEntities:hasFilter": [ + { + "type": "Relationship", + "object": "urn:filter:1" } - }, - { - "type": "Property", - "value": { - "@id": "https://industry-fusion.com/types/v0.9/state_ON" + ], + "iffBaseEntities:hasInWorkpiece": [ + { + "type": "Relationship", + "object": "urn:workpiece:1" } - } - ], - "https://industry-fusion.com/types/v0.9/strength": { - "type": "Property", - "value": "0.9" - }, - "https://industry-fusion.com/types/v0.9/hasCartridge": { - "type": "Relationship", - "object": "urn:filterCartridge:1" - } + ] }, { - - "@context": "https://uri.etsi.org/ngsi-ld/v1/ngsi-ld-core-context.jsonld", - "id": "urn:workpiece:1", - "type": "https://industry-fusion.com/types/v0.9/workpiece", - "https://industry-fusion.com/types/v0.9/material": { + "@context": "https://industryfusion.github.io/contexts/v0.1/context.jsonld", + "id": "urn:filter:1", + "type": "iffBaseEntities:Filter", + "iffBaseEntities:hasState": [ + { "type": "Property", "value": { - "@type": "https://industry-fusion.com/types/v0.9/steelGrade", - "@value": "1.4301" + "@id": "iffBaseKnowledge:state_ON" } - }, - "https://schema.org/depth": { - "type": "Property", - "unitCode": "MMT", - "value": "550" - }, - "https://schema.org/height": { - "type": "Property", - "value": "100", - "unitCode": "MMT" - }, - "https://schema.org/weight": { - "type": "Property", - "value": "10000", - "unitCode": "GRM" - }, - "https://schema.org/width": { - "type": "Property", - "value": "100", - "unitCode": "MMT" - } - }, - { - "@context": "https://uri.etsi.org/ngsi-ld/v1/ngsi-ld-core-context.jsonld", - "id": "urn:workpiece:2", - "type": "https://industry-fusion.com/types/v0.9/workpiece", - "https://industry-fusion.com/types/v0.9/material": { - "type": "Property", - "value": { - "@type": "https://industry-fusion.com/types/v0.9/steelGrade", - "@value": "1.4301" + }], + "iffBaseEntities:hasCartridge": [ + { + "type": "Relationship", + "object": "urn:cartridge:1" } - }, - "https://industry-fusion.com/types/v0.9/qualityCheck": [ + ], + "iffBaseEntities:hasStrength": [ { - "value": "1", "type": "Property", - "datasetId": "0", - "observedAt": "2023-03-24 13:42:32.0" + "value": 0.9, + "observedAt": "2024-02-28 13:52:32.0" }, { - "value": "0", "type": "Property", - "datasetId": "0", - "observedAt": "2023-03-24 13:52:32.0" - } - ], - "https://schema.org/depth": { - "type": "Property", - "unitCode": "MMT", - "value": "550" - }, - "https://schema.org/height": { - "type": "Property", - "value": "100", - "unitCode": "MMT" - }, - "https://schema.org/weight": { - "type": "Property", - "value": "10000", - "unitCode": "GRM" - }, - "https://schema.org/width": { - "type": "Property", - "value": "100", - "unitCode": "MMT" - } - }, - { - "@context": "https://uri.etsi.org/ngsi-ld/v1/ngsi-ld-core-context.jsonld", - "id": "urn:lasercutter:1", - "type": "https://industry-fusion.com/types/v0.9/lasercutter", - "https://industry-fusion.com/types/v0.9/state": { - "type": "Property", - "value": { - "@id": "https://industry-fusion.com/types/v0.9/state_OFF" - } - }, - "https://industry-fusion.com/types/v0.9/hasWorkpiece": { - "type": "Relationship", - "object": "urn:workpiece:1" - }, - "https://industry-fusion.com/types/v0.9/hasFilter": { - "type": "Relationship", - "object": "urn:filter:2" - } - }, - { - "@context": "https://uri.etsi.org/ngsi-ld/v1/ngsi-ld-core-context.jsonld", - "id": "urn:filter:2", - "type": "https://industry-fusion.com/types/v0.9/filter", - "https://industry-fusion.com/types/v0.9/state": { - "type": "Property", - "value": { - "@id": "https://industry-fusion.com/types/v0.9/state_OFF" - } - }, - "https://industry-fusion.com/types/v0.9/strength": { - "type": "Property", - "value": "0.1" - }, - "https://industry-fusion.com/types/v0.9/hasCartridge": { - "type": "Relationship", - "object": "urn:filterCartridge:2" - } - }, - { - "@context": "https://uri.etsi.org/ngsi-ld/v1/ngsi-ld-core-context.jsonld", - "id": "urn:filterCartridge:1", - "type": "https://industry-fusion.com/types/v0.9/filterCartridge", - "https://industry-fusion.com/types/v0.9/wasteClass": { - "type": "Property", - "value": { - "@id": "https://industry-fusion.com/types/v0.9/WC0" + "value": 0.8, + "observedAt": "2024-02-28 13:52:33.0" + }, + { + "type": "Property", + "value": 0.7, + "observedAt": "2024-02-28 13:52:34.0" + }, + { + "type": "Property", + "value": 0.6, + "observedAt": "2024-02-28 13:52:35.0" } - } + ] }, { - "@context": "https://uri.etsi.org/ngsi-ld/v1/ngsi-ld-core-context.jsonld", - "id": "urn:filterCartridge:2", - "type": "https://industry-fusion.com/types/v0.9/filterCartridge", - "https://industry-fusion.com/types/v0.9/inUseUntil": { - "type": "Property", - "value": "2021-10-25 13:54:55.4" - }, - "https://industry-fusion.com/types/v0.9/wasteClass": { - "type": "Property", - "value": { - "@id": "https://industry-fusion.com/types/v0.9/WC1" - } - } + "@context": "https://industryfusion.github.io/contexts/v0.1/context.jsonld", + "id": "urn:cartridge:1", + "type": "iffBaseEntities:FilterCartridge", + + "iffBaseEntities:isUsedUntil": [ + { + "type": "Property", + "value": "2024-02-27 13:54:55.4" + }], + "iffBaseEntities:isUsedFrom": [ + { + "type": "Property", + "value": "2024-02-27 13:54:55.4" + }], + "iffFilterEntities:hasWasteclass":[ + { + "type": "Property", + "value": { + "@id": "iffFilterKnowledge:WC1" + } + } + ] }, { - "@context": "https://uri.etsi.org/ngsi-ld/v1/ngsi-ld-core-context.jsonld", - "id": "urn:oee:1", - "type": "https://industry-fusion.com/types/v0.9/oeeTemplate", - "https://industry-fusion.com/types/v0.9/startTime": { - "type": "Property", - "value": "2000-03-23 13:42:31" - }, - "https://industry-fusion.com/types/v0.9/endTime": { - "type": "Property", - "value": "2099-03-25 14:42:31" - }, - - "https://industry-fusion.com/oee/v0.9/hasReferenceMachine": { - "type": "Relationship", - "object": "urn:plasmacutter:1" - } + "@context": "https://industryfusion.github.io/contexts/v0.1/context.jsonld", + "id": "urn:workpiece:1", + "type": "iffBaseEntities:Workpiece", + + "iffBaseEntities:hasMaterial": [ + { + "type": "Property", + "value": { + "@id": "material:EN_1.4301" + } + }], + "iffBaseEntities:hasHeight": [ + { + "type": "Property", + "value": 5 + } + ], + "iffBaseEntities:hasLength": [ + { + "type": "Property", + "value": 100 + } + ], + "iffBaseEntities:hasWidth": [ + { + "type": "Property", + "value": 100 + } + ] } ] diff --git a/semantic-model/kms/shacl.ttl b/semantic-model/kms/shacl.ttl index 4aa6ffcf..8db9a521 100644 --- a/semantic-model/kms/shacl.ttl +++ b/semantic-model/kms/shacl.ttl @@ -1,863 +1,335 @@ -@prefix dash: . -@prefix rdf: . -@prefix rdfs: . -@prefix schema: . +@prefix : . +@prefix default1: . +@prefix iffBaseEntities: . +@prefix iffBaseKnowledge: . +@prefix iffFilterEntities: . +@prefix iffFilterKnowledge: . +@prefix material: . +@prefix ngsild: . @prefix sh: . @prefix xsd: . -@prefix iff: . -PREFIX ex: - -iff:MachineShape - a sh:NodeShape ; - sh:targetClass iff:machine ; - sh:property [ - sh:path ; - sh:order 1 ; - sh:nodeKind sh:BlankNode; - sh:minCount 1 ; - sh:maxCount 1 ; - sh:property [ - sh:path ; - sh:nodeKind sh:IRI; - sh:class iff:machineState; - sh:minCount 1 ; - sh:maxCount 1 ; - ] ; - sh:property [ - sh:path rdf:type ; - sh:hasValue ; - sh:minCount 1 ; - ] ; - ] ; -. - - -iff:CutterShape - a sh:NodeShape ; - sh:targetClass iff:cutter ; - sh:property [ - sh:path ; - sh:order 10 ; - sh:nodeKind sh:BlankNode; - sh:minCount 1 ; - sh:maxCount 1 ; - sh:property [ - sh:path ; - sh:nodeKind sh:IRI; - sh:class iff:machineState; - sh:minCount 1 ; - sh:maxCount 1 ; - ] ; - sh:property [ - sh:path rdf:type ; - sh:hasValue ; - sh:minCount 1 ; - ] ; - ] ; - sh:property [ - sh:path ; - sh:order 20 ; - sh:nodeKind sh:BlankNode; - sh:minCount 1 ; - sh:maxCount 1 ; - sh:property [ - sh:path ; - sh:class iff:filter ; - sh:minCount 1 ; - sh:maxCount 1 ; - ] ; - sh:property [ - sh:path rdf:type ; - sh:hasValue ; - sh:minCount 1 ; - ] ; - ] ; - sh:property [ - sh:path ; - sh:order 30 ; - sh:nodeKind sh:BlankNode; - sh:minCount 0 ; - sh:maxCount 1 ; - sh:property [ - sh:path ; - sh:class iff:workpiece ; - sh:minCount 1 ; - sh:maxCount 1 ; - ] ; - sh:property [ - sh:path rdf:type ; - sh:hasValue ; - sh:minCount 1 ; - ] ; - ] ; - sh:property [ - sh:path ; - sh:order 40 ; - sh:nodeKind sh:BlankNode; - sh:minCount 0 ; - sh:maxCount 1 ; - sh:property [ - sh:path ; - sh:class iff:workpiece ; - sh:minCount 1 ; - sh:maxCount 1 ; - ] ; - sh:property [ - sh:path rdf:type ; - sh:hasValue ; - sh:minCount 1 ; - ] ; - ] ; - sh:property [ - sh:path ; - sh:order 50 ; - sh:nodeKind sh:BlankNode; - sh:minCount 0 ; - sh:maxCount 1 ; - sh:property [ - - sh:path ; - sh:nodeKind sh:Literal; - sh:minCount 1 ; - sh:maxCount 1 ; - ] ; - sh:property [ - sh:path rdf:type ; - sh:hasValue ; - ] ; - ] ; - . - - -iff:WorkpieceShape - a sh:NodeShape ; - sh:targetClass iff:workpiece ; - sh:property [ - sh:path ; - sh:order 1 ; - sh:nodeKind sh:BlankNode; - sh:minCount 1 ; - sh:maxCount 1 ; - sh:property [ - sh:path ; - sh:nodeKind sh:Literal; - sh:minLength 6 ; - sh:maxLength 7 ; - sh:pattern "^1\\.\\d{4,5}" ; - sh:minCount 1 ; - sh:maxCount 1 ; - sh:datatype - ] ; - sh:property [ - sh:path rdf:type ; - sh:hasValue ; - sh:minCount 1 ; - ] ; - ] ; - sh:property [ - sh:path ; - sh:order 2 ; - sh:nodeKind sh:BlankNode; - sh:minCount 1 ; - sh:maxCount 1 ; - sh:property [ - sh:path ; - sh:nodeKind sh:Literal ; - sh:maxInclusive 600.0 ; - sh:minExclusive 0.0 ; - sh:minCount 1 ; - sh:maxCount 1 ; - ] ; - sh:property [ - sh:path rdf:type ; - sh:hasValue ; - sh:minCount 1 ; - ] ; - ] ; - sh:property [ - sh:path ; - sh:order 3 ; - sh:nodeKind sh:BlankNode; - sh:minCount 0 ; - sh:maxCount 1 ; - sh:property [ - sh:path ; - sh:nodeKind sh:Literal ; - sh:maxInclusive 1 ; - sh:minInclusive 0 ; - sh:minCount 1 ; - sh:maxCount 1 ; - ] ; - sh:property [ - sh:path rdf:type ; - sh:hasValue ; - sh:minCount 1 ; - ] ; - ] ; - . - - -iff:FilterShape - a sh:NodeShape ; - sh:targetClass iff:filter ; - sh:property [ - sh:path ; - sh:order 1 ; - sh:nodeKind sh:BlankNode; - sh:minCount 1 ; - sh:maxCount 1 ; - sh:property [ - - sh:path ; - sh:nodeKind sh:IRI; - sh:class iff:machineState; - sh:minCount 1 ; - sh:maxCount 1 ; - ] ; - sh:property [ - sh:path rdf:type ; - sh:hasValue ; - ] ; - ] ; - sh:property [ - sh:path ; - sh:order 2 ; - sh:nodeKind sh:BlankNode; - sh:property [ - - sh:path ; - sh:nodeKind sh:Literal; - sh:minInclusive 0.0 ; - sh:maxInclusive 1.0 ; - sh:minCount 1 ; - sh:maxCount 1 ; - ] ; - sh:property [ - sh:path rdf:type ; - sh:hasValue ; - ] ; - ] ; - sh:property [ - sh:path ; - sh:order 3 ; - sh:nodeKind sh:BlankNode; - sh:minCount 1 ; - - sh:property [ - sh:path ; - sh:class iff:filterCartridge ; - ] ; - sh:property [ - sh:path rdf:type ; - sh:hasValue ; - sh:minCount 1 ; - ] ; - ] ; - . - -iff:FilterCartridgeShape - a sh:NodeShape ; - sh:targetClass iff:filterCartridge ; - sh:property [ - sh:path ; - sh:order 1 ; - sh:nodeKind sh:BlankNode; - sh:minCount 0 ; - sh:maxCount 1 ; - sh:property [ - - sh:path ; - sh:nodeKind sh:IRI; - sh:class iff:WC; - sh:minCount 1 ; - sh:maxCount 1 ; - ] ; - sh:property [ - sh:path rdf:type ; - sh:hasValue ; - ] ; - ] ; - sh:property [ - sh:path ; - sh:order 2 ; - sh:nodeKind sh:BlankNode; - sh:maxCount 1 ; - sh:property [ - - sh:path ; - sh:nodeKind sh:Literal ; - sh:minCount 1 ; - sh:maxCount 1 ; - ] ; - sh:property [ - sh:path rdf:type ; - sh:hasValue ; - ] ; - ] ; - - sh:property [ - sh:path ; - sh:order 3 ; - sh:nodeKind sh:BlankNode; - sh:maxCount 1 ; - - sh:property [ - sh:path ; - sh:nodeKind sh:Literal ; - sh:minCount 1 ; - sh:maxCount 1 ; - ] ; - sh:property [ - sh:path rdf:type ; - sh:hasValue ; - sh:minCount 1 ; - ] ; - ] ; - . - -iff:ScheduleEntityShape - a sh:NodeShape ; - sh:targetClass iff:scheduleEntity ; - sh:property [ - sh:path ; - sh:order 1 ; - sh:nodeKind sh:BlankNode; - sh:minCount 1 ; - sh:maxCount 1 ; - sh:property [ - - sh:path ; - sh:nodeKind sh:Literal; - sh:minCount 1 ; - sh:maxCount 1 ; - ] ; - sh:property [ - sh:path rdf:type ; - sh:hasValue ; - ] ; - ] ; - sh:property [ - sh:path ; - sh:order 2 ; - sh:nodeKind sh:BlankNode; - sh:minCount 1 ; - sh:maxCount 1 ; - sh:property [ - - sh:path ; - sh:nodeKind sh:Literal; - sh:minCount 1 ; - sh:maxCount 1 ; - ] ; - sh:property [ - sh:path rdf:type ; - sh:hasValue ; - ] ; - ] ; -. - - -iff:OeeTemplateShape - a sh:NodeShape ; - sh:targetClass iff:oeeTemplate ; - sh:property [ - sh:path ; - sh:order 3 ; - sh:nodeKind sh:BlankNode; - sh:minCount 1 ; - sh:maxCount 1 ; - sh:property [ - sh:path ; - sh:nodeKind sh:Literal; - sh:minCount 1 ; - sh:maxCount 1 ; - ] ; - sh:property [ - sh:path rdf:type ; - sh:hasValue ; - ] ; - ] ; - sh:property [ - sh:path ; - sh:order 4 ; - sh:nodeKind sh:BlankNode; - sh:minCount 1 ; - sh:maxCount 1 ; - sh:property [ - - sh:path ; - sh:nodeKind sh:Literal; - sh:minCount 1 ; - sh:maxCount 1 ; - ] ; - sh:property [ - sh:path rdf:type ; - sh:hasValue ; - ] ; - ] ; - - sh:property [ - sh:path ; - sh:order 6 ; - sh:nodeKind sh:BlankNode; - sh:minCount 0 ; - sh:maxCount 1 ; - sh:property [ - - sh:path ; - sh:nodeKind sh:Literal; - sh:minCount 1 ; - sh:maxCount 1 ; - ] ; - sh:property [ - sh:path rdf:type ; - sh:hasValue ; - ] ; - ] ; - sh:property [ - sh:path ; - sh:order 7 ; - sh:nodeKind sh:BlankNode; - sh:minCount 0 ; - sh:maxCount 1 ; - sh:property [ - - sh:path ; - sh:nodeKind sh:Literal; - sh:minCount 1 ; - sh:maxCount 1 ; - ] ; - sh:property [ - sh:path rdf:type ; - sh:hasValue ; - ] ; - ] ; - sh:property [ - sh:path ; - sh:order 8 ; - sh:nodeKind sh:BlankNode; - sh:minCount 0 ; - sh:maxCount 1 ; - sh:property [ - - sh:path ; - sh:nodeKind sh:Literal; - sh:minCount 1 ; - sh:maxCount 1 ; - ] ; - sh:property [ - sh:path rdf:type ; - sh:hasValue ; - ] ; - ] ; - - sh:property [ - sh:path ; - sh:order 10 ; - sh:nodeKind sh:BlankNode; - sh:minCount 0 ; - sh:maxCount 1 ; - sh:property [ - - sh:path ; - sh:nodeKind sh:Literal; - sh:minCount 1 ; - sh:maxCount 1 ; - ] ; - sh:property [ - sh:path rdf:type ; - sh:hasValue ; - ] ; - ] ; - sh:property [ - sh:path ; - sh:order 11 ; - sh:nodeKind sh:BlankNode; - sh:minCount 0 ; - sh:maxCount 1 ; - sh:property [ - - sh:path ; - sh:nodeKind sh:Literal; - sh:minCount 1 ; - sh:maxCount 1 ; - ] ; - sh:property [ - sh:path rdf:type ; - sh:hasValue ; - ] ; - ] ; - sh:property [ - sh:path ; - sh:order 12 ; - sh:nodeKind sh:BlankNode; - sh:minCount 1 ; - sh:maxCount 1 ; - sh:property [ - - sh:path ; - sh:nodeKind sh:IRI; - sh:class iff:cutter ; - sh:minCount 1 ; - sh:maxCount 1 ; - ] ; - sh:property [ - sh:path rdf:type ; - sh:hasValue ; - ] ; - ]; - sh:property [ - sh:path ; - sh:order 14 ; - sh:nodeKind sh:BlankNode; - sh:minCount 0 ; - sh:maxCount 1 ; - sh:property [ - - sh:path ; - sh:nodeKind sh:Literal; - sh:minInclusive 0 ; - sh:maxInclusive 1 ; - sh:minCount 1 ; - sh:maxCount 1 ; - ] ; - sh:property [ - sh:path rdf:type ; - sh:hasValue ; - ] ; - ] ; - . - - - iff:AggregateWorkpiecesShape - a sh:NodeShape ; - sh:targetClass iff:oeeTemplate ; - rdfs:comment "Aggregate the Workpieces to OEE object" ; - sh:rule [ - a sh:SPARQLRule ; - sh:construct """ - PREFIX iff: - PREFIX oee: - PREFIX rdfs: - PREFIX rdf: - CONSTRUCT { - $this oee:totalCount [ ?workpieces ] . - } - WHERE { - $this oee:hasReferenceMachine [ ?machinex ] . - $this iff:startTime [ ?startTime ] . - $this iff:endTime [ ?endTime ] . - ?machinex iff:hasOutWorkpiece [ ?workpiece ] . - ?machinex iff:hasOutWorkpiece [ ?workpieceTs ] . - bind(COUNT(DISTINCT ?workpiece) as ?workpieces) - FILTER( ?workpieceTs > xsd:dateTime(?startTime) && ?workpieceTs < xsd:dateTime(?endTime) ) - } - GROUP BY ?machinex - """ ; - ] ; -. - - - iff:AggregateWorkpiecesShape - a sh:NodeShape ; - sh:targetClass iff:oeeTemplate ; - rdfs:comment "Aggregate the good Workpieces" ; - sh:rule [ - a sh:SPARQLRule ; - sh:construct """ - PREFIX iff: - PREFIX oee: - PREFIX rdfs: - PREFIX rdf: - CONSTRUCT { - $this oee:goodCount [ ?qualityCount ] . - } - WHERE { - $this oee:hasReferenceMachine [ ?machinex ] . - $this iff:startTime [ ?startTime ] . - $this iff:endTime [ ?endTime ] . - ?machinex iff:hasOutWorkpiece [ ?workpiece ] . - ?machinex iff:hasOutWorkpiece [ ?workpieceTs ] . - OPTIONAL{?workpiece iff:qualityCheck [ ?qualityCheck ] .} . - bind(COUNT(DISTINCT ?workpiece) as ?workpieces) - bind(IF(?qualityCheck = "1" || (!bound(?qualityCheck) && bound(?workpiece)), ?workpiece, "NULL" ) as ?qc) - bind(COUNT(DISTINCT ?qc ) as ?qualityCount) - FILTER( (?qualityCheck = "1" || (!bound(?qualityCheck) && bound(?workpiece))) && ?workpieceTs > xsd:dateTime(?startTime) && ?workpieceTs < xsd:dateTime(?endTime) ) - } - GROUP BY ?machinex - """ ; - ] ; -. - - - iff:ScheduleTimeValidationShape - a sh:NodeShape ; - rdfs:comment "Check if a schedule is valid" ; - sh:targetClass iff:scheduleEntity ; - sh:sparql [ - a sh:SPARQLConstraints ; - sh:message "Invalid Schedule: End of Schedule is before start of schedule." ; - sh:select """ - PREFIX iff: - PREFIX xsd: - PREFIX rdfs: - PREFIX schema: - - SELECT $this - where { - $this iff:startTime [ ?startTime ] . - $this iff:endTime [ ?endTime ] . - FILTER(?startTime >= ?endTime) . - } - """ ; - ] . - - - - iff:OEEAvailabilityStateShape - a sh:NodeShape ; - rdfs:comment "Create AvailailityState" ; - sh:targetClass iff:cutter ; - sh:rule [ - a sh:SPARQLRule ; - sh:construct """ - PREFIX iff: - PREFIX oee: - PREFIX xsd: - PREFIX rdfs: - PREFIX rdf: - CONSTRUCT { - $this [ ?availabilityState ] . - } - WHERE { - $this iff:state [ ?state ] . - OPTIONAL{?state oee:availability ?availabilityVal .} . - bind(IF(bound(?availabilityVal), ?availabilityVal, "0") as ?availabilityState) - } - """ ; - ] ; -. - - - iff:OEEUpdateAvailabilityTimeAggShape - a sh:NodeShape ; - rdfs:comment "Update availabilityTimeAgg" ; - sh:targetClass iff:oeeTemplate ; - sh:rule [ - a sh:SPARQLRule ; - sh:construct """ - PREFIX iff: - PREFIX oee: - PREFIX rdfs: - PREFIX ifa: - PREFIX rdf: - CONSTRUCT { - $this oee:availabilityTimeAgg [ ?availabilityStateAgg ] . - } - WHERE { - $this iff:startTime [ ?startTime ] . - $this iff:endTime [ ?endTime ] . - $this oee:hasReferenceMachine [ ?machine ] . - ?machine oee:availabilityState [ ?state ] . - ?machine oee:availabilityState [ ?stateTs ] . - bind(ifa:statetime(xsd:integer(?state), ?stateTs) as ?availabilityStateAgg) . - #bind(SUM(xsd:integer(?state))*5 as ?availabilityStateAgg) . - FILTER(?stateTs > xsd:dateTime(?startTime) && ?stateTs < xsd:dateTime(?endTime)) - } - GROUP BY $this ?machine - """ ; - ] ; -. - - - iff:FilterStrengthShape - a sh:NodeShape ; - sh:targetClass iff:filter ; - sh:sparql [ - a sh:SPARQLConstraints ; - sh:message "Check Filter strength as function of workpiece: {?value}" ; - sh:select """ -PREFIX iff: +:CartridgeShape a sh:NodeShape ; + sh:property [ sh:maxCount 1 ; + sh:minCount 1 ; + sh:nodeKind sh:BlankNode ; + sh:order 10 ; + sh:path iffBaseEntities:isUsedFrom ; + sh:property [ sh:maxCount 1 ; + sh:minCount 1 ; + sh:nodeKind sh:Literal ; + sh:path ngsild:hasValue ] ], + [ sh:maxCount 1 ; + sh:minCount 1 ; + sh:nodeKind sh:BlankNode ; + sh:order 20 ; + sh:path iffBaseEntities:isUsedUntil ; + sh:property [ sh:maxCount 1 ; + sh:minCount 1 ; + sh:nodeKind sh:Literal ; + sh:path ngsild:hasValue ] ] ; + sh:targetClass iffBaseEntities:FilterCartridge . + +:CutterShape a sh:NodeShape ; + sh:property [ sh:maxCount 1 ; + sh:minCount 1 ; + sh:nodeKind sh:BlankNode ; + sh:order 1 ; + sh:path iffBaseEntities:hasFilter ; + sh:property [ sh:class iffBaseEntities:Filter ; + sh:maxCount 1 ; + sh:minCount 1 ; + sh:nodeKind sh:IRI ; + sh:path ngsild:hasObject ] ], + [ sh:maxCount 1 ; + sh:minCount 0 ; + sh:nodeKind sh:BlankNode ; + sh:order 20 ; + sh:path iffBaseEntities:hasInWorkpiece ; + sh:property [ sh:class iffBaseEntities:Workpiece ; + sh:maxCount 1 ; + sh:minCount 1 ; + sh:nodeKind sh:IRI ; + sh:path ngsild:hasObject ] ], + [ sh:maxCount 1 ; + sh:minCount 0 ; + sh:nodeKind sh:BlankNode ; + sh:order 30 ; + sh:path iffBaseEntities:hasOutWorkpiece ; + sh:property [ sh:class iffBaseEntities:Workpiece ; + sh:maxCount 1 ; + sh:minCount 1 ; + sh:nodeKind sh:IRI ; + sh:path ngsild:hasObject ] ] ; + sh:targetClass iffBaseEntities:Cutter . + +:FilterShape a sh:NodeShape ; + sh:property [ sh:maxCount 1 ; + sh:minCount 1 ; + sh:nodeKind sh:BlankNode ; + sh:order 10 ; + sh:path iffBaseEntities:hasStrength ; + sh:property [ sh:maxCount 1 ; + sh:maxInclusive 100.0 ; + sh:minCount 1 ; + sh:minInclusive 0.0 ; + sh:nodeKind sh:Literal ; + sh:path ngsild:hasValue ] ], + [ sh:maxCount 1 ; + sh:minCount 1 ; + sh:nodeKind sh:BlankNode ; + sh:order 1 ; + sh:path iffBaseEntities:hasCartridge ; + sh:property [ sh:class iffBaseEntities:FilterCartridge ; + sh:maxCount 1 ; + sh:minCount 1 ; + sh:nodeKind sh:IRI ; + sh:path ngsild:hasObject ] ] ; + sh:targetClass iffBaseEntities:Filter . + +:FilterStrengthShape a sh:NodeShape ; + sh:sparql [ a sh:SPARQLConstraints ; + sh:message "Check Filter strength as function of workpiece: {?value}" ; + sh:select """ +PREFIX iffBaseEntities: +PREFIX iffBaseKnowledge: PREFIX xsd: PREFIX rdfs: PREFIX schema: +PREFIX ngsild: SELECT ?this ?value where { - $this iff:strength [ ?strength ] . - ?pc a iff:cutter . - ?pc iff:hasFilter [ $this ] . - ?pc iff:hasInWorkpiece [ ?wp ] . - ?wp schema:depth [ ?depth ] . - ?pc iff:state [ ?pcstate ] . - $this iff:state [ ?filterstate ] . + $this iffBaseEntities:hasStrength [ ngsild:hasValue ?strength ] . + ?pc a iffBaseEntities:Cutter . + ?pc iffBaseEntities:hasFilter [ ngsild:hasObject $this ] . + ?pc iffBaseEntities:hasInWorkpiece [ ngsild:hasObject ?wp ] . + ?wp iffBaseEntities:hasHeight [ ngsild:hasValue ?height ] . + ?pc iffBaseEntities:hasState [ ngsild:hasValue ?pcstate ] . + $this iffBaseEntities:hasState [ ngsild:hasValue ?filterstate ] . BIND( - IF(xsd:integer(?depth) > 500 && xsd:float(?strength) < 1.0, \"Filter strength too low, should be 1.0\", - IF(xsd:integer(?depth) > 250 && xsd:float(?strength) < 0.75, \"Filter strength too low, should be at least 0.75\", - IF(xsd:float(?strength) < 0.5, \"Filter strength too low, should be at least 0.5\", \"ok\") + IF(xsd:integer(?height) > 500 && xsd:float(?strength) < 1.0, "Filter strength too low, should be 1.0", + IF(xsd:integer(?height) > 250 && xsd:float(?strength) < 0.75, "Filter strength too low, should be at least 0.75", + IF(xsd:float(?strength) < 0.5, "Filter strength too low, should be at least 0.5", "ok") ) ) as ?value) . - FILTER(?pcstate = && ?filterstate = && ?value != \"ok\") . + FILTER(?pcstate = iffBaseKnowledge:state_PROCESSING && ?filterstate = iffBaseKnowledge:state_ON && ?value != "ok") . } - """ ; - ] . - - -iff:StateOnCutterShape - a sh:NodeShape ; - sh:targetClass iff:cutter ; - sh:sparql [ - a sh:SPARQLConstraints; - sh:severity iff:severityCritical ; - sh:message "Cutter running without running filter" ; - sh:select """ -PREFIX iff: + """ ] ; + sh:targetClass iffBaseEntities:Filter . + +:MachineShape a sh:NodeShape ; + sh:property [ sh:maxCount 1 ; + sh:minCount 1 ; + sh:nodeKind sh:BlankNode ; + sh:order 1 ; + sh:path iffBaseEntities:hasState ; + sh:property [ sh:class iffBaseKnowledge:MachineState ; + sh:maxCount 1 ; + sh:minCount 1 ; + sh:nodeKind sh:IRI ; + sh:path ngsild:hasValue ] ] ; + sh:targetClass iffBaseEntities:Machine . + +:StateOnCutterShape a sh:NodeShape ; + sh:sparql [ a sh:SPARQLConstraints ; + sh:message "Cutter running without running filter" ; + sh:select """ +PREFIX iffBaseEntities: +PREFIX iffBaseKnowledge: +PREFIX xsd: PREFIX rdfs: +PREFIX ngsild: + SELECT $this ?v1 ?f ?v2 where { - $this iff:state [ ?v1 ] . - $this iff:hasFilter [ ?f ] . - ?f iff:state [ ?v2 ] . - FILTER(?v1 = && ?v2 != ) + $this iffBaseEntities:hasState [ ngsild:hasValue ?v1 ] . + $this iffBaseEntities:hasFilter [ ngsild:hasObject ?f ] . + ?f iffBaseEntities:hasState [ ngsild:hasValue ?v2 ] . + FILTER(?v1 = iffBaseKnowledge:state_PROCESSING && ?v2 != iffBaseKnowledge:state_ON) } """ ; - ] . - - -iff:StateOnFilterShape - a sh:NodeShape ; - sh:targetClass iff:filter ; - sh:sparql [ - a sh:SPARQLConstraints; - sh:severity iff:severityWarning ; - sh:message "Filter running without running assigned machine" ; - sh:select """ -PREFIX iff: + sh:severity iffBaseKnowledge:severityCritical ] ; + sh:targetClass iffBaseEntities:Cutter . + +:StateOnFilterShape a sh:NodeShape ; + sh:sparql [ a sh:SPARQLConstraints ; + sh:message "Filter running without running assigned machine" ; + sh:select """ +PREFIX iffBaseEntities: +PREFIX iffBaseKnowledge: +PREFIX xsd: PREFIX rdfs: -PREFIX rdf: +PREFIX ngsild: + SELECT $this ?v1 ?pc ?v2 where { - $this iff:state [ ?v1 ] . - ?pc iff:hasFilter [ $this ] . - ?pc iff:state [ ?v2 ] . - FILTER(?v1 = && ?v2 != ) + $this iffBaseEntities:hasState [ ngsild:hasValue ?v1 ] . + ?pc rdf:type iffBaseEntities:Plasmacutter . + ?pc iffBaseEntities:hasFilter [ ngsild:hasObject $this ] . + ?pc iffBaseEntities:hasState [ ngsild:hasValue ?v2 ] . + FILTER(?v1 = iffBaseKnowledge:state_ON && ?v2 != iffBaseEntities:state_PROCESSING) } """ ; - ] . + sh:severity iffBaseKnowledge:severityWarning ] ; + sh:targetClass iffBaseEntities:Filter . + +:StateValueShape a sh:NodeShape ; + sh:sparql [ a sh:SPARQLConstraints ; + sh:message "State value {?value} are not a valid machineState for machine {$this} of type {?type}" ; + sh:select """ +PREFIX iffBaseEntities: +PREFIX iffBaseKnowledge: +PREFIX xsd: +PREFIX rdfs: +PREFIX ngsild: -iff:StateValueShape - a sh:NodeShape ; - sh:targetClass iff:machine ; - sh:sparql [ - - sh:message "State value {?value} are not a valid machineState for machine {$this} of type {?type}" ; - sh:select """ - PREFIX iff: - PREFIX rdfs: - SELECT $this ?value ?type +SELECT $this ?value ?type where { - ?this a iff:machine . - ?this iff:state [ ?value ] . - ?this a ?type . - OPTIONAL{?type rdfs:subClassOf ?basetype .} - ?x iff:stateValidFor ?basetype . - FILTER( ?type != ) - FILTER NOT EXISTS { - - ?value iff:stateValidFor ?basetype . - + ?this a ?type . + ?this iffBaseEntities:hasState [ ngsild:hasValue ?value ] . + FILTER NOT EXISTS{ + ?value iffBaseKnowledge:isValidFor ?subtype . + ?type rdfs:subClassOf* ?subtype . } } - """ ; - ] . - -iff:ChangeWasteClassRulesShape - a sh:NodeShape ; - sh:targetClass iff:filterCartridge ; - sh:rule [ - a sh:SPARQLRule ; - sh:construct """ - PREFIX iff: - PREFIX rdfs: - PREFIX rdf: - CONSTRUCT { - $this iff:wasteClass [ ?wc ] . - } - WHERE { - ?filter iff:hasCartridge [ $this ] . - ?cutter iff:hasFilter [ ?filter ] . - ?cutter iff:state [ ?cutter_state ] . - ?filter iff:state [ ?filter_state ] . - ?cutter iff:hasInWorkpiece [ ?workpiece ] . - ?workpiece iff:material [ ?materialnr ] . - $this iff:wasteClass [ ?cur_wasteclass ] . - ?wc iff:containsMaterialNumber [ rdfs:member ?materialnr ] . - FILTER(?filter_state = iff:state_ON && ?cutter_state = iff:state_PROCESSING && ?cur_wasteclass != ?wc) - FILTER NOT EXISTS { - ?cur_wasteclass iff:higherHazardLevel ?wc . - } - - } - """ ; - ] . - - - -iff:TimestampCartridgeFromRulesShape - a sh:NodeShape ; - sh:targetClass iff:filterCartridge ; - sh:rule [ - a sh:SPARQLRule ; - sh:construct """ - PREFIX iff: - PREFIX rdfs: - PREFIX rdf: - CONSTRUCT { - $this iff:inUseFrom [ ?ts ] . - } - WHERE { - - ?filter iff:hasCartridge [ $this ] . - ?cutter iff:hasFilter [ ?filter ] . - ?cutter iff:state [ ?cstate ] . - ?cutter iff:state [ ?cstateTs ] . - ?filter iff:state [ ?fstate ] . - #OPTIONAL{$this iff:inUseFrom [ ?olderTs ]} . - bind(?cstateTs as ?ts) - #FILTER(!bound(?olderTs)) - #FILTER(?cstate = iff:state_PROCESSING && ?fstate = iff:state_ON) - FILTER NOT EXISTS { - $this iff:inUseFrom [ ?oldertimestamp ] . - } - } - """ ; - ] ; -. - - + """ ] ; + sh:targetClass iffBaseEntities:Machine . + +:TimestampCartridgeFromRulesShape a sh:NodeShape ; + sh:rule [ a sh:SPARQLRule ; + sh:construct """ +PREFIX iffBaseEntities: +PREFIX iffBaseKnowledge: +PREFIX rdfs: +PREFIX rdf: +PREFIX ngsild: +CONSTRUCT { + $this iffBaseEntities:isUsedFrom [ ngsild:hasValue ?ts ] . +} +WHERE { + ?filter iffBaseEntities:hasCartridge [ ngsild:hasObject $this ] . + ?filter iffBaseEntities:hasState [ ngsild:observedAt ?fstateTs ] . + ?filter iffBaseEntities:hasState [ ngsild:hasValue ?fstate ] . + bind(?fstateTs as ?ts) + FILTER NOT EXISTS { + $this iffBaseEntities:isUsedFrom [ ngsild:hasValue ?oldertimestamp ] . + } + FILTER(?fstate = iffBaseKnowledge:state_ON) +} +""" ] ; + sh:targetClass iffBaseEntities:FilterCartridge . + +:TimestampCartridgeUntilRulesShape a sh:NodeShape ; + sh:rule [ a sh:SPARQLRule ; + sh:construct """ +PREFIX iffBaseEntities: +PREFIX iffBaseKnowledge: +PREFIX rdfs: +PREFIX rdf: +PREFIX ngsild: +CONSTRUCT { + $this iffBaseEntities:isUsedUntil [ ngsild:hasValue ?ts ] . +} +WHERE { + ?filter iffBaseEntities:hasCartridge [ ngsild:hasObject $this ] . + ?filter iffBaseEntities:hasState [ ngsild:hasValue ?fstate ] . + ?filter iffBaseEntities:hasState [ ngsild:observedAt ?fstateTs ] . + bind(?fstateTs as ?ts) + FILTER(?fstate = iffBaseKnowledge:state_ON) +} +""" ] ; + sh:targetClass iffBaseEntities:FilterCartridge . + +:WorkpieceShape a sh:NodeShape ; + sh:property [ sh:maxCount 1 ; + sh:minCount 1 ; + sh:nodeKind sh:BlankNode ; + sh:order 10 ; + sh:path iffBaseEntities:hasWidth ; + sh:property [ sh:maxCount 1 ; + sh:maxInclusive 100.0 ; + sh:minCount 1 ; + sh:minInclusive 0.0 ; + sh:nodeKind sh:Literal ; + sh:path ngsild:hasValue ] ], + [ sh:maxCount 1 ; + sh:minCount 1 ; + sh:nodeKind sh:BlankNode ; + sh:order 20 ; + sh:path iffBaseEntities:hasHeight ; + sh:property [ sh:maxCount 1 ; + sh:maxInclusive 5.0 ; + sh:minCount 1 ; + sh:minInclusive 0.0 ; + sh:nodeKind sh:Literal ; + sh:path ngsild:hasValue ] ], + [ sh:maxCount 1 ; + sh:minCount 1 ; + sh:nodeKind sh:BlankNode ; + sh:order 30 ; + sh:path iffBaseEntities:hasLength ; + sh:property [ sh:maxCount 1 ; + sh:maxInclusive 200.0 ; + sh:minCount 1 ; + sh:minInclusive 0.0 ; + sh:nodeKind sh:Literal ; + sh:path ngsild:hasValue ] ], + [ sh:maxCount 1 ; + sh:minCount 1 ; + sh:nodeKind sh:BlankNode ; + sh:order 40 ; + sh:path iffBaseEntities:hasMaterial ; + sh:property [ sh:class material:Material ; + sh:maxCount 1 ; + sh:minCount 1 ; + sh:nodeKind sh:IRI ; + sh:path ngsild:hasValue ] ] ; + sh:targetClass iffBaseEntities:Workpiece . + +default1:CartridgeShape a sh:NodeShape ; + sh:property [ sh:maxCount 1 ; + sh:minCount 1 ; + sh:nodeKind sh:BlankNode ; + sh:order 10 ; + sh:path iffFilterEntities:hasWasteclass ; + sh:property [ sh:class iffFilterKnowledge:Wasteclass ; + sh:maxCount 1 ; + sh:minCount 1 ; + sh:nodeKind sh:IRI ; + sh:path ngsild:hasValue ] ] ; + sh:targetClass iffBaseEntities:FilterCartridge . + +default1:ChangeWasteClassRulesShape a sh:NodeShape ; + sh:rule [ a sh:SPARQLRule ; + sh:construct """ +PREFIX iffBaseEntities: +PREFIX iffBaseKnowledge: +PREFIX iffFilterEntities: +PREFIX iffFilterKnowledge: +PREFIX rdfs: +PREFIX rdf: +PREFIX ngsild: +PREFIX material: +CONSTRUCT { + $this iffFilterEntities:hasWasteclass [ ngsild:hasValue ?new_wasteclass ] . +} +WHERE { + ?filter iffBaseEntities:hasCartridge [ ngsild:hasObject $this ] . + ?cutter iffBaseEntities:hasFilter [ ngsild:hasObject ?filter ] . + ?cutter iffBaseEntities:hasState [ ngsild:hasValue ?cutter_state ] . + ?filter iffBaseEntities:hasState [ ngsild:hasValue ?filter_state ] . + ?workpiece iffBaseEntities:hasMaterial [ ngsild:hasValue ?material ] . + ?cutter iffBaseEntities:hasInWorkpiece [ ngsild:hasObject ?workpiece ] . + OPTIONAL{$this iffFilterEntities:hasWasteclass [ ngsild:hasValue ?cur_wasteclass ] .} + ?material iffFilterKnowledge:hasWasteclass ?new_wasteclass . + #?cur_wasteclass iffFilterKnowledge:higherHazardLevel ?new_wasteclass . + BIND(IF(bound(?cur_wasteclass), ?cur_wasteclass, iffFilterKnowledge:WC0) as ?wasteclass) + FILTER(?filter_state = iffBaseKnowledge:state_ON && ?cutter_state = iffBaseKnowledge:state_PROCESSING && ?wasteclass != ?new_wasteclass) + FILTER NOT EXISTS { + ?wasteclass iffFilterKnowledge:higherHazardLevel ?new_wasteclass . + } +} +""" ] ; + sh:targetClass iffBaseEntities:FilterCartridge . -iff:TimestampCartridgeUntilRulesShape - a sh:NodeShape ; - sh:targetClass iff:filterCartridge ; - sh:rule [ - a sh:SPARQLRule ; - sh:construct """ - PREFIX iff: - PREFIX rdfs: - PREFIX rdf: - CONSTRUCT { - $this iff:inUseUntil [ ?ts ] . - } - WHERE { - - ?filter iff:hasCartridge [ $this ] . - ?cutter iff:hasFilter [ ?filter ] . - ?cutter iff:state [ ?cstate ] . - ?filter iff:state [ ?fstate ] . - bind(xsd:dateTime(now()) as ?ts) - FILTER(?cstate = iff:state_PROCESSING && ?fstate = iff:state_ON) - } - """ ; - ] ; -. \ No newline at end of file diff --git a/semantic-model/shacl2flink/Makefile b/semantic-model/shacl2flink/Makefile index 7c34b4a8..612a7b88 100644 --- a/semantic-model/shacl2flink/Makefile +++ b/semantic-model/shacl2flink/Makefile @@ -9,6 +9,7 @@ SQLITEDB := $(OUTPUTDIR)/database.db SQLITE3 := sqlite3 HELM_DIR := ../../helm/charts/shacl NAMESPACE := iff +ONTDIR := ../kms/ontology sqlite_files = $(OUTPUTDIR)/core.sqlite $(OUTPUTDIR)/ngsild.sqlite $(OUTPUTDIR)/rdf.sqlite $(OUTPUTDIR)/ngsild-models.sqlite $(OUTPUTDIR)/shacl-validation.sqlite @@ -18,7 +19,7 @@ build: $(SHACL) $(KNOWLEDGE $(MODEL) $(PYTHON) create_rdf_table.py $(KNOWLEDGE) $(KAFKA_TOPICS) $(PYTHON) create_core_tables.py ${PYTHON} create_udfs.py - $(PYTHON) create_ngsild_tables.py $(SHACL) $(KAFKA_TOPICS) + $(PYTHON) create_ngsild_tables.py $(SHACL) $(KNOWLEDGE) $(KAFKA_TOPICS) $(PYTHON) create_ngsild_models.py $(SHACL) $(KNOWLEDGE) $(MODEL) $(PYTHON) create_sql_checks_from_shacl.py $(SHACL) $(KNOWLEDGE) @@ -27,7 +28,7 @@ helm: build mkdir -p ${HELM_DIR} rm -rf ${HELM_DIR}/templates mv ${OUTPUTDIR} ${HELM_DIR}/templates - rm -f ${HELM_DIR}/templates/*.sqlite ${HELM_DIR}/templates/core.yaml + rm -f ${HELM_DIR}/templates/*.sqlite ${HELM_DIR}/templates/core.yaml ${HELM_DIR}/templates/knowledge.yaml cp Chart.yaml ${HELM_DIR} test: requirements-dev.txt @@ -99,6 +100,7 @@ flink-deploy: clean make build || make enable-strimzi kubectl -n $(NAMESPACE) delete -f output/ngsild-kafka.yaml --ignore-not-found || make enable-strimzi kubectl -n $(NAMESPACE) delete -f output/rdf-kafka.yaml --ignore-not-found || make enable-strimzi + kubectl -n $(NAMESPACE) delete -f output/knowledge-configmap.yaml --ignore-not-found cd ../../helm && ./helmfile -f helmfile-shacl.yaml apply || make enable-strimzi make enable-strimzi make test-flink-is-deployed @@ -140,6 +142,23 @@ else @echo No KMS_DIR defined, doing nothing endif + +ontology2kms: + BASEURI=$$(cat ../../helm/common.yaml | yq '.ontology.baseUri') && \ + if [ -z "$(ONTDEBUG)" ]; then rm -rf $(ONTDIR); wget -r --no-parent --directory-prefix=$(ONTDIR) -nd -A ttl,jsonld $${BASEURI}; else \ + echo Not updating local ontolgoy directory; fi + @for ontology in $(ONTOLOGIES) base; do \ + echo "Processing $${ontology}"; \ + entitiesfile=$(ONTDIR)/$${ontology}_entities.ttl; \ + knowledgefile=$(ONTDIR)/$${ontology}_knowledge.ttl; \ + shaclfile=$(ONTDIR)/$${ontology}_shacl.ttl; \ + if [ -f "$${entitiesfile}" ]; then ontpattern=$${ontpattern}" $${entitiesfile}"; fi; \ + if [ -f "$${knowledgefile}" ]; then ontpattern=$${ontpattern}" $${knowledgefile}"; fi; \ + if [ -f "$${shaclfile}" ]; then shaclpattern=$${shaclpattern}" $${shaclfile}"; fi; \ + done; \ + echo creating knowledge.ttl with $${ontpattern}; /bin/bash -O extglob -c 'rdfpipe -i ttl -o ttl $${ontpattern} $(ONTDIR)/!(*_shacl.ttl|*.html|*.jsonld|*.txt) > ../kms/knowledge.ttl'; \ + echo creating shacl.ttl with $${shaclpattern}; rdfpipe -i ttl -o ttl $${shaclpattern} > ../kms/shacl.ttl + clean: @rm -rf $(OUTPUTDIR) diff --git a/semantic-model/shacl2flink/check_sparql_expression.py b/semantic-model/shacl2flink/check_sparql_expression.py new file mode 100644 index 00000000..a1a2b327 --- /dev/null +++ b/semantic-model/shacl2flink/check_sparql_expression.py @@ -0,0 +1,61 @@ +# +# Copyright (c) 2024 Intel Corporation +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import sys +import rdflib +import owlrl +import argparse + + +def parse_args(args=sys.argv[1:]): + parser = argparse.ArgumentParser(description='Evalutate sparql expression on knowledge/shacl/entiy files.') + parser.add_argument('queryfile', help='Path to a file which contains a SPARQL query') + parser.add_argument('shaclfile', help='Path to the SHACL file') + parser.add_argument('knowledgefile', help='Path to the knowledge file') + parser.add_argument('-e', '--entities', help='Path to the jsonld model/entity file') + parsed_args = parser.parse_args(args) + return parsed_args + + +def main(queryfile, shaclfile, knowledgefile, entityfile, output_folder='output'): + + with open(queryfile, "r") as f: + query = f.read() + + g = rdflib.Graph() + g.parse(shaclfile) + h = rdflib.Graph() + h.parse(knowledgefile) + owlrl.DeductiveClosure(owlrl.OWLRL_Extension, rdfs_closure=True, + axiomatic_triples=True, datatype_axioms=True).expand(h) + if entityfile is not None: + i = rdflib.Graph() + i.parse(entityfile) + g += i + g += h + + qres = g.query(query) + for row in qres: + print(f'{row}') + + +if __name__ == '__main__': + args = parse_args() + queryfile = args.queryfile + shaclfile = args.shaclfile + knowledgefile = args.knowledgefile + entityfile = args.entities + main(queryfile, shaclfile, knowledgefile, entityfile) diff --git a/semantic-model/shacl2flink/create_knowledge_closure.py b/semantic-model/shacl2flink/create_knowledge_closure.py new file mode 100644 index 00000000..bc956366 --- /dev/null +++ b/semantic-model/shacl2flink/create_knowledge_closure.py @@ -0,0 +1,46 @@ +# +# Copyright (c) 2024 Intel Corporation +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import sys +import os +import rdflib +import owlrl +import argparse + + +def parse_args(args=sys.argv[1:]): + parser = argparse.ArgumentParser(description='Create deductive closure of knowledge file.') + parser.add_argument('knowledgefile', help='Path to the knowledge file') + parser.add_argument('-o', '--outputfile', default='knowledge_closure.ttl', + help='Name of the result knowledge file.') + parsed_args = parser.parse_args(args) + return parsed_args + + +def main(knowledgefile, outputfile): + h = rdflib.Graph() + h.parse(knowledgefile) + owlrl.DeductiveClosure(owlrl.OWLRL_Extension, rdfs_closure=True, + axiomatic_triples=True, datatype_axioms=True).expand(h) + filename = os.path.dirname(os.path.abspath(knowledgefile)) + '/' + outputfile + h.serialize(destination=filename, format='turtle') + + +if __name__ == '__main__': + args = parse_args() + knowledgefile = args.knowledgefile + outputfile = args.outputfile + main(knowledgefile, outputfile) diff --git a/semantic-model/shacl2flink/create_ngsild_models.py b/semantic-model/shacl2flink/create_ngsild_models.py index 455373bb..b381de0a 100644 --- a/semantic-model/shacl2flink/create_ngsild_models.py +++ b/semantic-model/shacl2flink/create_ngsild_models.py @@ -20,6 +20,7 @@ import argparse import lib.utils as utils import lib.configs as configs +import owlrl def parse_args(args=sys.argv[1:]): @@ -82,16 +83,20 @@ def parse_args(args=sys.argv[1:]): PREFIX rdfs: PREFIX ngsild: PREFIX sh: +PREFIX owl: -SELECT DISTINCT ?id ?type ?field ?ord ?shacltype +SELECT DISTINCT ?id ?type ?field ?tabletype where { ?nodeshape a sh:NodeShape . - ?nodeshape sh:targetClass ?shacltype . + ?nodeshape sh:targetClass ?basetype . ?id a ?type . - ?type rdfs:subClassOf* ?shacltype . - ?nodeshape sh:property [ sh:path ?field ; sh:order ?ord ] . + ?type rdfs:subClassOf* ?basetype . + ?tabletype rdfs:subClassOf* ?basetype . + ?type rdfs:subClassOf* ?tabletype . + ?nodeshape sh:property [ sh:path ?field ;] . + FILTER(?tabletype != rdfs:Resource && ?tabletype != owl:Thing && ?tabletype != owl:Nothing ) } - ORDER BY ?id ?ord + ORDER BY ?id STR(?field) """ @@ -113,9 +118,9 @@ def main(shaclfile, knowledgefile, modelfile, output_folder='output'): model.parse(modelfile) knowledge = Graph() knowledge.parse(knowledgefile) - model += g + knowledge + attributes_model = model + g + knowledge - qres = model.query(attributes_query) + qres = attributes_model.query(attributes_query) first = True if len(qres) > 0: print(f'INSERT INTO `{configs.attributes_table_name}` VALUES', @@ -155,11 +160,14 @@ def main(shaclfile, knowledgefile, modelfile, output_folder='output'): print(";", file=sqlitef) # Create ngsild tables by sparql - qres = model.query(ngsild_tables_query_noinference) + owlrl.DeductiveClosure(owlrl.OWLRL_Extension, rdfs_closure=True, axiomatic_triples=True, + datatype_axioms=True).expand(knowledge) + table_model = model + knowledge + g + qres = table_model.query(ngsild_tables_query_noinference) tables = {} # Now create the entity tables - for id, type, field, ord, tabletype in qres: + for id, type, field, tabletype in qres: key = utils.camelcase_to_snake_case(utils.strip_class(tabletype.toPython())) if key not in tables: table = {} @@ -170,12 +178,9 @@ def main(shaclfile, knowledgefile, modelfile, output_folder='output'): tables[key][idstr] = [] tables[key][idstr].append(idstr) tables[key][idstr].append(type.toPython()) + tables[key][idstr].append('CURRENT_TIMESTAMP') tables[key][idstr].append(idstr + "\\\\" + field.toPython()) - for type, ids in tables.items(): - for id, table in ids.items(): - table.append('CURRENT_TIMESTAMP') - for type, ids in tables.items(): for id, table in ids.items(): print(f'INSERT INTO `{type}` VALUES', diff --git a/semantic-model/shacl2flink/create_ngsild_tables.py b/semantic-model/shacl2flink/create_ngsild_tables.py index 5ac386a7..9fae8478 100644 --- a/semantic-model/shacl2flink/create_ngsild_tables.py +++ b/semantic-model/shacl2flink/create_ngsild_tables.py @@ -14,8 +14,7 @@ # limitations under the License. # -from rdflib import Graph, Namespace, Variable -from rdflib.namespace import RDF +from rdflib import Graph import os import sys import argparse @@ -23,60 +22,61 @@ import lib.utils as utils import lib.configs as configs from ruamel.yaml.scalarstring import (SingleQuotedScalarString as sq) +import owlrl field_query = """ -PREFIX iff: PREFIX rdfs: PREFIX ngsild: PREFIX sh: -SELECT DISTINCT ?path +SELECT DISTINCT ?path ?shacltype where { ?nodeshape a sh:NodeShape . - ?nodeshape sh:targetClass ?shacltype . - ?nodeshape sh:property [ sh:path ?path ; sh:order ?ord ] . + ?nodeshape sh:targetClass ?shacltypex . + ?shacltype rdfs:subClassOf* ?shacltypex . + ?nodeshape sh:property [ sh:path ?path ; ] . } - ORDER BY ?ord + ORDER BY STR(?path) """ def parse_args(args=sys.argv[1:]): parser = argparse.ArgumentParser(description='create_ngsild_tables.py \ - ') + ') parser.add_argument('shaclfile', help='Path to the SHACL file') + parser.add_argument('knowledgefile', help='Path to the Knowledge file') parsed_args = parser.parse_args(args) return parsed_args -def main(shaclfile, output_folder='output'): +def main(shaclfile, knowledgefile, output_folder='output'): yaml = ruamel.yaml.YAML() utils.create_output_folder(output_folder) g = Graph() g.parse(shaclfile) - sh = Namespace("http://www.w3.org/ns/shacl#") + h = Graph() + h.parse(knowledgefile) + owlrl.DeductiveClosure(owlrl.OWLRL_Extension, rdfs_closure=True, axiomatic_triples=True, + datatype_axioms=True).expand(h) + g += h tables = {} - for s, p, o in g.triples((None, RDF.type, sh.NodeShape)): - for _, _, target_class in g.triples((s, sh.targetClass, None)): - if (s, sh.property, None) not in g: - break - stripped_class = utils.camelcase_to_snake_case(utils.strip_class(utils.strip_class( - target_class.toPython()))) - if stripped_class not in tables: - table = [] - tables[stripped_class] = table - else: - table = tables[stripped_class] + qres = g.query(field_query) + for row in qres: + target_class = row.shacltype + stripped_class = utils.camelcase_to_snake_case(utils.strip_class(utils.strip_class( + target_class.toPython()))) + if stripped_class not in tables: + table = [] + tables[stripped_class] = table table.append({sq("id"): "STRING"}) table.append({sq("type"): "STRING"}) - # Query the fields in order - bindings = {Variable("shacltype"): target_class} - qres = g.query(field_query, initBindings=bindings) - for row in qres: - target_path = row.path - table.append({sq(f'{target_path}'): "STRING"}) table.append({sq("ts"): "TIMESTAMP(3) METADATA FROM 'timestamp'"}) table.append({"watermark": "FOR `ts` AS `ts`"}) + else: + table = tables[stripped_class] + target_path = row.path + table.append({sq(f'{target_path}'): "STRING"}) # Kafka topic object for RDF config = {} @@ -118,4 +118,5 @@ def main(shaclfile, output_folder='output'): if __name__ == '__main__': args = parse_args() shaclfile = args.shaclfile - main(shaclfile) + knowledgefile = args.knowledgefile + main(shaclfile, knowledgefile) diff --git a/semantic-model/shacl2flink/create_rdf_table.py b/semantic-model/shacl2flink/create_rdf_table.py index 38c0490c..f42781c9 100644 --- a/semantic-model/shacl2flink/create_rdf_table.py +++ b/semantic-model/shacl2flink/create_rdf_table.py @@ -26,6 +26,25 @@ from lib import configs +filter_out = """ +PREFIX iff: +PREFIX rdfs: +PREFIX ngsild: +PREFIX sh: +PREFIX owl: + +CONSTRUCT {?s ?p ?o} +where { + ?s ?p ?o . + FILTER((?o = rdfs:Class || ?p = rdfs:subClassOf) + && ?o != rdfs:Resource + && ?o != rdfs:Thing + && ?o != owl:Thing + && ?o != owl:Class) +} +""" + + def parse_args(args=sys.argv[1:]): parser = argparse.ArgumentParser(description='create_rdf_table.py --namespace \ ') @@ -101,7 +120,8 @@ def main(knowledgefile, namespace, output_folder='output'): # Create RDF statements to insert data g = rdflib.Graph() g.parse(knowledgefile) - owlrl.OWLRLExtras.OWLRL_Extension(g, axioms=True, daxioms=True, rdfs=True).closure() + owlrl.DeductiveClosure(owlrl.OWLRL_Extension, rdfs_closure=True, axiomatic_triples=True, + datatype_axioms=True).expand(g) statementsets = create_statementset(g) sqlstatements = '' @@ -142,6 +162,12 @@ def main(knowledgefile, namespace, output_folder='output'): configs.rdf_topic, configs.kafka_topic_object_label, config), fk) + with open(os.path.join(output_folder, "knowledge-configmap.yaml"), "w") as fp: + qres = g.query(filter_out) + class_ttl = {} + class_ttl['knowledge.ttl'] = qres.serialize(format='turtle').decode("utf-8") + fp.write("---\n") + yaml.dump(utils.create_configmap_generic('knowledge', class_ttl), fp) if __name__ == '__main__': diff --git a/semantic-model/shacl2flink/lib/bgp_translation_utils.py b/semantic-model/shacl2flink/lib/bgp_translation_utils.py index fb3167be..d29b21ba 100644 --- a/semantic-model/shacl2flink/lib/bgp_translation_utils.py +++ b/semantic-model/shacl2flink/lib/bgp_translation_utils.py @@ -18,12 +18,11 @@ import re import random from rdflib import Namespace, URIRef, Variable, BNode, Literal -from rdflib.namespace import RDF, RDFS +from rdflib.namespace import RDF import copy basequery = """ -PREFIX iff: PREFIX rdfs: PREFIX ngsild: PREFIX sh: @@ -137,9 +136,9 @@ def get_rdf_join_condition(r, property_variables, entity_variables, time_variabl variables = property_variables if r in property_variables else time_variables if var in selectvars: if variables[r]: - return "'<' ||" + selectvars[var] + "|| '>'" + return "'<'||" + selectvars[var] + "||'>'" else: - return """'"' ||""" + selectvars[var] + """|| '"'""" + return """'"'||""" + selectvars[var] + """||'"'""" else: raise utils.SparqlValidationFailed(f'Could not resolve variable \ ?{var} at this point. You might want to rearrange the query to hint to \ @@ -170,6 +169,7 @@ def sort_triples(ctx, bounds, triples, graph): searchable with RDFlib """ def sort_key(triple): + key = '' if not isinstance(triple[0], BNode): key += triple[0].toPython() @@ -221,6 +221,9 @@ def select_candidates(bounds, triples, graph): elif isinstance(o, Variable) and utils.create_varname(s) in bounds: count += 1 bounds[utils.create_varname(o)] = '' + elif isinstance(o, Variable) and utils.create_varname(o) in bounds: + count += 1 + bounds[utils.create_varname(s)] = '' elif not isinstance(s, BNode) or (p != ngsild['hasValue'] and p != ngsild['hasObject'] and p != ngsild['observedAt'] and p != RDF['type']): # (2) @@ -232,6 +235,9 @@ def select_candidates(bounds, triples, graph): count += 1 if isinstance(s, Variable): bounds[utils.create_varname(s)] = '' + elif isinstance(s, Variable) and isinstance(o, URIRef): + count += 1 + bounds[utils.create_varname(s)] = '' elif isinstance(o, BNode): # rdf with blank nodes blanktriples = graph.triples((o, None, None)) for (bs, bp, bo) in blanktriples: @@ -349,6 +355,7 @@ def create_ngsild_mappings(ctx, sorted_graph): sparqlvalidationquery += f'?{key} rdfs:subClassOf <{value.toPython()}> .\n' sparqlvalidationquery += f'<{value.toPython()}> rdfs:subClassOf ?{key} .\n' for entity in entity_variables.keys(): + sparqlvalidationquery += f'?{entity}shapex sh:targetClass/rdfs:subClassOf* ?{entity} .\n' sparqlvalidationquery += f'?{entity}shape sh:targetClass ?{entity} .\n' variables.append(entity) for s, p, o in sorted_graph.triples((entity, None, None)): @@ -358,22 +365,24 @@ def create_ngsild_mappings(ctx, sorted_graph): [ sh:path ngsild:hasObject; sh:class ?{property_class} ] ] .\n' for property in property_variables: variables.append(property) + sparqlvalidationquery += f'?{property}shapex sh:targetClass/rdfs:subClassOf* ?{property} .\n' sparqlvalidationquery += f'?{property}shape sh:targetClass ?{property} .\n' for s, p, o in sorted_graph.triples((None, ngsild['hasValue'], property)): for p in sorted_graph.predicates(object=s): sparqlvalidationquery += f'?{property}shape sh:property [ sh:path <{p}> ; ] .\n' for subj in sorted_graph.subjects(predicate=p, object=s): if isinstance(subj, Variable): - equivalence.append(f'{subj.toPython()}shape = ?{property}shape') + sparqlvalidationquery += f'{subj.toPython()} rdfs:subClassOf* ?{property} .\n' for property in time_variables: variables.append(property) + sparqlvalidationquery += f'?{property}shapex sh:targetClass/rdfs:subClassOf* ?{property} .\n' sparqlvalidationquery += f'?{property}shape sh:targetClass ?{property} .\n' for s, p, o in sorted_graph.triples((None, ngsild['observedAt'], property)): for p in sorted_graph.predicates(object=s): sparqlvalidationquery += f'?{property}shape sh:property [ sh:path <{p}> ; ] .\n' for subj in sorted_graph.subjects(predicate=p, object=s): if isinstance(subj, Variable): - equivalence.append(f'{subj.toPython()}shape = ?{property}shape') + sparqlvalidationquery += f'{subj.toPython()} rdfs:subClassOf ?{property}' query = basequery for variable in variables: @@ -390,7 +399,7 @@ def create_ngsild_mappings(ctx, sorted_graph): if not first: query += ')\n}' else: - query += '}' + query += '} LIMIT 1' # Now execute the validation query # For the time being only unique resolutions of classes are allowed. @@ -400,8 +409,12 @@ def create_ngsild_mappings(ctx, sorted_graph): raise utils.SparqlValidationFailed(f"Validation of BGP failed. Variable types cannot be uniquely \ determined! Check expression {ctx['query']}") elif len(qres) == 0: + print('---------------------\nProblem identified in following BGP:') + for s, p, o in sorted_graph: + print(s, p, o) + print('---------------------') raise utils.SparqlValidationFailed(f"Validation of BGP failed. No solution found for ngsild variable \ -mappings! Check expression {ctx['query']}") +mappings! Check BGP in expression {ctx['query']}") else: # no ngsi-ld variables found, so do not try to infer the types qres = [] @@ -566,21 +579,32 @@ def process_rdf_spo(ctx, local_ctx, s, p, o): # special case p == rdf-type if p == RDF['type']: entity = local_ctx['bounds'].get(utils.create_varname(s)) - if entity is None and isinstance(o, URIRef): # create entity table based on type definition + if entity is None: # create entity table based on type definition subject_tablename = f'{s.toPython().upper()}TABLE'[1:] subject_varname = f'{s.toPython()}'[1:] subject_sqltable = utils.camelcase_to_snake_case(utils.strip_class(local_ctx['row'][subject_varname])) + join_condition = '' + if isinstance(o, URIRef): + join_condition = f"'<'||{subject_tablename}.`type`||'>' = '<{o.toPython()}>'" + else: + object_join_bound = get_rdf_join_condition(o, + ctx['property_variables'], + ctx['entity_variables'], + ctx['time_variables'], + local_ctx['bounds']) + join_condition = f"'<'||{subject_tablename}.`type`||'>' = {object_join_bound}" local_ctx['bgp_sql_expression'].append({'statement': f'{subject_sqltable}_view AS {subject_tablename}', - 'join_condition': ''}) + 'join_condition': join_condition}) ctx['sql_tables'].append(subject_sqltable) local_ctx['bounds'][subject_varname] = f'{subject_tablename}.`id`' local_ctx['bgp_tables'][subject_tablename] = [] - predicate_join_condition = f"{rdftable_name}.predicate = '<" + RDFS['subClassOf'].toPython() + ">'" - object_join_condition = f"{rdftable_name}.object = '<{o.toPython()}>'" - subject_join_condition = f"{rdftable_name}.subject = '<' || {subject_tablename}.`type` || '>'" - join_condition = f"{subject_join_condition} and {predicate_join_condition} and {object_join_condition}" - statement = f"{configs.rdf_table_name} as {rdftable_name}" - local_ctx['bgp_sql_expression'].append({'statement': statement, 'join_condition': join_condition}) + # predicate_join_condition = f"{rdftable_name}.predicate = '<" + RDFS['subClassOf'].toPython() + ">'" + # object_join_condition = f"{rdftable_name}.object = '<{o.toPython()}>'" + # subject_join_condition = f"{rdftable_name}.subject = '<' || {subject_tablename}.`type` || '>'" + # join_condition = f"{subject_join_condition} and {predicate_join_condition} and + # {object_join_condition}" + # statement = f"{configs.rdf_table_name} as {rdftable_name}" + # local_ctx['bgp_sql_expression'].append({'statement': statement, 'join_condition': join_condition}) return else: entity = entity.replace('.`id`', '.id') # Normalize cases when id is quoted @@ -607,18 +631,19 @@ def process_rdf_spo(ctx, local_ctx, s, p, o): # variable is bound, so get it and link it with bound value objvar = local_ctx['bounds'][utils.create_varname(o)] local_ctx['where'] = merge_where_expression(local_ctx['where'], - f"'<' || {entity_column} || '>' = {objvar}") + f"'<'||{entity_column}||'>' = {objvar}") return else: # subject entity variable but object is no variable local_ctx['where'] = merge_where_expression(local_ctx['where'], - f"'<' || {entity_column} || '>' \ + f"'<'||{entity_column}||'>' \ = {utils.format_node_type(o)}") return else: raise utils.SparqlValidationFailed("Cannot query generic RDF term with NGSI-LD entity subject.") else: - # No special case. Check if subject is non bound and if non bound whether it can be bound + # No special case. + # Check if subject is non bound and if non bound whether it can be bound subject_join_bound = get_rdf_join_condition(s, ctx['property_variables'], ctx['entity_variables'], ctx['time_variables'], local_ctx['bounds']) if subject_join_bound is None: diff --git a/semantic-model/shacl2flink/lib/sparql_to_sql.py b/semantic-model/shacl2flink/lib/sparql_to_sql.py index be4f5fe2..d8cf7cf9 100644 --- a/semantic-model/shacl2flink/lib/sparql_to_sql.py +++ b/semantic-model/shacl2flink/lib/sparql_to_sql.py @@ -18,12 +18,14 @@ import os import re from rdflib import Graph, Namespace, URIRef, Variable, BNode, Literal -from rdflib.namespace import RDF, XSD +from rdflib.namespace import RDF, XSD, RDFS +from rdflib.paths import MulPath from rdflib.plugins.sparql.parser import parseQuery from rdflib.plugins.sparql.algebra import translateQuery from functools import reduce import copy + file_dir = os.path.dirname(__file__) sys.path.append(file_dir) import utils # noqa: E402 @@ -139,6 +141,7 @@ def translate_query(query, target_class, orig_query): query: parsed sparql object """ algebra = query.algebra + randomstr = 'this' + bgp_translation_utils.get_random_string(16) ctx = { 'namespace_manager': query.prologue.namespace_manager, 'relationships': relationships, @@ -152,7 +155,10 @@ def translate_query(query, target_class, orig_query): 'target_sql': '', 'target_where': '', 'target_modifiers': [], - 'add_triples': [(Variable('this'), RDF['type'], target_class)], + 'add_triples': [(Variable('this'), RDF['type'], Variable(randomstr)), + (Variable(randomstr), + RDFS['subClassOf'], + target_class)], 'query': orig_query } if algebra.name == 'SelectQuery' or algebra.name == 'ConstructQuery': @@ -787,13 +793,17 @@ def translate_BGP(ctx, bgp): bgp['target_sql'] = '' return h = Graph() + filtered_triples = [] for s, p, o in bgp.triples: + if isinstance(p, MulPath): + p = p.path h.add((s, p, o)) + filtered_triples.append((s, p, o)) property_variables, entity_variables, time_variables, row = bgp_translation_utils.create_ngsild_mappings(ctx, h) # before translating, sort the bgp order to allow easier binds - bgp.triples = bgp_translation_utils.sort_triples(ctx, ctx['bounds'], bgp.triples, h) + bgp.triples = bgp_translation_utils.sort_triples(ctx, ctx['bounds'], filtered_triples, h) bgp_translation_utils.merge_vartypes(ctx, property_variables, entity_variables, time_variables) local_ctx = {} diff --git a/semantic-model/shacl2flink/lib/utils.py b/semantic-model/shacl2flink/lib/utils.py index 066a86b0..e1503af3 100644 --- a/semantic-model/shacl2flink/lib/utils.py +++ b/semantic-model/shacl2flink/lib/utils.py @@ -275,17 +275,27 @@ def create_sql_view(table_name, table, primary_key=['id'], def create_configmap(object_name, sqlstatementset): + # yaml_cm = {} + # yaml_cm['apiVersion'] = 'v1' + # yaml_cm['kind'] = 'ConfigMap' + # metadata = {} + # yaml_cm['metadata'] = metadata + # metadata['name'] = object_name + data = {} + # yaml_cm['data'] = data + for index, value in enumerate(sqlstatementset): + data[index] = value + return create_configmap_generic(object_name, data) + + +def create_configmap_generic(object_name, data): yaml_cm = {} yaml_cm['apiVersion'] = 'v1' yaml_cm['kind'] = 'ConfigMap' metadata = {} yaml_cm['metadata'] = metadata metadata['name'] = object_name - - data = {} yaml_cm['data'] = data - for index, value in enumerate(sqlstatementset): - data[index] = value return yaml_cm diff --git a/semantic-model/shacl2flink/tests/sql-tests/kms-constraints/test1/model10.jsonld b/semantic-model/shacl2flink/tests/sql-tests/kms-constraints/test1/model10.jsonld index f9205e58..e5049305 100644 --- a/semantic-model/shacl2flink/tests/sql-tests/kms-constraints/test1/model10.jsonld +++ b/semantic-model/shacl2flink/tests/sql-tests/kms-constraints/test1/model10.jsonld @@ -15,7 +15,7 @@ }, "https://industry-fusion.com/types/v0.9/hasWorkpiece": { "type": "Relationship", - "https://uri.etsi.org/ngsi-ld/hasObject": "urn:workpiece:1" + "object": "urn:workpiece:1" }, "https://industry-fusion.com/types/v0.9/hasFilter": { "type": "Relationship", @@ -42,7 +42,7 @@ }, "https://industry-fusion.com/types/v0.9/hasCartridge": { "type": "Relationship", - "https://uri.etsi.org/ngsi-ld/hasObject": "urn:filterCartridge:1" + "object": "urn:filterCartridge:1" } } ] \ No newline at end of file diff --git a/semantic-model/shacl2flink/tests/sql-tests/kms-constraints/test1/model10.jsonld_result b/semantic-model/shacl2flink/tests/sql-tests/kms-constraints/test1/model10.jsonld_result index cb488abb..de079a90 100644 --- a/semantic-model/shacl2flink/tests/sql-tests/kms-constraints/test1/model10.jsonld_result +++ b/semantic-model/shacl2flink/tests/sql-tests/kms-constraints/test1/model10.jsonld_result @@ -6,7 +6,7 @@ 'urn:filter:1','InConstraintComponent(https://industry-fusion.com/types/v0.9/stringState[0])','ok' 'urn:filter:1','MaxInclusiveConstraintComponent(https://industry-fusion.com/types/v0.9/strength[0])','ok' 'urn:filter:1','MinInclusiveConstraintComponent(https://industry-fusion.com/types/v0.9/strength[0])','ok' -'urn:filter:1','NodeKindConstraintComponent(https://industry-fusion.com/types/v0.9/hasCartridge)','warning' +'urn:filter:1','NodeKindConstraintComponent(https://industry-fusion.com/types/v0.9/hasCartridge)','ok' 'urn:filter:1','NodeKindConstraintComponent(https://industry-fusion.com/types/v0.9/state[0])','ok' 'urn:filter:1','NodeKindConstraintComponent(https://industry-fusion.com/types/v0.9/strength[0])','ok' 'urn:filter:1','NodeKindConstraintComponent(https://industry-fusion.com/types/v0.9/stringState[0])','ok' @@ -19,6 +19,6 @@ 'urn:plasmacutter:1','DatatypeConstraintComponent(https://industry-fusion.com/types/v0.9/state[0])','ok' 'urn:plasmacutter:1','InConstraintComponent(https://industry-fusion.com/types/v0.9/stringState[0])','warning' 'urn:plasmacutter:1','NodeKindConstraintComponent(https://industry-fusion.com/types/v0.9/hasFilter)','ok' -'urn:plasmacutter:1','NodeKindConstraintComponent(https://industry-fusion.com/types/v0.9/hasWorkpiece)','warning' +'urn:plasmacutter:1','NodeKindConstraintComponent(https://industry-fusion.com/types/v0.9/hasWorkpiece)','ok' 'urn:plasmacutter:1','NodeKindConstraintComponent(https://industry-fusion.com/types/v0.9/state[0])','ok' 'urn:plasmacutter:1','NodeKindConstraintComponent(https://industry-fusion.com/types/v0.9/stringState[0])','ok' diff --git a/semantic-model/shacl2flink/tests/sql-tests/kms-constraints/test2/shacl.ttl b/semantic-model/shacl2flink/tests/sql-tests/kms-constraints/test2/shacl.ttl index 1c5a413a..390bca0d 100644 --- a/semantic-model/shacl2flink/tests/sql-tests/kms-constraints/test2/shacl.ttl +++ b/semantic-model/shacl2flink/tests/sql-tests/kms-constraints/test2/shacl.ttl @@ -346,17 +346,13 @@ iff:StateValueShape PREFIX iff: PREFIX rdfs: SELECT $this ?value ?type - where { - ?this a iff:machine . - ?this iff:state [ ?value ] . - ?this a ?type . - OPTIONAL{?type rdfs:subClassOf ?basetype .} - ?x iff:stateValidFor ?basetype . - FILTER( ?type != ) - FILTER NOT EXISTS { - - ?value iff:stateValidFor ?basetype . - + where { + ?this a ?type . + ?type rdfs:subClassOf iff:machine . + ?this iff:state [ ?value ] . + FILTER NOT EXISTS{ + ?value iff:stateValidFor ?subtype . + ?type rdfs:subClassOf* ?subtype . } } """ ; diff --git a/semantic-model/shacl2flink/tests/sql-tests/tests.sh b/semantic-model/shacl2flink/tests/sql-tests/tests.sh index 53034cfe..c8597cce 100644 --- a/semantic-model/shacl2flink/tests/sql-tests/tests.sh +++ b/semantic-model/shacl2flink/tests/sql-tests/tests.sh @@ -27,7 +27,7 @@ for testdir in ${testdirs_constraints}; do rm -f ${DATABASE} echo -n "Test with model ${MODEL} in dir ${testdir} ..." python3 $TOOLDIR/create_ngsild_models.py ${SHACL} ${KNOWLEDGE} ${MODEL} - python3 $TOOLDIR/create_ngsild_tables.py ${SHACL} + python3 $TOOLDIR/create_ngsild_tables.py ${SHACL} ${KNOWLEDGE} # Test logic sqlite3 ${DATABASE} < $OUTPUTDIR/rdf.sqlite sqlite3 ${DATABASE} < $OUTPUTDIR/core.sqlite @@ -61,7 +61,7 @@ for testdir in ${testdirs_rules}; do rm -f ${DATABASE} echo -n "Test with model ${MODEL} in dir ${testdir} ..." python3 $TOOLDIR/create_ngsild_models.py ${SHACL} ${KNOWLEDGE} ${MODEL} - python3 $TOOLDIR/create_ngsild_tables.py ${SHACL} + python3 $TOOLDIR/create_ngsild_tables.py ${SHACL} ${KNOWLEDGE} # Test logic sqlite3 ${DATABASE} < $OUTPUTDIR/rdf.sqlite sqlite3 ${DATABASE} < $OUTPUTDIR/core.sqlite @@ -93,7 +93,7 @@ for testdir in ${testdirs_udf}; do rm -f ${DATABASE} echo -n "Test with model ${MODEL} in dir ${testdir} ..." python3 $TOOLDIR/create_ngsild_models.py ${SHACL} ${KNOWLEDGE} ${MODEL} - python3 $TOOLDIR/create_ngsild_tables.py ${SHACL} + python3 $TOOLDIR/create_ngsild_tables.py ${SHACL} ${KNOWLEDGE} # Test logic sqlite3 ${DATABASE} < $OUTPUTDIR/rdf.sqlite sqlite3 ${DATABASE} < $OUTPUTDIR/core.sqlite diff --git a/semantic-model/shacl2flink/tests/test_check_sparql_expression.py b/semantic-model/shacl2flink/tests/test_check_sparql_expression.py new file mode 100644 index 00000000..925068e2 --- /dev/null +++ b/semantic-model/shacl2flink/tests/test_check_sparql_expression.py @@ -0,0 +1,26 @@ +# +# Copyright (c) 2024 Intel Corporation +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from unittest.mock import patch, mock_open +import check_sparql_expression + + +@patch('check_sparql_expression.rdflib') +@patch('check_sparql_expression.owlrl') +def test_main(mock_owlrl, mock_rdflib): + with patch("builtins.open", mock_open(read_data="data")) as mock_file: + check_sparql_expression.main('queryfile.txt', 'kms/shacl.ttl', 'kms/knowledge.ttl', 'kms/model.jsonld') + mock_file.assert_called_with("queryfile.txt", 'r') diff --git a/semantic-model/shacl2flink/tests/test_create_knowledge_closure.py b/semantic-model/shacl2flink/tests/test_create_knowledge_closure.py new file mode 100644 index 00000000..db2110a5 --- /dev/null +++ b/semantic-model/shacl2flink/tests/test_create_knowledge_closure.py @@ -0,0 +1,24 @@ +# +# Copyright (c) 2024 Intel Corporation +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from unittest.mock import patch +import create_knowledge_closure + + +@patch('create_knowledge_closure.rdflib') +@patch('create_knowledge_closure.owlrl') +def test_main(mock_owlrl, mock_rdflib, tmp_path): + create_knowledge_closure.main('kms/knowledge.ttl', 'knowledge_closure.ttl') diff --git a/semantic-model/shacl2flink/tests/test_create_ngsild_tables.py b/semantic-model/shacl2flink/tests/test_create_ngsild_tables.py index 4e67228e..b33e2f74 100644 --- a/semantic-model/shacl2flink/tests/test_create_ngsild_tables.py +++ b/semantic-model/shacl2flink/tests/test_create_ngsild_tables.py @@ -14,7 +14,7 @@ # limitations under the License. # -from unittest.mock import patch +from unittest.mock import patch, MagicMock from rdflib import Namespace from rdflib.namespace import RDF import create_ngsild_tables @@ -24,11 +24,18 @@ sh = Namespace("http://www.w3.org/ns/shacl#") +class dotdict(dict): + __getattr__ = dict.get + __setattr__ = dict.__setitem__ + __delattr__ = dict.__delitem__ + + @patch('create_ngsild_tables.ruamel.yaml') @patch('create_ngsild_tables.Graph') @patch('create_ngsild_tables.configs') @patch('create_ngsild_tables.utils') -def test_main(mock_utils, mock_configs, mock_graph, +@patch('create_ngsild_tables.owlrl') +def test_main(mock_owlrl, mock_utils, mock_configs, mock_graph, mock_yaml, tmp_path): mock_configs.kafka_topic_ngsi_prefix = 'ngsild_prefix' mock_configs.kafka_bootstrap = 'bootstrap' @@ -36,14 +43,22 @@ def test_main(mock_utils, mock_configs, mock_graph, mock_utils.create_yaml_table.return_value = "yamltable" mock_utils.create_sql_view.return_value = "sqlview" mock_utils.create_yaml_view.return_value = "yamlview" + mock_utils.camelcase_to_snake_case.return_value = 'shacltype' mock_yaml.dump.return_value = "dump" g = mock_graph.return_value g.__contains__.return_value = True + g.__iadd__.return_value = g g.triples.return_value = [(ex.test, RDF.type, sh.NodeShape)] g.value.return_value = [(ex.test, sh.targetClass, ex.test2)] g.return_value = [(ex.test, sh.property, None)] - - create_ngsild_tables.main('kms/shacl.ttl', tmp_path) + shacltype = MagicMock() + shacltype.toPython.return_value = 'shacltype' + row = {'shacltype': shacltype, 'path': 'path'} + row = dotdict(row) + g.query.return_value = [row] + mock_owlrl.DeductiveClosure.expand.return_value = True + create_ngsild_tables.main('kms/shacl.ttl', 'kms/knowledge.ttl', tmp_path) assert os.path.exists(os.path.join(tmp_path, 'ngsild.yaml')) is True assert os.path.exists(os.path.join(tmp_path, 'ngsild.sqlite')) is True + assert os.path.exists(os.path.join(tmp_path, 'ngsild-kafka.yaml')) is True diff --git a/semantic-model/shacl2flink/tests/test_lib_bgp_translation_utils.py b/semantic-model/shacl2flink/tests/test_lib_bgp_translation_utils.py index a9960817..98d2aef9 100644 --- a/semantic-model/shacl2flink/tests/test_lib_bgp_translation_utils.py +++ b/semantic-model/shacl2flink/tests/test_lib_bgp_translation_utils.py @@ -32,7 +32,7 @@ def test_create_ngsild_mappings(monkeypatch): class Graph: def query(self, sparql): - assert "?this rdfs:subClassOf ." in sparql + assert "?this rdfs:subClassOf .\n" in sparql assert "?thisshape sh:targetClass ?this .\n?thisshape sh:property [ sh:path \ ; sh:property [ sh:path \ ngsild:hasObject; sh:class ?f ] ] ." in sparql @@ -40,8 +40,6 @@ def query(self, sparql): ; ] ." in sparql assert "?v1shape sh:targetClass ?v1 .\n?v1shape sh:property [ sh:path \ ; ] ." in sparql - assert "?thisshape = ?v1shape" in sparql - assert "?fshape = ?v2shape" in sparql return ['row'] relationships = { "https://industry-fusion.com/types/v0.9/hasFilter": True @@ -103,8 +101,6 @@ def query(self, sparql): ; ] ." in sparql assert "?v1shape sh:targetClass ?v1 .\n?v1shape sh:property [ sh:path \ ; ] ." in sparql - assert "?thisshape = ?v1shape" in sparql - assert "?pcshape = ?v2shape" in sparql return ['row'] relationships = { @@ -192,7 +188,7 @@ def test_process_rdf_spo_predicate_is_rdftype_object_is_iri(mock_isentity, mock_ p = RDF['type'] o = term.URIRef('https://example.com/obj') lib.bgp_translation_utils.process_rdf_spo(ctx, local_ctx, s, p, o) - assert local_ctx['where'] == "'<' || FILTER.type || '>' = ''" + assert local_ctx['where'] == "'<'||FILTER.type||'>' = ''" assert local_ctx['bounds'] == {'this': 'THISTABLE.id', 'f': 'FILTER.id'} @@ -243,7 +239,7 @@ def create_varname(var): p = RDF['type'] o = term.Variable('x') lib.bgp_translation_utils.process_rdf_spo(ctx, local_ctx, s, p, o) - assert local_ctx['where'] == "'<' || FILTER.type || '>' = xtable.subject" + assert local_ctx['where'] == "'<'||FILTER.type||'>' = xtable.subject" assert local_ctx['bounds'] == {'this': 'THISTABLE.id', 'f': 'FILTER.id', 'x': 'xtable.subject'} ctx = { 'namespace_manager': None, @@ -397,11 +393,8 @@ def test_process_rdf_spo_subject_is_no_entity_and_predicate_is_type(mock_isentit o = term.URIRef('https://example.com/obj') lib.bgp_translation_utils.process_rdf_spo(ctx, local_ctx, s, p, o) assert local_ctx['bgp_sql_expression'] == [{'statement': 'camelcase_to_snake_case_view AS FTABLE', - 'join_condition': ''}, - {'statement': 'rdf as testtable', - 'join_condition': "testtable.subject = '<' || FTABLE.`type` || '>' and \ -testtable.predicate = '' and testtable.object = \ -''"}] + 'join_condition': + "'<'||FTABLE.`type`||'>' = ''"}] assert local_ctx['bounds'] == {'this': 'THISTABLE.id', 'f': 'FTABLE.`id`'} assert local_ctx['bgp_tables'] == {'FTABLE': []} @@ -449,7 +442,7 @@ def test_process_rdf_spo_subject_is_entity_and_predicate_is_type(mock_isentity, p = RDF['type'] o = term.URIRef('https://example.com/obj') lib.bgp_translation_utils.process_rdf_spo(ctx, local_ctx, s, p, o) - assert local_ctx['where'] == "'<' || FILTER.type || '>' = ''" + assert local_ctx['where'] == "'<'||FILTER.type||'>' = ''" @patch('lib.bgp_translation_utils.get_rdf_join_condition') @@ -919,7 +912,7 @@ def create_varname(var): entity_variables, time_variables, selectvars) - assert result == "'<' ||v1.id|| '>'" + assert result == "'<'||v1.id||'>'" def test_get_rdf_join_condition_rdf(monkeypatch): diff --git a/semantic-model/shacl2flink/tests/test_lib_sparql_to_sql.py b/semantic-model/shacl2flink/tests/test_lib_sparql_to_sql.py index 7a28bb9c..c4ebe12e 100644 --- a/semantic-model/shacl2flink/tests/test_lib_sparql_to_sql.py +++ b/semantic-model/shacl2flink/tests/test_lib_sparql_to_sql.py @@ -76,6 +76,16 @@ def create_varname(var): function.expr = [term.Variable('var')] result = lib.sparql_to_sql.translate_function(ctx, function) assert result == 'SQL_DIALECT_CAST(SQL_DIALECT_STRIP_LITERAL{vartest} as FLOAT)' + function = MagicMock() + function.iri = term.URIRef('https://industry-fusion.com/aggregators/v0.9/test') + function.expr = [term.Variable('var')] + result = lib.sparql_to_sql.translate_function(ctx, function) + assert result == 'test(vartest)' + function = MagicMock() + function.iri = term.URIRef('https://industry-fusion.com/functions/v0.9/test') + function.expr = [term.Variable('var')] + result = lib.sparql_to_sql.translate_function(ctx, function) + assert result == 'test(vartest)' @patch('lib.sparql_to_sql.translate') @@ -271,9 +281,11 @@ def test_translate_sparql(mock_graph, mock_translate_query, mock_parseQuery, moc row2 = Bunch() row1.property = term.URIRef('property') row1.relationship = term.URIRef('relationship') + row1.kind = term.URIRef('kind') row2.property = term.URIRef('property2') row2.relationship = term.URIRef('relationship2') - g.__iadd__.return_value.query = MagicMock(side_effect=[[row1], [row2]]) + row2.kind = term.URIRef('kind') + g.query = MagicMock(side_effect=[[row1], [row2]]) relationships = { "https://industry-fusion.com/types/v0.9/hasFilter": True } @@ -306,6 +318,22 @@ def test_translate_filter(mock_translate): assert filter['where'] == 'wherex and where' +@patch('lib.sparql_to_sql.translate') +@patch('lib.sparql_to_sql.bgp_translation_utils') +def test_translate_aggregate_join(mock_translation_utils, mock_translate): + ctx = MagicMock() + elem = Bunch() + p = { + 'target_sql': 'target_sql', + 'where': 'where' + } + elem.p = p + lib.sparql_to_sql.translate_aggregate_join(ctx, elem) + assert mock_translate.called + assert mock_translation_utils.replace_attributes_table_expression.called + assert elem['where'] == 'where' + + def test_get_attribute_column_value(monkeypatch): ctx = { 'bounds': {'var': 'TABLE.`id`'}, diff --git a/test/bats/test-operators/operators-are-up-reloader.bats b/test/bats/test-operators/operators-are-up-reloader.bats new file mode 100644 index 00000000..dd6bce05 --- /dev/null +++ b/test/bats/test-operators/operators-are-up-reloader.bats @@ -0,0 +1,16 @@ +#!/usr/bin/env bats + +load "../lib/utils" +load "../lib/detik" + +# shellcheck disable=SC2034 # needed by detik libraries +DETIK_CLIENT_NAME="kubectl" +# shellcheck disable=SC2034 +DETIK_CLIENT_NAMESPACE="default" + +@test "verify that reloader-operator is up and running" { + + run try "at most 10 times every 30s to get pod named 'reloader-reloader' and verify that 'status' is 'running'" + [ "$status" -eq 0 ] + +} \ No newline at end of file diff --git a/test/prepare-platform.sh b/test/prepare-platform.sh index 99384e28..eb8cc791 100644 --- a/test/prepare-platform.sh +++ b/test/prepare-platform.sh @@ -93,6 +93,13 @@ echo --------------- sudo apt update sudo apt install openjdk-17-jdk openjdk-17-jre + +echo Install rdflib +echo -------------- +sudo apt update +sudo apt install python3-rdflib + + echo Installing maven echo ---------------------- wget https://archive.apache.org/dist/maven/maven-3/3.8.7/binaries/apache-maven-3.8.7-bin.tar.gz