diff --git a/ontology/FakeNewsScore.kif b/ontology/FakeNewsScore.kif new file mode 100644 index 0000000..3ee36e9 --- /dev/null +++ b/ontology/FakeNewsScore.kif @@ -0,0 +1,28 @@ +(instance fakeNewsScore SNetAIService) +(documentation fakeNewsWarning EnglishLanguage "Calls dependent services, calculates summary result from their outputs +and calculates the overall probability that the provided content contains fake news") + +(hasDependency fakeNewsScore uclnlp) +(hasInput fakeNewsScore uclnlpOutput) +; there should be a way to infer this from the previous axiom + +(hasDependency fakeNewsScore fnsBinaryClassifier) +(hasInput fakeNewsScore fnsBinaryClassifierOutput) +; there should be a way to infer this from the previous axiom + +(hasOutput fakeNewsScore fakeNewsScoreOutput) +(instance fakeNewsScoreOutputType DataType) + +(=> + (and + (subclass ?fakeNewsScoreOutput JSONDef) + (hasField ?fakeNewsScoreOutput url URL) + (...) +; all fields of actual data structure as approximately indicated here https://gitlab.com/nunet/fake-news-detection/fake_news_score/-/issues/22 +; actual data structure is determined by the code + (...) + ) + (instance ?fnsBinaryClassifierOutput fnsBinaryClassifierOutputType) +) + +; ideally we should include here the definition of RealizedFunction, as described in Idris part of the AI-DSL diff --git a/ontology/NuNet.kif b/ontology/NuNet.kif new file mode 100644 index 0000000..213f8b9 --- /dev/null +++ b/ontology/NuNet.kif @@ -0,0 +1,47 @@ +(subclass NuNetEnabledSNetAIService SNetAIService) +(documentation NuNetEnabledSNetAIService EnglishLanguage "SNetAIService which can be deployed on NuNetEnabledComputers and orchestrated via NuNet platfrom") + +(=> + (and + (hasMetadata ?SNetAIServiceMetadata ?SNetAIService) + (hasField ?SNetAIServiceMetadata RequiredComputingResources) + ; includes many other prerequisites and special protobuf definitions + ) + (instance ?SNetAIService NuNetEnabledSNetAIService) +) + +(subclass NuNetEnabledComputer Computer) +(documentation NuNetEnabledComputer EnglishLanguage "A Computer which was onboarded to NuNet platfrom and complies to its requirements.") + +(=> + (and + (hasRun ?NuNetEnabledComputer NuNetOnboardingScript) + (hasMetadata ?NuNetEnabledComputer ?ComputerMetadata) + (hasField AvailableComputingResources ?ComputerMetadata) + (or + (runsOS ?NuNetEnabledComputer Linux) + (runsOs ?NuNetEnabledComputer Raspbian) + ) + (or + (hasHardware ?NuNetEnabledComputer PC) + (hasHardware ?NuNetEnabledComputer RaspberyPi) + ) + ) + (instance ?NuNetEnabledComputer NuNetEnabledComputer) +) + +; here we check if a NuNetEnabledSNetAIService can be deployed on a NuNetEnabledComputer +; at any particular instance in time +; since AvailableComputingResources depend on state of the computer at that time +; (therefore it can be calculated only at runtime) + +(=> + (and + (hasMetadata ?NuNetEnabledComputer ?ComputerMetadata) + (hasField ?AvailableComputingResources ?ComputerMetadata) + (hasMetadata ?NuNetEnabledSNetAIService ?SNetAIServiceMetadata) + (hasField ?RequiredComputingResources ?SNetAIServiceMetadata) + (lessThanOrEqualTo ?RequiredComputingResources ?AvailableComputingResources) + ) + (canDeploy ?NuNetEnabledSNetAIService ?NuNetEnabledComputer) +) diff --git a/ontology/NuNetEnabledComputer.kif b/ontology/NuNetEnabledComputer.kif new file mode 100644 index 0000000..f89461e --- /dev/null +++ b/ontology/NuNetEnabledComputer.kif @@ -0,0 +1,9 @@ +(instance machineOne NuNetEnabledComputer) +(documentation machineOne EnglishLanguage "Free text explanation of the computer and its capabilities (if the owner wishes to expose)") + +(hasMetadata machineOne ComputerMetadata) +; there should be a way to actually check that the metadata file is contained on the machine (or it returns it if needed) +(hasField AvailableComputingResources ComputerMetadata) +; there should be a way to check actual resources and reise them here +(runsOS machineOne Linux) +(hasHardware machineOne PC) diff --git a/ontology/SingularityNet.kif b/ontology/SingularityNet.kif new file mode 100644 index 0000000..cb998eb --- /dev/null +++ b/ontology/SingularityNet.kif @@ -0,0 +1,41 @@ +(subclass SNetAIService ComputerProgram) +(subclass SNetAIService SoftwareContainer) +(documentation SNetAIService EnglishLanguage +"Software package exposed via SNetPlatfrom and conforming to the special packaging rules") + +(subclass SNetAIServiceIO Descriptor) + +(instance hasInput BinaryPredicate) +(domain hasInput 1 SNetAIService) +(domain hasInput 2 SNetAIServiceIO) + +(instance hasOutput BinaryPredicate) +(domain hasOutput 1 SNetAIService) +(domain hasOutput 2 SNetAIServiceIO) + +; we can also use TernaryPredicate here in order to make it more open for change +; currently I am not sure if it will make things more or less complicated + +(=> + (or + (subclass ?SNetAIServiceInputIO JSONDef) + (subclass ?SNetAIServiceInputIO ProtoBufDef) + (subclass ?SNetAIServiceInputIO IdrisTypeDef) + ) + (instance ?SNetAIServiceInputIO SNetAIServiceInputIO) +) + +(instance hasMetadata BinaryPredicate) +(domain hasMetadata 1 SNetAIService) +(domain hasMetadata 2 SNetAIServiceMetadata) + +; We can then define axioms defining valid json and protobuf on SNet platfrom +; Further, we want to define all primitive data types allowed by service ontologies +; however it is not clear how do to it in kif or SUMO +; so this is listed only as indication but is not correct +; this is subject of the future work + +(subclass JSONDef DataType) +(subclass URL DataType) +(subclass Text DataType) +(subclass Boolean DataType) diff --git a/ontology/fnsBinaryClassifier.kif b/ontology/fnsBinaryClassifier.kif new file mode 100644 index 0000000..2b9dfa4 --- /dev/null +++ b/ontology/fnsBinaryClassifier.kif @@ -0,0 +1,22 @@ +(instance fnsBinaryClassifier SNetAIService) +(documentation fnsBinaryClassifier EnglishLanguage "A pre-trained binary classification model for fake news detection") + +(hasInput fnsBinaryClassifier fnsBinaryClassifierInput) +(hasOutput fnsBinaryClassifier fnsBinaryClassifierOutput) + +(instance fnsBinaryClassifierInputType DataType) +(instance fnsBinaryClassifierOutputType DataType) + +(=> + (and + (hasField ?fnsBinaryClassifierInput mainText Text) + ) + (instance ?fnsBinaryClassifierInput fnsBinaryClassifierInputType) +) + +(=> + (and + (hasField ?fnsBinaryClassifierOutput fakeOrNot Boolean) + ) + (instance ?fnsBinaryClassifierOutput fnsBinaryClassifierOutputType) +) diff --git a/ontology/sumo_install.sh b/ontology/sumo_install.sh deleted file mode 100755 index 24211b2..0000000 --- a/ontology/sumo_install.sh +++ /dev/null @@ -1,58 +0,0 @@ -#!/bin/bash - -# comment out the following lines to turn off line-by-line mode -set -x -trap read debug - -# install everything on sumo directory under home -cd ~ -mkdir sumo -cd sumo -mkdir workspace -mkdir Programs -cd Programs -wget 'https://archive.apache.org/dist/tomcat/tomcat-8/v8.5.23/bin/apache-tomcat-8.5.23.zip' -wget 'http://wordnetcode.princeton.edu/3.0/WordNet-3.0.tar.gz' -wget 'http://wwwlehre.dhbw-stuttgart.de/~sschulz/WORK/E_DOWNLOAD/V_2.0/E.tgz' -tar -xvzf E.tgz -unzip apache-tomcat-8.5.23.zip -rm apache-tomcat-8.5.23.zip -cd ~/sumo/Programs/apache-tomcat-8.5.23/bin -chmod 777 * -cd ../webapps -chmod 777 * -cd ~/sumo/workspace/ -sudo apt-get install git -git clone https://github.com/ontologyportal/sigmakee -git clone https://github.com/ontologyportal/sumo -cd ~ -mkdir .sigmakee -cd .sigmakee -mkdir KBs -cp -R ~/sumo/workspace/sumo/* KBs -me="$(whoami)" -cp ~/sumo/workspace/sigmakee/config.xml ~/.sigmakee/KBs -sed -i "s/theuser/$me/g" KBs/config.xml -cd ~/sumo/Programs -gunzip WordNet-3.0.tar.gz -tar -xvf WordNet-3.0.tar -cp WordNet-3.0/dict/* ~/.sigmakee/KBs/WordNetMappings/ -cd ~/sumo/Programs/E -sudo apt-get install make -sudo apt-get install gcc -./configure -make -make install -cd ~ -sudo apt-get install graphviz -echo "export SIGMA_HOME=~/.sigmakee" >> .bashrc -echo "export SIGMA_SRC=~/sumo/workspace/sigmakee" >> .bashrc -echo "export ONTOLOGYPORTAL_GIT=~/sumo/workspace" >> .bashrc -echo "export CATALINA_OPTS=\"$CATALINA_OPTS -Xms500M -Xmx7g\"" >> .bashrc -echo "export CATALINA_HOME=~/sumo/Programs/apache-tomcat-8.5.23" >> .bashrc -source .bashrc -cd ~/sumo/workspace/sigmakee -sudo add-apt-repository universe -sudo apt-get update -sudo apt-get install ant -ant diff --git a/ontology/uclnlp.kif b/ontology/uclnlp.kif new file mode 100644 index 0000000..4b39f86 --- /dev/null +++ b/ontology/uclnlp.kif @@ -0,0 +1,38 @@ +(instance uclnlp SNetAIService) +(documentation uclnlp EnglishLanguage "Forked and adapted component of stance detection algorithm by UCL Machine Reading group.") + +(hasInput uclnlp uclnlpInput) +(hasInput uclnlp uclnlpOutput) + +(instance uclnlpInputType DataType) +(instance uclnlpOutputType DataType) + +(=> + (and + (hasField ?uclnlpInput titleText AlphaNumericString) + (hasField ?uclnlpInput mainText AlphaNumericString) + ) + (instance ?uclnlpInput uclnlpInputType) +) + +(=> + (and + (hasField ?uclnlpOutput agree RealNumber) + (hasField ?uclnlpOutput disagree RealNumber) + (hasField ?uclnlpOutput discuss RealNumber) + (hasField ?uclnlpOutput unrelated RealNumber) + ) + (instance ?uclnlpOutput uclnlpOutputType) +) + +; THIS COMMENT APPLIES TO ALL SERVICE DEFINITIONS +; +; here we enter the territory of type definitions and type checking +; which is the subject of formal type-checking part of AI-DSL +; therefore it is left unfinished here +; however, what we could do here is: +; (1) define correct serviceInput and serviceOutput types (unique for each service) +; (2) if possible, provide proof that if a service data of correct type on input, then it will output correctly typed data +; (3) if that will not be possible (which is the default option when actual service AI are not written in Idris): +; (3.1) check if input data is of correct type at runtime and refuse to start service if it is not; +; (3.2) check if output data is of correct type before sending it to the caller and raise error if it is not so;