Commit bce8227b authored by npedot's avatar npedot
Browse files

adds mixed ldb sample

parent a5414c1c
import logging; logging.basicConfig(level=logging.INFO)
import numpy as np
import tensorflow as tf
import code.logictensornetworks_wrapper as ltnw
import code.logictensornetworks as ltn
# Hyper-parameters
ltn.LAYERS = 10
ltnw.set_universal_aggreg('min')
EMBEDDING_SIZE = 6
ltnw.constant('a',min_value=[1.,2.,3.,0.,1.,2.],max_value=[1.,2.,3.,100.,1.,2.])
ltnw.constant('b',min_value=[4.,5.,6.,0.,1.,2.],max_value=[4.,5.,6.,100.,1.,2.])
ltnw.constant('c',min_value=[4.,5.,6.,0.,1.,2.],max_value=[4.,5.,6.,100.,1.,2.])
ltnw.constant('e',min_value=[4.,5.,6.,0.,1.,2.],max_value=[4.,5.,6.,100.,1.,2.])
ltnw.constant('f',min_value=[4.,5.,6.,0.,1.,2.],max_value=[4.,5.,6.,100.,1.,2.])
ltnw.constant('g',min_value=[4.,5.,6.,0.,1.,2.],max_value=[4.,5.,6.,100.,1.,2.])
ltnw.constant("ten",[10.])
ltnw.constant("twenty",[20.])
print(ltnw.CONSTANTS)
def _close_eta(x,y):
return 1-tf.abs(x-y)/100.
ltnw.function('eta',4,fun_definition=lambda x:x[:,3])
ltnw.predicate("close_eta",2,_close_eta)
ltnw.axiom("close_eta(eta(a),ten)")
ltnw.axiom("close_eta(eta(b),twenty)")
ltnw.axiom("close_eta(eta(b),eta(c))")
def simple_net():
N = tf.Variable(tf.random_normal((EMBEDDING_SIZE, EMBEDDING_SIZE),
stddev=0.1))
def net(x):
a = tf.sigmoid(tf.reduce_sum(
tf.multiply(tf.matmul(x, N), x), axis=1))
print("A SHAPE: {}".format(a.shape))
return a
return net
ltnw.predicate("Adult", EMBEDDING_SIZE, pred_definition=simple_net())
ltnw.predicate("Married", EMBEDDING_SIZE, pred_definition=simple_net())
ltnw.predicate("Worker", EMBEDDING_SIZE, pred_definition=simple_net())
constants = list('abcefg')
ltnw.variable('z', tf.concat(
[ltnw.CONSTANTS[c] for c in constants], axis=0))
ltnw.axiom('forall z:(Worker(z) -> Adult(z))')
ltnw.axiom('forall z:(Married(z) -> Adult(z))')
#ltnw.axiom('forall z:(close_eta(z,twenty) -> Adult(z))')
ltnw.axiom('~Adult(a)')
ltnw.axiom('Adult(b)')
ltnw.initialize_knowledgebase(optimizer=tf.train.RMSPropOptimizer(learning_rate=.01),
initial_sat_level_threshold=.4)
ltnw.train(max_epochs=20000)
print("eta(a)",ltnw.ask('eta(a)'))
print("eta(b)",ltnw.ask('eta(b)'))
print("eta(c)",ltnw.ask('eta(c)'))
print("adult(a)",ltnw.ask('Adult(a)'))
print("adult(b)",ltnw.ask('Adult(b)'))
print("adult(c)",ltnw.ask('Adult(c)'))
......@@ -153,7 +153,7 @@ We can use LTWN for the value prediction step to correct or calculate unknown va
In order to understand how the translation steps of a table are carried out in LTWN, we propose a gradual series of practical examples accompanied by the respective differences that highlight.
### Esempio minimale di dipendenza
### Minimal dependency example
Suppose a table with two tuples identified as t1, t2,
each tuple has two properties (A) Assumed, (B) Salary linked by a functional dependence A => B
......@@ -196,6 +196,14 @@ we can extend the constraints for example to a Euclidean space.
Case study: [test_ldb_numeric_ecludean](../docker/docker-ldb/ldb/sample/basic_numeric_euclidean.py)
### Minimal mixed example
We will mix numeric and categorical features. Given a numeric age feature and a categorial adult feature, we will try to compute che adult feature for constant 'c' given complete axiomatic definitions of constants 'a','b'.
Case study: [test_ldb_mixed](../docker/docker-ldb/ldb/sample/basic_mixed.py)
### Distribution example
To understand how it affects the distribution of values let's start from a table in CSV format:
......@@ -283,3 +291,5 @@ data cleaning proposal
* [Logic Tensor Networks: Deep Learning and Logical Reasoning from Data and Knowledge](https://www.researchgate.net/publication/303969790_Logic_Tensor_Networks_Deep_Learning_and_Logical_Reasoning_from_Data_and_Knowledge)
---------------------
Supports Markdown
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment