Skip to content
GitLab
Menu
Projects
Groups
Snippets
/
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in
Toggle navigation
Menu
Open sidebar
Pedot Nicola
ltn-db-clean
Commits
a89d95dc
Commit
a89d95dc
authored
Jul 17, 2019
by
npedot
Browse files
adds numeric samples
parent
b6c551fe
Changes
3
Hide whitespace changes
Inline
Side-by-side
docker/docker-ldb/ldb/code/numeric.py
View file @
a89d95dc
import
logging
;
logging
.
basicConfig
(
level
=
logging
.
INFO
)
import
sys
import
pandas
as
pd
import
tensorflow
as
tf
import
code.logictensornetworks_wrapper
as
ltnw
import
code.logictensornetworks_library
as
ltnl
import
numpy
as
np
import
code.logictensornetworks_wrapper
as
ltnw
ages
=
np
.
random
.
uniform
(
0
,
1
,(
6
,
1
)).
astype
(
np
.
float32
)
# ages=np.random.uniform(0,1,(6,1)).astype(np.float32)
ages
=
np
.
array
([
0.10
,
0.20
,
0.33
,
0.11
]).
astype
(
np
.
float32
)
print
(
ages
)
minore
=
ages
[
np
.
where
(
ages
<
0.18
)]
print
(
minore
)
...
...
docker/docker-ldb/ldb/code/numeric_class.py
0 → 100644
View file @
a89d95dc
# -*- coding: utf-8 -*-
import
logging
;
logging
.
basicConfig
(
level
=
logging
.
INFO
)
import
numpy
as
np
import
matplotlib.pyplot
as
plt
import
code.logictensornetworks_wrapper
as
ltnw
nr_samples
=
500
data
=
np
.
random
.
uniform
([
0
,
0
],[
1.
,
1.
],(
nr_samples
,
2
)).
astype
(
np
.
float32
)
data_A
=
data
[
np
.
where
(
np
.
sum
(
np
.
square
(
data
-
[.
5
,.
5
]),
axis
=
1
)
<
.
09
)]
data_not_A
=
data
[
np
.
where
(
np
.
sum
(
np
.
square
(
data
-
[.
5
,.
5
]),
axis
=
1
)
>=
.
09
)]
ltnw
.
variable
(
"?data_A"
,
data_A
)
ltnw
.
variable
(
"?data_not_A"
,
data_not_A
)
ltnw
.
variable
(
"?data"
,
data
)
ltnw
.
predicate
(
"A"
,
2
)
ltnw
.
axiom
(
"forall ?data_A: A(?data_A)"
)
ltnw
.
axiom
(
"forall ?data_not_A: ~A(?data_not_A)"
)
ltnw
.
initialize_knowledgebase
(
initial_sat_level_threshold
=
.
1
)
sat_level
=
ltnw
.
train
(
track_sat_levels
=
1000
,
sat_level_epsilon
=
.
99
)
ltnw
.
constant
(
"a"
,[
0.25
,.
5
])
ltnw
.
constant
(
"b"
,[
1.
,
1.
])
print
(
"a is in A: %s"
%
ltnw
.
ask
(
"A(a)"
))
print
(
"b is in A: %s"
%
ltnw
.
ask
(
"A(b)"
))
docker/docker-ldb/ldb/code/numeric_features.py
0 → 100644
View file @
a89d95dc
import
logging
;
logging
.
basicConfig
(
level
=
logging
.
INFO
)
import
numpy
as
np
import
tensorflow
as
tf
import
code.logictensornetworks_wrapper
as
ltnw
#ltnw.constant('a',[1.,2.,3.,16])
#ltnw.constant('b',[1.,2.,3.,16])
ltnw
.
constant
(
'a'
,[
1
,
15
])
ltnw
.
constant
(
'b'
,[
1
,
17
])
ltnw
.
constant
(
'c'
,[
1
,
18
])
ltnw
.
constant
(
'd'
,[
1
,
22
])
ltnw
.
constant
(
'e'
,[
1
,
99
])
ltnw
.
constant
(
'maggiorenne'
,[
1
,
18
])
ltnw
.
constant
(
'roma'
,[
12.
,
41.
])
ltnw
.
constant
(
'frascati'
,[
12.5
,
42.
])
ltnw
.
constant
(
'parigi'
,[
48.
,
2.
])
ltnw
.
function
(
"eta"
,
1
,
fun_definition
=
lambda
x
:
x
[:,
1
])
ltnw
.
initialize_knowledgebase
()
def
_maggiorenne
(
x
):
return
tf
.
cast
(
tf
.
less_equal
(
x
,
18
),
dtype
=
tf
.
float32
)
def
_crisp_leq
(
x
,
y
):
return
tf
.
cast
(
tf
.
less_equal
(
x
,
y
),
dtype
=
tf
.
float32
)
def
_dist
(
x
,
y
):
return
tf
.
exp
(
-
tf
.
sqrt
(
tf
.
reduce_sum
(
tf
.
square
(
tf
.
subtract
(
x
,
y
)),
axis
=
1
,
keepdims
=
True
)))
ltnw
.
predicate
(
"cleq"
,
2
,
_crisp_leq
)
ltnw
.
predicate
(
"close"
,
2
,
_dist
)
print
(
ltnw
.
ask
(
"cleq(eta(maggiorenne),eta(a))"
))
print
(
ltnw
.
ask
(
"cleq(eta(maggiorenne),eta(b))"
))
print
(
ltnw
.
ask
(
"cleq(eta(maggiorenne),eta(c))"
))
print
(
ltnw
.
ask
(
"cleq(eta(maggiorenne),eta(d))"
))
print
(
ltnw
.
ask
(
"cleq(eta(maggiorenne),eta(e))"
))
ltnw
.
constant
(
'z'
,[
1
,
12
])
print
(
"z {}"
.
format
(
ltnw
.
ask
(
"cleq(eta(maggiorenne),eta(z))"
)))
print
(
ltnw
.
ask
(
'close(roma,parigi)'
))
print
(
ltnw
.
ask
(
'close(roma,frascati)'
))
print
(
ltnw
.
ask
(
'close(roma,roma)'
))
Write
Preview
Supports
Markdown
0%
Try again
or
attach a new file
.
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment