-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathts_distance.py
More file actions
42 lines (30 loc) · 1.51 KB
/
ts_distance.py
File metadata and controls
42 lines (30 loc) · 1.51 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
import numpy as np
from scipy.special import rel_entr
import sklearn
from sklearn.metrics import mutual_info_score
num_of_ts=72
x_size=480
y_size=480
data=np.load("C:\\Users\\karm_ch\\Documents\\LDA_uncertainty_paper\\time_series_numpy\\labeler_2_ac_2018_03_14.238490133948005_53.76003136277209_0_gmm_labels2.npy")
data_temp1=np.reshape(data, (num_of_ts, (x_size * y_size)))
dataset= data_temp1.T
labels=np.load("C:\\Users\\karm_ch\\Documents\\LDA_uncertainty_paper\\time_series_numpy\\labeler_2_ac_2018_03_14.238490133948005_53.76003136277209_0_change_gmlabels04.npy")
dict={}
for A, B in zip(labels, dataset):
dict[A]=B
string_1 = dict.get(0)
string_2 = dict.get(1)
def kl_divergence(a, b):
return sum(a[i] * np.log(a[i]/b[i]) for i in range(len(a)))
'''print('KL-divergence(string_1 || string_2): %.3f ' % kl_divergence(string_1,string_2))
print('KL-divergence(string_2 || string_1): %.3f ' % kl_divergence(string_2,string_1))
# D( p || p) =0
print('KL-divergence(string_1 || string_1): %.3f ' % kl_divergence(string_1,string_1))
#print("Using Scipy rel_entr function")
string_1 = np.array(string_1)
string_2 = np.array(string_2)
print('KL-divergence(string_1 || string_2): %.3f ' % sum(rel_entr(string_1,string_2)))
print('KL-divergence(string_2 || string_1): %.3f ' % sum(rel_entr(string_2,string_1)))
print('KL-divergence(string_1 || string_1): %.3f ' % sum(rel_entr(string_1,string_1)))
'''
print(sklearn.metrics.mutual_info_score(list(string_1),list(string_2)))