Skip to content

Commit 1ddcca0

Browse files
committed
Add statistical distances computation
Signed-off-by: arthurPignet <arthur.pignet@mines-paristech.fr>
1 parent b042b87 commit 1ddcca0

File tree

1 file changed

+29
-14
lines changed

1 file changed

+29
-14
lines changed

mplc/contributivity.py

Lines changed: 29 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -12,12 +12,13 @@
1212
from timeit import default_timer as timer
1313

1414
import numpy as np
15+
import tensorflow as tf
1516
from loguru import logger
1617
from scipy.stats import norm
1718
from sklearn.linear_model import LinearRegression
1819

1920
from . import constants
20-
from .multi_partner_learning import basic_mpl
21+
from .multi_partner_learning import basic_mpl, fast_mpl
2122

2223

2324
class KrigingModel:
@@ -1113,23 +1114,37 @@ def compute_relative_perf_matrix(self):
11131114

11141115
return relative_perf_matrix
11151116

1116-
def s_model(self):
1117+
def statistcal_distances_via_smodel(self):
1118+
11171119
start = timer()
1118-
mpl = basic_mpl.FedAvgSmodel(self.scenario)
1120+
mpl = fast_mpl.FastFedAvgSmodel(self.scenario, self.scenario.mpl.pretrain_epochs)
11191121
mpl.fit()
1120-
theta_estimated = np.zeros((mpl.partners_count,
1121-
mpl.dataset.num_classes,
1122-
mpl.dataset.num_classes))
1122+
cross_entropy = tf.keras.metrics.CategoricalCrossentropy()
1123+
self.contributivity_scores = {'Kullbakc divergence': [0 for _ in mpl.partners_list],
1124+
'ma': [0 for _ in mpl.partners_list], 'Hennigen': [0 for _ in mpl.partners_list]}
11231125
for i, partnerMpl in enumerate(mpl.partners_list):
1124-
theta_estimated[i] = (np.exp(partnerMpl.noise_layer_weights) / np.sum(
1125-
np.exp(partnerMpl.noise_layer_weights), axis=2))
1126-
self.contributivity_scores = np.exp(- np.array([np.linalg.norm(
1127-
theta_estimated[i] - np.identity(mpl.dataset.num_classes)
1128-
) for i in range(len(self.scenario.partners_list))]))
1129-
1130-
self.name = "S-Model"
1126+
y_global = mpl.model.predict(partnerMpl.x_train)
1127+
y_local = mpl.smodel_list[i].predict(y_global)
1128+
cross_entropy.update_state(y_global, y_local)
1129+
cs = cross_entropy.result().numpy()
1130+
cross_entropy.reset_state()
1131+
cross_entropy.update_state(y_global, y_global)
1132+
e = cross_entropy.result().numpy()
1133+
cross_entropy.reset_state()
1134+
self.contributivity_scores['Kullbakc divergence'][i] = cs - e
1135+
BC = 0
1136+
for y_g, y_l in zip(y_global, y_local):
1137+
BC += np.sum(np.sqrt(y_g * y_l))
1138+
BC /= len(y_global)
1139+
self.contributivity_scores['Kullback Leiber divergence'][i] = cs - e
1140+
self.contributivity_scores['Bhattacharyya distance'][i] = - np.log(BC)
1141+
self.contributivity_scores['Hellinger metric'][i] = np.sqrt(1 - BC)
1142+
1143+
self.name = "Statistic metric via S-model"
11311144
self.scores_std = np.zeros(mpl.partners_count)
1132-
self.normalized_scores = self.contributivity_scores / np.sum(self.contributivity_scores)
1145+
self.normalized_scores = {}
1146+
for key, value in self.contributivity_scores.items():
1147+
self.normalized_scores[key] = value / np.sum(value)
11331148
end = timer()
11341149
self.computation_time_sec = end - start
11351150

0 commit comments

Comments
 (0)