Skip to content

Commit 7dab8a6

Browse files
author
Francisco Santos
committed
pategan base
1 parent ae64c02 commit 7dab8a6

2 files changed

Lines changed: 86 additions & 0 deletions

File tree

src/ydata_synthetic/synthesizers/regular/pategan/__init__.py

Whitespace-only changes.
Lines changed: 86 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,86 @@
1+
"PATEGAN implementation supporting Differential Privacy budget specification."
2+
from typing import List
3+
4+
# pylint: disable = W0622, E0401
5+
import tqdm
6+
from tensorflow import clip_by_value
7+
from tensorflow.dtypes import cast, float64
8+
from tensorflow.keras import Model
9+
from tensorflow.keras.layers import Dense, Input, ReLU
10+
from tensorflow.keras.optimizers import Adam
11+
from tensorflow.math import abs, exp, pow, reduce_sum, square
12+
from tensorflow.keras.losses import BinaryCrossentropy
13+
14+
from ydata_synthetic.synthesizers.gan import BaseModel
15+
16+
17+
class PATEGAN(BaseModel):
18+
"A basic PATEGAN synthesizer implementation with configurable differential privacy budget."
19+
20+
__MODEL__='PATEGAN'
21+
22+
def __init__(self, model_parameters, n_teachers: int, delta: float, epsilon: float):
23+
super().__init__(model_parameters)
24+
self.n_teachers = n_teachers
25+
self.delta = delta
26+
self.epsilon = epsilon
27+
28+
def define_gan(self):
29+
def discriminator():
30+
discriminator = Discriminator(self.batch_size)
31+
return discriminator.build_model((self.data_dim,), self.layers_dim)
32+
33+
self.generator = Generator(self.batch_size). \
34+
build_model(input_shape=(self.noise_dim,), dim=self.layers_dim, data_dim=self.data_dim)
35+
self.s_discriminator = discriminator()
36+
self.t_discriminators = [discriminator() for i in range(self.n_teachers)]
37+
38+
generator_optimizer = Adam(learning_rate=self.g_lr)
39+
discriminator_optimizer = Adam(learning_rate=self.d_lr)
40+
41+
loss_fn = BinaryCrossentropy(from_logits=True)
42+
self.generator.compile(loss=loss_fn, optimizer=generator_optimizer)
43+
self.s_discriminator.compile(loss=loss_fn, optimizer=discriminator_optimizer)
44+
for teacher in self.t_discriminators:
45+
teacher.compile(loss=loss_fn, optimizer=discriminator_optimizer)
46+
47+
# pylint: disable = C0103
48+
def _moments_acc(self, votes, lap_scale, l_list):
49+
q = (2 + lap_scale * abs(2 * votes - self.n_teachers))/(4 * exp(lap_scale * abs(2 * votes - self.n_teachers)))
50+
51+
update = []
52+
for l in l_list:
53+
clip = 2 * square(lap_scale) * l * (l + 1)
54+
t = (1 - q) * pow((1 - q) / (1 - exp(2*lap_scale) * q), l) + q * exp(2 * lap_scale * l)
55+
update.append(reduce_sum(clip_by_value(t, clip_value_min=-clip, clip_value_max=clip)))
56+
return cast(update, dtype=float64)
57+
58+
def train(self, data, train_arguments, num_cols: List[str], cat_cols: List[str],
59+
preprocess: bool = True):
60+
return None
61+
62+
63+
class Discriminator(Model):
64+
def __init__(self, batch_size):
65+
self.batch_size = batch_size
66+
67+
def build_model(self, input_shape, dim):
68+
input = Input(shape=input_shape, batch_size=self.batch_size)
69+
x = Dense(dim * 4)(input)
70+
x = ReLU()(x)
71+
x = Dense(dim * 2)(x)
72+
x = Dense(1)(x)
73+
return Model(inputs=input, outputs=x)
74+
75+
76+
class Generator(Model):
77+
def __init__(self, batch_size):
78+
self.batch_size = batch_size
79+
80+
def build_model(self, input_shape, dim, data_dim):
81+
input = Input(shape=input_shape, batch_size = self.batch_size)
82+
x = Dense(dim)(input)
83+
x = ReLU()(x)
84+
x = Dense(dim * 2)(x)
85+
x = Dense(data_dim)(x)
86+
return Model(inputs=input, outputs=x)

0 commit comments

Comments
 (0)