Skip to content

Commit ba2e34c

Browse files
Fix partial ModelParams documentation
1 parent 13db6f0 commit ba2e34c

File tree

1 file changed

+11
-6
lines changed

1 file changed

+11
-6
lines changed

precise/model.py

Lines changed: 11 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -29,17 +29,20 @@
2929
class ModelParams:
3030
"""
3131
Attributes:
32-
recurrent_units:
33-
dropout:
34-
extra_metrics: Whether to include false positive and false negative metrics
32+
recurrent_units: Number of GRU units. Higher values increase computation
33+
but allow more complex learning. Too high of a value causes overfitting
34+
dropout: Reduces overfitting but can potentially decrease accuracy if too high
35+
extra_metrics: Whether to include false positive and false negative metrics while training
3536
skip_acc: Whether to skip accuracy calculation while training
37+
loss_bias: Near 1.0 reduces false positives. See <set_loss_bias>
38+
freeze_till: Layer number from start to freeze after loading (allows for partial training)
3639
"""
3740
recurrent_units = attr.ib(20) # type: int
3841
dropout = attr.ib(0.2) # type: float
3942
extra_metrics = attr.ib(False) # type: bool
4043
skip_acc = attr.ib(False) # type: bool
4144
loss_bias = attr.ib(0.7) # type: float
42-
freeze_till = attr.ib(0) # type: bool
45+
freeze_till = attr.ib(0) # type: int
4346

4447

4548
def load_precise_model(model_name: str) -> Any:
@@ -73,7 +76,8 @@ def create_model(model_name: Optional[str], params: ModelParams) -> 'Sequential'
7376
model = Sequential()
7477
model.add(GRU(
7578
params.recurrent_units, activation='linear',
76-
input_shape=(pr.n_features, pr.feature_size), dropout=params.dropout, name='net'
79+
input_shape=(
80+
pr.n_features, pr.feature_size), dropout=params.dropout, name='net'
7781
))
7882
model.add(Dense(1, activation='sigmoid'))
7983

@@ -82,5 +86,6 @@ def create_model(model_name: Optional[str], params: ModelParams) -> 'Sequential'
8286
set_loss_bias(params.loss_bias)
8387
for i in model.layers[:params.freeze_till]:
8488
i.trainable = False
85-
model.compile('rmsprop', weighted_log_loss, metrics=(not params.skip_acc) * metrics)
89+
model.compile('rmsprop', weighted_log_loss,
90+
metrics=(not params.skip_acc) * metrics)
8691
return model

0 commit comments

Comments
 (0)