-
Notifications
You must be signed in to change notification settings - Fork 1
/
configuration.py
30 lines (26 loc) · 1.01 KB
/
configuration.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
# File : configuration.py
# Author : Yan <yanwong@126.com>
# Date : 08.04.2020
# Last Modified Date: 14.04.2020
# Last Modified By : Yan <yanwong@126.com>
"""Default configuration for model architecture and training."""
class ModelConfig(object):
"""Wrapper class for model hyperparameters."""
def __init__(self):
self.d_word = 100 # word embedding dimension
self.d_word_lstm = 100 # word LSTM hidden layer size
self.non_static_emb = True # fine tune input embedding
self.emb_dropout = 0. # droupout on the input (0 = no dropout)
self.lstm_dropout = 0.
self.l2_lambda = 0.0001
self.n_tags = 4
class TrainingConfig(object):
"""Wrapper class for model hyperparameters."""
def __init__(self):
self.learning_rate = 0.01
self.clip_gradients = 5.0
self.n_epochs = 100 # number of epochs over the training set
self.freq_eval = 100 # evaluate on dev every freq_eval steps
self.batch_size = 64