-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathhandler.py
29 lines (28 loc) · 1.45 KB
/
handler.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
from main import Main
if __name__ == '__main__':
Language_model = Main(data_dir='D:/Transformers Implementation/Language Model/Data/enwiki20201020',
cleaned_data_dir='D:/Transformers Implementation/Language Model/Clean Project/cleaned_data/',
processing_batch_size=5,
preprocessed_save_dir='D:/Transformers Implementation/Language Model/Clean Project/sequences/',
vocab_path = 'D:\Transformers Implementation\Language Model\\bert_vocab_uncased.txt',
custom_preprocessing_save_dir = 'D:/Transformers Implementation/Language Model/Clean Project/mlm/',
mask_rate = 0.25,
seq_len = 20,
max_mask_per_seq = 3,
smallest_len_seq = 5,
embedding_dim = 256,
num_layers = 1,
intermediate_dim = 512,
num_heads = 4,
dropout = 0.1,
norm_epsilon = 1e-5,
learning_rate = 5e-4,
log_dir = 'D:/Transformers Implementation/Language Model/Clean Project/logs/',
transfer_learning_batch = 1,
models_save_path = 'D:/Transformers Implementation/Language Model/Clean Project/models/',
model_checkpoint_path = 'D:/Transformers Implementation/Language Model/Clean Project/model_checkpoints/',
epochs = 5,
batch_size = 128,
projector_dir = 'D:/Transformers Implementation/Language Model/Clean Project/projector/',
)
Language_model.main()