3529 lines
294 KiB
Plaintext
3529 lines
294 KiB
Plaintext
2023-05-02 10:15:32,701 - wn_one_to_x - [INFO] - {'dataset': 'WN18RR', 'name': 'wn_one_to_x', 'gpu': '0', 'train_strategy': 'one_to_x', 'opt': 'adam', 'neg_num': 1000, 'batch_size': 128, 'l2': 0.0, 'lr': 0.0001, 'max_epochs': 500, 'num_workers': 0, 'seed': 42, 'restore': False, 'lbl_smooth': 0.1, 'embed_dim': 400, 'ent_vec_dim': 400, 'rel_vec_dim': 400, 'bias': False, 'form': 'plain', 'k_w': 10, 'k_h': 20, 'num_filt': 96, 'ker_sz': 9, 'perm': 1, 'hid_drop': 0.5, 'feat_drop': 0.2, 'inp_drop': 0.2, 'drop_path': 0.1, 'drop': 0.2, 'in_channels': 1, 'out_channels': 32, 'filt_h': 1, 'filt_w': 9, 'image_h': 128, 'image_w': 128, 'patch_size': 8, 'mixer_dim': 256, 'expansion_factor': 4, 'expansion_factor_token': 0.5, 'mixer_depth': 16, 'mixer_dropout': 0.2, 'log_dir': './log/', 'config_dir': './config/', 'test_only': False}
|
|
2023-05-02 10:15:41,321 - wn_one_to_x - [INFO] - [E:0| 0]: Train Loss:0.69448, Val MRR:0.0, wn_one_to_x
|
|
2023-05-02 10:17:10,860 - wn_one_to_x - [INFO] - [E:0| 100]: Train Loss:0.41174, Val MRR:0.0, wn_one_to_x
|
|
2023-05-02 10:18:40,119 - wn_one_to_x - [INFO] - [E:0| 200]: Train Loss:0.28248, Val MRR:0.0, wn_one_to_x
|
|
2023-05-02 10:20:08,980 - wn_one_to_x - [INFO] - [E:0| 300]: Train Loss:0.21438, Val MRR:0.0, wn_one_to_x
|
|
2023-05-02 10:21:37,749 - wn_one_to_x - [INFO] - [E:0| 400]: Train Loss:0.17293, Val MRR:0.0, wn_one_to_x
|
|
2023-05-02 10:23:07,320 - wn_one_to_x - [INFO] - [E:0| 500]: Train Loss:0.14516, Val MRR:0.0, wn_one_to_x
|
|
2023-05-02 10:24:36,368 - wn_one_to_x - [INFO] - [E:0| 600]: Train Loss:0.12528, Val MRR:0.0, wn_one_to_x
|
|
2023-05-02 10:26:05,109 - wn_one_to_x - [INFO] - [E:0| 700]: Train Loss:0.11035, Val MRR:0.0, wn_one_to_x
|
|
2023-05-02 10:27:33,013 - wn_one_to_x - [INFO] - [E:0| 800]: Train Loss:0.098731, Val MRR:0.0, wn_one_to_x
|
|
2023-05-02 10:29:01,435 - wn_one_to_x - [INFO] - [E:0| 900]: Train Loss:0.089433, Val MRR:0.0, wn_one_to_x
|
|
2023-05-02 10:30:30,247 - wn_one_to_x - [INFO] - [E:0| 1000]: Train Loss:0.081826, Val MRR:0.0, wn_one_to_x
|
|
2023-05-02 10:31:58,904 - wn_one_to_x - [INFO] - [E:0| 1100]: Train Loss:0.075488, Val MRR:0.0, wn_one_to_x
|
|
2023-05-02 10:33:27,526 - wn_one_to_x - [INFO] - [E:0| 1200]: Train Loss:0.070127, Val MRR:0.0, wn_one_to_x
|
|
2023-05-02 10:34:56,572 - wn_one_to_x - [INFO] - [E:0| 1300]: Train Loss:0.065534, Val MRR:0.0, wn_one_to_x
|
|
2023-05-02 10:35:46,074 - wn_one_to_x - [INFO] - [Epoch:0]: Training Loss:0.06324
|
|
|
|
2023-05-02 10:35:46,324 - wn_one_to_x - [INFO] - [Valid, Tail_Batch Step 0] wn_one_to_x
|
|
2023-05-02 10:36:01,777 - wn_one_to_x - [INFO] - [Valid, Head_Batch Step 0] wn_one_to_x
|
|
2023-05-02 10:36:17,750 - wn_one_to_x - [INFO] - [Evaluating Epoch 0 valid]:
|
|
MRR: Tail : 0.01458, Head : 0.00235, Avg : 0.00846
|
|
|
|
2023-05-02 10:36:21,147 - wn_one_to_x - [INFO] - [Epoch 0]: Training Loss: 0.063238, Valid MRR: 0.00846,
|
|
|
|
|
|
|
|
2023-05-02 10:36:22,057 - wn_one_to_x - [INFO] - [E:1| 0]: Train Loss:0.0097341, Val MRR:0.00846, wn_one_to_x
|
|
2023-05-02 10:37:50,978 - wn_one_to_x - [INFO] - [E:1| 100]: Train Loss:0.0095012, Val MRR:0.00846, wn_one_to_x
|
|
2023-05-02 10:39:20,790 - wn_one_to_x - [INFO] - [E:1| 200]: Train Loss:0.009297, Val MRR:0.00846, wn_one_to_x
|
|
2023-05-02 10:40:49,662 - wn_one_to_x - [INFO] - [E:1| 300]: Train Loss:0.0091201, Val MRR:0.00846, wn_one_to_x
|
|
2023-05-02 10:42:18,510 - wn_one_to_x - [INFO] - [E:1| 400]: Train Loss:0.0089663, Val MRR:0.00846, wn_one_to_x
|
|
2023-05-02 10:43:46,445 - wn_one_to_x - [INFO] - [E:1| 500]: Train Loss:0.008831, Val MRR:0.00846, wn_one_to_x
|
|
2023-05-02 10:45:14,616 - wn_one_to_x - [INFO] - [E:1| 600]: Train Loss:0.0087075, Val MRR:0.00846, wn_one_to_x
|
|
2023-05-02 10:46:43,052 - wn_one_to_x - [INFO] - [E:1| 700]: Train Loss:0.0085991, Val MRR:0.00846, wn_one_to_x
|
|
2023-05-02 10:48:11,672 - wn_one_to_x - [INFO] - [E:1| 800]: Train Loss:0.008502, Val MRR:0.00846, wn_one_to_x
|
|
2023-05-02 10:49:39,414 - wn_one_to_x - [INFO] - [E:1| 900]: Train Loss:0.0084137, Val MRR:0.00846, wn_one_to_x
|
|
2023-05-02 10:51:06,908 - wn_one_to_x - [INFO] - [E:1| 1000]: Train Loss:0.0083339, Val MRR:0.00846, wn_one_to_x
|
|
2023-05-02 10:52:34,661 - wn_one_to_x - [INFO] - [E:1| 1100]: Train Loss:0.0082626, Val MRR:0.00846, wn_one_to_x
|
|
2023-05-02 10:54:02,670 - wn_one_to_x - [INFO] - [E:1| 1200]: Train Loss:0.0081982, Val MRR:0.00846, wn_one_to_x
|
|
2023-05-02 10:55:30,572 - wn_one_to_x - [INFO] - [E:1| 1300]: Train Loss:0.0081385, Val MRR:0.00846, wn_one_to_x
|
|
2023-05-02 10:56:19,802 - wn_one_to_x - [INFO] - [Epoch:1]: Training Loss:0.008108
|
|
|
|
2023-05-02 10:56:20,058 - wn_one_to_x - [INFO] - [Valid, Tail_Batch Step 0] wn_one_to_x
|
|
2023-05-02 10:56:26,167 - wn_one_to_x - [INFO] - [Valid, Head_Batch Step 0] wn_one_to_x
|
|
2023-05-02 10:56:31,573 - wn_one_to_x - [INFO] - [Evaluating Epoch 1 valid]:
|
|
MRR: Tail : 0.01373, Head : 0.00363, Avg : 0.00868
|
|
|
|
2023-05-02 10:56:36,790 - wn_one_to_x - [INFO] - [Epoch 1]: Training Loss: 0.008108, Valid MRR: 0.00868,
|
|
|
|
|
|
|
|
2023-05-02 10:56:37,697 - wn_one_to_x - [INFO] - [E:2| 0]: Train Loss:0.0072846, Val MRR:0.00868, wn_one_to_x
|
|
2023-05-02 10:58:06,099 - wn_one_to_x - [INFO] - [E:2| 100]: Train Loss:0.007303, Val MRR:0.00868, wn_one_to_x
|
|
2023-05-02 10:59:34,105 - wn_one_to_x - [INFO] - [E:2| 200]: Train Loss:0.0072921, Val MRR:0.00868, wn_one_to_x
|
|
2023-05-02 11:01:01,944 - wn_one_to_x - [INFO] - [E:2| 300]: Train Loss:0.0072783, Val MRR:0.00868, wn_one_to_x
|
|
2023-05-02 11:02:30,577 - wn_one_to_x - [INFO] - [E:2| 400]: Train Loss:0.0072669, Val MRR:0.00868, wn_one_to_x
|
|
2023-05-02 11:03:59,242 - wn_one_to_x - [INFO] - [E:2| 500]: Train Loss:0.0072528, Val MRR:0.00868, wn_one_to_x
|
|
2023-05-02 11:05:27,104 - wn_one_to_x - [INFO] - [E:2| 600]: Train Loss:0.0072431, Val MRR:0.00868, wn_one_to_x
|
|
2023-05-02 11:06:56,306 - wn_one_to_x - [INFO] - [E:2| 700]: Train Loss:0.0072331, Val MRR:0.00868, wn_one_to_x
|
|
2023-05-02 11:08:24,610 - wn_one_to_x - [INFO] - [E:2| 800]: Train Loss:0.0072254, Val MRR:0.00868, wn_one_to_x
|
|
2023-05-02 11:09:54,021 - wn_one_to_x - [INFO] - [E:2| 900]: Train Loss:0.0072173, Val MRR:0.00868, wn_one_to_x
|
|
2023-05-02 11:11:22,937 - wn_one_to_x - [INFO] - [E:2| 1000]: Train Loss:0.0072107, Val MRR:0.00868, wn_one_to_x
|
|
2023-05-02 11:12:51,817 - wn_one_to_x - [INFO] - [E:2| 1100]: Train Loss:0.0072062, Val MRR:0.00868, wn_one_to_x
|
|
2023-05-02 11:14:21,360 - wn_one_to_x - [INFO] - [E:2| 1200]: Train Loss:0.0072001, Val MRR:0.00868, wn_one_to_x
|
|
2023-05-02 11:15:49,877 - wn_one_to_x - [INFO] - [E:2| 1300]: Train Loss:0.0071951, Val MRR:0.00868, wn_one_to_x
|
|
2023-05-02 11:16:39,432 - wn_one_to_x - [INFO] - [Epoch:2]: Training Loss:0.007192
|
|
|
|
2023-05-02 11:16:39,684 - wn_one_to_x - [INFO] - [Valid, Tail_Batch Step 0] wn_one_to_x
|
|
2023-05-02 11:16:49,210 - wn_one_to_x - [INFO] - [Valid, Head_Batch Step 0] wn_one_to_x
|
|
2023-05-02 11:17:03,374 - wn_one_to_x - [INFO] - [Evaluating Epoch 2 valid]:
|
|
MRR: Tail : 0.02517, Head : 0.00721, Avg : 0.01619
|
|
|
|
2023-05-02 11:17:07,609 - wn_one_to_x - [INFO] - [Epoch 2]: Training Loss: 0.0071921, Valid MRR: 0.01619,
|
|
|
|
|
|
|
|
2023-05-02 11:17:08,700 - wn_one_to_x - [INFO] - [E:3| 0]: Train Loss:0.0070748, Val MRR:0.01619, wn_one_to_x
|
|
2023-05-02 11:18:38,030 - wn_one_to_x - [INFO] - [E:3| 100]: Train Loss:0.0070283, Val MRR:0.01619, wn_one_to_x
|
|
2023-05-02 11:20:08,468 - wn_one_to_x - [INFO] - [E:3| 200]: Train Loss:0.0070341, Val MRR:0.01619, wn_one_to_x
|
|
2023-05-02 11:21:38,695 - wn_one_to_x - [INFO] - [E:3| 300]: Train Loss:0.0070372, Val MRR:0.01619, wn_one_to_x
|
|
2023-05-02 11:23:07,129 - wn_one_to_x - [INFO] - [E:3| 400]: Train Loss:0.0070357, Val MRR:0.01619, wn_one_to_x
|
|
2023-05-02 11:24:35,726 - wn_one_to_x - [INFO] - [E:3| 500]: Train Loss:0.0070365, Val MRR:0.01619, wn_one_to_x
|
|
2023-05-02 11:26:04,256 - wn_one_to_x - [INFO] - [E:3| 600]: Train Loss:0.0070398, Val MRR:0.01619, wn_one_to_x
|
|
2023-05-02 11:27:34,755 - wn_one_to_x - [INFO] - [E:3| 700]: Train Loss:0.0070413, Val MRR:0.01619, wn_one_to_x
|
|
2023-05-02 11:29:03,809 - wn_one_to_x - [INFO] - [E:3| 800]: Train Loss:0.0070433, Val MRR:0.01619, wn_one_to_x
|
|
2023-05-02 11:30:33,390 - wn_one_to_x - [INFO] - [E:3| 900]: Train Loss:0.0070442, Val MRR:0.01619, wn_one_to_x
|
|
2023-05-02 11:32:01,992 - wn_one_to_x - [INFO] - [E:3| 1000]: Train Loss:0.0070434, Val MRR:0.01619, wn_one_to_x
|
|
2023-05-02 11:33:30,582 - wn_one_to_x - [INFO] - [E:3| 1100]: Train Loss:0.0070424, Val MRR:0.01619, wn_one_to_x
|
|
2023-05-02 11:34:58,993 - wn_one_to_x - [INFO] - [E:3| 1200]: Train Loss:0.0070412, Val MRR:0.01619, wn_one_to_x
|
|
2023-05-02 11:36:27,518 - wn_one_to_x - [INFO] - [E:3| 1300]: Train Loss:0.0070399, Val MRR:0.01619, wn_one_to_x
|
|
2023-05-02 11:37:17,463 - wn_one_to_x - [INFO] - [Epoch:3]: Training Loss:0.007039
|
|
|
|
2023-05-02 11:37:17,709 - wn_one_to_x - [INFO] - [Valid, Tail_Batch Step 0] wn_one_to_x
|
|
2023-05-02 11:37:27,583 - wn_one_to_x - [INFO] - [Valid, Head_Batch Step 0] wn_one_to_x
|
|
2023-05-02 11:37:42,235 - wn_one_to_x - [INFO] - [Evaluating Epoch 3 valid]:
|
|
MRR: Tail : 0.02903, Head : 0.01315, Avg : 0.02109
|
|
|
|
2023-05-02 11:37:47,429 - wn_one_to_x - [INFO] - [Epoch 3]: Training Loss: 0.0070393, Valid MRR: 0.02109,
|
|
|
|
|
|
|
|
2023-05-02 11:37:48,356 - wn_one_to_x - [INFO] - [E:4| 0]: Train Loss:0.0067462, Val MRR:0.02109, wn_one_to_x
|
|
2023-05-02 11:39:17,669 - wn_one_to_x - [INFO] - [E:4| 100]: Train Loss:0.0069002, Val MRR:0.02109, wn_one_to_x
|
|
2023-05-02 11:40:45,924 - wn_one_to_x - [INFO] - [E:4| 200]: Train Loss:0.0069138, Val MRR:0.02109, wn_one_to_x
|
|
2023-05-02 11:42:14,220 - wn_one_to_x - [INFO] - [E:4| 300]: Train Loss:0.0069154, Val MRR:0.02109, wn_one_to_x
|
|
2023-05-02 11:43:42,439 - wn_one_to_x - [INFO] - [E:4| 400]: Train Loss:0.0069138, Val MRR:0.02109, wn_one_to_x
|
|
2023-05-02 11:45:09,984 - wn_one_to_x - [INFO] - [E:4| 500]: Train Loss:0.0069197, Val MRR:0.02109, wn_one_to_x
|
|
2023-05-02 11:46:37,911 - wn_one_to_x - [INFO] - [E:4| 600]: Train Loss:0.0069209, Val MRR:0.02109, wn_one_to_x
|
|
2023-05-02 11:48:06,247 - wn_one_to_x - [INFO] - [E:4| 700]: Train Loss:0.0069196, Val MRR:0.02109, wn_one_to_x
|
|
2023-05-02 11:49:35,587 - wn_one_to_x - [INFO] - [E:4| 800]: Train Loss:0.0069198, Val MRR:0.02109, wn_one_to_x
|
|
2023-05-02 11:51:04,819 - wn_one_to_x - [INFO] - [E:4| 900]: Train Loss:0.0069186, Val MRR:0.02109, wn_one_to_x
|
|
2023-05-02 11:52:34,443 - wn_one_to_x - [INFO] - [E:4| 1000]: Train Loss:0.0069169, Val MRR:0.02109, wn_one_to_x
|
|
2023-05-02 11:54:03,116 - wn_one_to_x - [INFO] - [E:4| 1100]: Train Loss:0.006916, Val MRR:0.02109, wn_one_to_x
|
|
2023-05-02 11:55:31,651 - wn_one_to_x - [INFO] - [E:4| 1200]: Train Loss:0.0069159, Val MRR:0.02109, wn_one_to_x
|
|
2023-05-02 11:56:59,314 - wn_one_to_x - [INFO] - [E:4| 1300]: Train Loss:0.0069163, Val MRR:0.02109, wn_one_to_x
|
|
2023-05-02 11:57:48,735 - wn_one_to_x - [INFO] - [Epoch:4]: Training Loss:0.006915
|
|
|
|
2023-05-02 11:57:48,977 - wn_one_to_x - [INFO] - [Valid, Tail_Batch Step 0] wn_one_to_x
|
|
2023-05-02 11:57:56,511 - wn_one_to_x - [INFO] - [Valid, Head_Batch Step 0] wn_one_to_x
|
|
2023-05-02 11:58:02,799 - wn_one_to_x - [INFO] - [Evaluating Epoch 4 valid]:
|
|
MRR: Tail : 0.02973, Head : 0.0147, Avg : 0.02222
|
|
|
|
2023-05-02 11:58:07,592 - wn_one_to_x - [INFO] - [Epoch 4]: Training Loss: 0.0069152, Valid MRR: 0.02222,
|
|
|
|
|
|
|
|
2023-05-02 11:58:08,478 - wn_one_to_x - [INFO] - [E:5| 0]: Train Loss:0.0070126, Val MRR:0.02222, wn_one_to_x
|
|
2023-05-02 11:59:37,952 - wn_one_to_x - [INFO] - [E:5| 100]: Train Loss:0.0067809, Val MRR:0.02222, wn_one_to_x
|
|
2023-05-02 12:01:08,392 - wn_one_to_x - [INFO] - [E:5| 200]: Train Loss:0.0067805, Val MRR:0.02222, wn_one_to_x
|
|
2023-05-02 12:02:37,558 - wn_one_to_x - [INFO] - [E:5| 300]: Train Loss:0.0067846, Val MRR:0.02222, wn_one_to_x
|
|
2023-05-02 12:04:06,464 - wn_one_to_x - [INFO] - [E:5| 400]: Train Loss:0.0067838, Val MRR:0.02222, wn_one_to_x
|
|
2023-05-02 12:05:34,830 - wn_one_to_x - [INFO] - [E:5| 500]: Train Loss:0.0067831, Val MRR:0.02222, wn_one_to_x
|
|
2023-05-02 12:07:02,900 - wn_one_to_x - [INFO] - [E:5| 600]: Train Loss:0.0067865, Val MRR:0.02222, wn_one_to_x
|
|
2023-05-02 12:08:32,154 - wn_one_to_x - [INFO] - [E:5| 700]: Train Loss:0.0067873, Val MRR:0.02222, wn_one_to_x
|
|
2023-05-02 12:10:02,042 - wn_one_to_x - [INFO] - [E:5| 800]: Train Loss:0.0067845, Val MRR:0.02222, wn_one_to_x
|
|
2023-05-02 12:11:30,532 - wn_one_to_x - [INFO] - [E:5| 900]: Train Loss:0.0067837, Val MRR:0.02222, wn_one_to_x
|
|
2023-05-02 12:12:59,621 - wn_one_to_x - [INFO] - [E:5| 1000]: Train Loss:0.0067839, Val MRR:0.02222, wn_one_to_x
|
|
2023-05-02 12:14:27,840 - wn_one_to_x - [INFO] - [E:5| 1100]: Train Loss:0.0067821, Val MRR:0.02222, wn_one_to_x
|
|
2023-05-02 12:15:56,553 - wn_one_to_x - [INFO] - [E:5| 1200]: Train Loss:0.0067847, Val MRR:0.02222, wn_one_to_x
|
|
2023-05-02 12:17:25,287 - wn_one_to_x - [INFO] - [E:5| 1300]: Train Loss:0.0067843, Val MRR:0.02222, wn_one_to_x
|
|
2023-05-02 12:18:14,606 - wn_one_to_x - [INFO] - [Epoch:5]: Training Loss:0.006782
|
|
|
|
2023-05-02 12:18:14,859 - wn_one_to_x - [INFO] - [Valid, Tail_Batch Step 0] wn_one_to_x
|
|
2023-05-02 12:18:21,869 - wn_one_to_x - [INFO] - [Valid, Head_Batch Step 0] wn_one_to_x
|
|
2023-05-02 12:18:27,246 - wn_one_to_x - [INFO] - [Evaluating Epoch 5 valid]:
|
|
MRR: Tail : 0.0295, Head : 0.01488, Avg : 0.02219
|
|
|
|
2023-05-02 12:18:27,246 - wn_one_to_x - [INFO] - [Epoch 5]: Training Loss: 0.006782, Valid MRR: 0.02222,
|
|
|
|
|
|
|
|
2023-05-02 12:18:28,122 - wn_one_to_x - [INFO] - [E:6| 0]: Train Loss:0.0067161, Val MRR:0.02222, wn_one_to_x
|
|
2023-05-02 12:19:56,306 - wn_one_to_x - [INFO] - [E:6| 100]: Train Loss:0.006625, Val MRR:0.02222, wn_one_to_x
|
|
2023-05-02 12:21:25,226 - wn_one_to_x - [INFO] - [E:6| 200]: Train Loss:0.0066253, Val MRR:0.02222, wn_one_to_x
|
|
2023-05-02 12:22:53,147 - wn_one_to_x - [INFO] - [E:6| 300]: Train Loss:0.0066272, Val MRR:0.02222, wn_one_to_x
|
|
2023-05-02 12:24:21,709 - wn_one_to_x - [INFO] - [E:6| 400]: Train Loss:0.006623, Val MRR:0.02222, wn_one_to_x
|
|
2023-05-02 12:25:49,873 - wn_one_to_x - [INFO] - [E:6| 500]: Train Loss:0.0066233, Val MRR:0.02222, wn_one_to_x
|
|
2023-05-02 12:27:18,083 - wn_one_to_x - [INFO] - [E:6| 600]: Train Loss:0.006625, Val MRR:0.02222, wn_one_to_x
|
|
2023-05-02 12:28:47,355 - wn_one_to_x - [INFO] - [E:6| 700]: Train Loss:0.006626, Val MRR:0.02222, wn_one_to_x
|
|
2023-05-02 12:30:17,444 - wn_one_to_x - [INFO] - [E:6| 800]: Train Loss:0.0066255, Val MRR:0.02222, wn_one_to_x
|
|
2023-05-02 12:31:46,666 - wn_one_to_x - [INFO] - [E:6| 900]: Train Loss:0.0066245, Val MRR:0.02222, wn_one_to_x
|
|
2023-05-02 12:33:17,149 - wn_one_to_x - [INFO] - [E:6| 1000]: Train Loss:0.0066229, Val MRR:0.02222, wn_one_to_x
|
|
2023-05-02 12:34:47,184 - wn_one_to_x - [INFO] - [E:6| 1100]: Train Loss:0.0066232, Val MRR:0.02222, wn_one_to_x
|
|
2023-05-02 12:36:17,687 - wn_one_to_x - [INFO] - [E:6| 1200]: Train Loss:0.0066219, Val MRR:0.02222, wn_one_to_x
|
|
2023-05-02 12:37:48,082 - wn_one_to_x - [INFO] - [E:6| 1300]: Train Loss:0.0066193, Val MRR:0.02222, wn_one_to_x
|
|
2023-05-02 12:38:37,631 - wn_one_to_x - [INFO] - [Epoch:6]: Training Loss:0.006618
|
|
|
|
2023-05-02 12:38:39,579 - wn_one_to_x - [INFO] - [Valid, Tail_Batch Step 0] wn_one_to_x
|
|
2023-05-02 12:38:48,918 - wn_one_to_x - [INFO] - [Valid, Head_Batch Step 0] wn_one_to_x
|
|
2023-05-02 12:38:55,711 - wn_one_to_x - [INFO] - [Evaluating Epoch 6 valid]:
|
|
MRR: Tail : 0.0317, Head : 0.01519, Avg : 0.02344
|
|
|
|
2023-05-02 12:39:00,726 - wn_one_to_x - [INFO] - [Epoch 6]: Training Loss: 0.0066182, Valid MRR: 0.02344,
|
|
|
|
|
|
|
|
2023-05-02 12:39:01,619 - wn_one_to_x - [INFO] - [E:7| 0]: Train Loss:0.0063931, Val MRR:0.02344, wn_one_to_x
|
|
2023-05-02 12:40:30,576 - wn_one_to_x - [INFO] - [E:7| 100]: Train Loss:0.0064241, Val MRR:0.02344, wn_one_to_x
|
|
2023-05-02 12:41:59,019 - wn_one_to_x - [INFO] - [E:7| 200]: Train Loss:0.0064308, Val MRR:0.02344, wn_one_to_x
|
|
2023-05-02 12:43:27,502 - wn_one_to_x - [INFO] - [E:7| 300]: Train Loss:0.006437, Val MRR:0.02344, wn_one_to_x
|
|
2023-05-02 12:44:56,504 - wn_one_to_x - [INFO] - [E:7| 400]: Train Loss:0.0064442, Val MRR:0.02344, wn_one_to_x
|
|
2023-05-02 12:46:24,698 - wn_one_to_x - [INFO] - [E:7| 500]: Train Loss:0.0064441, Val MRR:0.02344, wn_one_to_x
|
|
2023-05-02 12:47:53,222 - wn_one_to_x - [INFO] - [E:7| 600]: Train Loss:0.0064424, Val MRR:0.02344, wn_one_to_x
|
|
2023-05-02 12:49:21,438 - wn_one_to_x - [INFO] - [E:7| 700]: Train Loss:0.0064394, Val MRR:0.02344, wn_one_to_x
|
|
2023-05-02 12:50:49,946 - wn_one_to_x - [INFO] - [E:7| 800]: Train Loss:0.0064389, Val MRR:0.02344, wn_one_to_x
|
|
2023-05-02 12:52:18,497 - wn_one_to_x - [INFO] - [E:7| 900]: Train Loss:0.0064371, Val MRR:0.02344, wn_one_to_x
|
|
2023-05-02 12:53:47,124 - wn_one_to_x - [INFO] - [E:7| 1000]: Train Loss:0.006434, Val MRR:0.02344, wn_one_to_x
|
|
2023-05-02 12:55:15,606 - wn_one_to_x - [INFO] - [E:7| 1100]: Train Loss:0.0064313, Val MRR:0.02344, wn_one_to_x
|
|
2023-05-02 12:56:44,561 - wn_one_to_x - [INFO] - [E:7| 1200]: Train Loss:0.0064283, Val MRR:0.02344, wn_one_to_x
|
|
2023-05-02 12:58:12,501 - wn_one_to_x - [INFO] - [E:7| 1300]: Train Loss:0.0064268, Val MRR:0.02344, wn_one_to_x
|
|
2023-05-02 12:59:01,434 - wn_one_to_x - [INFO] - [Epoch:7]: Training Loss:0.006426
|
|
|
|
2023-05-02 12:59:01,677 - wn_one_to_x - [INFO] - [Valid, Tail_Batch Step 0] wn_one_to_x
|
|
2023-05-02 12:59:11,917 - wn_one_to_x - [INFO] - [Valid, Head_Batch Step 0] wn_one_to_x
|
|
2023-05-02 12:59:18,986 - wn_one_to_x - [INFO] - [Evaluating Epoch 7 valid]:
|
|
MRR: Tail : 0.03893, Head : 0.01616, Avg : 0.02754
|
|
|
|
2023-05-02 12:59:27,412 - wn_one_to_x - [INFO] - [Epoch 7]: Training Loss: 0.0064261, Valid MRR: 0.02754,
|
|
|
|
|
|
|
|
2023-05-02 12:59:28,415 - wn_one_to_x - [INFO] - [E:8| 0]: Train Loss:0.0061185, Val MRR:0.02754, wn_one_to_x
|
|
2023-05-02 13:00:56,730 - wn_one_to_x - [INFO] - [E:8| 100]: Train Loss:0.0062411, Val MRR:0.02754, wn_one_to_x
|
|
2023-05-02 13:02:26,190 - wn_one_to_x - [INFO] - [E:8| 200]: Train Loss:0.0062455, Val MRR:0.02754, wn_one_to_x
|
|
2023-05-02 13:03:54,846 - wn_one_to_x - [INFO] - [E:8| 300]: Train Loss:0.0062436, Val MRR:0.02754, wn_one_to_x
|
|
2023-05-02 13:05:23,439 - wn_one_to_x - [INFO] - [E:8| 400]: Train Loss:0.006238, Val MRR:0.02754, wn_one_to_x
|
|
2023-05-02 13:06:52,227 - wn_one_to_x - [INFO] - [E:8| 500]: Train Loss:0.0062401, Val MRR:0.02754, wn_one_to_x
|
|
2023-05-02 13:08:21,368 - wn_one_to_x - [INFO] - [E:8| 600]: Train Loss:0.0062408, Val MRR:0.02754, wn_one_to_x
|
|
2023-05-02 13:09:53,862 - wn_one_to_x - [INFO] - [E:8| 700]: Train Loss:0.0062406, Val MRR:0.02754, wn_one_to_x
|
|
2023-05-02 13:11:29,534 - wn_one_to_x - [INFO] - [E:8| 800]: Train Loss:0.0062386, Val MRR:0.02754, wn_one_to_x
|
|
2023-05-02 13:13:04,184 - wn_one_to_x - [INFO] - [E:8| 900]: Train Loss:0.0062383, Val MRR:0.02754, wn_one_to_x
|
|
2023-05-02 13:14:32,454 - wn_one_to_x - [INFO] - [E:8| 1000]: Train Loss:0.006236, Val MRR:0.02754, wn_one_to_x
|
|
2023-05-02 13:16:00,589 - wn_one_to_x - [INFO] - [E:8| 1100]: Train Loss:0.0062345, Val MRR:0.02754, wn_one_to_x
|
|
2023-05-02 13:17:29,024 - wn_one_to_x - [INFO] - [E:8| 1200]: Train Loss:0.0062336, Val MRR:0.02754, wn_one_to_x
|
|
2023-05-02 13:18:57,436 - wn_one_to_x - [INFO] - [E:8| 1300]: Train Loss:0.006232, Val MRR:0.02754, wn_one_to_x
|
|
2023-05-02 13:19:46,870 - wn_one_to_x - [INFO] - [Epoch:8]: Training Loss:0.006229
|
|
|
|
2023-05-02 13:19:47,188 - wn_one_to_x - [INFO] - [Valid, Tail_Batch Step 0] wn_one_to_x
|
|
2023-05-02 13:19:53,813 - wn_one_to_x - [INFO] - [Valid, Head_Batch Step 0] wn_one_to_x
|
|
2023-05-02 13:20:00,929 - wn_one_to_x - [INFO] - [Evaluating Epoch 8 valid]:
|
|
MRR: Tail : 0.04081, Head : 0.01681, Avg : 0.02881
|
|
|
|
2023-05-02 13:20:06,343 - wn_one_to_x - [INFO] - [Epoch 8]: Training Loss: 0.006229, Valid MRR: 0.02881,
|
|
|
|
|
|
|
|
2023-05-02 13:20:07,239 - wn_one_to_x - [INFO] - [E:9| 0]: Train Loss:0.0059077, Val MRR:0.02881, wn_one_to_x
|
|
2023-05-02 13:21:35,198 - wn_one_to_x - [INFO] - [E:9| 100]: Train Loss:0.0060203, Val MRR:0.02881, wn_one_to_x
|
|
2023-05-02 13:23:03,065 - wn_one_to_x - [INFO] - [E:9| 200]: Train Loss:0.0060153, Val MRR:0.02881, wn_one_to_x
|
|
2023-05-02 13:24:30,523 - wn_one_to_x - [INFO] - [E:9| 300]: Train Loss:0.0060208, Val MRR:0.02881, wn_one_to_x
|
|
2023-05-02 13:25:58,976 - wn_one_to_x - [INFO] - [E:9| 400]: Train Loss:0.0060255, Val MRR:0.02881, wn_one_to_x
|
|
2023-05-02 13:27:27,667 - wn_one_to_x - [INFO] - [E:9| 500]: Train Loss:0.0060329, Val MRR:0.02881, wn_one_to_x
|
|
2023-05-02 13:28:56,986 - wn_one_to_x - [INFO] - [E:9| 600]: Train Loss:0.0060345, Val MRR:0.02881, wn_one_to_x
|
|
2023-05-02 13:30:28,093 - wn_one_to_x - [INFO] - [E:9| 700]: Train Loss:0.0060354, Val MRR:0.02881, wn_one_to_x
|
|
2023-05-02 13:31:57,714 - wn_one_to_x - [INFO] - [E:9| 800]: Train Loss:0.0060338, Val MRR:0.02881, wn_one_to_x
|
|
2023-05-02 13:33:26,315 - wn_one_to_x - [INFO] - [E:9| 900]: Train Loss:0.0060349, Val MRR:0.02881, wn_one_to_x
|
|
2023-05-02 13:34:55,844 - wn_one_to_x - [INFO] - [E:9| 1000]: Train Loss:0.0060325, Val MRR:0.02881, wn_one_to_x
|
|
2023-05-02 13:36:25,026 - wn_one_to_x - [INFO] - [E:9| 1100]: Train Loss:0.006033, Val MRR:0.02881, wn_one_to_x
|
|
2023-05-02 13:37:53,519 - wn_one_to_x - [INFO] - [E:9| 1200]: Train Loss:0.0060312, Val MRR:0.02881, wn_one_to_x
|
|
2023-05-02 13:39:22,783 - wn_one_to_x - [INFO] - [E:9| 1300]: Train Loss:0.0060278, Val MRR:0.02881, wn_one_to_x
|
|
2023-05-02 13:40:12,734 - wn_one_to_x - [INFO] - [Epoch:9]: Training Loss:0.006025
|
|
|
|
2023-05-02 13:40:12,967 - wn_one_to_x - [INFO] - [Valid, Tail_Batch Step 0] wn_one_to_x
|
|
2023-05-02 13:40:18,693 - wn_one_to_x - [INFO] - [Valid, Head_Batch Step 0] wn_one_to_x
|
|
2023-05-02 13:40:25,150 - wn_one_to_x - [INFO] - [Evaluating Epoch 9 valid]:
|
|
MRR: Tail : 0.04329, Head : 0.01806, Avg : 0.03068
|
|
MR: Tail : 6066.7, Head : 1.1573e+04, Avg : 8820.0
|
|
Hit-1: Tail : 0.02175, Head : 0.01088, Avg : 0.01632
|
|
Hit-3: Tail : 0.04581, Head : 0.01747, Avg : 0.03164
|
|
Hit-10: Tail : 0.08899, Head : 0.02966, Avg : 0.05933
|
|
2023-05-02 13:40:29,721 - wn_one_to_x - [INFO] - [Epoch 9]: Training Loss: 0.0060247, Valid MRR: 0.03068,
|
|
|
|
|
|
|
|
2023-05-02 13:40:30,596 - wn_one_to_x - [INFO] - [E:10| 0]: Train Loss:0.0058569, Val MRR:0.03068, wn_one_to_x
|
|
2023-05-02 13:41:58,651 - wn_one_to_x - [INFO] - [E:10| 100]: Train Loss:0.0058264, Val MRR:0.03068, wn_one_to_x
|
|
2023-05-02 13:43:25,531 - wn_one_to_x - [INFO] - [E:10| 200]: Train Loss:0.0058329, Val MRR:0.03068, wn_one_to_x
|
|
2023-05-02 13:44:52,653 - wn_one_to_x - [INFO] - [E:10| 300]: Train Loss:0.0058255, Val MRR:0.03068, wn_one_to_x
|
|
2023-05-02 13:46:19,422 - wn_one_to_x - [INFO] - [E:10| 400]: Train Loss:0.0058263, Val MRR:0.03068, wn_one_to_x
|
|
2023-05-02 13:47:46,719 - wn_one_to_x - [INFO] - [E:10| 500]: Train Loss:0.0058272, Val MRR:0.03068, wn_one_to_x
|
|
2023-05-02 13:49:13,637 - wn_one_to_x - [INFO] - [E:10| 600]: Train Loss:0.0058279, Val MRR:0.03068, wn_one_to_x
|
|
2023-05-02 13:50:40,526 - wn_one_to_x - [INFO] - [E:10| 700]: Train Loss:0.0058273, Val MRR:0.03068, wn_one_to_x
|
|
2023-05-02 13:52:07,570 - wn_one_to_x - [INFO] - [E:10| 800]: Train Loss:0.0058262, Val MRR:0.03068, wn_one_to_x
|
|
2023-05-02 13:53:34,468 - wn_one_to_x - [INFO] - [E:10| 900]: Train Loss:0.0058237, Val MRR:0.03068, wn_one_to_x
|
|
2023-05-02 13:55:01,448 - wn_one_to_x - [INFO] - [E:10| 1000]: Train Loss:0.0058219, Val MRR:0.03068, wn_one_to_x
|
|
2023-05-02 13:56:28,961 - wn_one_to_x - [INFO] - [E:10| 1100]: Train Loss:0.0058192, Val MRR:0.03068, wn_one_to_x
|
|
2023-05-02 13:57:56,335 - wn_one_to_x - [INFO] - [E:10| 1200]: Train Loss:0.0058196, Val MRR:0.03068, wn_one_to_x
|
|
2023-05-02 13:59:23,371 - wn_one_to_x - [INFO] - [E:10| 1300]: Train Loss:0.005815, Val MRR:0.03068, wn_one_to_x
|
|
2023-05-02 14:00:11,850 - wn_one_to_x - [INFO] - [Epoch:10]: Training Loss:0.005814
|
|
|
|
2023-05-02 14:00:12,093 - wn_one_to_x - [INFO] - [Valid, Tail_Batch Step 0] wn_one_to_x
|
|
2023-05-02 14:00:20,395 - wn_one_to_x - [INFO] - [Valid, Head_Batch Step 0] wn_one_to_x
|
|
2023-05-02 14:00:26,140 - wn_one_to_x - [INFO] - [Evaluating Epoch 10 valid]:
|
|
MRR: Tail : 0.04782, Head : 0.01999, Avg : 0.0339
|
|
|
|
2023-05-02 14:00:28,215 - wn_one_to_x - [INFO] - [Epoch 10]: Training Loss: 0.0058141, Valid MRR: 0.0339,
|
|
|
|
|
|
|
|
2023-05-02 14:00:29,103 - wn_one_to_x - [INFO] - [E:11| 0]: Train Loss:0.0055901, Val MRR:0.0339, wn_one_to_x
|
|
2023-05-02 14:01:55,902 - wn_one_to_x - [INFO] - [E:11| 100]: Train Loss:0.0056114, Val MRR:0.0339, wn_one_to_x
|
|
2023-05-02 14:03:23,327 - wn_one_to_x - [INFO] - [E:11| 200]: Train Loss:0.0056157, Val MRR:0.0339, wn_one_to_x
|
|
2023-05-02 14:04:52,235 - wn_one_to_x - [INFO] - [E:11| 300]: Train Loss:0.0056203, Val MRR:0.0339, wn_one_to_x
|
|
2023-05-02 14:06:21,557 - wn_one_to_x - [INFO] - [E:11| 400]: Train Loss:0.0056167, Val MRR:0.0339, wn_one_to_x
|
|
2023-05-02 14:07:51,631 - wn_one_to_x - [INFO] - [E:11| 500]: Train Loss:0.0056142, Val MRR:0.0339, wn_one_to_x
|
|
2023-05-02 14:09:18,724 - wn_one_to_x - [INFO] - [E:11| 600]: Train Loss:0.0056158, Val MRR:0.0339, wn_one_to_x
|
|
2023-05-02 14:10:46,121 - wn_one_to_x - [INFO] - [E:11| 700]: Train Loss:0.0056127, Val MRR:0.0339, wn_one_to_x
|
|
2023-05-02 14:12:15,879 - wn_one_to_x - [INFO] - [E:11| 800]: Train Loss:0.0056113, Val MRR:0.0339, wn_one_to_x
|
|
2023-05-02 14:13:46,291 - wn_one_to_x - [INFO] - [E:11| 900]: Train Loss:0.0056097, Val MRR:0.0339, wn_one_to_x
|
|
2023-05-02 14:15:15,343 - wn_one_to_x - [INFO] - [E:11| 1000]: Train Loss:0.0056083, Val MRR:0.0339, wn_one_to_x
|
|
2023-05-02 14:16:44,113 - wn_one_to_x - [INFO] - [E:11| 1100]: Train Loss:0.0056064, Val MRR:0.0339, wn_one_to_x
|
|
2023-05-02 14:18:12,050 - wn_one_to_x - [INFO] - [E:11| 1200]: Train Loss:0.0056065, Val MRR:0.0339, wn_one_to_x
|
|
2023-05-02 14:19:39,913 - wn_one_to_x - [INFO] - [E:11| 1300]: Train Loss:0.0056066, Val MRR:0.0339, wn_one_to_x
|
|
2023-05-02 14:20:29,136 - wn_one_to_x - [INFO] - [Epoch:11]: Training Loss:0.005605
|
|
|
|
2023-05-02 14:20:29,382 - wn_one_to_x - [INFO] - [Valid, Tail_Batch Step 0] wn_one_to_x
|
|
2023-05-02 14:20:36,340 - wn_one_to_x - [INFO] - [Valid, Head_Batch Step 0] wn_one_to_x
|
|
2023-05-02 14:20:42,705 - wn_one_to_x - [INFO] - [Evaluating Epoch 11 valid]:
|
|
MRR: Tail : 0.05166, Head : 0.0219, Avg : 0.03678
|
|
|
|
2023-05-02 14:20:44,920 - wn_one_to_x - [INFO] - [Epoch 11]: Training Loss: 0.0056047, Valid MRR: 0.03678,
|
|
|
|
|
|
|
|
2023-05-02 14:20:45,809 - wn_one_to_x - [INFO] - [E:12| 0]: Train Loss:0.0052809, Val MRR:0.03678, wn_one_to_x
|
|
2023-05-02 14:22:14,069 - wn_one_to_x - [INFO] - [E:12| 100]: Train Loss:0.0053839, Val MRR:0.03678, wn_one_to_x
|
|
2023-05-02 14:23:41,191 - wn_one_to_x - [INFO] - [E:12| 200]: Train Loss:0.0053952, Val MRR:0.03678, wn_one_to_x
|
|
2023-05-02 14:25:10,492 - wn_one_to_x - [INFO] - [E:12| 300]: Train Loss:0.0054088, Val MRR:0.03678, wn_one_to_x
|
|
2023-05-02 14:26:38,032 - wn_one_to_x - [INFO] - [E:12| 400]: Train Loss:0.0054106, Val MRR:0.03678, wn_one_to_x
|
|
2023-05-02 14:28:05,265 - wn_one_to_x - [INFO] - [E:12| 500]: Train Loss:0.0054118, Val MRR:0.03678, wn_one_to_x
|
|
2023-05-02 14:29:32,988 - wn_one_to_x - [INFO] - [E:12| 600]: Train Loss:0.0054084, Val MRR:0.03678, wn_one_to_x
|
|
2023-05-02 14:31:00,279 - wn_one_to_x - [INFO] - [E:12| 700]: Train Loss:0.0054081, Val MRR:0.03678, wn_one_to_x
|
|
2023-05-02 14:32:29,176 - wn_one_to_x - [INFO] - [E:12| 800]: Train Loss:0.005408, Val MRR:0.03678, wn_one_to_x
|
|
2023-05-02 14:33:57,533 - wn_one_to_x - [INFO] - [E:12| 900]: Train Loss:0.0054073, Val MRR:0.03678, wn_one_to_x
|
|
2023-05-02 14:35:24,876 - wn_one_to_x - [INFO] - [E:12| 1000]: Train Loss:0.0054061, Val MRR:0.03678, wn_one_to_x
|
|
2023-05-02 14:36:52,174 - wn_one_to_x - [INFO] - [E:12| 1100]: Train Loss:0.0054022, Val MRR:0.03678, wn_one_to_x
|
|
2023-05-02 14:38:19,625 - wn_one_to_x - [INFO] - [E:12| 1200]: Train Loss:0.0054018, Val MRR:0.03678, wn_one_to_x
|
|
2023-05-02 14:39:47,021 - wn_one_to_x - [INFO] - [E:12| 1300]: Train Loss:0.005401, Val MRR:0.03678, wn_one_to_x
|
|
2023-05-02 14:40:36,097 - wn_one_to_x - [INFO] - [Epoch:12]: Training Loss:0.0054
|
|
|
|
2023-05-02 14:40:36,345 - wn_one_to_x - [INFO] - [Valid, Tail_Batch Step 0] wn_one_to_x
|
|
2023-05-02 14:40:45,119 - wn_one_to_x - [INFO] - [Valid, Head_Batch Step 0] wn_one_to_x
|
|
2023-05-02 14:40:50,726 - wn_one_to_x - [INFO] - [Evaluating Epoch 12 valid]:
|
|
MRR: Tail : 0.05466, Head : 0.02439, Avg : 0.03952
|
|
|
|
2023-05-02 14:40:52,524 - wn_one_to_x - [INFO] - [Epoch 12]: Training Loss: 0.0054004, Valid MRR: 0.03952,
|
|
|
|
|
|
|
|
2023-05-02 14:40:53,422 - wn_one_to_x - [INFO] - [E:13| 0]: Train Loss:0.0051907, Val MRR:0.03952, wn_one_to_x
|
|
2023-05-02 14:42:21,265 - wn_one_to_x - [INFO] - [E:13| 100]: Train Loss:0.0052194, Val MRR:0.03952, wn_one_to_x
|
|
2023-05-02 14:43:48,847 - wn_one_to_x - [INFO] - [E:13| 200]: Train Loss:0.005216, Val MRR:0.03952, wn_one_to_x
|
|
2023-05-02 14:45:16,437 - wn_one_to_x - [INFO] - [E:13| 300]: Train Loss:0.0052179, Val MRR:0.03952, wn_one_to_x
|
|
2023-05-02 14:46:43,795 - wn_one_to_x - [INFO] - [E:13| 400]: Train Loss:0.0052174, Val MRR:0.03952, wn_one_to_x
|
|
2023-05-02 14:48:11,394 - wn_one_to_x - [INFO] - [E:13| 500]: Train Loss:0.0052142, Val MRR:0.03952, wn_one_to_x
|
|
2023-05-02 14:49:39,340 - wn_one_to_x - [INFO] - [E:13| 600]: Train Loss:0.0052167, Val MRR:0.03952, wn_one_to_x
|
|
2023-05-02 14:51:07,277 - wn_one_to_x - [INFO] - [E:13| 700]: Train Loss:0.0052156, Val MRR:0.03952, wn_one_to_x
|
|
2023-05-02 14:52:35,932 - wn_one_to_x - [INFO] - [E:13| 800]: Train Loss:0.0052136, Val MRR:0.03952, wn_one_to_x
|
|
2023-05-02 14:54:04,566 - wn_one_to_x - [INFO] - [E:13| 900]: Train Loss:0.0052107, Val MRR:0.03952, wn_one_to_x
|
|
2023-05-02 14:55:32,780 - wn_one_to_x - [INFO] - [E:13| 1000]: Train Loss:0.0052086, Val MRR:0.03952, wn_one_to_x
|
|
2023-05-02 14:56:59,820 - wn_one_to_x - [INFO] - [E:13| 1100]: Train Loss:0.0052085, Val MRR:0.03952, wn_one_to_x
|
|
2023-05-02 14:58:27,979 - wn_one_to_x - [INFO] - [E:13| 1200]: Train Loss:0.005207, Val MRR:0.03952, wn_one_to_x
|
|
2023-05-02 14:59:56,260 - wn_one_to_x - [INFO] - [E:13| 1300]: Train Loss:0.0052048, Val MRR:0.03952, wn_one_to_x
|
|
2023-05-02 15:00:45,612 - wn_one_to_x - [INFO] - [Epoch:13]: Training Loss:0.005203
|
|
|
|
2023-05-02 15:00:45,857 - wn_one_to_x - [INFO] - [Valid, Tail_Batch Step 0] wn_one_to_x
|
|
2023-05-02 15:00:51,404 - wn_one_to_x - [INFO] - [Valid, Head_Batch Step 0] wn_one_to_x
|
|
2023-05-02 15:00:56,783 - wn_one_to_x - [INFO] - [Evaluating Epoch 13 valid]:
|
|
MRR: Tail : 0.05636, Head : 0.03029, Avg : 0.04333
|
|
|
|
2023-05-02 15:00:58,721 - wn_one_to_x - [INFO] - [Epoch 13]: Training Loss: 0.0052028, Valid MRR: 0.04333,
|
|
|
|
|
|
|
|
2023-05-02 15:00:59,603 - wn_one_to_x - [INFO] - [E:14| 0]: Train Loss:0.0051203, Val MRR:0.04333, wn_one_to_x
|
|
2023-05-02 15:02:28,061 - wn_one_to_x - [INFO] - [E:14| 100]: Train Loss:0.0050206, Val MRR:0.04333, wn_one_to_x
|
|
2023-05-02 15:03:55,994 - wn_one_to_x - [INFO] - [E:14| 200]: Train Loss:0.005029, Val MRR:0.04333, wn_one_to_x
|
|
2023-05-02 15:05:23,326 - wn_one_to_x - [INFO] - [E:14| 300]: Train Loss:0.0050187, Val MRR:0.04333, wn_one_to_x
|
|
2023-05-02 15:06:51,289 - wn_one_to_x - [INFO] - [E:14| 400]: Train Loss:0.0050171, Val MRR:0.04333, wn_one_to_x
|
|
2023-05-02 15:08:19,332 - wn_one_to_x - [INFO] - [E:14| 500]: Train Loss:0.0050172, Val MRR:0.04333, wn_one_to_x
|
|
2023-05-02 15:09:46,846 - wn_one_to_x - [INFO] - [E:14| 600]: Train Loss:0.0050185, Val MRR:0.04333, wn_one_to_x
|
|
2023-05-02 15:11:14,752 - wn_one_to_x - [INFO] - [E:14| 700]: Train Loss:0.0050176, Val MRR:0.04333, wn_one_to_x
|
|
2023-05-02 15:12:42,490 - wn_one_to_x - [INFO] - [E:14| 800]: Train Loss:0.0050173, Val MRR:0.04333, wn_one_to_x
|
|
2023-05-02 15:14:10,508 - wn_one_to_x - [INFO] - [E:14| 900]: Train Loss:0.005015, Val MRR:0.04333, wn_one_to_x
|
|
2023-05-02 15:15:38,950 - wn_one_to_x - [INFO] - [E:14| 1000]: Train Loss:0.0050142, Val MRR:0.04333, wn_one_to_x
|
|
2023-05-02 15:17:06,267 - wn_one_to_x - [INFO] - [E:14| 1100]: Train Loss:0.0050119, Val MRR:0.04333, wn_one_to_x
|
|
2023-05-02 15:18:34,165 - wn_one_to_x - [INFO] - [E:14| 1200]: Train Loss:0.0050112, Val MRR:0.04333, wn_one_to_x
|
|
2023-05-02 15:20:02,650 - wn_one_to_x - [INFO] - [E:14| 1300]: Train Loss:0.0050091, Val MRR:0.04333, wn_one_to_x
|
|
2023-05-02 15:20:52,049 - wn_one_to_x - [INFO] - [Epoch:14]: Training Loss:0.005007
|
|
|
|
2023-05-02 15:20:52,309 - wn_one_to_x - [INFO] - [Valid, Tail_Batch Step 0] wn_one_to_x
|
|
2023-05-02 15:20:58,024 - wn_one_to_x - [INFO] - [Valid, Head_Batch Step 0] wn_one_to_x
|
|
2023-05-02 15:21:03,438 - wn_one_to_x - [INFO] - [Evaluating Epoch 14 valid]:
|
|
MRR: Tail : 0.06217, Head : 0.03465, Avg : 0.04841
|
|
|
|
2023-05-02 15:21:08,784 - wn_one_to_x - [INFO] - [Epoch 14]: Training Loss: 0.0050075, Valid MRR: 0.04841,
|
|
|
|
|
|
|
|
2023-05-02 15:21:09,806 - wn_one_to_x - [INFO] - [E:15| 0]: Train Loss:0.0047982, Val MRR:0.04841, wn_one_to_x
|
|
2023-05-02 15:22:39,361 - wn_one_to_x - [INFO] - [E:15| 100]: Train Loss:0.0048317, Val MRR:0.04841, wn_one_to_x
|
|
2023-05-02 15:24:07,702 - wn_one_to_x - [INFO] - [E:15| 200]: Train Loss:0.0048287, Val MRR:0.04841, wn_one_to_x
|
|
2023-05-02 15:25:38,332 - wn_one_to_x - [INFO] - [E:15| 300]: Train Loss:0.0048315, Val MRR:0.04841, wn_one_to_x
|
|
2023-05-02 15:27:07,738 - wn_one_to_x - [INFO] - [E:15| 400]: Train Loss:0.0048329, Val MRR:0.04841, wn_one_to_x
|
|
2023-05-02 15:28:37,715 - wn_one_to_x - [INFO] - [E:15| 500]: Train Loss:0.0048299, Val MRR:0.04841, wn_one_to_x
|
|
2023-05-02 15:30:07,138 - wn_one_to_x - [INFO] - [E:15| 600]: Train Loss:0.0048328, Val MRR:0.04841, wn_one_to_x
|
|
2023-05-02 15:31:36,614 - wn_one_to_x - [INFO] - [E:15| 700]: Train Loss:0.0048322, Val MRR:0.04841, wn_one_to_x
|
|
2023-05-02 15:33:06,328 - wn_one_to_x - [INFO] - [E:15| 800]: Train Loss:0.004832, Val MRR:0.04841, wn_one_to_x
|
|
2023-05-02 15:34:35,749 - wn_one_to_x - [INFO] - [E:15| 900]: Train Loss:0.0048281, Val MRR:0.04841, wn_one_to_x
|
|
2023-05-02 15:36:06,648 - wn_one_to_x - [INFO] - [E:15| 1000]: Train Loss:0.0048251, Val MRR:0.04841, wn_one_to_x
|
|
2023-05-02 15:37:38,357 - wn_one_to_x - [INFO] - [E:15| 1100]: Train Loss:0.0048244, Val MRR:0.04841, wn_one_to_x
|
|
2023-05-02 15:39:07,816 - wn_one_to_x - [INFO] - [E:15| 1200]: Train Loss:0.0048232, Val MRR:0.04841, wn_one_to_x
|
|
2023-05-02 15:40:38,293 - wn_one_to_x - [INFO] - [E:15| 1300]: Train Loss:0.0048218, Val MRR:0.04841, wn_one_to_x
|
|
2023-05-02 15:41:27,822 - wn_one_to_x - [INFO] - [Epoch:15]: Training Loss:0.004822
|
|
|
|
2023-05-02 15:41:28,076 - wn_one_to_x - [INFO] - [Valid, Tail_Batch Step 0] wn_one_to_x
|
|
2023-05-02 15:41:33,731 - wn_one_to_x - [INFO] - [Valid, Head_Batch Step 0] wn_one_to_x
|
|
2023-05-02 15:41:40,076 - wn_one_to_x - [INFO] - [Evaluating Epoch 15 valid]:
|
|
MRR: Tail : 0.06929, Head : 0.04228, Avg : 0.05578
|
|
|
|
2023-05-02 15:41:41,747 - wn_one_to_x - [INFO] - [Epoch 15]: Training Loss: 0.0048218, Valid MRR: 0.05578,
|
|
|
|
|
|
|
|
2023-05-02 15:41:42,666 - wn_one_to_x - [INFO] - [E:16| 0]: Train Loss:0.0045796, Val MRR:0.05578, wn_one_to_x
|
|
2023-05-02 15:43:13,209 - wn_one_to_x - [INFO] - [E:16| 100]: Train Loss:0.0046437, Val MRR:0.05578, wn_one_to_x
|
|
2023-05-02 15:44:41,886 - wn_one_to_x - [INFO] - [E:16| 200]: Train Loss:0.0046443, Val MRR:0.05578, wn_one_to_x
|
|
2023-05-02 15:46:10,671 - wn_one_to_x - [INFO] - [E:16| 300]: Train Loss:0.0046484, Val MRR:0.05578, wn_one_to_x
|
|
2023-05-02 15:47:40,598 - wn_one_to_x - [INFO] - [E:16| 400]: Train Loss:0.0046546, Val MRR:0.05578, wn_one_to_x
|
|
2023-05-02 15:49:10,256 - wn_one_to_x - [INFO] - [E:16| 500]: Train Loss:0.0046522, Val MRR:0.05578, wn_one_to_x
|
|
2023-05-02 15:50:39,024 - wn_one_to_x - [INFO] - [E:16| 600]: Train Loss:0.0046543, Val MRR:0.05578, wn_one_to_x
|
|
2023-05-02 15:52:07,166 - wn_one_to_x - [INFO] - [E:16| 700]: Train Loss:0.0046525, Val MRR:0.05578, wn_one_to_x
|
|
2023-05-02 15:53:38,064 - wn_one_to_x - [INFO] - [E:16| 800]: Train Loss:0.0046495, Val MRR:0.05578, wn_one_to_x
|
|
2023-05-02 15:55:07,501 - wn_one_to_x - [INFO] - [E:16| 900]: Train Loss:0.004649, Val MRR:0.05578, wn_one_to_x
|
|
2023-05-02 15:56:36,217 - wn_one_to_x - [INFO] - [E:16| 1000]: Train Loss:0.0046458, Val MRR:0.05578, wn_one_to_x
|
|
2023-05-02 15:58:05,584 - wn_one_to_x - [INFO] - [E:16| 1100]: Train Loss:0.0046441, Val MRR:0.05578, wn_one_to_x
|
|
2023-05-02 15:59:34,895 - wn_one_to_x - [INFO] - [E:16| 1200]: Train Loss:0.004642, Val MRR:0.05578, wn_one_to_x
|
|
2023-05-02 16:01:04,918 - wn_one_to_x - [INFO] - [E:16| 1300]: Train Loss:0.004641, Val MRR:0.05578, wn_one_to_x
|
|
2023-05-02 16:01:54,329 - wn_one_to_x - [INFO] - [Epoch:16]: Training Loss:0.004638
|
|
|
|
2023-05-02 16:01:54,575 - wn_one_to_x - [INFO] - [Valid, Tail_Batch Step 0] wn_one_to_x
|
|
2023-05-02 16:02:00,389 - wn_one_to_x - [INFO] - [Valid, Head_Batch Step 0] wn_one_to_x
|
|
2023-05-02 16:02:05,856 - wn_one_to_x - [INFO] - [Evaluating Epoch 16 valid]:
|
|
MRR: Tail : 0.07911, Head : 0.04738, Avg : 0.06324
|
|
|
|
2023-05-02 16:02:08,198 - wn_one_to_x - [INFO] - [Epoch 16]: Training Loss: 0.0046384, Valid MRR: 0.06324,
|
|
|
|
|
|
|
|
2023-05-02 16:02:09,139 - wn_one_to_x - [INFO] - [E:17| 0]: Train Loss:0.0043086, Val MRR:0.06324, wn_one_to_x
|
|
2023-05-02 16:03:38,205 - wn_one_to_x - [INFO] - [E:17| 100]: Train Loss:0.0044494, Val MRR:0.06324, wn_one_to_x
|
|
2023-05-02 16:05:08,474 - wn_one_to_x - [INFO] - [E:17| 200]: Train Loss:0.00446, Val MRR:0.06324, wn_one_to_x
|
|
2023-05-02 16:06:37,908 - wn_one_to_x - [INFO] - [E:17| 300]: Train Loss:0.0044622, Val MRR:0.06324, wn_one_to_x
|
|
2023-05-02 16:08:06,207 - wn_one_to_x - [INFO] - [E:17| 400]: Train Loss:0.0044665, Val MRR:0.06324, wn_one_to_x
|
|
2023-05-02 16:09:34,644 - wn_one_to_x - [INFO] - [E:17| 500]: Train Loss:0.0044652, Val MRR:0.06324, wn_one_to_x
|
|
2023-05-02 16:11:04,032 - wn_one_to_x - [INFO] - [E:17| 600]: Train Loss:0.0044673, Val MRR:0.06324, wn_one_to_x
|
|
2023-05-02 16:12:32,852 - wn_one_to_x - [INFO] - [E:17| 700]: Train Loss:0.0044673, Val MRR:0.06324, wn_one_to_x
|
|
2023-05-02 16:14:03,411 - wn_one_to_x - [INFO] - [E:17| 800]: Train Loss:0.0044682, Val MRR:0.06324, wn_one_to_x
|
|
2023-05-02 16:15:32,107 - wn_one_to_x - [INFO] - [E:17| 900]: Train Loss:0.0044674, Val MRR:0.06324, wn_one_to_x
|
|
2023-05-02 16:17:01,259 - wn_one_to_x - [INFO] - [E:17| 1000]: Train Loss:0.0044679, Val MRR:0.06324, wn_one_to_x
|
|
2023-05-02 16:18:29,132 - wn_one_to_x - [INFO] - [E:17| 1100]: Train Loss:0.0044678, Val MRR:0.06324, wn_one_to_x
|
|
2023-05-02 16:19:57,037 - wn_one_to_x - [INFO] - [E:17| 1200]: Train Loss:0.0044652, Val MRR:0.06324, wn_one_to_x
|
|
2023-05-02 16:21:24,216 - wn_one_to_x - [INFO] - [E:17| 1300]: Train Loss:0.0044642, Val MRR:0.06324, wn_one_to_x
|
|
2023-05-02 16:22:13,221 - wn_one_to_x - [INFO] - [Epoch:17]: Training Loss:0.004464
|
|
|
|
2023-05-02 16:22:13,529 - wn_one_to_x - [INFO] - [Valid, Tail_Batch Step 0] wn_one_to_x
|
|
2023-05-02 16:22:19,044 - wn_one_to_x - [INFO] - [Valid, Head_Batch Step 0] wn_one_to_x
|
|
2023-05-02 16:22:25,404 - wn_one_to_x - [INFO] - [Evaluating Epoch 17 valid]:
|
|
MRR: Tail : 0.08427, Head : 0.05584, Avg : 0.07006
|
|
|
|
2023-05-02 16:22:28,384 - wn_one_to_x - [INFO] - [Epoch 17]: Training Loss: 0.0044636, Valid MRR: 0.07006,
|
|
|
|
|
|
|
|
2023-05-02 16:22:29,262 - wn_one_to_x - [INFO] - [E:18| 0]: Train Loss:0.0041649, Val MRR:0.07006, wn_one_to_x
|
|
2023-05-02 16:23:56,832 - wn_one_to_x - [INFO] - [E:18| 100]: Train Loss:0.0042845, Val MRR:0.07006, wn_one_to_x
|
|
2023-05-02 16:25:24,563 - wn_one_to_x - [INFO] - [E:18| 200]: Train Loss:0.0042907, Val MRR:0.07006, wn_one_to_x
|
|
2023-05-02 16:26:52,660 - wn_one_to_x - [INFO] - [E:18| 300]: Train Loss:0.0042819, Val MRR:0.07006, wn_one_to_x
|
|
2023-05-02 16:28:20,246 - wn_one_to_x - [INFO] - [E:18| 400]: Train Loss:0.0042859, Val MRR:0.07006, wn_one_to_x
|
|
2023-05-02 16:29:47,992 - wn_one_to_x - [INFO] - [E:18| 500]: Train Loss:0.0042874, Val MRR:0.07006, wn_one_to_x
|
|
2023-05-02 16:31:16,889 - wn_one_to_x - [INFO] - [E:18| 600]: Train Loss:0.0042842, Val MRR:0.07006, wn_one_to_x
|
|
2023-05-02 16:32:44,892 - wn_one_to_x - [INFO] - [E:18| 700]: Train Loss:0.004288, Val MRR:0.07006, wn_one_to_x
|
|
2023-05-02 16:34:13,967 - wn_one_to_x - [INFO] - [E:18| 800]: Train Loss:0.0042854, Val MRR:0.07006, wn_one_to_x
|
|
2023-05-02 16:35:41,220 - wn_one_to_x - [INFO] - [E:18| 900]: Train Loss:0.0042856, Val MRR:0.07006, wn_one_to_x
|
|
2023-05-02 16:37:09,458 - wn_one_to_x - [INFO] - [E:18| 1000]: Train Loss:0.004286, Val MRR:0.07006, wn_one_to_x
|
|
2023-05-02 16:38:37,273 - wn_one_to_x - [INFO] - [E:18| 1100]: Train Loss:0.0042879, Val MRR:0.07006, wn_one_to_x
|
|
2023-05-02 16:40:04,986 - wn_one_to_x - [INFO] - [E:18| 1200]: Train Loss:0.0042896, Val MRR:0.07006, wn_one_to_x
|
|
2023-05-02 16:41:32,403 - wn_one_to_x - [INFO] - [E:18| 1300]: Train Loss:0.0042906, Val MRR:0.07006, wn_one_to_x
|
|
2023-05-02 16:42:21,311 - wn_one_to_x - [INFO] - [Epoch:18]: Training Loss:0.00429
|
|
|
|
2023-05-02 16:42:22,061 - wn_one_to_x - [INFO] - [Valid, Tail_Batch Step 0] wn_one_to_x
|
|
2023-05-02 16:42:27,620 - wn_one_to_x - [INFO] - [Valid, Head_Batch Step 0] wn_one_to_x
|
|
2023-05-02 16:42:32,950 - wn_one_to_x - [INFO] - [Evaluating Epoch 18 valid]:
|
|
MRR: Tail : 0.09172, Head : 0.06806, Avg : 0.07989
|
|
|
|
2023-05-02 16:42:34,747 - wn_one_to_x - [INFO] - [Epoch 18]: Training Loss: 0.0042902, Valid MRR: 0.07989,
|
|
|
|
|
|
|
|
2023-05-02 16:42:35,689 - wn_one_to_x - [INFO] - [E:19| 0]: Train Loss:0.0039897, Val MRR:0.07989, wn_one_to_x
|
|
2023-05-02 16:44:03,160 - wn_one_to_x - [INFO] - [E:19| 100]: Train Loss:0.0041381, Val MRR:0.07989, wn_one_to_x
|
|
2023-05-02 16:45:31,174 - wn_one_to_x - [INFO] - [E:19| 200]: Train Loss:0.0041291, Val MRR:0.07989, wn_one_to_x
|
|
2023-05-02 16:46:58,962 - wn_one_to_x - [INFO] - [E:19| 300]: Train Loss:0.0041228, Val MRR:0.07989, wn_one_to_x
|
|
2023-05-02 16:48:26,252 - wn_one_to_x - [INFO] - [E:19| 400]: Train Loss:0.0041205, Val MRR:0.07989, wn_one_to_x
|
|
2023-05-02 16:49:53,936 - wn_one_to_x - [INFO] - [E:19| 500]: Train Loss:0.0041254, Val MRR:0.07989, wn_one_to_x
|
|
2023-05-02 16:51:21,546 - wn_one_to_x - [INFO] - [E:19| 600]: Train Loss:0.0041253, Val MRR:0.07989, wn_one_to_x
|
|
2023-05-02 16:52:48,792 - wn_one_to_x - [INFO] - [E:19| 700]: Train Loss:0.0041252, Val MRR:0.07989, wn_one_to_x
|
|
2023-05-02 16:54:17,011 - wn_one_to_x - [INFO] - [E:19| 800]: Train Loss:0.0041283, Val MRR:0.07989, wn_one_to_x
|
|
2023-05-02 16:55:44,161 - wn_one_to_x - [INFO] - [E:19| 900]: Train Loss:0.0041259, Val MRR:0.07989, wn_one_to_x
|
|
2023-05-02 16:57:11,733 - wn_one_to_x - [INFO] - [E:19| 1000]: Train Loss:0.0041236, Val MRR:0.07989, wn_one_to_x
|
|
2023-05-02 16:58:40,545 - wn_one_to_x - [INFO] - [E:19| 1100]: Train Loss:0.0041247, Val MRR:0.07989, wn_one_to_x
|
|
2023-05-02 17:00:08,019 - wn_one_to_x - [INFO] - [E:19| 1200]: Train Loss:0.004124, Val MRR:0.07989, wn_one_to_x
|
|
2023-05-02 17:01:35,885 - wn_one_to_x - [INFO] - [E:19| 1300]: Train Loss:0.0041234, Val MRR:0.07989, wn_one_to_x
|
|
2023-05-02 17:02:25,086 - wn_one_to_x - [INFO] - [Epoch:19]: Training Loss:0.004124
|
|
|
|
2023-05-02 17:02:25,343 - wn_one_to_x - [INFO] - [Valid, Tail_Batch Step 0] wn_one_to_x
|
|
2023-05-02 17:02:31,834 - wn_one_to_x - [INFO] - [Valid, Head_Batch Step 0] wn_one_to_x
|
|
2023-05-02 17:02:37,093 - wn_one_to_x - [INFO] - [Evaluating Epoch 19 valid]:
|
|
MRR: Tail : 0.09744, Head : 0.07655, Avg : 0.087
|
|
MR: Tail : 3317.9, Head : 6699.5, Avg : 5008.7
|
|
Hit-1: Tail : 0.05669, Head : 0.04614, Avg : 0.05142
|
|
Hit-3: Tail : 0.10646, Head : 0.08174, Avg : 0.0941
|
|
Hit-10: Tail : 0.17666, Head : 0.13349, Avg : 0.15508
|
|
2023-05-02 17:02:41,480 - wn_one_to_x - [INFO] - [Epoch 19]: Training Loss: 0.0041239, Valid MRR: 0.087,
|
|
|
|
|
|
|
|
2023-05-02 17:02:42,371 - wn_one_to_x - [INFO] - [E:20| 0]: Train Loss:0.0039504, Val MRR:0.087, wn_one_to_x
|
|
2023-05-02 17:04:09,629 - wn_one_to_x - [INFO] - [E:20| 100]: Train Loss:0.0039654, Val MRR:0.087, wn_one_to_x
|
|
2023-05-02 17:05:36,953 - wn_one_to_x - [INFO] - [E:20| 200]: Train Loss:0.0039589, Val MRR:0.087, wn_one_to_x
|
|
2023-05-02 17:07:05,288 - wn_one_to_x - [INFO] - [E:20| 300]: Train Loss:0.0039556, Val MRR:0.087, wn_one_to_x
|
|
2023-05-02 17:08:33,535 - wn_one_to_x - [INFO] - [E:20| 400]: Train Loss:0.0039569, Val MRR:0.087, wn_one_to_x
|
|
2023-05-02 17:10:01,398 - wn_one_to_x - [INFO] - [E:20| 500]: Train Loss:0.0039561, Val MRR:0.087, wn_one_to_x
|
|
2023-05-02 17:11:29,828 - wn_one_to_x - [INFO] - [E:20| 600]: Train Loss:0.0039599, Val MRR:0.087, wn_one_to_x
|
|
2023-05-02 17:12:57,778 - wn_one_to_x - [INFO] - [E:20| 700]: Train Loss:0.0039585, Val MRR:0.087, wn_one_to_x
|
|
2023-05-02 17:14:27,267 - wn_one_to_x - [INFO] - [E:20| 800]: Train Loss:0.0039626, Val MRR:0.087, wn_one_to_x
|
|
2023-05-02 17:15:55,276 - wn_one_to_x - [INFO] - [E:20| 900]: Train Loss:0.0039604, Val MRR:0.087, wn_one_to_x
|
|
2023-05-02 17:17:23,363 - wn_one_to_x - [INFO] - [E:20| 1000]: Train Loss:0.0039603, Val MRR:0.087, wn_one_to_x
|
|
2023-05-02 17:18:50,406 - wn_one_to_x - [INFO] - [E:20| 1100]: Train Loss:0.0039613, Val MRR:0.087, wn_one_to_x
|
|
2023-05-02 17:20:17,914 - wn_one_to_x - [INFO] - [E:20| 1200]: Train Loss:0.0039613, Val MRR:0.087, wn_one_to_x
|
|
2023-05-02 17:21:44,907 - wn_one_to_x - [INFO] - [E:20| 1300]: Train Loss:0.0039616, Val MRR:0.087, wn_one_to_x
|
|
2023-05-02 17:22:33,171 - wn_one_to_x - [INFO] - [Epoch:20]: Training Loss:0.003961
|
|
|
|
2023-05-02 17:22:33,419 - wn_one_to_x - [INFO] - [Valid, Tail_Batch Step 0] wn_one_to_x
|
|
2023-05-02 17:22:39,385 - wn_one_to_x - [INFO] - [Valid, Head_Batch Step 0] wn_one_to_x
|
|
2023-05-02 17:22:44,999 - wn_one_to_x - [INFO] - [Evaluating Epoch 20 valid]:
|
|
MRR: Tail : 0.11177, Head : 0.09031, Avg : 0.10104
|
|
|
|
2023-05-02 17:22:47,034 - wn_one_to_x - [INFO] - [Epoch 20]: Training Loss: 0.0039612, Valid MRR: 0.10104,
|
|
|
|
|
|
|
|
2023-05-02 17:22:47,918 - wn_one_to_x - [INFO] - [E:21| 0]: Train Loss:0.0038512, Val MRR:0.10104, wn_one_to_x
|
|
2023-05-02 17:24:14,574 - wn_one_to_x - [INFO] - [E:21| 100]: Train Loss:0.0038108, Val MRR:0.10104, wn_one_to_x
|
|
2023-05-02 17:25:41,092 - wn_one_to_x - [INFO] - [E:21| 200]: Train Loss:0.0038021, Val MRR:0.10104, wn_one_to_x
|
|
2023-05-02 17:27:07,745 - wn_one_to_x - [INFO] - [E:21| 300]: Train Loss:0.003802, Val MRR:0.10104, wn_one_to_x
|
|
2023-05-02 17:28:34,311 - wn_one_to_x - [INFO] - [E:21| 400]: Train Loss:0.0038077, Val MRR:0.10104, wn_one_to_x
|
|
2023-05-02 17:30:00,734 - wn_one_to_x - [INFO] - [E:21| 500]: Train Loss:0.0038062, Val MRR:0.10104, wn_one_to_x
|
|
2023-05-02 17:31:27,254 - wn_one_to_x - [INFO] - [E:21| 600]: Train Loss:0.003806, Val MRR:0.10104, wn_one_to_x
|
|
2023-05-02 17:32:53,850 - wn_one_to_x - [INFO] - [E:21| 700]: Train Loss:0.0038081, Val MRR:0.10104, wn_one_to_x
|
|
2023-05-02 17:34:20,294 - wn_one_to_x - [INFO] - [E:21| 800]: Train Loss:0.0038093, Val MRR:0.10104, wn_one_to_x
|
|
2023-05-02 17:35:46,945 - wn_one_to_x - [INFO] - [E:21| 900]: Train Loss:0.0038085, Val MRR:0.10104, wn_one_to_x
|
|
2023-05-02 17:37:13,322 - wn_one_to_x - [INFO] - [E:21| 1000]: Train Loss:0.0038081, Val MRR:0.10104, wn_one_to_x
|
|
2023-05-02 17:38:39,894 - wn_one_to_x - [INFO] - [E:21| 1100]: Train Loss:0.0038062, Val MRR:0.10104, wn_one_to_x
|
|
2023-05-02 17:40:06,348 - wn_one_to_x - [INFO] - [E:21| 1200]: Train Loss:0.0038069, Val MRR:0.10104, wn_one_to_x
|
|
2023-05-02 17:41:32,788 - wn_one_to_x - [INFO] - [E:21| 1300]: Train Loss:0.0038067, Val MRR:0.10104, wn_one_to_x
|
|
2023-05-02 17:42:21,173 - wn_one_to_x - [INFO] - [Epoch:21]: Training Loss:0.003805
|
|
|
|
2023-05-02 17:42:21,424 - wn_one_to_x - [INFO] - [Valid, Tail_Batch Step 0] wn_one_to_x
|
|
2023-05-02 17:42:27,244 - wn_one_to_x - [INFO] - [Valid, Head_Batch Step 0] wn_one_to_x
|
|
2023-05-02 17:42:32,820 - wn_one_to_x - [INFO] - [Evaluating Epoch 21 valid]:
|
|
MRR: Tail : 0.12048, Head : 0.09968, Avg : 0.11008
|
|
|
|
2023-05-02 17:42:34,824 - wn_one_to_x - [INFO] - [Epoch 21]: Training Loss: 0.0038046, Valid MRR: 0.11008,
|
|
|
|
|
|
|
|
2023-05-02 17:42:35,696 - wn_one_to_x - [INFO] - [E:22| 0]: Train Loss:0.00372, Val MRR:0.11008, wn_one_to_x
|
|
2023-05-02 17:44:02,230 - wn_one_to_x - [INFO] - [E:22| 100]: Train Loss:0.0036319, Val MRR:0.11008, wn_one_to_x
|
|
2023-05-02 17:45:28,626 - wn_one_to_x - [INFO] - [E:22| 200]: Train Loss:0.0036371, Val MRR:0.11008, wn_one_to_x
|
|
2023-05-02 17:46:55,091 - wn_one_to_x - [INFO] - [E:22| 300]: Train Loss:0.0036347, Val MRR:0.11008, wn_one_to_x
|
|
2023-05-02 17:48:21,692 - wn_one_to_x - [INFO] - [E:22| 400]: Train Loss:0.0036423, Val MRR:0.11008, wn_one_to_x
|
|
2023-05-02 17:49:48,111 - wn_one_to_x - [INFO] - [E:22| 500]: Train Loss:0.0036458, Val MRR:0.11008, wn_one_to_x
|
|
2023-05-02 17:51:14,448 - wn_one_to_x - [INFO] - [E:22| 600]: Train Loss:0.0036491, Val MRR:0.11008, wn_one_to_x
|
|
2023-05-02 17:52:41,080 - wn_one_to_x - [INFO] - [E:22| 700]: Train Loss:0.0036561, Val MRR:0.11008, wn_one_to_x
|
|
2023-05-02 17:54:07,631 - wn_one_to_x - [INFO] - [E:22| 800]: Train Loss:0.0036545, Val MRR:0.11008, wn_one_to_x
|
|
2023-05-02 17:55:33,895 - wn_one_to_x - [INFO] - [E:22| 900]: Train Loss:0.0036561, Val MRR:0.11008, wn_one_to_x
|
|
2023-05-02 17:57:00,386 - wn_one_to_x - [INFO] - [E:22| 1000]: Train Loss:0.0036551, Val MRR:0.11008, wn_one_to_x
|
|
2023-05-02 17:58:26,747 - wn_one_to_x - [INFO] - [E:22| 1100]: Train Loss:0.0036551, Val MRR:0.11008, wn_one_to_x
|
|
2023-05-02 17:59:53,463 - wn_one_to_x - [INFO] - [E:22| 1200]: Train Loss:0.0036553, Val MRR:0.11008, wn_one_to_x
|
|
2023-05-02 18:01:20,169 - wn_one_to_x - [INFO] - [E:22| 1300]: Train Loss:0.0036548, Val MRR:0.11008, wn_one_to_x
|
|
2023-05-02 18:02:08,367 - wn_one_to_x - [INFO] - [Epoch:22]: Training Loss:0.003655
|
|
|
|
2023-05-02 18:02:08,612 - wn_one_to_x - [INFO] - [Valid, Tail_Batch Step 0] wn_one_to_x
|
|
2023-05-02 18:02:14,174 - wn_one_to_x - [INFO] - [Valid, Head_Batch Step 0] wn_one_to_x
|
|
2023-05-02 18:02:19,495 - wn_one_to_x - [INFO] - [Evaluating Epoch 22 valid]:
|
|
MRR: Tail : 0.13211, Head : 0.11381, Avg : 0.12296
|
|
|
|
2023-05-02 18:02:21,177 - wn_one_to_x - [INFO] - [Epoch 22]: Training Loss: 0.0036546, Valid MRR: 0.12296,
|
|
|
|
|
|
|
|
2023-05-02 18:02:22,056 - wn_one_to_x - [INFO] - [E:23| 0]: Train Loss:0.0036961, Val MRR:0.12296, wn_one_to_x
|
|
2023-05-02 18:03:48,726 - wn_one_to_x - [INFO] - [E:23| 100]: Train Loss:0.0034994, Val MRR:0.12296, wn_one_to_x
|
|
2023-05-02 18:05:15,077 - wn_one_to_x - [INFO] - [E:23| 200]: Train Loss:0.0035005, Val MRR:0.12296, wn_one_to_x
|
|
2023-05-02 18:06:41,345 - wn_one_to_x - [INFO] - [E:23| 300]: Train Loss:0.0034976, Val MRR:0.12296, wn_one_to_x
|
|
2023-05-02 18:08:08,221 - wn_one_to_x - [INFO] - [E:23| 400]: Train Loss:0.0035019, Val MRR:0.12296, wn_one_to_x
|
|
2023-05-02 18:09:34,578 - wn_one_to_x - [INFO] - [E:23| 500]: Train Loss:0.0035046, Val MRR:0.12296, wn_one_to_x
|
|
2023-05-02 18:11:01,140 - wn_one_to_x - [INFO] - [E:23| 600]: Train Loss:0.0035012, Val MRR:0.12296, wn_one_to_x
|
|
2023-05-02 18:12:27,523 - wn_one_to_x - [INFO] - [E:23| 700]: Train Loss:0.0035047, Val MRR:0.12296, wn_one_to_x
|
|
2023-05-02 18:13:54,261 - wn_one_to_x - [INFO] - [E:23| 800]: Train Loss:0.0035064, Val MRR:0.12296, wn_one_to_x
|
|
2023-05-02 18:15:20,593 - wn_one_to_x - [INFO] - [E:23| 900]: Train Loss:0.003507, Val MRR:0.12296, wn_one_to_x
|
|
2023-05-02 18:16:47,361 - wn_one_to_x - [INFO] - [E:23| 1000]: Train Loss:0.0035079, Val MRR:0.12296, wn_one_to_x
|
|
2023-05-02 18:18:13,920 - wn_one_to_x - [INFO] - [E:23| 1100]: Train Loss:0.0035103, Val MRR:0.12296, wn_one_to_x
|
|
2023-05-02 18:19:40,509 - wn_one_to_x - [INFO] - [E:23| 1200]: Train Loss:0.0035083, Val MRR:0.12296, wn_one_to_x
|
|
2023-05-02 18:21:06,966 - wn_one_to_x - [INFO] - [E:23| 1300]: Train Loss:0.0035072, Val MRR:0.12296, wn_one_to_x
|
|
2023-05-02 18:21:55,449 - wn_one_to_x - [INFO] - [Epoch:23]: Training Loss:0.003509
|
|
|
|
2023-05-02 18:21:55,702 - wn_one_to_x - [INFO] - [Valid, Tail_Batch Step 0] wn_one_to_x
|
|
2023-05-02 18:22:01,308 - wn_one_to_x - [INFO] - [Valid, Head_Batch Step 0] wn_one_to_x
|
|
2023-05-02 18:22:07,568 - wn_one_to_x - [INFO] - [Evaluating Epoch 23 valid]:
|
|
MRR: Tail : 0.14371, Head : 0.12316, Avg : 0.13344
|
|
|
|
2023-05-02 18:22:09,263 - wn_one_to_x - [INFO] - [Epoch 23]: Training Loss: 0.0035093, Valid MRR: 0.13344,
|
|
|
|
|
|
|
|
2023-05-02 18:22:10,142 - wn_one_to_x - [INFO] - [E:24| 0]: Train Loss:0.0031617, Val MRR:0.13344, wn_one_to_x
|
|
2023-05-02 18:23:36,460 - wn_one_to_x - [INFO] - [E:24| 100]: Train Loss:0.0033385, Val MRR:0.13344, wn_one_to_x
|
|
2023-05-02 18:25:02,901 - wn_one_to_x - [INFO] - [E:24| 200]: Train Loss:0.0033499, Val MRR:0.13344, wn_one_to_x
|
|
2023-05-02 18:26:29,288 - wn_one_to_x - [INFO] - [E:24| 300]: Train Loss:0.0033479, Val MRR:0.13344, wn_one_to_x
|
|
2023-05-02 18:27:55,583 - wn_one_to_x - [INFO] - [E:24| 400]: Train Loss:0.0033584, Val MRR:0.13344, wn_one_to_x
|
|
2023-05-02 18:29:25,591 - wn_one_to_x - [INFO] - [E:24| 500]: Train Loss:0.0033619, Val MRR:0.13344, wn_one_to_x
|
|
2023-05-02 18:30:53,473 - wn_one_to_x - [INFO] - [E:24| 600]: Train Loss:0.0033655, Val MRR:0.13344, wn_one_to_x
|
|
2023-05-02 18:32:21,392 - wn_one_to_x - [INFO] - [E:24| 700]: Train Loss:0.0033645, Val MRR:0.13344, wn_one_to_x
|
|
2023-05-02 18:33:50,717 - wn_one_to_x - [INFO] - [E:24| 800]: Train Loss:0.003367, Val MRR:0.13344, wn_one_to_x
|
|
2023-05-02 18:35:18,503 - wn_one_to_x - [INFO] - [E:24| 900]: Train Loss:0.0033699, Val MRR:0.13344, wn_one_to_x
|
|
2023-05-02 18:36:45,510 - wn_one_to_x - [INFO] - [E:24| 1000]: Train Loss:0.0033722, Val MRR:0.13344, wn_one_to_x
|
|
2023-05-02 18:38:13,161 - wn_one_to_x - [INFO] - [E:24| 1100]: Train Loss:0.0033706, Val MRR:0.13344, wn_one_to_x
|
|
2023-05-02 18:39:40,692 - wn_one_to_x - [INFO] - [E:24| 1200]: Train Loss:0.0033727, Val MRR:0.13344, wn_one_to_x
|
|
2023-05-02 18:41:08,167 - wn_one_to_x - [INFO] - [E:24| 1300]: Train Loss:0.0033716, Val MRR:0.13344, wn_one_to_x
|
|
2023-05-02 18:41:57,161 - wn_one_to_x - [INFO] - [Epoch:24]: Training Loss:0.003372
|
|
|
|
2023-05-02 18:41:57,415 - wn_one_to_x - [INFO] - [Valid, Tail_Batch Step 0] wn_one_to_x
|
|
2023-05-02 18:42:03,037 - wn_one_to_x - [INFO] - [Valid, Head_Batch Step 0] wn_one_to_x
|
|
2023-05-02 18:42:08,320 - wn_one_to_x - [INFO] - [Evaluating Epoch 24 valid]:
|
|
MRR: Tail : 0.1558, Head : 0.13769, Avg : 0.14675
|
|
|
|
2023-05-02 18:42:10,276 - wn_one_to_x - [INFO] - [Epoch 24]: Training Loss: 0.0033723, Valid MRR: 0.14675,
|
|
|
|
|
|
|
|
2023-05-02 18:42:11,246 - wn_one_to_x - [INFO] - [E:25| 0]: Train Loss:0.0034823, Val MRR:0.14675, wn_one_to_x
|
|
2023-05-02 18:43:38,828 - wn_one_to_x - [INFO] - [E:25| 100]: Train Loss:0.0032338, Val MRR:0.14675, wn_one_to_x
|
|
2023-05-02 18:45:06,584 - wn_one_to_x - [INFO] - [E:25| 200]: Train Loss:0.0032333, Val MRR:0.14675, wn_one_to_x
|
|
2023-05-02 18:46:34,432 - wn_one_to_x - [INFO] - [E:25| 300]: Train Loss:0.0032291, Val MRR:0.14675, wn_one_to_x
|
|
2023-05-02 18:48:01,946 - wn_one_to_x - [INFO] - [E:25| 400]: Train Loss:0.0032317, Val MRR:0.14675, wn_one_to_x
|
|
2023-05-02 18:49:29,698 - wn_one_to_x - [INFO] - [E:25| 500]: Train Loss:0.0032334, Val MRR:0.14675, wn_one_to_x
|
|
2023-05-02 18:50:57,530 - wn_one_to_x - [INFO] - [E:25| 600]: Train Loss:0.0032392, Val MRR:0.14675, wn_one_to_x
|
|
2023-05-02 18:52:25,282 - wn_one_to_x - [INFO] - [E:25| 700]: Train Loss:0.0032393, Val MRR:0.14675, wn_one_to_x
|
|
2023-05-02 18:53:53,088 - wn_one_to_x - [INFO] - [E:25| 800]: Train Loss:0.0032399, Val MRR:0.14675, wn_one_to_x
|
|
2023-05-02 18:55:20,555 - wn_one_to_x - [INFO] - [E:25| 900]: Train Loss:0.003239, Val MRR:0.14675, wn_one_to_x
|
|
2023-05-02 18:56:48,834 - wn_one_to_x - [INFO] - [E:25| 1000]: Train Loss:0.0032403, Val MRR:0.14675, wn_one_to_x
|
|
2023-05-02 18:58:17,051 - wn_one_to_x - [INFO] - [E:25| 1100]: Train Loss:0.0032417, Val MRR:0.14675, wn_one_to_x
|
|
2023-05-02 18:59:45,999 - wn_one_to_x - [INFO] - [E:25| 1200]: Train Loss:0.0032409, Val MRR:0.14675, wn_one_to_x
|
|
2023-05-02 19:01:13,728 - wn_one_to_x - [INFO] - [E:25| 1300]: Train Loss:0.0032402, Val MRR:0.14675, wn_one_to_x
|
|
2023-05-02 19:02:02,466 - wn_one_to_x - [INFO] - [Epoch:25]: Training Loss:0.003241
|
|
|
|
2023-05-02 19:02:02,709 - wn_one_to_x - [INFO] - [Valid, Tail_Batch Step 0] wn_one_to_x
|
|
2023-05-02 19:02:08,595 - wn_one_to_x - [INFO] - [Valid, Head_Batch Step 0] wn_one_to_x
|
|
2023-05-02 19:02:14,178 - wn_one_to_x - [INFO] - [Evaluating Epoch 25 valid]:
|
|
MRR: Tail : 0.16734, Head : 0.14727, Avg : 0.1573
|
|
|
|
2023-05-02 19:02:16,311 - wn_one_to_x - [INFO] - [Epoch 25]: Training Loss: 0.0032413, Valid MRR: 0.1573,
|
|
|
|
|
|
|
|
2023-05-02 19:02:17,302 - wn_one_to_x - [INFO] - [E:26| 0]: Train Loss:0.003036, Val MRR:0.1573, wn_one_to_x
|
|
2023-05-02 19:03:44,622 - wn_one_to_x - [INFO] - [E:26| 100]: Train Loss:0.0031107, Val MRR:0.1573, wn_one_to_x
|
|
2023-05-02 19:05:12,683 - wn_one_to_x - [INFO] - [E:26| 200]: Train Loss:0.0031143, Val MRR:0.1573, wn_one_to_x
|
|
2023-05-02 19:06:40,654 - wn_one_to_x - [INFO] - [E:26| 300]: Train Loss:0.0031078, Val MRR:0.1573, wn_one_to_x
|
|
2023-05-02 19:08:09,595 - wn_one_to_x - [INFO] - [E:26| 400]: Train Loss:0.0031122, Val MRR:0.1573, wn_one_to_x
|
|
2023-05-02 19:09:37,399 - wn_one_to_x - [INFO] - [E:26| 500]: Train Loss:0.003113, Val MRR:0.1573, wn_one_to_x
|
|
2023-05-02 19:11:05,846 - wn_one_to_x - [INFO] - [E:26| 600]: Train Loss:0.0031151, Val MRR:0.1573, wn_one_to_x
|
|
2023-05-02 19:12:33,228 - wn_one_to_x - [INFO] - [E:26| 700]: Train Loss:0.0031157, Val MRR:0.1573, wn_one_to_x
|
|
2023-05-02 19:14:00,779 - wn_one_to_x - [INFO] - [E:26| 800]: Train Loss:0.0031162, Val MRR:0.1573, wn_one_to_x
|
|
2023-05-02 19:15:29,093 - wn_one_to_x - [INFO] - [E:26| 900]: Train Loss:0.0031153, Val MRR:0.1573, wn_one_to_x
|
|
2023-05-02 19:16:57,533 - wn_one_to_x - [INFO] - [E:26| 1000]: Train Loss:0.0031159, Val MRR:0.1573, wn_one_to_x
|
|
2023-05-02 19:18:25,198 - wn_one_to_x - [INFO] - [E:26| 1100]: Train Loss:0.0031161, Val MRR:0.1573, wn_one_to_x
|
|
2023-05-02 19:19:53,632 - wn_one_to_x - [INFO] - [E:26| 1200]: Train Loss:0.0031152, Val MRR:0.1573, wn_one_to_x
|
|
2023-05-02 19:21:21,523 - wn_one_to_x - [INFO] - [E:26| 1300]: Train Loss:0.0031167, Val MRR:0.1573, wn_one_to_x
|
|
2023-05-02 19:22:10,203 - wn_one_to_x - [INFO] - [Epoch:26]: Training Loss:0.003117
|
|
|
|
2023-05-02 19:22:10,444 - wn_one_to_x - [INFO] - [Valid, Tail_Batch Step 0] wn_one_to_x
|
|
2023-05-02 19:22:16,828 - wn_one_to_x - [INFO] - [Valid, Head_Batch Step 0] wn_one_to_x
|
|
2023-05-02 19:22:22,096 - wn_one_to_x - [INFO] - [Evaluating Epoch 26 valid]:
|
|
MRR: Tail : 0.17825, Head : 0.1611, Avg : 0.16968
|
|
|
|
2023-05-02 19:22:24,208 - wn_one_to_x - [INFO] - [Epoch 26]: Training Loss: 0.0031167, Valid MRR: 0.16968,
|
|
|
|
|
|
|
|
2023-05-02 19:22:25,213 - wn_one_to_x - [INFO] - [E:27| 0]: Train Loss:0.0029606, Val MRR:0.16968, wn_one_to_x
|
|
2023-05-02 19:23:53,091 - wn_one_to_x - [INFO] - [E:27| 100]: Train Loss:0.0029824, Val MRR:0.16968, wn_one_to_x
|
|
2023-05-02 19:25:21,190 - wn_one_to_x - [INFO] - [E:27| 200]: Train Loss:0.0029836, Val MRR:0.16968, wn_one_to_x
|
|
2023-05-02 19:26:49,323 - wn_one_to_x - [INFO] - [E:27| 300]: Train Loss:0.0029911, Val MRR:0.16968, wn_one_to_x
|
|
2023-05-02 19:28:17,207 - wn_one_to_x - [INFO] - [E:27| 400]: Train Loss:0.0029952, Val MRR:0.16968, wn_one_to_x
|
|
2023-05-02 19:29:45,334 - wn_one_to_x - [INFO] - [E:27| 500]: Train Loss:0.0029941, Val MRR:0.16968, wn_one_to_x
|
|
2023-05-02 19:31:13,532 - wn_one_to_x - [INFO] - [E:27| 600]: Train Loss:0.0029962, Val MRR:0.16968, wn_one_to_x
|
|
2023-05-02 19:32:41,334 - wn_one_to_x - [INFO] - [E:27| 700]: Train Loss:0.0029954, Val MRR:0.16968, wn_one_to_x
|
|
2023-05-02 19:34:09,221 - wn_one_to_x - [INFO] - [E:27| 800]: Train Loss:0.002995, Val MRR:0.16968, wn_one_to_x
|
|
2023-05-02 19:35:36,345 - wn_one_to_x - [INFO] - [E:27| 900]: Train Loss:0.0029951, Val MRR:0.16968, wn_one_to_x
|
|
2023-05-02 19:37:04,844 - wn_one_to_x - [INFO] - [E:27| 1000]: Train Loss:0.0029961, Val MRR:0.16968, wn_one_to_x
|
|
2023-05-02 19:38:32,299 - wn_one_to_x - [INFO] - [E:27| 1100]: Train Loss:0.0029957, Val MRR:0.16968, wn_one_to_x
|
|
2023-05-02 19:40:00,990 - wn_one_to_x - [INFO] - [E:27| 1200]: Train Loss:0.0029958, Val MRR:0.16968, wn_one_to_x
|
|
2023-05-02 19:41:30,285 - wn_one_to_x - [INFO] - [E:27| 1300]: Train Loss:0.0029986, Val MRR:0.16968, wn_one_to_x
|
|
2023-05-02 19:42:19,679 - wn_one_to_x - [INFO] - [Epoch:27]: Training Loss:0.002999
|
|
|
|
2023-05-02 19:42:19,912 - wn_one_to_x - [INFO] - [Valid, Tail_Batch Step 0] wn_one_to_x
|
|
2023-05-02 19:42:25,385 - wn_one_to_x - [INFO] - [Valid, Head_Batch Step 0] wn_one_to_x
|
|
2023-05-02 19:42:30,709 - wn_one_to_x - [INFO] - [Evaluating Epoch 27 valid]:
|
|
MRR: Tail : 0.19466, Head : 0.16753, Avg : 0.1811
|
|
|
|
2023-05-02 19:42:32,800 - wn_one_to_x - [INFO] - [Epoch 27]: Training Loss: 0.0029993, Valid MRR: 0.1811,
|
|
|
|
|
|
|
|
2023-05-02 19:42:33,697 - wn_one_to_x - [INFO] - [E:28| 0]: Train Loss:0.0028866, Val MRR:0.1811, wn_one_to_x
|
|
2023-05-02 19:44:01,252 - wn_one_to_x - [INFO] - [E:28| 100]: Train Loss:0.0028807, Val MRR:0.1811, wn_one_to_x
|
|
2023-05-02 19:45:29,081 - wn_one_to_x - [INFO] - [E:28| 200]: Train Loss:0.0028705, Val MRR:0.1811, wn_one_to_x
|
|
2023-05-02 19:46:58,408 - wn_one_to_x - [INFO] - [E:28| 300]: Train Loss:0.0028754, Val MRR:0.1811, wn_one_to_x
|
|
2023-05-02 19:48:27,125 - wn_one_to_x - [INFO] - [E:28| 400]: Train Loss:0.0028769, Val MRR:0.1811, wn_one_to_x
|
|
2023-05-02 19:49:55,748 - wn_one_to_x - [INFO] - [E:28| 500]: Train Loss:0.0028794, Val MRR:0.1811, wn_one_to_x
|
|
2023-05-02 19:51:24,211 - wn_one_to_x - [INFO] - [E:28| 600]: Train Loss:0.0028805, Val MRR:0.1811, wn_one_to_x
|
|
2023-05-02 19:52:52,531 - wn_one_to_x - [INFO] - [E:28| 700]: Train Loss:0.0028797, Val MRR:0.1811, wn_one_to_x
|
|
2023-05-02 19:54:20,999 - wn_one_to_x - [INFO] - [E:28| 800]: Train Loss:0.0028818, Val MRR:0.1811, wn_one_to_x
|
|
2023-05-02 19:55:48,383 - wn_one_to_x - [INFO] - [E:28| 900]: Train Loss:0.0028846, Val MRR:0.1811, wn_one_to_x
|
|
2023-05-02 19:57:16,283 - wn_one_to_x - [INFO] - [E:28| 1000]: Train Loss:0.0028865, Val MRR:0.1811, wn_one_to_x
|
|
2023-05-02 19:58:44,199 - wn_one_to_x - [INFO] - [E:28| 1100]: Train Loss:0.0028873, Val MRR:0.1811, wn_one_to_x
|
|
2023-05-02 20:00:12,612 - wn_one_to_x - [INFO] - [E:28| 1200]: Train Loss:0.0028878, Val MRR:0.1811, wn_one_to_x
|
|
2023-05-02 20:01:40,393 - wn_one_to_x - [INFO] - [E:28| 1300]: Train Loss:0.0028876, Val MRR:0.1811, wn_one_to_x
|
|
2023-05-02 20:02:30,190 - wn_one_to_x - [INFO] - [Epoch:28]: Training Loss:0.002888
|
|
|
|
2023-05-02 20:02:30,877 - wn_one_to_x - [INFO] - [Valid, Tail_Batch Step 0] wn_one_to_x
|
|
2023-05-02 20:02:37,931 - wn_one_to_x - [INFO] - [Valid, Head_Batch Step 0] wn_one_to_x
|
|
2023-05-02 20:02:43,454 - wn_one_to_x - [INFO] - [Evaluating Epoch 28 valid]:
|
|
MRR: Tail : 0.2022, Head : 0.18123, Avg : 0.19172
|
|
|
|
2023-05-02 20:02:45,562 - wn_one_to_x - [INFO] - [Epoch 28]: Training Loss: 0.0028881, Valid MRR: 0.19172,
|
|
|
|
|
|
|
|
2023-05-02 20:02:46,438 - wn_one_to_x - [INFO] - [E:29| 0]: Train Loss:0.0028415, Val MRR:0.19172, wn_one_to_x
|
|
2023-05-02 20:04:14,370 - wn_one_to_x - [INFO] - [E:29| 100]: Train Loss:0.0027545, Val MRR:0.19172, wn_one_to_x
|
|
2023-05-02 20:05:42,748 - wn_one_to_x - [INFO] - [E:29| 200]: Train Loss:0.002762, Val MRR:0.19172, wn_one_to_x
|
|
2023-05-02 20:07:10,583 - wn_one_to_x - [INFO] - [E:29| 300]: Train Loss:0.0027696, Val MRR:0.19172, wn_one_to_x
|
|
2023-05-02 20:08:38,373 - wn_one_to_x - [INFO] - [E:29| 400]: Train Loss:0.0027628, Val MRR:0.19172, wn_one_to_x
|
|
2023-05-02 20:10:06,157 - wn_one_to_x - [INFO] - [E:29| 500]: Train Loss:0.0027639, Val MRR:0.19172, wn_one_to_x
|
|
2023-05-02 20:11:33,976 - wn_one_to_x - [INFO] - [E:29| 600]: Train Loss:0.0027657, Val MRR:0.19172, wn_one_to_x
|
|
2023-05-02 20:13:01,779 - wn_one_to_x - [INFO] - [E:29| 700]: Train Loss:0.002768, Val MRR:0.19172, wn_one_to_x
|
|
2023-05-02 20:14:29,638 - wn_one_to_x - [INFO] - [E:29| 800]: Train Loss:0.0027695, Val MRR:0.19172, wn_one_to_x
|
|
2023-05-02 20:15:57,452 - wn_one_to_x - [INFO] - [E:29| 900]: Train Loss:0.0027713, Val MRR:0.19172, wn_one_to_x
|
|
2023-05-02 20:17:25,718 - wn_one_to_x - [INFO] - [E:29| 1000]: Train Loss:0.0027747, Val MRR:0.19172, wn_one_to_x
|
|
2023-05-02 20:18:53,309 - wn_one_to_x - [INFO] - [E:29| 1100]: Train Loss:0.0027783, Val MRR:0.19172, wn_one_to_x
|
|
2023-05-02 20:20:22,498 - wn_one_to_x - [INFO] - [E:29| 1200]: Train Loss:0.0027784, Val MRR:0.19172, wn_one_to_x
|
|
2023-05-02 20:21:50,380 - wn_one_to_x - [INFO] - [E:29| 1300]: Train Loss:0.002781, Val MRR:0.19172, wn_one_to_x
|
|
2023-05-02 20:22:39,299 - wn_one_to_x - [INFO] - [Epoch:29]: Training Loss:0.002782
|
|
|
|
2023-05-02 20:22:39,547 - wn_one_to_x - [INFO] - [Valid, Tail_Batch Step 0] wn_one_to_x
|
|
2023-05-02 20:22:45,868 - wn_one_to_x - [INFO] - [Valid, Head_Batch Step 0] wn_one_to_x
|
|
2023-05-02 20:22:51,179 - wn_one_to_x - [INFO] - [Evaluating Epoch 29 valid]:
|
|
MRR: Tail : 0.21599, Head : 0.19566, Avg : 0.20582
|
|
MR: Tail : 2998.0, Head : 5164.9, Avg : 4081.4
|
|
Hit-1: Tail : 0.16216, Head : 0.15359, Avg : 0.15788
|
|
Hit-3: Tail : 0.23665, Head : 0.21061, Avg : 0.22363
|
|
Hit-10: Tail : 0.31971, Head : 0.27258, Avg : 0.29614
|
|
2023-05-02 20:22:53,251 - wn_one_to_x - [INFO] - [Epoch 29]: Training Loss: 0.0027817, Valid MRR: 0.20582,
|
|
|
|
|
|
|
|
2023-05-02 20:22:54,142 - wn_one_to_x - [INFO] - [E:30| 0]: Train Loss:0.0027621, Val MRR:0.20582, wn_one_to_x
|
|
2023-05-02 20:24:22,901 - wn_one_to_x - [INFO] - [E:30| 100]: Train Loss:0.0026627, Val MRR:0.20582, wn_one_to_x
|
|
2023-05-02 20:25:50,815 - wn_one_to_x - [INFO] - [E:30| 200]: Train Loss:0.0026508, Val MRR:0.20582, wn_one_to_x
|
|
2023-05-02 20:27:20,138 - wn_one_to_x - [INFO] - [E:30| 300]: Train Loss:0.0026501, Val MRR:0.20582, wn_one_to_x
|
|
2023-05-02 20:28:47,609 - wn_one_to_x - [INFO] - [E:30| 400]: Train Loss:0.0026535, Val MRR:0.20582, wn_one_to_x
|
|
2023-05-02 20:30:16,347 - wn_one_to_x - [INFO] - [E:30| 500]: Train Loss:0.0026587, Val MRR:0.20582, wn_one_to_x
|
|
2023-05-02 20:31:44,714 - wn_one_to_x - [INFO] - [E:30| 600]: Train Loss:0.0026637, Val MRR:0.20582, wn_one_to_x
|
|
2023-05-02 20:33:12,821 - wn_one_to_x - [INFO] - [E:30| 700]: Train Loss:0.0026664, Val MRR:0.20582, wn_one_to_x
|
|
2023-05-02 20:34:40,396 - wn_one_to_x - [INFO] - [E:30| 800]: Train Loss:0.0026705, Val MRR:0.20582, wn_one_to_x
|
|
2023-05-02 20:36:08,908 - wn_one_to_x - [INFO] - [E:30| 900]: Train Loss:0.0026737, Val MRR:0.20582, wn_one_to_x
|
|
2023-05-02 20:37:37,195 - wn_one_to_x - [INFO] - [E:30| 1000]: Train Loss:0.0026765, Val MRR:0.20582, wn_one_to_x
|
|
2023-05-02 20:39:05,012 - wn_one_to_x - [INFO] - [E:30| 1100]: Train Loss:0.0026788, Val MRR:0.20582, wn_one_to_x
|
|
2023-05-02 20:40:32,562 - wn_one_to_x - [INFO] - [E:30| 1200]: Train Loss:0.0026811, Val MRR:0.20582, wn_one_to_x
|
|
2023-05-02 20:42:00,149 - wn_one_to_x - [INFO] - [E:30| 1300]: Train Loss:0.0026814, Val MRR:0.20582, wn_one_to_x
|
|
2023-05-02 20:42:48,814 - wn_one_to_x - [INFO] - [Epoch:30]: Training Loss:0.002683
|
|
|
|
2023-05-02 20:42:49,062 - wn_one_to_x - [INFO] - [Valid, Tail_Batch Step 0] wn_one_to_x
|
|
2023-05-02 20:42:54,544 - wn_one_to_x - [INFO] - [Valid, Head_Batch Step 0] wn_one_to_x
|
|
2023-05-02 20:43:01,161 - wn_one_to_x - [INFO] - [Evaluating Epoch 30 valid]:
|
|
MRR: Tail : 0.22702, Head : 0.20301, Avg : 0.21501
|
|
|
|
2023-05-02 20:43:03,136 - wn_one_to_x - [INFO] - [Epoch 30]: Training Loss: 0.0026827, Valid MRR: 0.21501,
|
|
|
|
|
|
|
|
2023-05-02 20:43:04,036 - wn_one_to_x - [INFO] - [E:31| 0]: Train Loss:0.0025914, Val MRR:0.21501, wn_one_to_x
|
|
2023-05-02 20:44:31,977 - wn_one_to_x - [INFO] - [E:31| 100]: Train Loss:0.0025592, Val MRR:0.21501, wn_one_to_x
|
|
2023-05-02 20:46:00,144 - wn_one_to_x - [INFO] - [E:31| 200]: Train Loss:0.0025605, Val MRR:0.21501, wn_one_to_x
|
|
2023-05-02 20:47:27,395 - wn_one_to_x - [INFO] - [E:31| 300]: Train Loss:0.0025633, Val MRR:0.21501, wn_one_to_x
|
|
2023-05-02 20:48:54,539 - wn_one_to_x - [INFO] - [E:31| 400]: Train Loss:0.0025664, Val MRR:0.21501, wn_one_to_x
|
|
2023-05-02 20:50:22,244 - wn_one_to_x - [INFO] - [E:31| 500]: Train Loss:0.0025712, Val MRR:0.21501, wn_one_to_x
|
|
2023-05-02 20:51:49,529 - wn_one_to_x - [INFO] - [E:31| 600]: Train Loss:0.0025738, Val MRR:0.21501, wn_one_to_x
|
|
2023-05-02 20:53:17,588 - wn_one_to_x - [INFO] - [E:31| 700]: Train Loss:0.002576, Val MRR:0.21501, wn_one_to_x
|
|
2023-05-02 20:54:45,201 - wn_one_to_x - [INFO] - [E:31| 800]: Train Loss:0.0025803, Val MRR:0.21501, wn_one_to_x
|
|
2023-05-02 20:56:12,595 - wn_one_to_x - [INFO] - [E:31| 900]: Train Loss:0.0025814, Val MRR:0.21501, wn_one_to_x
|
|
2023-05-02 20:57:40,391 - wn_one_to_x - [INFO] - [E:31| 1000]: Train Loss:0.002582, Val MRR:0.21501, wn_one_to_x
|
|
2023-05-02 20:59:08,695 - wn_one_to_x - [INFO] - [E:31| 1100]: Train Loss:0.0025852, Val MRR:0.21501, wn_one_to_x
|
|
2023-05-02 21:00:36,516 - wn_one_to_x - [INFO] - [E:31| 1200]: Train Loss:0.0025881, Val MRR:0.21501, wn_one_to_x
|
|
2023-05-02 21:02:05,320 - wn_one_to_x - [INFO] - [E:31| 1300]: Train Loss:0.0025879, Val MRR:0.21501, wn_one_to_x
|
|
2023-05-02 21:02:53,714 - wn_one_to_x - [INFO] - [Epoch:31]: Training Loss:0.002588
|
|
|
|
2023-05-02 21:02:53,958 - wn_one_to_x - [INFO] - [Valid, Tail_Batch Step 0] wn_one_to_x
|
|
2023-05-02 21:02:59,622 - wn_one_to_x - [INFO] - [Valid, Head_Batch Step 0] wn_one_to_x
|
|
2023-05-02 21:03:04,916 - wn_one_to_x - [INFO] - [Evaluating Epoch 31 valid]:
|
|
MRR: Tail : 0.23796, Head : 0.21405, Avg : 0.22601
|
|
|
|
2023-05-02 21:03:06,979 - wn_one_to_x - [INFO] - [Epoch 31]: Training Loss: 0.0025878, Valid MRR: 0.22601,
|
|
|
|
|
|
|
|
2023-05-02 21:03:07,908 - wn_one_to_x - [INFO] - [E:32| 0]: Train Loss:0.0025451, Val MRR:0.22601, wn_one_to_x
|
|
2023-05-02 21:04:35,723 - wn_one_to_x - [INFO] - [E:32| 100]: Train Loss:0.002466, Val MRR:0.22601, wn_one_to_x
|
|
2023-05-02 21:06:03,582 - wn_one_to_x - [INFO] - [E:32| 200]: Train Loss:0.0024665, Val MRR:0.22601, wn_one_to_x
|
|
2023-05-02 21:07:32,340 - wn_one_to_x - [INFO] - [E:32| 300]: Train Loss:0.0024736, Val MRR:0.22601, wn_one_to_x
|
|
2023-05-02 21:08:59,142 - wn_one_to_x - [INFO] - [E:32| 400]: Train Loss:0.0024766, Val MRR:0.22601, wn_one_to_x
|
|
2023-05-02 21:10:25,768 - wn_one_to_x - [INFO] - [E:32| 500]: Train Loss:0.0024792, Val MRR:0.22601, wn_one_to_x
|
|
2023-05-02 21:11:52,396 - wn_one_to_x - [INFO] - [E:32| 600]: Train Loss:0.0024842, Val MRR:0.22601, wn_one_to_x
|
|
2023-05-02 21:13:18,821 - wn_one_to_x - [INFO] - [E:32| 700]: Train Loss:0.0024865, Val MRR:0.22601, wn_one_to_x
|
|
2023-05-02 21:14:45,267 - wn_one_to_x - [INFO] - [E:32| 800]: Train Loss:0.0024896, Val MRR:0.22601, wn_one_to_x
|
|
2023-05-02 21:16:11,848 - wn_one_to_x - [INFO] - [E:32| 900]: Train Loss:0.0024921, Val MRR:0.22601, wn_one_to_x
|
|
2023-05-02 21:17:38,455 - wn_one_to_x - [INFO] - [E:32| 1000]: Train Loss:0.0024918, Val MRR:0.22601, wn_one_to_x
|
|
2023-05-02 21:19:05,021 - wn_one_to_x - [INFO] - [E:32| 1100]: Train Loss:0.0024943, Val MRR:0.22601, wn_one_to_x
|
|
2023-05-02 21:20:31,848 - wn_one_to_x - [INFO] - [E:32| 1200]: Train Loss:0.0024963, Val MRR:0.22601, wn_one_to_x
|
|
2023-05-02 21:21:58,480 - wn_one_to_x - [INFO] - [E:32| 1300]: Train Loss:0.0024969, Val MRR:0.22601, wn_one_to_x
|
|
2023-05-02 21:22:46,561 - wn_one_to_x - [INFO] - [Epoch:32]: Training Loss:0.002497
|
|
|
|
2023-05-02 21:22:46,805 - wn_one_to_x - [INFO] - [Valid, Tail_Batch Step 0] wn_one_to_x
|
|
2023-05-02 21:22:52,481 - wn_one_to_x - [INFO] - [Valid, Head_Batch Step 0] wn_one_to_x
|
|
2023-05-02 21:22:57,907 - wn_one_to_x - [INFO] - [Evaluating Epoch 32 valid]:
|
|
MRR: Tail : 0.24664, Head : 0.22165, Avg : 0.23415
|
|
|
|
2023-05-02 21:23:00,070 - wn_one_to_x - [INFO] - [Epoch 32]: Training Loss: 0.0024969, Valid MRR: 0.23415,
|
|
|
|
|
|
|
|
2023-05-02 21:23:00,940 - wn_one_to_x - [INFO] - [E:33| 0]: Train Loss:0.0025767, Val MRR:0.23415, wn_one_to_x
|
|
2023-05-02 21:24:27,326 - wn_one_to_x - [INFO] - [E:33| 100]: Train Loss:0.0023995, Val MRR:0.23415, wn_one_to_x
|
|
2023-05-02 21:25:53,546 - wn_one_to_x - [INFO] - [E:33| 200]: Train Loss:0.0023981, Val MRR:0.23415, wn_one_to_x
|
|
2023-05-02 21:27:20,560 - wn_one_to_x - [INFO] - [E:33| 300]: Train Loss:0.0023949, Val MRR:0.23415, wn_one_to_x
|
|
2023-05-02 21:28:46,539 - wn_one_to_x - [INFO] - [E:33| 400]: Train Loss:0.0023996, Val MRR:0.23415, wn_one_to_x
|
|
2023-05-02 21:30:13,019 - wn_one_to_x - [INFO] - [E:33| 500]: Train Loss:0.0024023, Val MRR:0.23415, wn_one_to_x
|
|
2023-05-02 21:31:39,648 - wn_one_to_x - [INFO] - [E:33| 600]: Train Loss:0.002406, Val MRR:0.23415, wn_one_to_x
|
|
2023-05-02 21:33:06,051 - wn_one_to_x - [INFO] - [E:33| 700]: Train Loss:0.0024065, Val MRR:0.23415, wn_one_to_x
|
|
2023-05-02 21:34:32,399 - wn_one_to_x - [INFO] - [E:33| 800]: Train Loss:0.0024085, Val MRR:0.23415, wn_one_to_x
|
|
2023-05-02 21:35:59,233 - wn_one_to_x - [INFO] - [E:33| 900]: Train Loss:0.0024106, Val MRR:0.23415, wn_one_to_x
|
|
2023-05-02 21:37:25,682 - wn_one_to_x - [INFO] - [E:33| 1000]: Train Loss:0.0024113, Val MRR:0.23415, wn_one_to_x
|
|
2023-05-02 21:38:52,362 - wn_one_to_x - [INFO] - [E:33| 1100]: Train Loss:0.0024137, Val MRR:0.23415, wn_one_to_x
|
|
2023-05-02 21:40:18,649 - wn_one_to_x - [INFO] - [E:33| 1200]: Train Loss:0.0024126, Val MRR:0.23415, wn_one_to_x
|
|
2023-05-02 21:41:45,461 - wn_one_to_x - [INFO] - [E:33| 1300]: Train Loss:0.0024161, Val MRR:0.23415, wn_one_to_x
|
|
2023-05-02 21:42:33,573 - wn_one_to_x - [INFO] - [Epoch:33]: Training Loss:0.002415
|
|
|
|
2023-05-02 21:42:33,818 - wn_one_to_x - [INFO] - [Valid, Tail_Batch Step 0] wn_one_to_x
|
|
2023-05-02 21:42:39,383 - wn_one_to_x - [INFO] - [Valid, Head_Batch Step 0] wn_one_to_x
|
|
2023-05-02 21:42:44,748 - wn_one_to_x - [INFO] - [Evaluating Epoch 33 valid]:
|
|
MRR: Tail : 0.25732, Head : 0.23165, Avg : 0.24449
|
|
|
|
2023-05-02 21:42:46,714 - wn_one_to_x - [INFO] - [Epoch 33]: Training Loss: 0.0024155, Valid MRR: 0.24449,
|
|
|
|
|
|
|
|
2023-05-02 21:42:47,603 - wn_one_to_x - [INFO] - [E:34| 0]: Train Loss:0.0023211, Val MRR:0.24449, wn_one_to_x
|
|
2023-05-02 21:44:14,485 - wn_one_to_x - [INFO] - [E:34| 100]: Train Loss:0.0023049, Val MRR:0.24449, wn_one_to_x
|
|
2023-05-02 21:45:41,077 - wn_one_to_x - [INFO] - [E:34| 200]: Train Loss:0.0023146, Val MRR:0.24449, wn_one_to_x
|
|
2023-05-02 21:47:07,296 - wn_one_to_x - [INFO] - [E:34| 300]: Train Loss:0.002317, Val MRR:0.24449, wn_one_to_x
|
|
2023-05-02 21:48:33,637 - wn_one_to_x - [INFO] - [E:34| 400]: Train Loss:0.0023204, Val MRR:0.24449, wn_one_to_x
|
|
2023-05-02 21:49:59,578 - wn_one_to_x - [INFO] - [E:34| 500]: Train Loss:0.0023192, Val MRR:0.24449, wn_one_to_x
|
|
2023-05-02 21:51:26,172 - wn_one_to_x - [INFO] - [E:34| 600]: Train Loss:0.0023203, Val MRR:0.24449, wn_one_to_x
|
|
2023-05-02 21:52:52,673 - wn_one_to_x - [INFO] - [E:34| 700]: Train Loss:0.0023218, Val MRR:0.24449, wn_one_to_x
|
|
2023-05-02 21:54:19,107 - wn_one_to_x - [INFO] - [E:34| 800]: Train Loss:0.0023219, Val MRR:0.24449, wn_one_to_x
|
|
2023-05-02 21:55:45,760 - wn_one_to_x - [INFO] - [E:34| 900]: Train Loss:0.0023252, Val MRR:0.24449, wn_one_to_x
|
|
2023-05-02 21:57:12,309 - wn_one_to_x - [INFO] - [E:34| 1000]: Train Loss:0.0023272, Val MRR:0.24449, wn_one_to_x
|
|
2023-05-02 21:58:38,564 - wn_one_to_x - [INFO] - [E:34| 1100]: Train Loss:0.0023292, Val MRR:0.24449, wn_one_to_x
|
|
2023-05-02 22:00:05,008 - wn_one_to_x - [INFO] - [E:34| 1200]: Train Loss:0.00233, Val MRR:0.24449, wn_one_to_x
|
|
2023-05-02 22:01:31,223 - wn_one_to_x - [INFO] - [E:34| 1300]: Train Loss:0.0023332, Val MRR:0.24449, wn_one_to_x
|
|
2023-05-02 22:02:19,707 - wn_one_to_x - [INFO] - [Epoch:34]: Training Loss:0.002333
|
|
|
|
2023-05-02 22:02:19,951 - wn_one_to_x - [INFO] - [Valid, Tail_Batch Step 0] wn_one_to_x
|
|
2023-05-02 22:02:25,598 - wn_one_to_x - [INFO] - [Valid, Head_Batch Step 0] wn_one_to_x
|
|
2023-05-02 22:02:30,976 - wn_one_to_x - [INFO] - [Evaluating Epoch 34 valid]:
|
|
MRR: Tail : 0.26447, Head : 0.24104, Avg : 0.25276
|
|
|
|
2023-05-02 22:02:33,025 - wn_one_to_x - [INFO] - [Epoch 34]: Training Loss: 0.0023335, Valid MRR: 0.25276,
|
|
|
|
|
|
|
|
2023-05-02 22:02:33,901 - wn_one_to_x - [INFO] - [E:35| 0]: Train Loss:0.0022711, Val MRR:0.25276, wn_one_to_x
|
|
2023-05-02 22:04:00,120 - wn_one_to_x - [INFO] - [E:35| 100]: Train Loss:0.0022552, Val MRR:0.25276, wn_one_to_x
|
|
2023-05-02 22:05:26,378 - wn_one_to_x - [INFO] - [E:35| 200]: Train Loss:0.0022396, Val MRR:0.25276, wn_one_to_x
|
|
2023-05-02 22:06:52,822 - wn_one_to_x - [INFO] - [E:35| 300]: Train Loss:0.0022423, Val MRR:0.25276, wn_one_to_x
|
|
2023-05-02 22:08:19,213 - wn_one_to_x - [INFO] - [E:35| 400]: Train Loss:0.0022475, Val MRR:0.25276, wn_one_to_x
|
|
2023-05-02 22:09:45,610 - wn_one_to_x - [INFO] - [E:35| 500]: Train Loss:0.0022473, Val MRR:0.25276, wn_one_to_x
|
|
2023-05-02 22:11:12,252 - wn_one_to_x - [INFO] - [E:35| 600]: Train Loss:0.0022496, Val MRR:0.25276, wn_one_to_x
|
|
2023-05-02 22:12:38,727 - wn_one_to_x - [INFO] - [E:35| 700]: Train Loss:0.0022506, Val MRR:0.25276, wn_one_to_x
|
|
2023-05-02 22:14:05,273 - wn_one_to_x - [INFO] - [E:35| 800]: Train Loss:0.0022539, Val MRR:0.25276, wn_one_to_x
|
|
2023-05-02 22:15:31,642 - wn_one_to_x - [INFO] - [E:35| 900]: Train Loss:0.0022538, Val MRR:0.25276, wn_one_to_x
|
|
2023-05-02 22:16:57,879 - wn_one_to_x - [INFO] - [E:35| 1000]: Train Loss:0.0022544, Val MRR:0.25276, wn_one_to_x
|
|
2023-05-02 22:18:24,321 - wn_one_to_x - [INFO] - [E:35| 1100]: Train Loss:0.0022544, Val MRR:0.25276, wn_one_to_x
|
|
2023-05-02 22:19:50,764 - wn_one_to_x - [INFO] - [E:35| 1200]: Train Loss:0.0022559, Val MRR:0.25276, wn_one_to_x
|
|
2023-05-02 22:21:17,604 - wn_one_to_x - [INFO] - [E:35| 1300]: Train Loss:0.0022578, Val MRR:0.25276, wn_one_to_x
|
|
2023-05-02 22:22:06,283 - wn_one_to_x - [INFO] - [Epoch:35]: Training Loss:0.002259
|
|
|
|
2023-05-02 22:22:06,526 - wn_one_to_x - [INFO] - [Valid, Tail_Batch Step 0] wn_one_to_x
|
|
2023-05-02 22:22:12,174 - wn_one_to_x - [INFO] - [Valid, Head_Batch Step 0] wn_one_to_x
|
|
2023-05-02 22:22:17,780 - wn_one_to_x - [INFO] - [Evaluating Epoch 35 valid]:
|
|
MRR: Tail : 0.27532, Head : 0.24558, Avg : 0.26045
|
|
|
|
2023-05-02 22:22:19,766 - wn_one_to_x - [INFO] - [Epoch 35]: Training Loss: 0.002259, Valid MRR: 0.26045,
|
|
|
|
|
|
|
|
2023-05-02 22:22:20,644 - wn_one_to_x - [INFO] - [E:36| 0]: Train Loss:0.0022471, Val MRR:0.26045, wn_one_to_x
|
|
2023-05-02 22:23:47,307 - wn_one_to_x - [INFO] - [E:36| 100]: Train Loss:0.0021525, Val MRR:0.26045, wn_one_to_x
|
|
2023-05-02 22:25:14,118 - wn_one_to_x - [INFO] - [E:36| 200]: Train Loss:0.0021539, Val MRR:0.26045, wn_one_to_x
|
|
2023-05-02 22:26:40,676 - wn_one_to_x - [INFO] - [E:36| 300]: Train Loss:0.0021643, Val MRR:0.26045, wn_one_to_x
|
|
2023-05-02 22:28:07,190 - wn_one_to_x - [INFO] - [E:36| 400]: Train Loss:0.0021722, Val MRR:0.26045, wn_one_to_x
|
|
2023-05-02 22:29:33,808 - wn_one_to_x - [INFO] - [E:36| 500]: Train Loss:0.0021701, Val MRR:0.26045, wn_one_to_x
|
|
2023-05-02 22:31:00,239 - wn_one_to_x - [INFO] - [E:36| 600]: Train Loss:0.002176, Val MRR:0.26045, wn_one_to_x
|
|
2023-05-02 22:32:26,673 - wn_one_to_x - [INFO] - [E:36| 700]: Train Loss:0.0021784, Val MRR:0.26045, wn_one_to_x
|
|
2023-05-02 22:33:52,714 - wn_one_to_x - [INFO] - [E:36| 800]: Train Loss:0.0021784, Val MRR:0.26045, wn_one_to_x
|
|
2023-05-02 22:35:19,200 - wn_one_to_x - [INFO] - [E:36| 900]: Train Loss:0.0021793, Val MRR:0.26045, wn_one_to_x
|
|
2023-05-02 22:36:45,658 - wn_one_to_x - [INFO] - [E:36| 1000]: Train Loss:0.0021807, Val MRR:0.26045, wn_one_to_x
|
|
2023-05-02 22:38:12,077 - wn_one_to_x - [INFO] - [E:36| 1100]: Train Loss:0.0021843, Val MRR:0.26045, wn_one_to_x
|
|
2023-05-02 22:39:38,389 - wn_one_to_x - [INFO] - [E:36| 1200]: Train Loss:0.0021851, Val MRR:0.26045, wn_one_to_x
|
|
2023-05-02 22:41:04,476 - wn_one_to_x - [INFO] - [E:36| 1300]: Train Loss:0.0021869, Val MRR:0.26045, wn_one_to_x
|
|
2023-05-02 22:41:52,781 - wn_one_to_x - [INFO] - [Epoch:36]: Training Loss:0.002188
|
|
|
|
2023-05-02 22:41:53,029 - wn_one_to_x - [INFO] - [Valid, Tail_Batch Step 0] wn_one_to_x
|
|
2023-05-02 22:41:58,844 - wn_one_to_x - [INFO] - [Valid, Head_Batch Step 0] wn_one_to_x
|
|
2023-05-02 22:42:04,230 - wn_one_to_x - [INFO] - [Evaluating Epoch 36 valid]:
|
|
MRR: Tail : 0.28097, Head : 0.25398, Avg : 0.26748
|
|
|
|
2023-05-02 22:42:07,189 - wn_one_to_x - [INFO] - [Epoch 36]: Training Loss: 0.0021876, Valid MRR: 0.26748,
|
|
|
|
|
|
|
|
2023-05-02 22:42:08,139 - wn_one_to_x - [INFO] - [E:37| 0]: Train Loss:0.0020678, Val MRR:0.26748, wn_one_to_x
|
|
2023-05-02 22:43:34,700 - wn_one_to_x - [INFO] - [E:37| 100]: Train Loss:0.0020992, Val MRR:0.26748, wn_one_to_x
|
|
2023-05-02 22:45:00,883 - wn_one_to_x - [INFO] - [E:37| 200]: Train Loss:0.0021075, Val MRR:0.26748, wn_one_to_x
|
|
2023-05-02 22:46:26,744 - wn_one_to_x - [INFO] - [E:37| 300]: Train Loss:0.0021063, Val MRR:0.26748, wn_one_to_x
|
|
2023-05-02 22:47:53,316 - wn_one_to_x - [INFO] - [E:37| 400]: Train Loss:0.0021087, Val MRR:0.26748, wn_one_to_x
|
|
2023-05-02 22:49:19,713 - wn_one_to_x - [INFO] - [E:37| 500]: Train Loss:0.0021113, Val MRR:0.26748, wn_one_to_x
|
|
2023-05-02 22:50:46,196 - wn_one_to_x - [INFO] - [E:37| 600]: Train Loss:0.0021141, Val MRR:0.26748, wn_one_to_x
|
|
2023-05-02 22:52:12,857 - wn_one_to_x - [INFO] - [E:37| 700]: Train Loss:0.0021125, Val MRR:0.26748, wn_one_to_x
|
|
2023-05-02 22:53:39,422 - wn_one_to_x - [INFO] - [E:37| 800]: Train Loss:0.0021125, Val MRR:0.26748, wn_one_to_x
|
|
2023-05-02 22:55:06,060 - wn_one_to_x - [INFO] - [E:37| 900]: Train Loss:0.0021148, Val MRR:0.26748, wn_one_to_x
|
|
2023-05-02 22:56:32,707 - wn_one_to_x - [INFO] - [E:37| 1000]: Train Loss:0.0021166, Val MRR:0.26748, wn_one_to_x
|
|
2023-05-02 22:57:59,016 - wn_one_to_x - [INFO] - [E:37| 1100]: Train Loss:0.0021182, Val MRR:0.26748, wn_one_to_x
|
|
2023-05-02 22:59:25,442 - wn_one_to_x - [INFO] - [E:37| 1200]: Train Loss:0.0021208, Val MRR:0.26748, wn_one_to_x
|
|
2023-05-02 23:00:51,632 - wn_one_to_x - [INFO] - [E:37| 1300]: Train Loss:0.0021206, Val MRR:0.26748, wn_one_to_x
|
|
2023-05-02 23:01:39,722 - wn_one_to_x - [INFO] - [Epoch:37]: Training Loss:0.002121
|
|
|
|
2023-05-02 23:01:39,968 - wn_one_to_x - [INFO] - [Valid, Tail_Batch Step 0] wn_one_to_x
|
|
2023-05-02 23:01:45,455 - wn_one_to_x - [INFO] - [Valid, Head_Batch Step 0] wn_one_to_x
|
|
2023-05-02 23:01:50,807 - wn_one_to_x - [INFO] - [Evaluating Epoch 37 valid]:
|
|
MRR: Tail : 0.28935, Head : 0.26236, Avg : 0.27586
|
|
|
|
2023-05-02 23:01:52,719 - wn_one_to_x - [INFO] - [Epoch 37]: Training Loss: 0.0021208, Valid MRR: 0.27586,
|
|
|
|
|
|
|
|
2023-05-02 23:01:53,603 - wn_one_to_x - [INFO] - [E:38| 0]: Train Loss:0.0020754, Val MRR:0.27586, wn_one_to_x
|
|
2023-05-02 23:03:20,098 - wn_one_to_x - [INFO] - [E:38| 100]: Train Loss:0.0020325, Val MRR:0.27586, wn_one_to_x
|
|
2023-05-02 23:04:46,698 - wn_one_to_x - [INFO] - [E:38| 200]: Train Loss:0.0020337, Val MRR:0.27586, wn_one_to_x
|
|
2023-05-02 23:06:12,750 - wn_one_to_x - [INFO] - [E:38| 300]: Train Loss:0.002031, Val MRR:0.27586, wn_one_to_x
|
|
2023-05-02 23:07:39,267 - wn_one_to_x - [INFO] - [E:38| 400]: Train Loss:0.0020365, Val MRR:0.27586, wn_one_to_x
|
|
2023-05-02 23:09:05,539 - wn_one_to_x - [INFO] - [E:38| 500]: Train Loss:0.0020378, Val MRR:0.27586, wn_one_to_x
|
|
2023-05-02 23:10:32,194 - wn_one_to_x - [INFO] - [E:38| 600]: Train Loss:0.0020419, Val MRR:0.27586, wn_one_to_x
|
|
2023-05-02 23:11:58,781 - wn_one_to_x - [INFO] - [E:38| 700]: Train Loss:0.0020438, Val MRR:0.27586, wn_one_to_x
|
|
2023-05-02 23:13:24,868 - wn_one_to_x - [INFO] - [E:38| 800]: Train Loss:0.002045, Val MRR:0.27586, wn_one_to_x
|
|
2023-05-02 23:14:51,101 - wn_one_to_x - [INFO] - [E:38| 900]: Train Loss:0.0020466, Val MRR:0.27586, wn_one_to_x
|
|
2023-05-02 23:16:17,687 - wn_one_to_x - [INFO] - [E:38| 1000]: Train Loss:0.0020501, Val MRR:0.27586, wn_one_to_x
|
|
2023-05-02 23:17:44,288 - wn_one_to_x - [INFO] - [E:38| 1100]: Train Loss:0.0020525, Val MRR:0.27586, wn_one_to_x
|
|
2023-05-02 23:19:10,822 - wn_one_to_x - [INFO] - [E:38| 1200]: Train Loss:0.0020567, Val MRR:0.27586, wn_one_to_x
|
|
2023-05-02 23:20:37,166 - wn_one_to_x - [INFO] - [E:38| 1300]: Train Loss:0.0020586, Val MRR:0.27586, wn_one_to_x
|
|
2023-05-02 23:21:25,666 - wn_one_to_x - [INFO] - [Epoch:38]: Training Loss:0.002058
|
|
|
|
2023-05-02 23:21:25,914 - wn_one_to_x - [INFO] - [Valid, Tail_Batch Step 0] wn_one_to_x
|
|
2023-05-02 23:21:31,614 - wn_one_to_x - [INFO] - [Valid, Head_Batch Step 0] wn_one_to_x
|
|
2023-05-02 23:21:37,234 - wn_one_to_x - [INFO] - [Evaluating Epoch 38 valid]:
|
|
MRR: Tail : 0.29696, Head : 0.27315, Avg : 0.28505
|
|
|
|
2023-05-02 23:21:42,399 - wn_one_to_x - [INFO] - [Epoch 38]: Training Loss: 0.0020584, Valid MRR: 0.28505,
|
|
|
|
|
|
|
|
2023-05-02 23:21:43,277 - wn_one_to_x - [INFO] - [E:39| 0]: Train Loss:0.0019397, Val MRR:0.28505, wn_one_to_x
|
|
2023-05-02 23:23:10,120 - wn_one_to_x - [INFO] - [E:39| 100]: Train Loss:0.0019586, Val MRR:0.28505, wn_one_to_x
|
|
2023-05-02 23:24:36,711 - wn_one_to_x - [INFO] - [E:39| 200]: Train Loss:0.0019665, Val MRR:0.28505, wn_one_to_x
|
|
2023-05-02 23:26:03,375 - wn_one_to_x - [INFO] - [E:39| 300]: Train Loss:0.0019727, Val MRR:0.28505, wn_one_to_x
|
|
2023-05-02 23:27:30,010 - wn_one_to_x - [INFO] - [E:39| 400]: Train Loss:0.0019827, Val MRR:0.28505, wn_one_to_x
|
|
2023-05-02 23:28:56,525 - wn_one_to_x - [INFO] - [E:39| 500]: Train Loss:0.0019866, Val MRR:0.28505, wn_one_to_x
|
|
2023-05-02 23:30:22,650 - wn_one_to_x - [INFO] - [E:39| 600]: Train Loss:0.0019901, Val MRR:0.28505, wn_one_to_x
|
|
2023-05-02 23:31:49,367 - wn_one_to_x - [INFO] - [E:39| 700]: Train Loss:0.0019905, Val MRR:0.28505, wn_one_to_x
|
|
2023-05-02 23:33:15,993 - wn_one_to_x - [INFO] - [E:39| 800]: Train Loss:0.0019914, Val MRR:0.28505, wn_one_to_x
|
|
2023-05-02 23:34:42,527 - wn_one_to_x - [INFO] - [E:39| 900]: Train Loss:0.0019919, Val MRR:0.28505, wn_one_to_x
|
|
2023-05-02 23:36:08,857 - wn_one_to_x - [INFO] - [E:39| 1000]: Train Loss:0.0019935, Val MRR:0.28505, wn_one_to_x
|
|
2023-05-02 23:37:35,727 - wn_one_to_x - [INFO] - [E:39| 1100]: Train Loss:0.0019941, Val MRR:0.28505, wn_one_to_x
|
|
2023-05-02 23:39:02,382 - wn_one_to_x - [INFO] - [E:39| 1200]: Train Loss:0.0019978, Val MRR:0.28505, wn_one_to_x
|
|
2023-05-02 23:40:28,958 - wn_one_to_x - [INFO] - [E:39| 1300]: Train Loss:0.0019986, Val MRR:0.28505, wn_one_to_x
|
|
2023-05-02 23:41:17,173 - wn_one_to_x - [INFO] - [Epoch:39]: Training Loss:0.002
|
|
|
|
2023-05-02 23:41:17,416 - wn_one_to_x - [INFO] - [Valid, Tail_Batch Step 0] wn_one_to_x
|
|
2023-05-02 23:41:22,962 - wn_one_to_x - [INFO] - [Valid, Head_Batch Step 0] wn_one_to_x
|
|
2023-05-02 23:41:28,559 - wn_one_to_x - [INFO] - [Evaluating Epoch 39 valid]:
|
|
MRR: Tail : 0.30397, Head : 0.27834, Avg : 0.29116
|
|
MR: Tail : 3127.6, Head : 4513.2, Avg : 3820.4
|
|
Hit-1: Tail : 0.25181, Head : 0.2327, Avg : 0.24225
|
|
Hit-3: Tail : 0.32927, Head : 0.30125, Avg : 0.31526
|
|
Hit-10: Tail : 0.39848, Head : 0.35959, Avg : 0.37904
|
|
2023-05-02 23:41:30,398 - wn_one_to_x - [INFO] - [Epoch 39]: Training Loss: 0.0019998, Valid MRR: 0.29116,
|
|
|
|
|
|
|
|
2023-05-02 23:41:31,277 - wn_one_to_x - [INFO] - [E:40| 0]: Train Loss:0.0018405, Val MRR:0.29116, wn_one_to_x
|
|
2023-05-02 23:42:57,511 - wn_one_to_x - [INFO] - [E:40| 100]: Train Loss:0.0019119, Val MRR:0.29116, wn_one_to_x
|
|
2023-05-02 23:44:24,110 - wn_one_to_x - [INFO] - [E:40| 200]: Train Loss:0.0019135, Val MRR:0.29116, wn_one_to_x
|
|
2023-05-02 23:45:50,069 - wn_one_to_x - [INFO] - [E:40| 300]: Train Loss:0.0019109, Val MRR:0.29116, wn_one_to_x
|
|
2023-05-02 23:47:16,344 - wn_one_to_x - [INFO] - [E:40| 400]: Train Loss:0.0019165, Val MRR:0.29116, wn_one_to_x
|
|
2023-05-02 23:48:42,833 - wn_one_to_x - [INFO] - [E:40| 500]: Train Loss:0.0019202, Val MRR:0.29116, wn_one_to_x
|
|
2023-05-02 23:50:09,262 - wn_one_to_x - [INFO] - [E:40| 600]: Train Loss:0.0019241, Val MRR:0.29116, wn_one_to_x
|
|
2023-05-02 23:51:35,959 - wn_one_to_x - [INFO] - [E:40| 700]: Train Loss:0.0019264, Val MRR:0.29116, wn_one_to_x
|
|
2023-05-02 23:53:02,705 - wn_one_to_x - [INFO] - [E:40| 800]: Train Loss:0.0019298, Val MRR:0.29116, wn_one_to_x
|
|
2023-05-02 23:54:29,294 - wn_one_to_x - [INFO] - [E:40| 900]: Train Loss:0.0019325, Val MRR:0.29116, wn_one_to_x
|
|
2023-05-02 23:55:55,770 - wn_one_to_x - [INFO] - [E:40| 1000]: Train Loss:0.0019342, Val MRR:0.29116, wn_one_to_x
|
|
2023-05-02 23:57:22,561 - wn_one_to_x - [INFO] - [E:40| 1100]: Train Loss:0.0019343, Val MRR:0.29116, wn_one_to_x
|
|
2023-05-02 23:58:49,101 - wn_one_to_x - [INFO] - [E:40| 1200]: Train Loss:0.0019375, Val MRR:0.29116, wn_one_to_x
|
|
2023-05-03 00:00:15,814 - wn_one_to_x - [INFO] - [E:40| 1300]: Train Loss:0.00194, Val MRR:0.29116, wn_one_to_x
|
|
2023-05-03 00:01:04,418 - wn_one_to_x - [INFO] - [Epoch:40]: Training Loss:0.001941
|
|
|
|
2023-05-03 00:01:04,664 - wn_one_to_x - [INFO] - [Valid, Tail_Batch Step 0] wn_one_to_x
|
|
2023-05-03 00:01:10,236 - wn_one_to_x - [INFO] - [Valid, Head_Batch Step 0] wn_one_to_x
|
|
2023-05-03 00:01:15,684 - wn_one_to_x - [INFO] - [Evaluating Epoch 40 valid]:
|
|
MRR: Tail : 0.31179, Head : 0.28696, Avg : 0.29937
|
|
|
|
2023-05-03 00:01:17,376 - wn_one_to_x - [INFO] - [Epoch 40]: Training Loss: 0.0019408, Valid MRR: 0.29937,
|
|
|
|
|
|
|
|
2023-05-03 00:01:18,321 - wn_one_to_x - [INFO] - [E:41| 0]: Train Loss:0.0018176, Val MRR:0.29937, wn_one_to_x
|
|
2023-05-03 00:02:44,660 - wn_one_to_x - [INFO] - [E:41| 100]: Train Loss:0.0018397, Val MRR:0.29937, wn_one_to_x
|
|
2023-05-03 00:04:11,250 - wn_one_to_x - [INFO] - [E:41| 200]: Train Loss:0.0018548, Val MRR:0.29937, wn_one_to_x
|
|
2023-05-03 00:05:37,443 - wn_one_to_x - [INFO] - [E:41| 300]: Train Loss:0.0018562, Val MRR:0.29937, wn_one_to_x
|
|
2023-05-03 00:07:03,988 - wn_one_to_x - [INFO] - [E:41| 400]: Train Loss:0.0018634, Val MRR:0.29937, wn_one_to_x
|
|
2023-05-03 00:08:30,282 - wn_one_to_x - [INFO] - [E:41| 500]: Train Loss:0.0018686, Val MRR:0.29937, wn_one_to_x
|
|
2023-05-03 00:09:56,754 - wn_one_to_x - [INFO] - [E:41| 600]: Train Loss:0.0018718, Val MRR:0.29937, wn_one_to_x
|
|
2023-05-03 00:11:23,310 - wn_one_to_x - [INFO] - [E:41| 700]: Train Loss:0.0018727, Val MRR:0.29937, wn_one_to_x
|
|
2023-05-03 00:12:49,683 - wn_one_to_x - [INFO] - [E:41| 800]: Train Loss:0.0018733, Val MRR:0.29937, wn_one_to_x
|
|
2023-05-03 00:14:15,845 - wn_one_to_x - [INFO] - [E:41| 900]: Train Loss:0.0018761, Val MRR:0.29937, wn_one_to_x
|
|
2023-05-03 00:15:42,578 - wn_one_to_x - [INFO] - [E:41| 1000]: Train Loss:0.0018789, Val MRR:0.29937, wn_one_to_x
|
|
2023-05-03 00:17:09,255 - wn_one_to_x - [INFO] - [E:41| 1100]: Train Loss:0.0018791, Val MRR:0.29937, wn_one_to_x
|
|
2023-05-03 00:18:35,913 - wn_one_to_x - [INFO] - [E:41| 1200]: Train Loss:0.0018819, Val MRR:0.29937, wn_one_to_x
|
|
2023-05-03 00:20:02,747 - wn_one_to_x - [INFO] - [E:41| 1300]: Train Loss:0.0018851, Val MRR:0.29937, wn_one_to_x
|
|
2023-05-03 00:20:51,348 - wn_one_to_x - [INFO] - [Epoch:41]: Training Loss:0.001886
|
|
|
|
2023-05-03 00:20:51,593 - wn_one_to_x - [INFO] - [Valid, Tail_Batch Step 0] wn_one_to_x
|
|
2023-05-03 00:20:57,165 - wn_one_to_x - [INFO] - [Valid, Head_Batch Step 0] wn_one_to_x
|
|
2023-05-03 00:21:02,490 - wn_one_to_x - [INFO] - [Evaluating Epoch 41 valid]:
|
|
MRR: Tail : 0.31283, Head : 0.29537, Avg : 0.3041
|
|
|
|
2023-05-03 00:21:04,182 - wn_one_to_x - [INFO] - [Epoch 41]: Training Loss: 0.0018864, Valid MRR: 0.3041,
|
|
|
|
|
|
|
|
2023-05-03 00:21:05,056 - wn_one_to_x - [INFO] - [E:42| 0]: Train Loss:0.0016679, Val MRR:0.3041, wn_one_to_x
|
|
2023-05-03 00:22:31,616 - wn_one_to_x - [INFO] - [E:42| 100]: Train Loss:0.0017893, Val MRR:0.3041, wn_one_to_x
|
|
2023-05-03 00:23:58,116 - wn_one_to_x - [INFO] - [E:42| 200]: Train Loss:0.0017988, Val MRR:0.3041, wn_one_to_x
|
|
2023-05-03 00:25:24,679 - wn_one_to_x - [INFO] - [E:42| 300]: Train Loss:0.0017993, Val MRR:0.3041, wn_one_to_x
|
|
2023-05-03 00:26:51,244 - wn_one_to_x - [INFO] - [E:42| 400]: Train Loss:0.0018049, Val MRR:0.3041, wn_one_to_x
|
|
2023-05-03 00:28:17,623 - wn_one_to_x - [INFO] - [E:42| 500]: Train Loss:0.0018086, Val MRR:0.3041, wn_one_to_x
|
|
2023-05-03 00:29:44,154 - wn_one_to_x - [INFO] - [E:42| 600]: Train Loss:0.0018152, Val MRR:0.3041, wn_one_to_x
|
|
2023-05-03 00:31:10,763 - wn_one_to_x - [INFO] - [E:42| 700]: Train Loss:0.001819, Val MRR:0.3041, wn_one_to_x
|
|
2023-05-03 00:32:37,362 - wn_one_to_x - [INFO] - [E:42| 800]: Train Loss:0.0018225, Val MRR:0.3041, wn_one_to_x
|
|
2023-05-03 00:34:04,089 - wn_one_to_x - [INFO] - [E:42| 900]: Train Loss:0.0018278, Val MRR:0.3041, wn_one_to_x
|
|
2023-05-03 00:35:30,840 - wn_one_to_x - [INFO] - [E:42| 1000]: Train Loss:0.001831, Val MRR:0.3041, wn_one_to_x
|
|
2023-05-03 00:36:57,333 - wn_one_to_x - [INFO] - [E:42| 1100]: Train Loss:0.0018314, Val MRR:0.3041, wn_one_to_x
|
|
2023-05-03 00:38:23,625 - wn_one_to_x - [INFO] - [E:42| 1200]: Train Loss:0.0018324, Val MRR:0.3041, wn_one_to_x
|
|
2023-05-03 00:39:50,328 - wn_one_to_x - [INFO] - [E:42| 1300]: Train Loss:0.0018358, Val MRR:0.3041, wn_one_to_x
|
|
2023-05-03 00:40:38,675 - wn_one_to_x - [INFO] - [Epoch:42]: Training Loss:0.001837
|
|
|
|
2023-05-03 00:40:38,919 - wn_one_to_x - [INFO] - [Valid, Tail_Batch Step 0] wn_one_to_x
|
|
2023-05-03 00:40:44,480 - wn_one_to_x - [INFO] - [Valid, Head_Batch Step 0] wn_one_to_x
|
|
2023-05-03 00:40:50,018 - wn_one_to_x - [INFO] - [Evaluating Epoch 42 valid]:
|
|
MRR: Tail : 0.32172, Head : 0.30183, Avg : 0.31177
|
|
|
|
2023-05-03 00:40:51,813 - wn_one_to_x - [INFO] - [Epoch 42]: Training Loss: 0.0018372, Valid MRR: 0.31177,
|
|
|
|
|
|
|
|
2023-05-03 00:40:52,687 - wn_one_to_x - [INFO] - [E:43| 0]: Train Loss:0.0017226, Val MRR:0.31177, wn_one_to_x
|
|
2023-05-03 00:42:18,661 - wn_one_to_x - [INFO] - [E:43| 100]: Train Loss:0.0017564, Val MRR:0.31177, wn_one_to_x
|
|
2023-05-03 00:43:44,939 - wn_one_to_x - [INFO] - [E:43| 200]: Train Loss:0.00176, Val MRR:0.31177, wn_one_to_x
|
|
2023-05-03 00:45:11,571 - wn_one_to_x - [INFO] - [E:43| 300]: Train Loss:0.0017578, Val MRR:0.31177, wn_one_to_x
|
|
2023-05-03 00:46:37,643 - wn_one_to_x - [INFO] - [E:43| 400]: Train Loss:0.0017616, Val MRR:0.31177, wn_one_to_x
|
|
2023-05-03 00:48:04,224 - wn_one_to_x - [INFO] - [E:43| 500]: Train Loss:0.0017714, Val MRR:0.31177, wn_one_to_x
|
|
2023-05-03 00:49:30,877 - wn_one_to_x - [INFO] - [E:43| 600]: Train Loss:0.0017727, Val MRR:0.31177, wn_one_to_x
|
|
2023-05-03 00:50:57,529 - wn_one_to_x - [INFO] - [E:43| 700]: Train Loss:0.0017746, Val MRR:0.31177, wn_one_to_x
|
|
2023-05-03 00:52:24,237 - wn_one_to_x - [INFO] - [E:43| 800]: Train Loss:0.0017783, Val MRR:0.31177, wn_one_to_x
|
|
2023-05-03 00:53:50,679 - wn_one_to_x - [INFO] - [E:43| 900]: Train Loss:0.0017795, Val MRR:0.31177, wn_one_to_x
|
|
2023-05-03 00:55:17,109 - wn_one_to_x - [INFO] - [E:43| 1000]: Train Loss:0.0017809, Val MRR:0.31177, wn_one_to_x
|
|
2023-05-03 00:56:43,670 - wn_one_to_x - [INFO] - [E:43| 1100]: Train Loss:0.001782, Val MRR:0.31177, wn_one_to_x
|
|
2023-05-03 00:58:10,429 - wn_one_to_x - [INFO] - [E:43| 1200]: Train Loss:0.0017837, Val MRR:0.31177, wn_one_to_x
|
|
2023-05-03 00:59:36,929 - wn_one_to_x - [INFO] - [E:43| 1300]: Train Loss:0.0017868, Val MRR:0.31177, wn_one_to_x
|
|
2023-05-03 01:00:25,075 - wn_one_to_x - [INFO] - [Epoch:43]: Training Loss:0.001787
|
|
|
|
2023-05-03 01:00:25,319 - wn_one_to_x - [INFO] - [Valid, Tail_Batch Step 0] wn_one_to_x
|
|
2023-05-03 01:00:30,956 - wn_one_to_x - [INFO] - [Valid, Head_Batch Step 0] wn_one_to_x
|
|
2023-05-03 01:00:36,225 - wn_one_to_x - [INFO] - [Evaluating Epoch 43 valid]:
|
|
MRR: Tail : 0.3282, Head : 0.312, Avg : 0.3201
|
|
|
|
2023-05-03 01:00:38,002 - wn_one_to_x - [INFO] - [Epoch 43]: Training Loss: 0.0017872, Valid MRR: 0.3201,
|
|
|
|
|
|
|
|
2023-05-03 01:00:38,886 - wn_one_to_x - [INFO] - [E:44| 0]: Train Loss:0.0017567, Val MRR:0.3201, wn_one_to_x
|
|
2023-05-03 01:02:05,507 - wn_one_to_x - [INFO] - [E:44| 100]: Train Loss:0.001711, Val MRR:0.3201, wn_one_to_x
|
|
2023-05-03 01:03:32,160 - wn_one_to_x - [INFO] - [E:44| 200]: Train Loss:0.0017113, Val MRR:0.3201, wn_one_to_x
|
|
2023-05-03 01:04:58,528 - wn_one_to_x - [INFO] - [E:44| 300]: Train Loss:0.0017173, Val MRR:0.3201, wn_one_to_x
|
|
2023-05-03 01:06:24,944 - wn_one_to_x - [INFO] - [E:44| 400]: Train Loss:0.0017196, Val MRR:0.3201, wn_one_to_x
|
|
2023-05-03 01:07:51,116 - wn_one_to_x - [INFO] - [E:44| 500]: Train Loss:0.0017233, Val MRR:0.3201, wn_one_to_x
|
|
2023-05-03 01:09:17,814 - wn_one_to_x - [INFO] - [E:44| 600]: Train Loss:0.0017261, Val MRR:0.3201, wn_one_to_x
|
|
2023-05-03 01:10:44,314 - wn_one_to_x - [INFO] - [E:44| 700]: Train Loss:0.0017279, Val MRR:0.3201, wn_one_to_x
|
|
2023-05-03 01:12:11,267 - wn_one_to_x - [INFO] - [E:44| 800]: Train Loss:0.00173, Val MRR:0.3201, wn_one_to_x
|
|
2023-05-03 01:13:37,648 - wn_one_to_x - [INFO] - [E:44| 900]: Train Loss:0.0017324, Val MRR:0.3201, wn_one_to_x
|
|
2023-05-03 01:15:04,048 - wn_one_to_x - [INFO] - [E:44| 1000]: Train Loss:0.0017343, Val MRR:0.3201, wn_one_to_x
|
|
2023-05-03 01:16:30,557 - wn_one_to_x - [INFO] - [E:44| 1100]: Train Loss:0.0017366, Val MRR:0.3201, wn_one_to_x
|
|
2023-05-03 01:17:56,761 - wn_one_to_x - [INFO] - [E:44| 1200]: Train Loss:0.0017382, Val MRR:0.3201, wn_one_to_x
|
|
2023-05-03 01:19:23,408 - wn_one_to_x - [INFO] - [E:44| 1300]: Train Loss:0.0017406, Val MRR:0.3201, wn_one_to_x
|
|
2023-05-03 01:20:11,507 - wn_one_to_x - [INFO] - [Epoch:44]: Training Loss:0.001742
|
|
|
|
2023-05-03 01:20:11,765 - wn_one_to_x - [INFO] - [Valid, Tail_Batch Step 0] wn_one_to_x
|
|
2023-05-03 01:20:17,344 - wn_one_to_x - [INFO] - [Valid, Head_Batch Step 0] wn_one_to_x
|
|
2023-05-03 01:20:22,620 - wn_one_to_x - [INFO] - [Evaluating Epoch 44 valid]:
|
|
MRR: Tail : 0.33347, Head : 0.31995, Avg : 0.32671
|
|
|
|
2023-05-03 01:20:24,321 - wn_one_to_x - [INFO] - [Epoch 44]: Training Loss: 0.0017423, Valid MRR: 0.32671,
|
|
|
|
|
|
|
|
2023-05-03 01:20:25,206 - wn_one_to_x - [INFO] - [E:45| 0]: Train Loss:0.0016158, Val MRR:0.32671, wn_one_to_x
|
|
2023-05-03 01:21:51,449 - wn_one_to_x - [INFO] - [E:45| 100]: Train Loss:0.0016786, Val MRR:0.32671, wn_one_to_x
|
|
2023-05-03 01:23:17,805 - wn_one_to_x - [INFO] - [E:45| 200]: Train Loss:0.0016814, Val MRR:0.32671, wn_one_to_x
|
|
2023-05-03 01:24:44,886 - wn_one_to_x - [INFO] - [E:45| 300]: Train Loss:0.0016811, Val MRR:0.32671, wn_one_to_x
|
|
2023-05-03 01:26:11,389 - wn_one_to_x - [INFO] - [E:45| 400]: Train Loss:0.0016836, Val MRR:0.32671, wn_one_to_x
|
|
2023-05-03 01:27:37,993 - wn_one_to_x - [INFO] - [E:45| 500]: Train Loss:0.0016857, Val MRR:0.32671, wn_one_to_x
|
|
2023-05-03 01:29:04,350 - wn_one_to_x - [INFO] - [E:45| 600]: Train Loss:0.0016848, Val MRR:0.32671, wn_one_to_x
|
|
2023-05-03 01:30:30,950 - wn_one_to_x - [INFO] - [E:45| 700]: Train Loss:0.0016884, Val MRR:0.32671, wn_one_to_x
|
|
2023-05-03 01:31:57,786 - wn_one_to_x - [INFO] - [E:45| 800]: Train Loss:0.001689, Val MRR:0.32671, wn_one_to_x
|
|
2023-05-03 01:33:24,703 - wn_one_to_x - [INFO] - [E:45| 900]: Train Loss:0.0016911, Val MRR:0.32671, wn_one_to_x
|
|
2023-05-03 01:34:51,274 - wn_one_to_x - [INFO] - [E:45| 1000]: Train Loss:0.001693, Val MRR:0.32671, wn_one_to_x
|
|
2023-05-03 01:36:18,202 - wn_one_to_x - [INFO] - [E:45| 1100]: Train Loss:0.001695, Val MRR:0.32671, wn_one_to_x
|
|
2023-05-03 01:37:44,948 - wn_one_to_x - [INFO] - [E:45| 1200]: Train Loss:0.0016969, Val MRR:0.32671, wn_one_to_x
|
|
2023-05-03 01:39:11,444 - wn_one_to_x - [INFO] - [E:45| 1300]: Train Loss:0.0016985, Val MRR:0.32671, wn_one_to_x
|
|
2023-05-03 01:39:59,730 - wn_one_to_x - [INFO] - [Epoch:45]: Training Loss:0.0017
|
|
|
|
2023-05-03 01:39:59,973 - wn_one_to_x - [INFO] - [Valid, Tail_Batch Step 0] wn_one_to_x
|
|
2023-05-03 01:40:05,475 - wn_one_to_x - [INFO] - [Valid, Head_Batch Step 0] wn_one_to_x
|
|
2023-05-03 01:40:10,730 - wn_one_to_x - [INFO] - [Evaluating Epoch 45 valid]:
|
|
MRR: Tail : 0.34294, Head : 0.32253, Avg : 0.33273
|
|
|
|
2023-05-03 01:40:12,569 - wn_one_to_x - [INFO] - [Epoch 45]: Training Loss: 0.0017004, Valid MRR: 0.33273,
|
|
|
|
|
|
|
|
2023-05-03 01:40:13,451 - wn_one_to_x - [INFO] - [E:46| 0]: Train Loss:0.0015336, Val MRR:0.33273, wn_one_to_x
|
|
2023-05-03 01:41:40,009 - wn_one_to_x - [INFO] - [E:46| 100]: Train Loss:0.0016394, Val MRR:0.33273, wn_one_to_x
|
|
2023-05-03 01:43:06,391 - wn_one_to_x - [INFO] - [E:46| 200]: Train Loss:0.0016353, Val MRR:0.33273, wn_one_to_x
|
|
2023-05-03 01:44:32,929 - wn_one_to_x - [INFO] - [E:46| 300]: Train Loss:0.0016366, Val MRR:0.33273, wn_one_to_x
|
|
2023-05-03 01:45:59,332 - wn_one_to_x - [INFO] - [E:46| 400]: Train Loss:0.001636, Val MRR:0.33273, wn_one_to_x
|
|
2023-05-03 01:47:25,595 - wn_one_to_x - [INFO] - [E:46| 500]: Train Loss:0.0016388, Val MRR:0.33273, wn_one_to_x
|
|
2023-05-03 01:48:52,075 - wn_one_to_x - [INFO] - [E:46| 600]: Train Loss:0.0016439, Val MRR:0.33273, wn_one_to_x
|
|
2023-05-03 01:50:18,654 - wn_one_to_x - [INFO] - [E:46| 700]: Train Loss:0.0016469, Val MRR:0.33273, wn_one_to_x
|
|
2023-05-03 01:51:45,901 - wn_one_to_x - [INFO] - [E:46| 800]: Train Loss:0.0016455, Val MRR:0.33273, wn_one_to_x
|
|
2023-05-03 01:53:12,880 - wn_one_to_x - [INFO] - [E:46| 900]: Train Loss:0.0016462, Val MRR:0.33273, wn_one_to_x
|
|
2023-05-03 01:54:39,101 - wn_one_to_x - [INFO] - [E:46| 1000]: Train Loss:0.001649, Val MRR:0.33273, wn_one_to_x
|
|
2023-05-03 01:56:06,171 - wn_one_to_x - [INFO] - [E:46| 1100]: Train Loss:0.0016505, Val MRR:0.33273, wn_one_to_x
|
|
2023-05-03 01:57:32,720 - wn_one_to_x - [INFO] - [E:46| 1200]: Train Loss:0.0016527, Val MRR:0.33273, wn_one_to_x
|
|
2023-05-03 01:58:59,372 - wn_one_to_x - [INFO] - [E:46| 1300]: Train Loss:0.0016555, Val MRR:0.33273, wn_one_to_x
|
|
2023-05-03 01:59:48,034 - wn_one_to_x - [INFO] - [Epoch:46]: Training Loss:0.001656
|
|
|
|
2023-05-03 01:59:48,278 - wn_one_to_x - [INFO] - [Valid, Tail_Batch Step 0] wn_one_to_x
|
|
2023-05-03 01:59:53,811 - wn_one_to_x - [INFO] - [Valid, Head_Batch Step 0] wn_one_to_x
|
|
2023-05-03 01:59:59,071 - wn_one_to_x - [INFO] - [Evaluating Epoch 46 valid]:
|
|
MRR: Tail : 0.344, Head : 0.32703, Avg : 0.33551
|
|
|
|
2023-05-03 02:00:02,127 - wn_one_to_x - [INFO] - [Epoch 46]: Training Loss: 0.0016561, Valid MRR: 0.33551,
|
|
|
|
|
|
|
|
2023-05-03 02:00:03,014 - wn_one_to_x - [INFO] - [E:47| 0]: Train Loss:0.001608, Val MRR:0.33551, wn_one_to_x
|
|
2023-05-03 02:01:29,844 - wn_one_to_x - [INFO] - [E:47| 100]: Train Loss:0.0015922, Val MRR:0.33551, wn_one_to_x
|
|
2023-05-03 02:02:56,669 - wn_one_to_x - [INFO] - [E:47| 200]: Train Loss:0.001601, Val MRR:0.33551, wn_one_to_x
|
|
2023-05-03 02:04:23,807 - wn_one_to_x - [INFO] - [E:47| 300]: Train Loss:0.0016006, Val MRR:0.33551, wn_one_to_x
|
|
2023-05-03 02:05:50,497 - wn_one_to_x - [INFO] - [E:47| 400]: Train Loss:0.0016022, Val MRR:0.33551, wn_one_to_x
|
|
2023-05-03 02:07:17,291 - wn_one_to_x - [INFO] - [E:47| 500]: Train Loss:0.0016042, Val MRR:0.33551, wn_one_to_x
|
|
2023-05-03 02:08:43,976 - wn_one_to_x - [INFO] - [E:47| 600]: Train Loss:0.0016036, Val MRR:0.33551, wn_one_to_x
|
|
2023-05-03 02:10:10,500 - wn_one_to_x - [INFO] - [E:47| 700]: Train Loss:0.0016044, Val MRR:0.33551, wn_one_to_x
|
|
2023-05-03 02:11:37,143 - wn_one_to_x - [INFO] - [E:47| 800]: Train Loss:0.0016083, Val MRR:0.33551, wn_one_to_x
|
|
2023-05-03 02:13:03,820 - wn_one_to_x - [INFO] - [E:47| 900]: Train Loss:0.0016101, Val MRR:0.33551, wn_one_to_x
|
|
2023-05-03 02:14:30,529 - wn_one_to_x - [INFO] - [E:47| 1000]: Train Loss:0.0016095, Val MRR:0.33551, wn_one_to_x
|
|
2023-05-03 02:15:57,038 - wn_one_to_x - [INFO] - [E:47| 1100]: Train Loss:0.0016109, Val MRR:0.33551, wn_one_to_x
|
|
2023-05-03 02:17:24,332 - wn_one_to_x - [INFO] - [E:47| 1200]: Train Loss:0.0016126, Val MRR:0.33551, wn_one_to_x
|
|
2023-05-03 02:18:50,957 - wn_one_to_x - [INFO] - [E:47| 1300]: Train Loss:0.0016138, Val MRR:0.33551, wn_one_to_x
|
|
2023-05-03 02:19:39,600 - wn_one_to_x - [INFO] - [Epoch:47]: Training Loss:0.001615
|
|
|
|
2023-05-03 02:19:39,846 - wn_one_to_x - [INFO] - [Valid, Tail_Batch Step 0] wn_one_to_x
|
|
2023-05-03 02:19:45,397 - wn_one_to_x - [INFO] - [Valid, Head_Batch Step 0] wn_one_to_x
|
|
2023-05-03 02:19:50,945 - wn_one_to_x - [INFO] - [Evaluating Epoch 47 valid]:
|
|
MRR: Tail : 0.35084, Head : 0.33698, Avg : 0.34391
|
|
|
|
2023-05-03 02:19:52,943 - wn_one_to_x - [INFO] - [Epoch 47]: Training Loss: 0.0016148, Valid MRR: 0.34391,
|
|
|
|
|
|
|
|
2023-05-03 02:19:53,841 - wn_one_to_x - [INFO] - [E:48| 0]: Train Loss:0.0015313, Val MRR:0.34391, wn_one_to_x
|
|
2023-05-03 02:21:20,636 - wn_one_to_x - [INFO] - [E:48| 100]: Train Loss:0.0015546, Val MRR:0.34391, wn_one_to_x
|
|
2023-05-03 02:22:47,747 - wn_one_to_x - [INFO] - [E:48| 200]: Train Loss:0.0015595, Val MRR:0.34391, wn_one_to_x
|
|
2023-05-03 02:24:15,081 - wn_one_to_x - [INFO] - [E:48| 300]: Train Loss:0.0015597, Val MRR:0.34391, wn_one_to_x
|
|
2023-05-03 02:25:41,768 - wn_one_to_x - [INFO] - [E:48| 400]: Train Loss:0.001564, Val MRR:0.34391, wn_one_to_x
|
|
2023-05-03 02:27:08,035 - wn_one_to_x - [INFO] - [E:48| 500]: Train Loss:0.0015649, Val MRR:0.34391, wn_one_to_x
|
|
2023-05-03 02:28:34,814 - wn_one_to_x - [INFO] - [E:48| 600]: Train Loss:0.0015675, Val MRR:0.34391, wn_one_to_x
|
|
2023-05-03 02:30:02,447 - wn_one_to_x - [INFO] - [E:48| 700]: Train Loss:0.0015679, Val MRR:0.34391, wn_one_to_x
|
|
2023-05-03 02:31:29,016 - wn_one_to_x - [INFO] - [E:48| 800]: Train Loss:0.0015697, Val MRR:0.34391, wn_one_to_x
|
|
2023-05-03 02:32:56,134 - wn_one_to_x - [INFO] - [E:48| 900]: Train Loss:0.0015726, Val MRR:0.34391, wn_one_to_x
|
|
2023-05-03 02:34:22,858 - wn_one_to_x - [INFO] - [E:48| 1000]: Train Loss:0.0015733, Val MRR:0.34391, wn_one_to_x
|
|
2023-05-03 02:35:49,319 - wn_one_to_x - [INFO] - [E:48| 1100]: Train Loss:0.0015757, Val MRR:0.34391, wn_one_to_x
|
|
2023-05-03 02:37:15,595 - wn_one_to_x - [INFO] - [E:48| 1200]: Train Loss:0.001577, Val MRR:0.34391, wn_one_to_x
|
|
2023-05-03 02:38:42,164 - wn_one_to_x - [INFO] - [E:48| 1300]: Train Loss:0.0015793, Val MRR:0.34391, wn_one_to_x
|
|
2023-05-03 02:39:30,758 - wn_one_to_x - [INFO] - [Epoch:48]: Training Loss:0.00158
|
|
|
|
2023-05-03 02:39:31,002 - wn_one_to_x - [INFO] - [Valid, Tail_Batch Step 0] wn_one_to_x
|
|
2023-05-03 02:39:36,481 - wn_one_to_x - [INFO] - [Valid, Head_Batch Step 0] wn_one_to_x
|
|
2023-05-03 02:39:42,577 - wn_one_to_x - [INFO] - [Evaluating Epoch 48 valid]:
|
|
MRR: Tail : 0.35996, Head : 0.34089, Avg : 0.35043
|
|
|
|
2023-05-03 02:39:44,257 - wn_one_to_x - [INFO] - [Epoch 48]: Training Loss: 0.0015798, Valid MRR: 0.35043,
|
|
|
|
|
|
|
|
2023-05-03 02:39:45,126 - wn_one_to_x - [INFO] - [E:49| 0]: Train Loss:0.0014739, Val MRR:0.35043, wn_one_to_x
|
|
2023-05-03 02:41:11,536 - wn_one_to_x - [INFO] - [E:49| 100]: Train Loss:0.0015263, Val MRR:0.35043, wn_one_to_x
|
|
2023-05-03 02:42:37,903 - wn_one_to_x - [INFO] - [E:49| 200]: Train Loss:0.0015246, Val MRR:0.35043, wn_one_to_x
|
|
2023-05-03 02:44:05,133 - wn_one_to_x - [INFO] - [E:49| 300]: Train Loss:0.0015309, Val MRR:0.35043, wn_one_to_x
|
|
2023-05-03 02:45:31,684 - wn_one_to_x - [INFO] - [E:49| 400]: Train Loss:0.0015295, Val MRR:0.35043, wn_one_to_x
|
|
2023-05-03 02:46:58,265 - wn_one_to_x - [INFO] - [E:49| 500]: Train Loss:0.0015289, Val MRR:0.35043, wn_one_to_x
|
|
2023-05-03 02:48:24,825 - wn_one_to_x - [INFO] - [E:49| 600]: Train Loss:0.0015293, Val MRR:0.35043, wn_one_to_x
|
|
2023-05-03 02:49:51,586 - wn_one_to_x - [INFO] - [E:49| 700]: Train Loss:0.0015318, Val MRR:0.35043, wn_one_to_x
|
|
2023-05-03 02:51:18,607 - wn_one_to_x - [INFO] - [E:49| 800]: Train Loss:0.0015336, Val MRR:0.35043, wn_one_to_x
|
|
2023-05-03 02:52:45,076 - wn_one_to_x - [INFO] - [E:49| 900]: Train Loss:0.0015375, Val MRR:0.35043, wn_one_to_x
|
|
2023-05-03 02:54:11,564 - wn_one_to_x - [INFO] - [E:49| 1000]: Train Loss:0.0015403, Val MRR:0.35043, wn_one_to_x
|
|
2023-05-03 02:55:38,553 - wn_one_to_x - [INFO] - [E:49| 1100]: Train Loss:0.0015415, Val MRR:0.35043, wn_one_to_x
|
|
2023-05-03 02:57:06,117 - wn_one_to_x - [INFO] - [E:49| 1200]: Train Loss:0.0015429, Val MRR:0.35043, wn_one_to_x
|
|
2023-05-03 02:58:33,279 - wn_one_to_x - [INFO] - [E:49| 1300]: Train Loss:0.0015447, Val MRR:0.35043, wn_one_to_x
|
|
2023-05-03 02:59:21,930 - wn_one_to_x - [INFO] - [Epoch:49]: Training Loss:0.001544
|
|
|
|
2023-05-03 02:59:22,178 - wn_one_to_x - [INFO] - [Valid, Tail_Batch Step 0] wn_one_to_x
|
|
2023-05-03 02:59:27,940 - wn_one_to_x - [INFO] - [Valid, Head_Batch Step 0] wn_one_to_x
|
|
2023-05-03 02:59:33,327 - wn_one_to_x - [INFO] - [Evaluating Epoch 49 valid]:
|
|
MRR: Tail : 0.36345, Head : 0.35065, Avg : 0.35705
|
|
MR: Tail : 3251.9, Head : 4190.9, Avg : 3721.4
|
|
Hit-1: Tail : 0.31246, Head : 0.31015, Avg : 0.31131
|
|
Hit-3: Tail : 0.39288, Head : 0.3708, Avg : 0.38184
|
|
Hit-10: Tail : 0.45221, Head : 0.42452, Avg : 0.43837
|
|
2023-05-03 02:59:35,104 - wn_one_to_x - [INFO] - [Epoch 49]: Training Loss: 0.0015445, Valid MRR: 0.35705,
|
|
|
|
|
|
|
|
2023-05-03 02:59:35,992 - wn_one_to_x - [INFO] - [E:50| 0]: Train Loss:0.0016138, Val MRR:0.35705, wn_one_to_x
|
|
2023-05-03 03:01:02,734 - wn_one_to_x - [INFO] - [E:50| 100]: Train Loss:0.0014922, Val MRR:0.35705, wn_one_to_x
|
|
2023-05-03 03:02:28,876 - wn_one_to_x - [INFO] - [E:50| 200]: Train Loss:0.0014838, Val MRR:0.35705, wn_one_to_x
|
|
2023-05-03 03:03:55,293 - wn_one_to_x - [INFO] - [E:50| 300]: Train Loss:0.0014904, Val MRR:0.35705, wn_one_to_x
|
|
2023-05-03 03:05:21,505 - wn_one_to_x - [INFO] - [E:50| 400]: Train Loss:0.0014907, Val MRR:0.35705, wn_one_to_x
|
|
2023-05-03 03:06:48,440 - wn_one_to_x - [INFO] - [E:50| 500]: Train Loss:0.0014935, Val MRR:0.35705, wn_one_to_x
|
|
2023-05-03 03:08:14,964 - wn_one_to_x - [INFO] - [E:50| 600]: Train Loss:0.0014945, Val MRR:0.35705, wn_one_to_x
|
|
2023-05-03 03:09:41,587 - wn_one_to_x - [INFO] - [E:50| 700]: Train Loss:0.001498, Val MRR:0.35705, wn_one_to_x
|
|
2023-05-03 03:11:08,057 - wn_one_to_x - [INFO] - [E:50| 800]: Train Loss:0.0014979, Val MRR:0.35705, wn_one_to_x
|
|
2023-05-03 03:12:34,559 - wn_one_to_x - [INFO] - [E:50| 900]: Train Loss:0.0015, Val MRR:0.35705, wn_one_to_x
|
|
2023-05-03 03:14:01,123 - wn_one_to_x - [INFO] - [E:50| 1000]: Train Loss:0.0015017, Val MRR:0.35705, wn_one_to_x
|
|
2023-05-03 03:15:27,714 - wn_one_to_x - [INFO] - [E:50| 1100]: Train Loss:0.0015033, Val MRR:0.35705, wn_one_to_x
|
|
2023-05-03 03:16:54,615 - wn_one_to_x - [INFO] - [E:50| 1200]: Train Loss:0.0015055, Val MRR:0.35705, wn_one_to_x
|
|
2023-05-03 03:18:21,176 - wn_one_to_x - [INFO] - [E:50| 1300]: Train Loss:0.0015071, Val MRR:0.35705, wn_one_to_x
|
|
2023-05-03 03:19:09,379 - wn_one_to_x - [INFO] - [Epoch:50]: Training Loss:0.001508
|
|
|
|
2023-05-03 03:19:09,625 - wn_one_to_x - [INFO] - [Valid, Tail_Batch Step 0] wn_one_to_x
|
|
2023-05-03 03:19:15,171 - wn_one_to_x - [INFO] - [Valid, Head_Batch Step 0] wn_one_to_x
|
|
2023-05-03 03:19:21,196 - wn_one_to_x - [INFO] - [Evaluating Epoch 50 valid]:
|
|
MRR: Tail : 0.36869, Head : 0.35186, Avg : 0.36027
|
|
|
|
2023-05-03 03:19:23,022 - wn_one_to_x - [INFO] - [Epoch 50]: Training Loss: 0.0015082, Valid MRR: 0.36027,
|
|
|
|
|
|
|
|
2023-05-03 03:19:23,938 - wn_one_to_x - [INFO] - [E:51| 0]: Train Loss:0.0014084, Val MRR:0.36027, wn_one_to_x
|
|
2023-05-03 03:20:50,868 - wn_one_to_x - [INFO] - [E:51| 100]: Train Loss:0.0014478, Val MRR:0.36027, wn_one_to_x
|
|
2023-05-03 03:22:17,745 - wn_one_to_x - [INFO] - [E:51| 200]: Train Loss:0.0014486, Val MRR:0.36027, wn_one_to_x
|
|
2023-05-03 03:23:44,091 - wn_one_to_x - [INFO] - [E:51| 300]: Train Loss:0.0014502, Val MRR:0.36027, wn_one_to_x
|
|
2023-05-03 03:25:10,849 - wn_one_to_x - [INFO] - [E:51| 400]: Train Loss:0.0014567, Val MRR:0.36027, wn_one_to_x
|
|
2023-05-03 03:26:37,326 - wn_one_to_x - [INFO] - [E:51| 500]: Train Loss:0.0014562, Val MRR:0.36027, wn_one_to_x
|
|
2023-05-03 03:28:03,908 - wn_one_to_x - [INFO] - [E:51| 600]: Train Loss:0.0014583, Val MRR:0.36027, wn_one_to_x
|
|
2023-05-03 03:29:30,240 - wn_one_to_x - [INFO] - [E:51| 700]: Train Loss:0.0014606, Val MRR:0.36027, wn_one_to_x
|
|
2023-05-03 03:30:57,221 - wn_one_to_x - [INFO] - [E:51| 800]: Train Loss:0.0014629, Val MRR:0.36027, wn_one_to_x
|
|
2023-05-03 03:32:23,156 - wn_one_to_x - [INFO] - [E:51| 900]: Train Loss:0.001466, Val MRR:0.36027, wn_one_to_x
|
|
2023-05-03 03:33:49,805 - wn_one_to_x - [INFO] - [E:51| 1000]: Train Loss:0.0014685, Val MRR:0.36027, wn_one_to_x
|
|
2023-05-03 03:35:16,113 - wn_one_to_x - [INFO] - [E:51| 1100]: Train Loss:0.0014715, Val MRR:0.36027, wn_one_to_x
|
|
2023-05-03 03:36:42,813 - wn_one_to_x - [INFO] - [E:51| 1200]: Train Loss:0.0014721, Val MRR:0.36027, wn_one_to_x
|
|
2023-05-03 03:38:09,493 - wn_one_to_x - [INFO] - [E:51| 1300]: Train Loss:0.0014744, Val MRR:0.36027, wn_one_to_x
|
|
2023-05-03 03:38:57,924 - wn_one_to_x - [INFO] - [Epoch:51]: Training Loss:0.001475
|
|
|
|
2023-05-03 03:38:58,170 - wn_one_to_x - [INFO] - [Valid, Tail_Batch Step 0] wn_one_to_x
|
|
2023-05-03 03:39:03,806 - wn_one_to_x - [INFO] - [Valid, Head_Batch Step 0] wn_one_to_x
|
|
2023-05-03 03:39:09,512 - wn_one_to_x - [INFO] - [Evaluating Epoch 51 valid]:
|
|
MRR: Tail : 0.36915, Head : 0.35482, Avg : 0.36199
|
|
|
|
2023-05-03 03:39:11,230 - wn_one_to_x - [INFO] - [Epoch 51]: Training Loss: 0.0014747, Valid MRR: 0.36199,
|
|
|
|
|
|
|
|
2023-05-03 03:39:12,108 - wn_one_to_x - [INFO] - [E:52| 0]: Train Loss:0.0015237, Val MRR:0.36199, wn_one_to_x
|
|
2023-05-03 03:40:38,561 - wn_one_to_x - [INFO] - [E:52| 100]: Train Loss:0.0014378, Val MRR:0.36199, wn_one_to_x
|
|
2023-05-03 03:42:05,158 - wn_one_to_x - [INFO] - [E:52| 200]: Train Loss:0.0014284, Val MRR:0.36199, wn_one_to_x
|
|
2023-05-03 03:43:31,980 - wn_one_to_x - [INFO] - [E:52| 300]: Train Loss:0.0014274, Val MRR:0.36199, wn_one_to_x
|
|
2023-05-03 03:44:58,653 - wn_one_to_x - [INFO] - [E:52| 400]: Train Loss:0.0014273, Val MRR:0.36199, wn_one_to_x
|
|
2023-05-03 03:46:24,840 - wn_one_to_x - [INFO] - [E:52| 500]: Train Loss:0.0014303, Val MRR:0.36199, wn_one_to_x
|
|
2023-05-03 03:47:51,279 - wn_one_to_x - [INFO] - [E:52| 600]: Train Loss:0.0014324, Val MRR:0.36199, wn_one_to_x
|
|
2023-05-03 03:49:17,881 - wn_one_to_x - [INFO] - [E:52| 700]: Train Loss:0.0014345, Val MRR:0.36199, wn_one_to_x
|
|
2023-05-03 03:50:44,656 - wn_one_to_x - [INFO] - [E:52| 800]: Train Loss:0.0014348, Val MRR:0.36199, wn_one_to_x
|
|
2023-05-03 03:52:11,436 - wn_one_to_x - [INFO] - [E:52| 900]: Train Loss:0.0014362, Val MRR:0.36199, wn_one_to_x
|
|
2023-05-03 03:53:38,040 - wn_one_to_x - [INFO] - [E:52| 1000]: Train Loss:0.001439, Val MRR:0.36199, wn_one_to_x
|
|
2023-05-03 03:55:04,801 - wn_one_to_x - [INFO] - [E:52| 1100]: Train Loss:0.0014406, Val MRR:0.36199, wn_one_to_x
|
|
2023-05-03 03:56:31,239 - wn_one_to_x - [INFO] - [E:52| 1200]: Train Loss:0.0014433, Val MRR:0.36199, wn_one_to_x
|
|
2023-05-03 03:57:57,829 - wn_one_to_x - [INFO] - [E:52| 1300]: Train Loss:0.0014445, Val MRR:0.36199, wn_one_to_x
|
|
2023-05-03 03:58:46,200 - wn_one_to_x - [INFO] - [Epoch:52]: Training Loss:0.001445
|
|
|
|
2023-05-03 03:58:46,445 - wn_one_to_x - [INFO] - [Valid, Tail_Batch Step 0] wn_one_to_x
|
|
2023-05-03 03:58:52,517 - wn_one_to_x - [INFO] - [Valid, Head_Batch Step 0] wn_one_to_x
|
|
2023-05-03 03:58:59,177 - wn_one_to_x - [INFO] - [Evaluating Epoch 52 valid]:
|
|
MRR: Tail : 0.37737, Head : 0.35972, Avg : 0.36854
|
|
|
|
2023-05-03 03:59:00,873 - wn_one_to_x - [INFO] - [Epoch 52]: Training Loss: 0.0014455, Valid MRR: 0.36854,
|
|
|
|
|
|
|
|
2023-05-03 03:59:01,748 - wn_one_to_x - [INFO] - [E:53| 0]: Train Loss:0.0013252, Val MRR:0.36854, wn_one_to_x
|
|
2023-05-03 04:00:28,549 - wn_one_to_x - [INFO] - [E:53| 100]: Train Loss:0.001393, Val MRR:0.36854, wn_one_to_x
|
|
2023-05-03 04:01:55,222 - wn_one_to_x - [INFO] - [E:53| 200]: Train Loss:0.0013868, Val MRR:0.36854, wn_one_to_x
|
|
2023-05-03 04:03:21,800 - wn_one_to_x - [INFO] - [E:53| 300]: Train Loss:0.0013898, Val MRR:0.36854, wn_one_to_x
|
|
2023-05-03 04:04:48,320 - wn_one_to_x - [INFO] - [E:53| 400]: Train Loss:0.0013934, Val MRR:0.36854, wn_one_to_x
|
|
2023-05-03 04:06:15,282 - wn_one_to_x - [INFO] - [E:53| 500]: Train Loss:0.0013974, Val MRR:0.36854, wn_one_to_x
|
|
2023-05-03 04:07:41,814 - wn_one_to_x - [INFO] - [E:53| 600]: Train Loss:0.0014002, Val MRR:0.36854, wn_one_to_x
|
|
2023-05-03 04:09:08,343 - wn_one_to_x - [INFO] - [E:53| 700]: Train Loss:0.0014001, Val MRR:0.36854, wn_one_to_x
|
|
2023-05-03 04:10:34,858 - wn_one_to_x - [INFO] - [E:53| 800]: Train Loss:0.001401, Val MRR:0.36854, wn_one_to_x
|
|
2023-05-03 04:12:01,207 - wn_one_to_x - [INFO] - [E:53| 900]: Train Loss:0.0014031, Val MRR:0.36854, wn_one_to_x
|
|
2023-05-03 04:13:27,611 - wn_one_to_x - [INFO] - [E:53| 1000]: Train Loss:0.0014052, Val MRR:0.36854, wn_one_to_x
|
|
2023-05-03 04:14:54,469 - wn_one_to_x - [INFO] - [E:53| 1100]: Train Loss:0.0014063, Val MRR:0.36854, wn_one_to_x
|
|
2023-05-03 04:16:20,899 - wn_one_to_x - [INFO] - [E:53| 1200]: Train Loss:0.001409, Val MRR:0.36854, wn_one_to_x
|
|
2023-05-03 04:17:47,061 - wn_one_to_x - [INFO] - [E:53| 1300]: Train Loss:0.0014119, Val MRR:0.36854, wn_one_to_x
|
|
2023-05-03 04:18:35,289 - wn_one_to_x - [INFO] - [Epoch:53]: Training Loss:0.001413
|
|
|
|
2023-05-03 04:18:35,535 - wn_one_to_x - [INFO] - [Valid, Tail_Batch Step 0] wn_one_to_x
|
|
2023-05-03 04:18:41,038 - wn_one_to_x - [INFO] - [Valid, Head_Batch Step 0] wn_one_to_x
|
|
2023-05-03 04:18:46,301 - wn_one_to_x - [INFO] - [Evaluating Epoch 53 valid]:
|
|
MRR: Tail : 0.37922, Head : 0.36436, Avg : 0.37179
|
|
|
|
2023-05-03 04:18:48,219 - wn_one_to_x - [INFO] - [Epoch 53]: Training Loss: 0.0014131, Valid MRR: 0.37179,
|
|
|
|
|
|
|
|
2023-05-03 04:18:49,100 - wn_one_to_x - [INFO] - [E:54| 0]: Train Loss:0.0014769, Val MRR:0.37179, wn_one_to_x
|
|
2023-05-03 04:20:15,758 - wn_one_to_x - [INFO] - [E:54| 100]: Train Loss:0.0013593, Val MRR:0.37179, wn_one_to_x
|
|
2023-05-03 04:21:42,407 - wn_one_to_x - [INFO] - [E:54| 200]: Train Loss:0.0013584, Val MRR:0.37179, wn_one_to_x
|
|
2023-05-03 04:23:08,868 - wn_one_to_x - [INFO] - [E:54| 300]: Train Loss:0.0013635, Val MRR:0.37179, wn_one_to_x
|
|
2023-05-03 04:24:35,689 - wn_one_to_x - [INFO] - [E:54| 400]: Train Loss:0.0013657, Val MRR:0.37179, wn_one_to_x
|
|
2023-05-03 04:26:02,185 - wn_one_to_x - [INFO] - [E:54| 500]: Train Loss:0.0013693, Val MRR:0.37179, wn_one_to_x
|
|
2023-05-03 04:27:28,569 - wn_one_to_x - [INFO] - [E:54| 600]: Train Loss:0.0013698, Val MRR:0.37179, wn_one_to_x
|
|
2023-05-03 04:28:55,138 - wn_one_to_x - [INFO] - [E:54| 700]: Train Loss:0.0013725, Val MRR:0.37179, wn_one_to_x
|
|
2023-05-03 04:30:22,286 - wn_one_to_x - [INFO] - [E:54| 800]: Train Loss:0.0013756, Val MRR:0.37179, wn_one_to_x
|
|
2023-05-03 04:31:48,966 - wn_one_to_x - [INFO] - [E:54| 900]: Train Loss:0.0013769, Val MRR:0.37179, wn_one_to_x
|
|
2023-05-03 04:33:15,307 - wn_one_to_x - [INFO] - [E:54| 1000]: Train Loss:0.0013788, Val MRR:0.37179, wn_one_to_x
|
|
2023-05-03 04:34:41,894 - wn_one_to_x - [INFO] - [E:54| 1100]: Train Loss:0.0013806, Val MRR:0.37179, wn_one_to_x
|
|
2023-05-03 04:36:08,579 - wn_one_to_x - [INFO] - [E:54| 1200]: Train Loss:0.0013824, Val MRR:0.37179, wn_one_to_x
|
|
2023-05-03 04:37:35,260 - wn_one_to_x - [INFO] - [E:54| 1300]: Train Loss:0.0013839, Val MRR:0.37179, wn_one_to_x
|
|
2023-05-03 04:38:23,474 - wn_one_to_x - [INFO] - [Epoch:54]: Training Loss:0.001385
|
|
|
|
2023-05-03 04:38:23,717 - wn_one_to_x - [INFO] - [Valid, Tail_Batch Step 0] wn_one_to_x
|
|
2023-05-03 04:38:29,369 - wn_one_to_x - [INFO] - [Valid, Head_Batch Step 0] wn_one_to_x
|
|
2023-05-03 04:38:34,697 - wn_one_to_x - [INFO] - [Evaluating Epoch 54 valid]:
|
|
MRR: Tail : 0.39048, Head : 0.36413, Avg : 0.3773
|
|
|
|
2023-05-03 04:38:36,450 - wn_one_to_x - [INFO] - [Epoch 54]: Training Loss: 0.0013852, Valid MRR: 0.3773,
|
|
|
|
|
|
|
|
2023-05-03 04:38:37,328 - wn_one_to_x - [INFO] - [E:55| 0]: Train Loss:0.0012801, Val MRR:0.3773, wn_one_to_x
|
|
2023-05-03 04:40:03,998 - wn_one_to_x - [INFO] - [E:55| 100]: Train Loss:0.0013368, Val MRR:0.3773, wn_one_to_x
|
|
2023-05-03 04:41:30,678 - wn_one_to_x - [INFO] - [E:55| 200]: Train Loss:0.0013386, Val MRR:0.3773, wn_one_to_x
|
|
2023-05-03 04:42:57,520 - wn_one_to_x - [INFO] - [E:55| 300]: Train Loss:0.0013421, Val MRR:0.3773, wn_one_to_x
|
|
2023-05-03 04:44:24,107 - wn_one_to_x - [INFO] - [E:55| 400]: Train Loss:0.001343, Val MRR:0.3773, wn_one_to_x
|
|
2023-05-03 04:45:51,577 - wn_one_to_x - [INFO] - [E:55| 500]: Train Loss:0.0013444, Val MRR:0.3773, wn_one_to_x
|
|
2023-05-03 04:47:18,557 - wn_one_to_x - [INFO] - [E:55| 600]: Train Loss:0.0013456, Val MRR:0.3773, wn_one_to_x
|
|
2023-05-03 04:48:45,499 - wn_one_to_x - [INFO] - [E:55| 700]: Train Loss:0.0013475, Val MRR:0.3773, wn_one_to_x
|
|
2023-05-03 04:50:12,933 - wn_one_to_x - [INFO] - [E:55| 800]: Train Loss:0.0013492, Val MRR:0.3773, wn_one_to_x
|
|
2023-05-03 04:51:39,746 - wn_one_to_x - [INFO] - [E:55| 900]: Train Loss:0.0013507, Val MRR:0.3773, wn_one_to_x
|
|
2023-05-03 04:53:08,976 - wn_one_to_x - [INFO] - [E:55| 1000]: Train Loss:0.001353, Val MRR:0.3773, wn_one_to_x
|
|
2023-05-03 04:54:37,017 - wn_one_to_x - [INFO] - [E:55| 1100]: Train Loss:0.0013544, Val MRR:0.3773, wn_one_to_x
|
|
2023-05-03 04:56:04,723 - wn_one_to_x - [INFO] - [E:55| 1200]: Train Loss:0.0013552, Val MRR:0.3773, wn_one_to_x
|
|
2023-05-03 04:57:33,328 - wn_one_to_x - [INFO] - [E:55| 1300]: Train Loss:0.0013571, Val MRR:0.3773, wn_one_to_x
|
|
2023-05-03 04:58:22,862 - wn_one_to_x - [INFO] - [Epoch:55]: Training Loss:0.001357
|
|
|
|
2023-05-03 04:58:23,113 - wn_one_to_x - [INFO] - [Valid, Tail_Batch Step 0] wn_one_to_x
|
|
2023-05-03 04:58:28,689 - wn_one_to_x - [INFO] - [Valid, Head_Batch Step 0] wn_one_to_x
|
|
2023-05-03 04:58:34,068 - wn_one_to_x - [INFO] - [Evaluating Epoch 55 valid]:
|
|
MRR: Tail : 0.38773, Head : 0.37197, Avg : 0.37985
|
|
|
|
2023-05-03 04:58:36,190 - wn_one_to_x - [INFO] - [Epoch 55]: Training Loss: 0.0013572, Valid MRR: 0.37985,
|
|
|
|
|
|
|
|
2023-05-03 04:58:37,077 - wn_one_to_x - [INFO] - [E:56| 0]: Train Loss:0.0012209, Val MRR:0.37985, wn_one_to_x
|
|
2023-05-03 05:00:05,240 - wn_one_to_x - [INFO] - [E:56| 100]: Train Loss:0.0013055, Val MRR:0.37985, wn_one_to_x
|
|
2023-05-03 05:01:34,091 - wn_one_to_x - [INFO] - [E:56| 200]: Train Loss:0.0013138, Val MRR:0.37985, wn_one_to_x
|
|
2023-05-03 05:03:02,072 - wn_one_to_x - [INFO] - [E:56| 300]: Train Loss:0.0013089, Val MRR:0.37985, wn_one_to_x
|
|
2023-05-03 05:04:29,523 - wn_one_to_x - [INFO] - [E:56| 400]: Train Loss:0.0013095, Val MRR:0.37985, wn_one_to_x
|
|
2023-05-03 05:05:57,541 - wn_one_to_x - [INFO] - [E:56| 500]: Train Loss:0.0013122, Val MRR:0.37985, wn_one_to_x
|
|
2023-05-03 05:07:25,598 - wn_one_to_x - [INFO] - [E:56| 600]: Train Loss:0.0013142, Val MRR:0.37985, wn_one_to_x
|
|
2023-05-03 05:08:53,806 - wn_one_to_x - [INFO] - [E:56| 700]: Train Loss:0.0013168, Val MRR:0.37985, wn_one_to_x
|
|
2023-05-03 05:10:25,228 - wn_one_to_x - [INFO] - [E:56| 800]: Train Loss:0.0013185, Val MRR:0.37985, wn_one_to_x
|
|
2023-05-03 05:11:53,286 - wn_one_to_x - [INFO] - [E:56| 900]: Train Loss:0.0013224, Val MRR:0.37985, wn_one_to_x
|
|
2023-05-03 05:13:20,766 - wn_one_to_x - [INFO] - [E:56| 1000]: Train Loss:0.0013244, Val MRR:0.37985, wn_one_to_x
|
|
2023-05-03 05:14:47,447 - wn_one_to_x - [INFO] - [E:56| 1100]: Train Loss:0.0013267, Val MRR:0.37985, wn_one_to_x
|
|
2023-05-03 05:16:15,023 - wn_one_to_x - [INFO] - [E:56| 1200]: Train Loss:0.0013282, Val MRR:0.37985, wn_one_to_x
|
|
2023-05-03 05:17:41,776 - wn_one_to_x - [INFO] - [E:56| 1300]: Train Loss:0.0013287, Val MRR:0.37985, wn_one_to_x
|
|
2023-05-03 05:18:31,251 - wn_one_to_x - [INFO] - [Epoch:56]: Training Loss:0.00133
|
|
|
|
2023-05-03 05:18:32,061 - wn_one_to_x - [INFO] - [Valid, Tail_Batch Step 0] wn_one_to_x
|
|
2023-05-03 05:18:37,792 - wn_one_to_x - [INFO] - [Valid, Head_Batch Step 0] wn_one_to_x
|
|
2023-05-03 05:18:43,565 - wn_one_to_x - [INFO] - [Evaluating Epoch 56 valid]:
|
|
MRR: Tail : 0.39114, Head : 0.3751, Avg : 0.38312
|
|
|
|
2023-05-03 05:18:45,297 - wn_one_to_x - [INFO] - [Epoch 56]: Training Loss: 0.0013299, Valid MRR: 0.38312,
|
|
|
|
|
|
|
|
2023-05-03 05:18:46,183 - wn_one_to_x - [INFO] - [E:57| 0]: Train Loss:0.001165, Val MRR:0.38312, wn_one_to_x
|
|
2023-05-03 05:20:13,757 - wn_one_to_x - [INFO] - [E:57| 100]: Train Loss:0.0012732, Val MRR:0.38312, wn_one_to_x
|
|
2023-05-03 05:21:40,688 - wn_one_to_x - [INFO] - [E:57| 200]: Train Loss:0.0012806, Val MRR:0.38312, wn_one_to_x
|
|
2023-05-03 05:23:08,357 - wn_one_to_x - [INFO] - [E:57| 300]: Train Loss:0.001286, Val MRR:0.38312, wn_one_to_x
|
|
2023-05-03 05:24:35,592 - wn_one_to_x - [INFO] - [E:57| 400]: Train Loss:0.00129, Val MRR:0.38312, wn_one_to_x
|
|
2023-05-03 05:26:02,843 - wn_one_to_x - [INFO] - [E:57| 500]: Train Loss:0.0012887, Val MRR:0.38312, wn_one_to_x
|
|
2023-05-03 05:27:29,369 - wn_one_to_x - [INFO] - [E:57| 600]: Train Loss:0.0012926, Val MRR:0.38312, wn_one_to_x
|
|
2023-05-03 05:28:55,818 - wn_one_to_x - [INFO] - [E:57| 700]: Train Loss:0.0012938, Val MRR:0.38312, wn_one_to_x
|
|
2023-05-03 05:30:23,643 - wn_one_to_x - [INFO] - [E:57| 800]: Train Loss:0.0012955, Val MRR:0.38312, wn_one_to_x
|
|
2023-05-03 05:31:50,242 - wn_one_to_x - [INFO] - [E:57| 900]: Train Loss:0.0012973, Val MRR:0.38312, wn_one_to_x
|
|
2023-05-03 05:33:17,595 - wn_one_to_x - [INFO] - [E:57| 1000]: Train Loss:0.001299, Val MRR:0.38312, wn_one_to_x
|
|
2023-05-03 05:34:44,721 - wn_one_to_x - [INFO] - [E:57| 1100]: Train Loss:0.0013013, Val MRR:0.38312, wn_one_to_x
|
|
2023-05-03 05:36:11,606 - wn_one_to_x - [INFO] - [E:57| 1200]: Train Loss:0.0013023, Val MRR:0.38312, wn_one_to_x
|
|
2023-05-03 05:37:38,214 - wn_one_to_x - [INFO] - [E:57| 1300]: Train Loss:0.0013039, Val MRR:0.38312, wn_one_to_x
|
|
2023-05-03 05:38:26,549 - wn_one_to_x - [INFO] - [Epoch:57]: Training Loss:0.001305
|
|
|
|
2023-05-03 05:38:26,798 - wn_one_to_x - [INFO] - [Valid, Tail_Batch Step 0] wn_one_to_x
|
|
2023-05-03 05:38:32,312 - wn_one_to_x - [INFO] - [Valid, Head_Batch Step 0] wn_one_to_x
|
|
2023-05-03 05:38:37,942 - wn_one_to_x - [INFO] - [Evaluating Epoch 57 valid]:
|
|
MRR: Tail : 0.39879, Head : 0.38055, Avg : 0.38967
|
|
|
|
2023-05-03 05:38:39,851 - wn_one_to_x - [INFO] - [Epoch 57]: Training Loss: 0.0013055, Valid MRR: 0.38967,
|
|
|
|
|
|
|
|
2023-05-03 05:38:40,735 - wn_one_to_x - [INFO] - [E:58| 0]: Train Loss:0.0011955, Val MRR:0.38967, wn_one_to_x
|
|
2023-05-03 05:40:07,225 - wn_one_to_x - [INFO] - [E:58| 100]: Train Loss:0.0012699, Val MRR:0.38967, wn_one_to_x
|
|
2023-05-03 05:41:33,662 - wn_one_to_x - [INFO] - [E:58| 200]: Train Loss:0.0012666, Val MRR:0.38967, wn_one_to_x
|
|
2023-05-03 05:43:00,271 - wn_one_to_x - [INFO] - [E:58| 300]: Train Loss:0.0012661, Val MRR:0.38967, wn_one_to_x
|
|
2023-05-03 05:44:27,444 - wn_one_to_x - [INFO] - [E:58| 400]: Train Loss:0.0012634, Val MRR:0.38967, wn_one_to_x
|
|
2023-05-03 05:45:53,773 - wn_one_to_x - [INFO] - [E:58| 500]: Train Loss:0.0012652, Val MRR:0.38967, wn_one_to_x
|
|
2023-05-03 05:47:20,452 - wn_one_to_x - [INFO] - [E:58| 600]: Train Loss:0.0012657, Val MRR:0.38967, wn_one_to_x
|
|
2023-05-03 05:48:47,077 - wn_one_to_x - [INFO] - [E:58| 700]: Train Loss:0.0012672, Val MRR:0.38967, wn_one_to_x
|
|
2023-05-03 05:50:14,894 - wn_one_to_x - [INFO] - [E:58| 800]: Train Loss:0.0012699, Val MRR:0.38967, wn_one_to_x
|
|
2023-05-03 05:51:42,757 - wn_one_to_x - [INFO] - [E:58| 900]: Train Loss:0.0012711, Val MRR:0.38967, wn_one_to_x
|
|
2023-05-03 05:53:09,454 - wn_one_to_x - [INFO] - [E:58| 1000]: Train Loss:0.0012713, Val MRR:0.38967, wn_one_to_x
|
|
2023-05-03 05:54:36,847 - wn_one_to_x - [INFO] - [E:58| 1100]: Train Loss:0.0012737, Val MRR:0.38967, wn_one_to_x
|
|
2023-05-03 05:56:03,493 - wn_one_to_x - [INFO] - [E:58| 1200]: Train Loss:0.0012744, Val MRR:0.38967, wn_one_to_x
|
|
2023-05-03 05:57:30,292 - wn_one_to_x - [INFO] - [E:58| 1300]: Train Loss:0.0012769, Val MRR:0.38967, wn_one_to_x
|
|
2023-05-03 05:58:19,585 - wn_one_to_x - [INFO] - [Epoch:58]: Training Loss:0.001278
|
|
|
|
2023-05-03 05:58:19,829 - wn_one_to_x - [INFO] - [Valid, Tail_Batch Step 0] wn_one_to_x
|
|
2023-05-03 05:58:25,411 - wn_one_to_x - [INFO] - [Valid, Head_Batch Step 0] wn_one_to_x
|
|
2023-05-03 05:58:30,707 - wn_one_to_x - [INFO] - [Evaluating Epoch 58 valid]:
|
|
MRR: Tail : 0.40259, Head : 0.38092, Avg : 0.39176
|
|
|
|
2023-05-03 05:58:32,538 - wn_one_to_x - [INFO] - [Epoch 58]: Training Loss: 0.0012782, Valid MRR: 0.39176,
|
|
|
|
|
|
|
|
2023-05-03 05:58:33,413 - wn_one_to_x - [INFO] - [E:59| 0]: Train Loss:0.0012599, Val MRR:0.39176, wn_one_to_x
|
|
2023-05-03 05:59:59,697 - wn_one_to_x - [INFO] - [E:59| 100]: Train Loss:0.0012375, Val MRR:0.39176, wn_one_to_x
|
|
2023-05-03 06:01:26,210 - wn_one_to_x - [INFO] - [E:59| 200]: Train Loss:0.00124, Val MRR:0.39176, wn_one_to_x
|
|
2023-05-03 06:02:55,188 - wn_one_to_x - [INFO] - [E:59| 300]: Train Loss:0.0012423, Val MRR:0.39176, wn_one_to_x
|
|
2023-05-03 06:04:22,388 - wn_one_to_x - [INFO] - [E:59| 400]: Train Loss:0.001244, Val MRR:0.39176, wn_one_to_x
|
|
2023-05-03 06:05:51,270 - wn_one_to_x - [INFO] - [E:59| 500]: Train Loss:0.0012424, Val MRR:0.39176, wn_one_to_x
|
|
2023-05-03 06:07:18,750 - wn_one_to_x - [INFO] - [E:59| 600]: Train Loss:0.0012428, Val MRR:0.39176, wn_one_to_x
|
|
2023-05-03 06:08:46,719 - wn_one_to_x - [INFO] - [E:59| 700]: Train Loss:0.0012454, Val MRR:0.39176, wn_one_to_x
|
|
2023-05-03 06:10:14,586 - wn_one_to_x - [INFO] - [E:59| 800]: Train Loss:0.0012473, Val MRR:0.39176, wn_one_to_x
|
|
2023-05-03 06:11:42,110 - wn_one_to_x - [INFO] - [E:59| 900]: Train Loss:0.0012486, Val MRR:0.39176, wn_one_to_x
|
|
2023-05-03 06:13:10,084 - wn_one_to_x - [INFO] - [E:59| 1000]: Train Loss:0.0012507, Val MRR:0.39176, wn_one_to_x
|
|
2023-05-03 06:14:37,913 - wn_one_to_x - [INFO] - [E:59| 1100]: Train Loss:0.0012527, Val MRR:0.39176, wn_one_to_x
|
|
2023-05-03 06:16:06,195 - wn_one_to_x - [INFO] - [E:59| 1200]: Train Loss:0.001255, Val MRR:0.39176, wn_one_to_x
|
|
2023-05-03 06:17:35,192 - wn_one_to_x - [INFO] - [E:59| 1300]: Train Loss:0.0012569, Val MRR:0.39176, wn_one_to_x
|
|
2023-05-03 06:18:24,629 - wn_one_to_x - [INFO] - [Epoch:59]: Training Loss:0.001258
|
|
|
|
2023-05-03 06:18:24,901 - wn_one_to_x - [INFO] - [Valid, Tail_Batch Step 0] wn_one_to_x
|
|
2023-05-03 06:18:30,722 - wn_one_to_x - [INFO] - [Valid, Head_Batch Step 0] wn_one_to_x
|
|
2023-05-03 06:18:36,099 - wn_one_to_x - [INFO] - [Evaluating Epoch 59 valid]:
|
|
MRR: Tail : 0.40345, Head : 0.38284, Avg : 0.39315
|
|
MR: Tail : 3308.4, Head : 3991.4, Avg : 3649.9
|
|
Hit-1: Tail : 0.3586, Head : 0.34575, Avg : 0.35218
|
|
Hit-3: Tail : 0.43013, Head : 0.4031, Avg : 0.41661
|
|
Hit-10: Tail : 0.47693, Head : 0.44529, Avg : 0.46111
|
|
2023-05-03 06:18:37,974 - wn_one_to_x - [INFO] - [Epoch 59]: Training Loss: 0.001258, Valid MRR: 0.39315,
|
|
|
|
|
|
|
|
2023-05-03 06:18:38,849 - wn_one_to_x - [INFO] - [E:60| 0]: Train Loss:0.0011581, Val MRR:0.39315, wn_one_to_x
|
|
2023-05-03 06:20:07,161 - wn_one_to_x - [INFO] - [E:60| 100]: Train Loss:0.0012028, Val MRR:0.39315, wn_one_to_x
|
|
2023-05-03 06:21:35,541 - wn_one_to_x - [INFO] - [E:60| 200]: Train Loss:0.0012137, Val MRR:0.39315, wn_one_to_x
|
|
2023-05-03 06:23:03,802 - wn_one_to_x - [INFO] - [E:60| 300]: Train Loss:0.0012173, Val MRR:0.39315, wn_one_to_x
|
|
2023-05-03 06:24:34,340 - wn_one_to_x - [INFO] - [E:60| 400]: Train Loss:0.0012194, Val MRR:0.39315, wn_one_to_x
|
|
2023-05-03 06:26:05,271 - wn_one_to_x - [INFO] - [E:60| 500]: Train Loss:0.0012225, Val MRR:0.39315, wn_one_to_x
|
|
2023-05-03 06:27:35,175 - wn_one_to_x - [INFO] - [E:60| 600]: Train Loss:0.0012223, Val MRR:0.39315, wn_one_to_x
|
|
2023-05-03 06:29:03,475 - wn_one_to_x - [INFO] - [E:60| 700]: Train Loss:0.0012249, Val MRR:0.39315, wn_one_to_x
|
|
2023-05-03 06:30:31,592 - wn_one_to_x - [INFO] - [E:60| 800]: Train Loss:0.0012261, Val MRR:0.39315, wn_one_to_x
|
|
2023-05-03 06:31:59,165 - wn_one_to_x - [INFO] - [E:60| 900]: Train Loss:0.0012275, Val MRR:0.39315, wn_one_to_x
|
|
2023-05-03 06:33:27,623 - wn_one_to_x - [INFO] - [E:60| 1000]: Train Loss:0.0012309, Val MRR:0.39315, wn_one_to_x
|
|
2023-05-03 06:34:55,851 - wn_one_to_x - [INFO] - [E:60| 1100]: Train Loss:0.001233, Val MRR:0.39315, wn_one_to_x
|
|
2023-05-03 06:36:24,154 - wn_one_to_x - [INFO] - [E:60| 1200]: Train Loss:0.0012358, Val MRR:0.39315, wn_one_to_x
|
|
2023-05-03 06:37:54,256 - wn_one_to_x - [INFO] - [E:60| 1300]: Train Loss:0.0012372, Val MRR:0.39315, wn_one_to_x
|
|
2023-05-03 06:38:43,480 - wn_one_to_x - [INFO] - [Epoch:60]: Training Loss:0.001238
|
|
|
|
2023-05-03 06:38:44,113 - wn_one_to_x - [INFO] - [Valid, Tail_Batch Step 0] wn_one_to_x
|
|
2023-05-03 06:38:51,469 - wn_one_to_x - [INFO] - [Valid, Head_Batch Step 0] wn_one_to_x
|
|
2023-05-03 06:38:57,232 - wn_one_to_x - [INFO] - [Evaluating Epoch 60 valid]:
|
|
MRR: Tail : 0.40603, Head : 0.38835, Avg : 0.39719
|
|
|
|
2023-05-03 06:38:58,985 - wn_one_to_x - [INFO] - [Epoch 60]: Training Loss: 0.0012377, Valid MRR: 0.39719,
|
|
|
|
|
|
|
|
2023-05-03 06:38:59,872 - wn_one_to_x - [INFO] - [E:61| 0]: Train Loss:0.0012041, Val MRR:0.39719, wn_one_to_x
|
|
2023-05-03 06:40:30,790 - wn_one_to_x - [INFO] - [E:61| 100]: Train Loss:0.001194, Val MRR:0.39719, wn_one_to_x
|
|
2023-05-03 06:42:00,125 - wn_one_to_x - [INFO] - [E:61| 200]: Train Loss:0.0011987, Val MRR:0.39719, wn_one_to_x
|
|
2023-05-03 06:43:28,932 - wn_one_to_x - [INFO] - [E:61| 300]: Train Loss:0.0011999, Val MRR:0.39719, wn_one_to_x
|
|
2023-05-03 06:44:57,724 - wn_one_to_x - [INFO] - [E:61| 400]: Train Loss:0.0012012, Val MRR:0.39719, wn_one_to_x
|
|
2023-05-03 06:46:25,946 - wn_one_to_x - [INFO] - [E:61| 500]: Train Loss:0.0012014, Val MRR:0.39719, wn_one_to_x
|
|
2023-05-03 06:47:54,102 - wn_one_to_x - [INFO] - [E:61| 600]: Train Loss:0.0012047, Val MRR:0.39719, wn_one_to_x
|
|
2023-05-03 06:49:25,365 - wn_one_to_x - [INFO] - [E:61| 700]: Train Loss:0.0012069, Val MRR:0.39719, wn_one_to_x
|
|
2023-05-03 06:50:54,721 - wn_one_to_x - [INFO] - [E:61| 800]: Train Loss:0.0012078, Val MRR:0.39719, wn_one_to_x
|
|
2023-05-03 06:52:23,787 - wn_one_to_x - [INFO] - [E:61| 900]: Train Loss:0.0012092, Val MRR:0.39719, wn_one_to_x
|
|
2023-05-03 06:53:53,789 - wn_one_to_x - [INFO] - [E:61| 1000]: Train Loss:0.0012094, Val MRR:0.39719, wn_one_to_x
|
|
2023-05-03 06:55:23,147 - wn_one_to_x - [INFO] - [E:61| 1100]: Train Loss:0.0012116, Val MRR:0.39719, wn_one_to_x
|
|
2023-05-03 06:56:51,431 - wn_one_to_x - [INFO] - [E:61| 1200]: Train Loss:0.0012127, Val MRR:0.39719, wn_one_to_x
|
|
2023-05-03 06:58:21,446 - wn_one_to_x - [INFO] - [E:61| 1300]: Train Loss:0.0012142, Val MRR:0.39719, wn_one_to_x
|
|
2023-05-03 06:59:11,746 - wn_one_to_x - [INFO] - [Epoch:61]: Training Loss:0.001215
|
|
|
|
2023-05-03 06:59:11,993 - wn_one_to_x - [INFO] - [Valid, Tail_Batch Step 0] wn_one_to_x
|
|
2023-05-03 06:59:18,693 - wn_one_to_x - [INFO] - [Valid, Head_Batch Step 0] wn_one_to_x
|
|
2023-05-03 06:59:24,460 - wn_one_to_x - [INFO] - [Evaluating Epoch 61 valid]:
|
|
MRR: Tail : 0.40718, Head : 0.38935, Avg : 0.39826
|
|
|
|
2023-05-03 06:59:26,240 - wn_one_to_x - [INFO] - [Epoch 61]: Training Loss: 0.0012148, Valid MRR: 0.39826,
|
|
|
|
|
|
|
|
2023-05-03 06:59:27,117 - wn_one_to_x - [INFO] - [E:62| 0]: Train Loss:0.0011254, Val MRR:0.39826, wn_one_to_x
|
|
2023-05-03 07:00:56,185 - wn_one_to_x - [INFO] - [E:62| 100]: Train Loss:0.0011734, Val MRR:0.39826, wn_one_to_x
|
|
2023-05-03 07:02:25,780 - wn_one_to_x - [INFO] - [E:62| 200]: Train Loss:0.0011777, Val MRR:0.39826, wn_one_to_x
|
|
2023-05-03 07:03:54,725 - wn_one_to_x - [INFO] - [E:62| 300]: Train Loss:0.0011798, Val MRR:0.39826, wn_one_to_x
|
|
2023-05-03 07:05:23,091 - wn_one_to_x - [INFO] - [E:62| 400]: Train Loss:0.0011799, Val MRR:0.39826, wn_one_to_x
|
|
2023-05-03 07:06:50,336 - wn_one_to_x - [INFO] - [E:62| 500]: Train Loss:0.001182, Val MRR:0.39826, wn_one_to_x
|
|
2023-05-03 07:08:17,500 - wn_one_to_x - [INFO] - [E:62| 600]: Train Loss:0.0011833, Val MRR:0.39826, wn_one_to_x
|
|
2023-05-03 07:09:44,532 - wn_one_to_x - [INFO] - [E:62| 700]: Train Loss:0.001185, Val MRR:0.39826, wn_one_to_x
|
|
2023-05-03 07:11:11,718 - wn_one_to_x - [INFO] - [E:62| 800]: Train Loss:0.0011871, Val MRR:0.39826, wn_one_to_x
|
|
2023-05-03 07:12:38,778 - wn_one_to_x - [INFO] - [E:62| 900]: Train Loss:0.0011873, Val MRR:0.39826, wn_one_to_x
|
|
2023-05-03 07:14:05,768 - wn_one_to_x - [INFO] - [E:62| 1000]: Train Loss:0.0011892, Val MRR:0.39826, wn_one_to_x
|
|
2023-05-03 07:15:32,097 - wn_one_to_x - [INFO] - [E:62| 1100]: Train Loss:0.0011897, Val MRR:0.39826, wn_one_to_x
|
|
2023-05-03 07:16:58,719 - wn_one_to_x - [INFO] - [E:62| 1200]: Train Loss:0.0011915, Val MRR:0.39826, wn_one_to_x
|
|
2023-05-03 07:18:25,481 - wn_one_to_x - [INFO] - [E:62| 1300]: Train Loss:0.0011928, Val MRR:0.39826, wn_one_to_x
|
|
2023-05-03 07:19:13,732 - wn_one_to_x - [INFO] - [Epoch:62]: Training Loss:0.001193
|
|
|
|
2023-05-03 07:19:13,977 - wn_one_to_x - [INFO] - [Valid, Tail_Batch Step 0] wn_one_to_x
|
|
2023-05-03 07:19:19,467 - wn_one_to_x - [INFO] - [Valid, Head_Batch Step 0] wn_one_to_x
|
|
2023-05-03 07:19:24,802 - wn_one_to_x - [INFO] - [Evaluating Epoch 62 valid]:
|
|
MRR: Tail : 0.40932, Head : 0.39267, Avg : 0.40099
|
|
|
|
2023-05-03 07:19:26,551 - wn_one_to_x - [INFO] - [Epoch 62]: Training Loss: 0.0011934, Valid MRR: 0.40099,
|
|
|
|
|
|
|
|
2023-05-03 07:19:27,423 - wn_one_to_x - [INFO] - [E:63| 0]: Train Loss:0.0012219, Val MRR:0.40099, wn_one_to_x
|
|
2023-05-03 07:20:53,725 - wn_one_to_x - [INFO] - [E:63| 100]: Train Loss:0.0011601, Val MRR:0.40099, wn_one_to_x
|
|
2023-05-03 07:22:20,238 - wn_one_to_x - [INFO] - [E:63| 200]: Train Loss:0.0011566, Val MRR:0.40099, wn_one_to_x
|
|
2023-05-03 07:23:47,035 - wn_one_to_x - [INFO] - [E:63| 300]: Train Loss:0.0011592, Val MRR:0.40099, wn_one_to_x
|
|
2023-05-03 07:25:13,378 - wn_one_to_x - [INFO] - [E:63| 400]: Train Loss:0.001164, Val MRR:0.40099, wn_one_to_x
|
|
2023-05-03 07:26:39,929 - wn_one_to_x - [INFO] - [E:63| 500]: Train Loss:0.001165, Val MRR:0.40099, wn_one_to_x
|
|
2023-05-03 07:28:06,324 - wn_one_to_x - [INFO] - [E:63| 600]: Train Loss:0.0011662, Val MRR:0.40099, wn_one_to_x
|
|
2023-05-03 07:29:33,091 - wn_one_to_x - [INFO] - [E:63| 700]: Train Loss:0.0011684, Val MRR:0.40099, wn_one_to_x
|
|
2023-05-03 07:30:59,791 - wn_one_to_x - [INFO] - [E:63| 800]: Train Loss:0.0011696, Val MRR:0.40099, wn_one_to_x
|
|
2023-05-03 07:32:26,249 - wn_one_to_x - [INFO] - [E:63| 900]: Train Loss:0.0011708, Val MRR:0.40099, wn_one_to_x
|
|
2023-05-03 07:33:52,698 - wn_one_to_x - [INFO] - [E:63| 1000]: Train Loss:0.0011731, Val MRR:0.40099, wn_one_to_x
|
|
2023-05-03 07:35:19,086 - wn_one_to_x - [INFO] - [E:63| 1100]: Train Loss:0.0011736, Val MRR:0.40099, wn_one_to_x
|
|
2023-05-03 07:36:45,719 - wn_one_to_x - [INFO] - [E:63| 1200]: Train Loss:0.0011752, Val MRR:0.40099, wn_one_to_x
|
|
2023-05-03 07:38:12,461 - wn_one_to_x - [INFO] - [E:63| 1300]: Train Loss:0.0011767, Val MRR:0.40099, wn_one_to_x
|
|
2023-05-03 07:39:00,504 - wn_one_to_x - [INFO] - [Epoch:63]: Training Loss:0.001178
|
|
|
|
2023-05-03 07:39:00,751 - wn_one_to_x - [INFO] - [Valid, Tail_Batch Step 0] wn_one_to_x
|
|
2023-05-03 07:39:07,067 - wn_one_to_x - [INFO] - [Valid, Head_Batch Step 0] wn_one_to_x
|
|
2023-05-03 07:39:12,388 - wn_one_to_x - [INFO] - [Evaluating Epoch 63 valid]:
|
|
MRR: Tail : 0.40932, Head : 0.39024, Avg : 0.39978
|
|
|
|
2023-05-03 07:39:12,388 - wn_one_to_x - [INFO] - [Epoch 63]: Training Loss: 0.0011778, Valid MRR: 0.40099,
|
|
|
|
|
|
|
|
2023-05-03 07:39:13,275 - wn_one_to_x - [INFO] - [E:64| 0]: Train Loss:0.0011999, Val MRR:0.40099, wn_one_to_x
|
|
2023-05-03 07:40:39,811 - wn_one_to_x - [INFO] - [E:64| 100]: Train Loss:0.0011425, Val MRR:0.40099, wn_one_to_x
|
|
2023-05-03 07:42:06,227 - wn_one_to_x - [INFO] - [E:64| 200]: Train Loss:0.0011422, Val MRR:0.40099, wn_one_to_x
|
|
2023-05-03 07:43:32,650 - wn_one_to_x - [INFO] - [E:64| 300]: Train Loss:0.0011438, Val MRR:0.40099, wn_one_to_x
|
|
2023-05-03 07:44:59,386 - wn_one_to_x - [INFO] - [E:64| 400]: Train Loss:0.0011442, Val MRR:0.40099, wn_one_to_x
|
|
2023-05-03 07:46:25,945 - wn_one_to_x - [INFO] - [E:64| 500]: Train Loss:0.0011467, Val MRR:0.40099, wn_one_to_x
|
|
2023-05-03 07:47:52,901 - wn_one_to_x - [INFO] - [E:64| 600]: Train Loss:0.0011475, Val MRR:0.40099, wn_one_to_x
|
|
2023-05-03 07:49:19,548 - wn_one_to_x - [INFO] - [E:64| 700]: Train Loss:0.0011487, Val MRR:0.40099, wn_one_to_x
|
|
2023-05-03 07:50:45,922 - wn_one_to_x - [INFO] - [E:64| 800]: Train Loss:0.0011502, Val MRR:0.40099, wn_one_to_x
|
|
2023-05-03 07:52:12,625 - wn_one_to_x - [INFO] - [E:64| 900]: Train Loss:0.0011511, Val MRR:0.40099, wn_one_to_x
|
|
2023-05-03 07:53:39,104 - wn_one_to_x - [INFO] - [E:64| 1000]: Train Loss:0.0011521, Val MRR:0.40099, wn_one_to_x
|
|
2023-05-03 07:55:06,001 - wn_one_to_x - [INFO] - [E:64| 1100]: Train Loss:0.0011538, Val MRR:0.40099, wn_one_to_x
|
|
2023-05-03 07:56:32,745 - wn_one_to_x - [INFO] - [E:64| 1200]: Train Loss:0.0011552, Val MRR:0.40099, wn_one_to_x
|
|
2023-05-03 07:57:59,250 - wn_one_to_x - [INFO] - [E:64| 1300]: Train Loss:0.0011561, Val MRR:0.40099, wn_one_to_x
|
|
2023-05-03 07:58:47,816 - wn_one_to_x - [INFO] - [Epoch:64]: Training Loss:0.001157
|
|
|
|
2023-05-03 07:58:48,062 - wn_one_to_x - [INFO] - [Valid, Tail_Batch Step 0] wn_one_to_x
|
|
2023-05-03 07:58:53,578 - wn_one_to_x - [INFO] - [Valid, Head_Batch Step 0] wn_one_to_x
|
|
2023-05-03 07:58:58,909 - wn_one_to_x - [INFO] - [Evaluating Epoch 64 valid]:
|
|
MRR: Tail : 0.40832, Head : 0.39415, Avg : 0.40124
|
|
|
|
2023-05-03 07:59:00,607 - wn_one_to_x - [INFO] - [Epoch 64]: Training Loss: 0.0011569, Valid MRR: 0.40124,
|
|
|
|
|
|
|
|
2023-05-03 07:59:01,494 - wn_one_to_x - [INFO] - [E:65| 0]: Train Loss:0.001059, Val MRR:0.40124, wn_one_to_x
|
|
2023-05-03 08:00:28,052 - wn_one_to_x - [INFO] - [E:65| 100]: Train Loss:0.0011189, Val MRR:0.40124, wn_one_to_x
|
|
2023-05-03 08:01:54,648 - wn_one_to_x - [INFO] - [E:65| 200]: Train Loss:0.0011232, Val MRR:0.40124, wn_one_to_x
|
|
2023-05-03 08:03:21,328 - wn_one_to_x - [INFO] - [E:65| 300]: Train Loss:0.0011248, Val MRR:0.40124, wn_one_to_x
|
|
2023-05-03 08:04:47,909 - wn_one_to_x - [INFO] - [E:65| 400]: Train Loss:0.0011273, Val MRR:0.40124, wn_one_to_x
|
|
2023-05-03 08:06:14,633 - wn_one_to_x - [INFO] - [E:65| 500]: Train Loss:0.0011272, Val MRR:0.40124, wn_one_to_x
|
|
2023-05-03 08:07:41,088 - wn_one_to_x - [INFO] - [E:65| 600]: Train Loss:0.0011298, Val MRR:0.40124, wn_one_to_x
|
|
2023-05-03 08:09:07,704 - wn_one_to_x - [INFO] - [E:65| 700]: Train Loss:0.0011307, Val MRR:0.40124, wn_one_to_x
|
|
2023-05-03 08:10:34,523 - wn_one_to_x - [INFO] - [E:65| 800]: Train Loss:0.0011322, Val MRR:0.40124, wn_one_to_x
|
|
2023-05-03 08:12:00,951 - wn_one_to_x - [INFO] - [E:65| 900]: Train Loss:0.0011341, Val MRR:0.40124, wn_one_to_x
|
|
2023-05-03 08:13:27,811 - wn_one_to_x - [INFO] - [E:65| 1000]: Train Loss:0.0011345, Val MRR:0.40124, wn_one_to_x
|
|
2023-05-03 08:14:54,551 - wn_one_to_x - [INFO] - [E:65| 1100]: Train Loss:0.0011367, Val MRR:0.40124, wn_one_to_x
|
|
2023-05-03 08:16:21,270 - wn_one_to_x - [INFO] - [E:65| 1200]: Train Loss:0.0011392, Val MRR:0.40124, wn_one_to_x
|
|
2023-05-03 08:17:47,937 - wn_one_to_x - [INFO] - [E:65| 1300]: Train Loss:0.0011405, Val MRR:0.40124, wn_one_to_x
|
|
2023-05-03 08:18:36,267 - wn_one_to_x - [INFO] - [Epoch:65]: Training Loss:0.001141
|
|
|
|
2023-05-03 08:18:36,514 - wn_one_to_x - [INFO] - [Valid, Tail_Batch Step 0] wn_one_to_x
|
|
2023-05-03 08:18:42,113 - wn_one_to_x - [INFO] - [Valid, Head_Batch Step 0] wn_one_to_x
|
|
2023-05-03 08:18:47,984 - wn_one_to_x - [INFO] - [Evaluating Epoch 65 valid]:
|
|
MRR: Tail : 0.41336, Head : 0.39462, Avg : 0.40399
|
|
|
|
2023-05-03 08:18:49,755 - wn_one_to_x - [INFO] - [Epoch 65]: Training Loss: 0.0011412, Valid MRR: 0.40399,
|
|
|
|
|
|
|
|
2023-05-03 08:18:50,637 - wn_one_to_x - [INFO] - [E:66| 0]: Train Loss:0.0010456, Val MRR:0.40399, wn_one_to_x
|
|
2023-05-03 08:20:17,420 - wn_one_to_x - [INFO] - [E:66| 100]: Train Loss:0.0011014, Val MRR:0.40399, wn_one_to_x
|
|
2023-05-03 08:21:44,080 - wn_one_to_x - [INFO] - [E:66| 200]: Train Loss:0.0011023, Val MRR:0.40399, wn_one_to_x
|
|
2023-05-03 08:23:10,481 - wn_one_to_x - [INFO] - [E:66| 300]: Train Loss:0.0011056, Val MRR:0.40399, wn_one_to_x
|
|
2023-05-03 08:24:37,407 - wn_one_to_x - [INFO] - [E:66| 400]: Train Loss:0.0011097, Val MRR:0.40399, wn_one_to_x
|
|
2023-05-03 08:26:04,035 - wn_one_to_x - [INFO] - [E:66| 500]: Train Loss:0.0011115, Val MRR:0.40399, wn_one_to_x
|
|
2023-05-03 08:27:30,588 - wn_one_to_x - [INFO] - [E:66| 600]: Train Loss:0.0011131, Val MRR:0.40399, wn_one_to_x
|
|
2023-05-03 08:28:56,835 - wn_one_to_x - [INFO] - [E:66| 700]: Train Loss:0.0011136, Val MRR:0.40399, wn_one_to_x
|
|
2023-05-03 08:30:23,185 - wn_one_to_x - [INFO] - [E:66| 800]: Train Loss:0.0011144, Val MRR:0.40399, wn_one_to_x
|
|
2023-05-03 08:31:50,085 - wn_one_to_x - [INFO] - [E:66| 900]: Train Loss:0.0011164, Val MRR:0.40399, wn_one_to_x
|
|
2023-05-03 08:33:17,021 - wn_one_to_x - [INFO] - [E:66| 1000]: Train Loss:0.0011176, Val MRR:0.40399, wn_one_to_x
|
|
2023-05-03 08:34:46,213 - wn_one_to_x - [INFO] - [E:66| 1100]: Train Loss:0.0011192, Val MRR:0.40399, wn_one_to_x
|
|
2023-05-03 08:36:15,381 - wn_one_to_x - [INFO] - [E:66| 1200]: Train Loss:0.0011209, Val MRR:0.40399, wn_one_to_x
|
|
2023-05-03 08:37:43,161 - wn_one_to_x - [INFO] - [E:66| 1300]: Train Loss:0.0011212, Val MRR:0.40399, wn_one_to_x
|
|
2023-05-03 08:38:32,207 - wn_one_to_x - [INFO] - [Epoch:66]: Training Loss:0.001122
|
|
|
|
2023-05-03 08:38:32,452 - wn_one_to_x - [INFO] - [Valid, Tail_Batch Step 0] wn_one_to_x
|
|
2023-05-03 08:38:39,156 - wn_one_to_x - [INFO] - [Valid, Head_Batch Step 0] wn_one_to_x
|
|
2023-05-03 08:38:47,229 - wn_one_to_x - [INFO] - [Evaluating Epoch 66 valid]:
|
|
MRR: Tail : 0.41434, Head : 0.39661, Avg : 0.40547
|
|
|
|
2023-05-03 08:38:49,170 - wn_one_to_x - [INFO] - [Epoch 66]: Training Loss: 0.0011223, Valid MRR: 0.40547,
|
|
|
|
|
|
|
|
2023-05-03 08:38:50,042 - wn_one_to_x - [INFO] - [E:67| 0]: Train Loss:0.0010973, Val MRR:0.40547, wn_one_to_x
|
|
2023-05-03 08:40:17,386 - wn_one_to_x - [INFO] - [E:67| 100]: Train Loss:0.0010865, Val MRR:0.40547, wn_one_to_x
|
|
2023-05-03 08:41:45,723 - wn_one_to_x - [INFO] - [E:67| 200]: Train Loss:0.0010914, Val MRR:0.40547, wn_one_to_x
|
|
2023-05-03 08:43:12,843 - wn_one_to_x - [INFO] - [E:67| 300]: Train Loss:0.0010911, Val MRR:0.40547, wn_one_to_x
|
|
2023-05-03 08:44:40,553 - wn_one_to_x - [INFO] - [E:67| 400]: Train Loss:0.0010924, Val MRR:0.40547, wn_one_to_x
|
|
2023-05-03 08:46:08,061 - wn_one_to_x - [INFO] - [E:67| 500]: Train Loss:0.0010955, Val MRR:0.40547, wn_one_to_x
|
|
2023-05-03 08:47:35,567 - wn_one_to_x - [INFO] - [E:67| 600]: Train Loss:0.0010971, Val MRR:0.40547, wn_one_to_x
|
|
2023-05-03 08:49:02,544 - wn_one_to_x - [INFO] - [E:67| 700]: Train Loss:0.0010977, Val MRR:0.40547, wn_one_to_x
|
|
2023-05-03 08:50:30,026 - wn_one_to_x - [INFO] - [E:67| 800]: Train Loss:0.0010993, Val MRR:0.40547, wn_one_to_x
|
|
2023-05-03 08:51:57,533 - wn_one_to_x - [INFO] - [E:67| 900]: Train Loss:0.0011013, Val MRR:0.40547, wn_one_to_x
|
|
2023-05-03 08:53:24,308 - wn_one_to_x - [INFO] - [E:67| 1000]: Train Loss:0.0011019, Val MRR:0.40547, wn_one_to_x
|
|
2023-05-03 08:54:51,885 - wn_one_to_x - [INFO] - [E:67| 1100]: Train Loss:0.0011032, Val MRR:0.40547, wn_one_to_x
|
|
2023-05-03 08:56:19,028 - wn_one_to_x - [INFO] - [E:67| 1200]: Train Loss:0.0011041, Val MRR:0.40547, wn_one_to_x
|
|
2023-05-03 08:57:46,283 - wn_one_to_x - [INFO] - [E:67| 1300]: Train Loss:0.0011049, Val MRR:0.40547, wn_one_to_x
|
|
2023-05-03 08:58:34,813 - wn_one_to_x - [INFO] - [Epoch:67]: Training Loss:0.001106
|
|
|
|
2023-05-03 08:58:35,072 - wn_one_to_x - [INFO] - [Valid, Tail_Batch Step 0] wn_one_to_x
|
|
2023-05-03 08:58:43,600 - wn_one_to_x - [INFO] - [Valid, Head_Batch Step 0] wn_one_to_x
|
|
2023-05-03 08:58:51,288 - wn_one_to_x - [INFO] - [Evaluating Epoch 67 valid]:
|
|
MRR: Tail : 0.4149, Head : 0.39702, Avg : 0.40596
|
|
|
|
2023-05-03 08:58:53,295 - wn_one_to_x - [INFO] - [Epoch 67]: Training Loss: 0.0011055, Valid MRR: 0.40596,
|
|
|
|
|
|
|
|
2023-05-03 08:58:54,188 - wn_one_to_x - [INFO] - [E:68| 0]: Train Loss:0.001113, Val MRR:0.40596, wn_one_to_x
|
|
2023-05-03 09:00:22,597 - wn_one_to_x - [INFO] - [E:68| 100]: Train Loss:0.001073, Val MRR:0.40596, wn_one_to_x
|
|
2023-05-03 09:01:50,221 - wn_one_to_x - [INFO] - [E:68| 200]: Train Loss:0.0010733, Val MRR:0.40596, wn_one_to_x
|
|
2023-05-03 09:03:17,617 - wn_one_to_x - [INFO] - [E:68| 300]: Train Loss:0.0010753, Val MRR:0.40596, wn_one_to_x
|
|
2023-05-03 09:04:45,791 - wn_one_to_x - [INFO] - [E:68| 400]: Train Loss:0.0010788, Val MRR:0.40596, wn_one_to_x
|
|
2023-05-03 09:06:13,481 - wn_one_to_x - [INFO] - [E:68| 500]: Train Loss:0.0010793, Val MRR:0.40596, wn_one_to_x
|
|
2023-05-03 09:07:41,885 - wn_one_to_x - [INFO] - [E:68| 600]: Train Loss:0.0010801, Val MRR:0.40596, wn_one_to_x
|
|
2023-05-03 09:09:10,308 - wn_one_to_x - [INFO] - [E:68| 700]: Train Loss:0.0010818, Val MRR:0.40596, wn_one_to_x
|
|
2023-05-03 09:10:38,322 - wn_one_to_x - [INFO] - [E:68| 800]: Train Loss:0.0010826, Val MRR:0.40596, wn_one_to_x
|
|
2023-05-03 09:12:05,848 - wn_one_to_x - [INFO] - [E:68| 900]: Train Loss:0.001084, Val MRR:0.40596, wn_one_to_x
|
|
2023-05-03 09:13:32,961 - wn_one_to_x - [INFO] - [E:68| 1000]: Train Loss:0.0010862, Val MRR:0.40596, wn_one_to_x
|
|
2023-05-03 09:15:00,484 - wn_one_to_x - [INFO] - [E:68| 1100]: Train Loss:0.0010876, Val MRR:0.40596, wn_one_to_x
|
|
2023-05-03 09:16:27,736 - wn_one_to_x - [INFO] - [E:68| 1200]: Train Loss:0.0010891, Val MRR:0.40596, wn_one_to_x
|
|
2023-05-03 09:17:54,966 - wn_one_to_x - [INFO] - [E:68| 1300]: Train Loss:0.00109, Val MRR:0.40596, wn_one_to_x
|
|
2023-05-03 09:18:43,812 - wn_one_to_x - [INFO] - [Epoch:68]: Training Loss:0.001091
|
|
|
|
2023-05-03 09:18:44,054 - wn_one_to_x - [INFO] - [Valid, Tail_Batch Step 0] wn_one_to_x
|
|
2023-05-03 09:18:49,887 - wn_one_to_x - [INFO] - [Valid, Head_Batch Step 0] wn_one_to_x
|
|
2023-05-03 09:18:55,325 - wn_one_to_x - [INFO] - [Evaluating Epoch 68 valid]:
|
|
MRR: Tail : 0.41601, Head : 0.39656, Avg : 0.40628
|
|
|
|
2023-05-03 09:18:57,198 - wn_one_to_x - [INFO] - [Epoch 68]: Training Loss: 0.001091, Valid MRR: 0.40628,
|
|
|
|
|
|
|
|
2023-05-03 09:18:58,085 - wn_one_to_x - [INFO] - [E:69| 0]: Train Loss:0.0010975, Val MRR:0.40628, wn_one_to_x
|
|
2023-05-03 09:20:25,104 - wn_one_to_x - [INFO] - [E:69| 100]: Train Loss:0.0010641, Val MRR:0.40628, wn_one_to_x
|
|
2023-05-03 09:21:53,341 - wn_one_to_x - [INFO] - [E:69| 200]: Train Loss:0.0010643, Val MRR:0.40628, wn_one_to_x
|
|
2023-05-03 09:23:20,770 - wn_one_to_x - [INFO] - [E:69| 300]: Train Loss:0.0010664, Val MRR:0.40628, wn_one_to_x
|
|
2023-05-03 09:24:48,218 - wn_one_to_x - [INFO] - [E:69| 400]: Train Loss:0.0010678, Val MRR:0.40628, wn_one_to_x
|
|
2023-05-03 09:26:16,632 - wn_one_to_x - [INFO] - [E:69| 500]: Train Loss:0.0010694, Val MRR:0.40628, wn_one_to_x
|
|
2023-05-03 09:27:44,547 - wn_one_to_x - [INFO] - [E:69| 600]: Train Loss:0.0010696, Val MRR:0.40628, wn_one_to_x
|
|
2023-05-03 09:29:11,347 - wn_one_to_x - [INFO] - [E:69| 700]: Train Loss:0.0010711, Val MRR:0.40628, wn_one_to_x
|
|
2023-05-03 09:30:39,007 - wn_one_to_x - [INFO] - [E:69| 800]: Train Loss:0.0010729, Val MRR:0.40628, wn_one_to_x
|
|
2023-05-03 09:32:06,464 - wn_one_to_x - [INFO] - [E:69| 900]: Train Loss:0.0010751, Val MRR:0.40628, wn_one_to_x
|
|
2023-05-03 09:33:33,895 - wn_one_to_x - [INFO] - [E:69| 1000]: Train Loss:0.001076, Val MRR:0.40628, wn_one_to_x
|
|
2023-05-03 09:35:01,554 - wn_one_to_x - [INFO] - [E:69| 1100]: Train Loss:0.0010772, Val MRR:0.40628, wn_one_to_x
|
|
2023-05-03 09:36:28,704 - wn_one_to_x - [INFO] - [E:69| 1200]: Train Loss:0.001078, Val MRR:0.40628, wn_one_to_x
|
|
2023-05-03 09:37:56,811 - wn_one_to_x - [INFO] - [E:69| 1300]: Train Loss:0.0010788, Val MRR:0.40628, wn_one_to_x
|
|
2023-05-03 09:38:45,487 - wn_one_to_x - [INFO] - [Epoch:69]: Training Loss:0.001079
|
|
|
|
2023-05-03 09:38:45,732 - wn_one_to_x - [INFO] - [Valid, Tail_Batch Step 0] wn_one_to_x
|
|
2023-05-03 09:38:51,242 - wn_one_to_x - [INFO] - [Valid, Head_Batch Step 0] wn_one_to_x
|
|
2023-05-03 09:38:56,555 - wn_one_to_x - [INFO] - [Evaluating Epoch 69 valid]:
|
|
MRR: Tail : 0.42024, Head : 0.40091, Avg : 0.41058
|
|
MR: Tail : 3586.9, Head : 3931.0, Avg : 3759.0
|
|
Hit-1: Tail : 0.38332, Head : 0.36717, Avg : 0.37525
|
|
Hit-3: Tail : 0.44001, Head : 0.42024, Avg : 0.43013
|
|
Hit-10: Tail : 0.47891, Head : 0.45847, Avg : 0.46869
|
|
2023-05-03 09:38:58,502 - wn_one_to_x - [INFO] - [Epoch 69]: Training Loss: 0.0010792, Valid MRR: 0.41058,
|
|
|
|
|
|
|
|
2023-05-03 09:38:59,436 - wn_one_to_x - [INFO] - [E:70| 0]: Train Loss:0.0010346, Val MRR:0.41058, wn_one_to_x
|
|
2023-05-03 09:40:28,075 - wn_one_to_x - [INFO] - [E:70| 100]: Train Loss:0.0010436, Val MRR:0.41058, wn_one_to_x
|
|
2023-05-03 09:41:55,696 - wn_one_to_x - [INFO] - [E:70| 200]: Train Loss:0.0010469, Val MRR:0.41058, wn_one_to_x
|
|
2023-05-03 09:43:22,873 - wn_one_to_x - [INFO] - [E:70| 300]: Train Loss:0.0010492, Val MRR:0.41058, wn_one_to_x
|
|
2023-05-03 09:44:50,499 - wn_one_to_x - [INFO] - [E:70| 400]: Train Loss:0.0010513, Val MRR:0.41058, wn_one_to_x
|
|
2023-05-03 09:46:18,039 - wn_one_to_x - [INFO] - [E:70| 500]: Train Loss:0.0010517, Val MRR:0.41058, wn_one_to_x
|
|
2023-05-03 09:47:44,869 - wn_one_to_x - [INFO] - [E:70| 600]: Train Loss:0.0010541, Val MRR:0.41058, wn_one_to_x
|
|
2023-05-03 09:49:12,027 - wn_one_to_x - [INFO] - [E:70| 700]: Train Loss:0.0010563, Val MRR:0.41058, wn_one_to_x
|
|
2023-05-03 09:50:40,211 - wn_one_to_x - [INFO] - [E:70| 800]: Train Loss:0.0010579, Val MRR:0.41058, wn_one_to_x
|
|
2023-05-03 09:52:07,927 - wn_one_to_x - [INFO] - [E:70| 900]: Train Loss:0.0010598, Val MRR:0.41058, wn_one_to_x
|
|
2023-05-03 09:53:35,519 - wn_one_to_x - [INFO] - [E:70| 1000]: Train Loss:0.0010614, Val MRR:0.41058, wn_one_to_x
|
|
2023-05-03 09:55:03,115 - wn_one_to_x - [INFO] - [E:70| 1100]: Train Loss:0.0010623, Val MRR:0.41058, wn_one_to_x
|
|
2023-05-03 09:56:29,093 - wn_one_to_x - [INFO] - [E:70| 1200]: Train Loss:0.0010636, Val MRR:0.41058, wn_one_to_x
|
|
2023-05-03 09:57:55,729 - wn_one_to_x - [INFO] - [E:70| 1300]: Train Loss:0.0010646, Val MRR:0.41058, wn_one_to_x
|
|
2023-05-03 09:58:44,030 - wn_one_to_x - [INFO] - [Epoch:70]: Training Loss:0.001065
|
|
|
|
2023-05-03 09:58:44,275 - wn_one_to_x - [INFO] - [Valid, Tail_Batch Step 0] wn_one_to_x
|
|
2023-05-03 09:58:50,789 - wn_one_to_x - [INFO] - [Valid, Head_Batch Step 0] wn_one_to_x
|
|
2023-05-03 09:58:56,119 - wn_one_to_x - [INFO] - [Evaluating Epoch 70 valid]:
|
|
MRR: Tail : 0.42137, Head : 0.40061, Avg : 0.41099
|
|
|
|
2023-05-03 09:58:57,932 - wn_one_to_x - [INFO] - [Epoch 70]: Training Loss: 0.0010655, Valid MRR: 0.41099,
|
|
|
|
|
|
|
|
2023-05-03 09:58:58,802 - wn_one_to_x - [INFO] - [E:71| 0]: Train Loss:0.0010233, Val MRR:0.41099, wn_one_to_x
|
|
2023-05-03 10:00:25,016 - wn_one_to_x - [INFO] - [E:71| 100]: Train Loss:0.0010333, Val MRR:0.41099, wn_one_to_x
|
|
2023-05-03 10:01:51,282 - wn_one_to_x - [INFO] - [E:71| 200]: Train Loss:0.0010377, Val MRR:0.41099, wn_one_to_x
|
|
2023-05-03 10:03:17,797 - wn_one_to_x - [INFO] - [E:71| 300]: Train Loss:0.0010404, Val MRR:0.41099, wn_one_to_x
|
|
2023-05-03 10:04:44,264 - wn_one_to_x - [INFO] - [E:71| 400]: Train Loss:0.0010412, Val MRR:0.41099, wn_one_to_x
|
|
2023-05-03 10:06:10,396 - wn_one_to_x - [INFO] - [E:71| 500]: Train Loss:0.0010422, Val MRR:0.41099, wn_one_to_x
|
|
2023-05-03 10:07:37,573 - wn_one_to_x - [INFO] - [E:71| 600]: Train Loss:0.001044, Val MRR:0.41099, wn_one_to_x
|
|
2023-05-03 10:09:03,647 - wn_one_to_x - [INFO] - [E:71| 700]: Train Loss:0.0010443, Val MRR:0.41099, wn_one_to_x
|
|
2023-05-03 10:10:30,009 - wn_one_to_x - [INFO] - [E:71| 800]: Train Loss:0.0010447, Val MRR:0.41099, wn_one_to_x
|
|
2023-05-03 10:11:56,916 - wn_one_to_x - [INFO] - [E:71| 900]: Train Loss:0.0010463, Val MRR:0.41099, wn_one_to_x
|
|
2023-05-03 10:13:23,204 - wn_one_to_x - [INFO] - [E:71| 1000]: Train Loss:0.0010477, Val MRR:0.41099, wn_one_to_x
|
|
2023-05-03 10:14:49,632 - wn_one_to_x - [INFO] - [E:71| 1100]: Train Loss:0.0010488, Val MRR:0.41099, wn_one_to_x
|
|
2023-05-03 10:16:16,208 - wn_one_to_x - [INFO] - [E:71| 1200]: Train Loss:0.0010496, Val MRR:0.41099, wn_one_to_x
|
|
2023-05-03 10:17:42,939 - wn_one_to_x - [INFO] - [E:71| 1300]: Train Loss:0.0010511, Val MRR:0.41099, wn_one_to_x
|
|
2023-05-03 10:18:31,280 - wn_one_to_x - [INFO] - [Epoch:71]: Training Loss:0.001052
|
|
|
|
2023-05-03 10:18:31,522 - wn_one_to_x - [INFO] - [Valid, Tail_Batch Step 0] wn_one_to_x
|
|
2023-05-03 10:18:36,983 - wn_one_to_x - [INFO] - [Valid, Head_Batch Step 0] wn_one_to_x
|
|
2023-05-03 10:18:42,631 - wn_one_to_x - [INFO] - [Evaluating Epoch 71 valid]:
|
|
MRR: Tail : 0.42256, Head : 0.40135, Avg : 0.41195
|
|
|
|
2023-05-03 10:18:44,528 - wn_one_to_x - [INFO] - [Epoch 71]: Training Loss: 0.0010517, Valid MRR: 0.41195,
|
|
|
|
|
|
|
|
2023-05-03 10:18:45,443 - wn_one_to_x - [INFO] - [E:72| 0]: Train Loss:0.0010276, Val MRR:0.41195, wn_one_to_x
|
|
2023-05-03 10:20:11,473 - wn_one_to_x - [INFO] - [E:72| 100]: Train Loss:0.0010235, Val MRR:0.41195, wn_one_to_x
|
|
2023-05-03 10:21:37,801 - wn_one_to_x - [INFO] - [E:72| 200]: Train Loss:0.0010285, Val MRR:0.41195, wn_one_to_x
|
|
2023-05-03 10:23:04,178 - wn_one_to_x - [INFO] - [E:72| 300]: Train Loss:0.0010294, Val MRR:0.41195, wn_one_to_x
|
|
2023-05-03 10:24:30,177 - wn_one_to_x - [INFO] - [E:72| 400]: Train Loss:0.0010296, Val MRR:0.41195, wn_one_to_x
|
|
2023-05-03 10:25:56,433 - wn_one_to_x - [INFO] - [E:72| 500]: Train Loss:0.0010307, Val MRR:0.41195, wn_one_to_x
|
|
2023-05-03 10:27:22,690 - wn_one_to_x - [INFO] - [E:72| 600]: Train Loss:0.0010309, Val MRR:0.41195, wn_one_to_x
|
|
2023-05-03 10:28:48,885 - wn_one_to_x - [INFO] - [E:72| 700]: Train Loss:0.0010326, Val MRR:0.41195, wn_one_to_x
|
|
2023-05-03 10:30:16,045 - wn_one_to_x - [INFO] - [E:72| 800]: Train Loss:0.0010343, Val MRR:0.41195, wn_one_to_x
|
|
2023-05-03 10:31:42,754 - wn_one_to_x - [INFO] - [E:72| 900]: Train Loss:0.0010358, Val MRR:0.41195, wn_one_to_x
|
|
2023-05-03 10:33:09,549 - wn_one_to_x - [INFO] - [E:72| 1000]: Train Loss:0.0010375, Val MRR:0.41195, wn_one_to_x
|
|
2023-05-03 10:34:36,060 - wn_one_to_x - [INFO] - [E:72| 1100]: Train Loss:0.0010382, Val MRR:0.41195, wn_one_to_x
|
|
2023-05-03 10:36:02,352 - wn_one_to_x - [INFO] - [E:72| 1200]: Train Loss:0.0010385, Val MRR:0.41195, wn_one_to_x
|
|
2023-05-03 10:37:28,765 - wn_one_to_x - [INFO] - [E:72| 1300]: Train Loss:0.0010395, Val MRR:0.41195, wn_one_to_x
|
|
2023-05-03 10:38:17,143 - wn_one_to_x - [INFO] - [Epoch:72]: Training Loss:0.00104
|
|
|
|
2023-05-03 10:38:17,389 - wn_one_to_x - [INFO] - [Valid, Tail_Batch Step 0] wn_one_to_x
|
|
2023-05-03 10:38:22,874 - wn_one_to_x - [INFO] - [Valid, Head_Batch Step 0] wn_one_to_x
|
|
2023-05-03 10:38:28,127 - wn_one_to_x - [INFO] - [Evaluating Epoch 72 valid]:
|
|
MRR: Tail : 0.42525, Head : 0.40348, Avg : 0.41437
|
|
|
|
2023-05-03 10:38:30,006 - wn_one_to_x - [INFO] - [Epoch 72]: Training Loss: 0.00104, Valid MRR: 0.41437,
|
|
|
|
|
|
|
|
2023-05-03 10:38:30,948 - wn_one_to_x - [INFO] - [E:73| 0]: Train Loss:0.0010375, Val MRR:0.41437, wn_one_to_x
|
|
2023-05-03 10:39:57,175 - wn_one_to_x - [INFO] - [E:73| 100]: Train Loss:0.0010148, Val MRR:0.41437, wn_one_to_x
|
|
2023-05-03 10:41:23,939 - wn_one_to_x - [INFO] - [E:73| 200]: Train Loss:0.0010185, Val MRR:0.41437, wn_one_to_x
|
|
2023-05-03 10:42:51,065 - wn_one_to_x - [INFO] - [E:73| 300]: Train Loss:0.0010171, Val MRR:0.41437, wn_one_to_x
|
|
2023-05-03 10:44:17,325 - wn_one_to_x - [INFO] - [E:73| 400]: Train Loss:0.0010196, Val MRR:0.41437, wn_one_to_x
|
|
2023-05-03 10:45:43,739 - wn_one_to_x - [INFO] - [E:73| 500]: Train Loss:0.0010204, Val MRR:0.41437, wn_one_to_x
|
|
2023-05-03 10:47:09,907 - wn_one_to_x - [INFO] - [E:73| 600]: Train Loss:0.0010219, Val MRR:0.41437, wn_one_to_x
|
|
2023-05-03 10:48:36,346 - wn_one_to_x - [INFO] - [E:73| 700]: Train Loss:0.0010231, Val MRR:0.41437, wn_one_to_x
|
|
2023-05-03 10:50:02,653 - wn_one_to_x - [INFO] - [E:73| 800]: Train Loss:0.0010245, Val MRR:0.41437, wn_one_to_x
|
|
2023-05-03 10:51:28,838 - wn_one_to_x - [INFO] - [E:73| 900]: Train Loss:0.0010251, Val MRR:0.41437, wn_one_to_x
|
|
2023-05-03 10:52:55,505 - wn_one_to_x - [INFO] - [E:73| 1000]: Train Loss:0.0010269, Val MRR:0.41437, wn_one_to_x
|
|
2023-05-03 10:54:21,607 - wn_one_to_x - [INFO] - [E:73| 1100]: Train Loss:0.0010283, Val MRR:0.41437, wn_one_to_x
|
|
2023-05-03 10:55:48,248 - wn_one_to_x - [INFO] - [E:73| 1200]: Train Loss:0.001029, Val MRR:0.41437, wn_one_to_x
|
|
2023-05-03 10:57:14,671 - wn_one_to_x - [INFO] - [E:73| 1300]: Train Loss:0.0010297, Val MRR:0.41437, wn_one_to_x
|
|
2023-05-03 10:58:02,917 - wn_one_to_x - [INFO] - [Epoch:73]: Training Loss:0.00103
|
|
|
|
2023-05-03 10:58:03,161 - wn_one_to_x - [INFO] - [Valid, Tail_Batch Step 0] wn_one_to_x
|
|
2023-05-03 10:58:08,643 - wn_one_to_x - [INFO] - [Valid, Head_Batch Step 0] wn_one_to_x
|
|
2023-05-03 10:58:13,900 - wn_one_to_x - [INFO] - [Evaluating Epoch 73 valid]:
|
|
MRR: Tail : 0.426, Head : 0.40615, Avg : 0.41608
|
|
|
|
2023-05-03 10:58:15,623 - wn_one_to_x - [INFO] - [Epoch 73]: Training Loss: 0.00103, Valid MRR: 0.41608,
|
|
|
|
|
|
|
|
2023-05-03 10:58:16,504 - wn_one_to_x - [INFO] - [E:74| 0]: Train Loss:0.00099527, Val MRR:0.41608, wn_one_to_x
|
|
2023-05-03 10:59:43,043 - wn_one_to_x - [INFO] - [E:74| 100]: Train Loss:0.0010003, Val MRR:0.41608, wn_one_to_x
|
|
2023-05-03 11:01:09,435 - wn_one_to_x - [INFO] - [E:74| 200]: Train Loss:0.0010041, Val MRR:0.41608, wn_one_to_x
|
|
2023-05-03 11:02:35,867 - wn_one_to_x - [INFO] - [E:74| 300]: Train Loss:0.0010076, Val MRR:0.41608, wn_one_to_x
|
|
2023-05-03 11:04:02,185 - wn_one_to_x - [INFO] - [E:74| 400]: Train Loss:0.0010079, Val MRR:0.41608, wn_one_to_x
|
|
2023-05-03 11:05:28,577 - wn_one_to_x - [INFO] - [E:74| 500]: Train Loss:0.0010087, Val MRR:0.41608, wn_one_to_x
|
|
2023-05-03 11:06:55,051 - wn_one_to_x - [INFO] - [E:74| 600]: Train Loss:0.0010103, Val MRR:0.41608, wn_one_to_x
|
|
2023-05-03 11:08:21,519 - wn_one_to_x - [INFO] - [E:74| 700]: Train Loss:0.0010114, Val MRR:0.41608, wn_one_to_x
|
|
2023-05-03 11:09:48,277 - wn_one_to_x - [INFO] - [E:74| 800]: Train Loss:0.0010126, Val MRR:0.41608, wn_one_to_x
|
|
2023-05-03 11:11:14,787 - wn_one_to_x - [INFO] - [E:74| 900]: Train Loss:0.0010143, Val MRR:0.41608, wn_one_to_x
|
|
2023-05-03 11:12:41,512 - wn_one_to_x - [INFO] - [E:74| 1000]: Train Loss:0.0010161, Val MRR:0.41608, wn_one_to_x
|
|
2023-05-03 11:14:08,675 - wn_one_to_x - [INFO] - [E:74| 1100]: Train Loss:0.0010169, Val MRR:0.41608, wn_one_to_x
|
|
2023-05-03 11:15:35,653 - wn_one_to_x - [INFO] - [E:74| 1200]: Train Loss:0.0010179, Val MRR:0.41608, wn_one_to_x
|
|
2023-05-03 11:17:02,098 - wn_one_to_x - [INFO] - [E:74| 1300]: Train Loss:0.0010189, Val MRR:0.41608, wn_one_to_x
|
|
2023-05-03 11:17:50,355 - wn_one_to_x - [INFO] - [Epoch:74]: Training Loss:0.001019
|
|
|
|
2023-05-03 11:17:50,597 - wn_one_to_x - [INFO] - [Valid, Tail_Batch Step 0] wn_one_to_x
|
|
2023-05-03 11:17:56,210 - wn_one_to_x - [INFO] - [Valid, Head_Batch Step 0] wn_one_to_x
|
|
2023-05-03 11:18:02,237 - wn_one_to_x - [INFO] - [Evaluating Epoch 74 valid]:
|
|
MRR: Tail : 0.4233, Head : 0.40504, Avg : 0.41417
|
|
|
|
2023-05-03 11:18:02,238 - wn_one_to_x - [INFO] - [Epoch 74]: Training Loss: 0.0010192, Valid MRR: 0.41608,
|
|
|
|
|
|
|
|
2023-05-03 11:18:03,113 - wn_one_to_x - [INFO] - [E:75| 0]: Train Loss:0.00098469, Val MRR:0.41608, wn_one_to_x
|
|
2023-05-03 11:19:29,372 - wn_one_to_x - [INFO] - [E:75| 100]: Train Loss:0.00099334, Val MRR:0.41608, wn_one_to_x
|
|
2023-05-03 11:20:55,762 - wn_one_to_x - [INFO] - [E:75| 200]: Train Loss:0.00099498, Val MRR:0.41608, wn_one_to_x
|
|
2023-05-03 11:22:22,328 - wn_one_to_x - [INFO] - [E:75| 300]: Train Loss:0.00099518, Val MRR:0.41608, wn_one_to_x
|
|
2023-05-03 11:23:48,702 - wn_one_to_x - [INFO] - [E:75| 400]: Train Loss:0.00099815, Val MRR:0.41608, wn_one_to_x
|
|
2023-05-03 11:25:15,283 - wn_one_to_x - [INFO] - [E:75| 500]: Train Loss:0.00099935, Val MRR:0.41608, wn_one_to_x
|
|
2023-05-03 11:26:41,729 - wn_one_to_x - [INFO] - [E:75| 600]: Train Loss:0.0010018, Val MRR:0.41608, wn_one_to_x
|
|
2023-05-03 11:28:08,415 - wn_one_to_x - [INFO] - [E:75| 700]: Train Loss:0.0010028, Val MRR:0.41608, wn_one_to_x
|
|
2023-05-03 11:29:34,693 - wn_one_to_x - [INFO] - [E:75| 800]: Train Loss:0.0010033, Val MRR:0.41608, wn_one_to_x
|
|
2023-05-03 11:31:01,051 - wn_one_to_x - [INFO] - [E:75| 900]: Train Loss:0.0010036, Val MRR:0.41608, wn_one_to_x
|
|
2023-05-03 11:32:27,340 - wn_one_to_x - [INFO] - [E:75| 1000]: Train Loss:0.0010039, Val MRR:0.41608, wn_one_to_x
|
|
2023-05-03 11:33:53,861 - wn_one_to_x - [INFO] - [E:75| 1100]: Train Loss:0.0010042, Val MRR:0.41608, wn_one_to_x
|
|
2023-05-03 11:35:19,856 - wn_one_to_x - [INFO] - [E:75| 1200]: Train Loss:0.0010055, Val MRR:0.41608, wn_one_to_x
|
|
2023-05-03 11:36:45,483 - wn_one_to_x - [INFO] - [E:75| 1300]: Train Loss:0.0010065, Val MRR:0.41608, wn_one_to_x
|
|
2023-05-03 11:37:34,526 - wn_one_to_x - [INFO] - [Epoch:75]: Training Loss:0.001007
|
|
|
|
2023-05-03 11:37:34,773 - wn_one_to_x - [INFO] - [Valid, Tail_Batch Step 0] wn_one_to_x
|
|
2023-05-03 11:37:40,275 - wn_one_to_x - [INFO] - [Valid, Head_Batch Step 0] wn_one_to_x
|
|
2023-05-03 11:37:45,534 - wn_one_to_x - [INFO] - [Evaluating Epoch 75 valid]:
|
|
MRR: Tail : 0.42145, Head : 0.40269, Avg : 0.41207
|
|
|
|
2023-05-03 11:37:45,534 - wn_one_to_x - [INFO] - [Epoch 75]: Training Loss: 0.0010074, Valid MRR: 0.41608,
|
|
|
|
|
|
|
|
2023-05-03 11:37:46,399 - wn_one_to_x - [INFO] - [E:76| 0]: Train Loss:0.00097469, Val MRR:0.41608, wn_one_to_x
|
|
2023-05-03 11:39:12,260 - wn_one_to_x - [INFO] - [E:76| 100]: Train Loss:0.00098572, Val MRR:0.41608, wn_one_to_x
|
|
2023-05-03 11:40:38,524 - wn_one_to_x - [INFO] - [E:76| 200]: Train Loss:0.00098446, Val MRR:0.41608, wn_one_to_x
|
|
2023-05-03 11:42:04,727 - wn_one_to_x - [INFO] - [E:76| 300]: Train Loss:0.00098457, Val MRR:0.41608, wn_one_to_x
|
|
2023-05-03 11:43:30,825 - wn_one_to_x - [INFO] - [E:76| 400]: Train Loss:0.00098628, Val MRR:0.41608, wn_one_to_x
|
|
2023-05-03 11:44:57,009 - wn_one_to_x - [INFO] - [E:76| 500]: Train Loss:0.00098774, Val MRR:0.41608, wn_one_to_x
|
|
2023-05-03 11:46:23,797 - wn_one_to_x - [INFO] - [E:76| 600]: Train Loss:0.00098906, Val MRR:0.41608, wn_one_to_x
|
|
2023-05-03 11:47:49,913 - wn_one_to_x - [INFO] - [E:76| 700]: Train Loss:0.00099007, Val MRR:0.41608, wn_one_to_x
|
|
2023-05-03 11:49:15,895 - wn_one_to_x - [INFO] - [E:76| 800]: Train Loss:0.0009913, Val MRR:0.41608, wn_one_to_x
|
|
2023-05-03 11:50:42,438 - wn_one_to_x - [INFO] - [E:76| 900]: Train Loss:0.00099305, Val MRR:0.41608, wn_one_to_x
|
|
2023-05-03 11:52:08,724 - wn_one_to_x - [INFO] - [E:76| 1000]: Train Loss:0.00099427, Val MRR:0.41608, wn_one_to_x
|
|
2023-05-03 11:53:34,754 - wn_one_to_x - [INFO] - [E:76| 1100]: Train Loss:0.0009951, Val MRR:0.41608, wn_one_to_x
|
|
2023-05-03 11:55:01,231 - wn_one_to_x - [INFO] - [E:76| 1200]: Train Loss:0.00099563, Val MRR:0.41608, wn_one_to_x
|
|
2023-05-03 11:56:27,144 - wn_one_to_x - [INFO] - [E:76| 1300]: Train Loss:0.00099728, Val MRR:0.41608, wn_one_to_x
|
|
2023-05-03 11:57:15,461 - wn_one_to_x - [INFO] - [Epoch:76]: Training Loss:0.0009979
|
|
|
|
2023-05-03 11:57:15,701 - wn_one_to_x - [INFO] - [Valid, Tail_Batch Step 0] wn_one_to_x
|
|
2023-05-03 11:57:21,351 - wn_one_to_x - [INFO] - [Valid, Head_Batch Step 0] wn_one_to_x
|
|
2023-05-03 11:57:27,087 - wn_one_to_x - [INFO] - [Evaluating Epoch 76 valid]:
|
|
MRR: Tail : 0.42562, Head : 0.40502, Avg : 0.41532
|
|
|
|
2023-05-03 11:57:27,087 - wn_one_to_x - [INFO] - [Epoch 76]: Training Loss: 0.00099786, Valid MRR: 0.41608,
|
|
|
|
|
|
|
|
2023-05-03 11:57:27,965 - wn_one_to_x - [INFO] - [E:77| 0]: Train Loss:0.0010142, Val MRR:0.41608, wn_one_to_x
|
|
2023-05-03 11:58:54,625 - wn_one_to_x - [INFO] - [E:77| 100]: Train Loss:0.00097788, Val MRR:0.41608, wn_one_to_x
|
|
2023-05-03 12:00:21,791 - wn_one_to_x - [INFO] - [E:77| 200]: Train Loss:0.00097874, Val MRR:0.41608, wn_one_to_x
|
|
2023-05-03 12:01:48,360 - wn_one_to_x - [INFO] - [E:77| 300]: Train Loss:0.00098194, Val MRR:0.41608, wn_one_to_x
|
|
2023-05-03 12:03:14,685 - wn_one_to_x - [INFO] - [E:77| 400]: Train Loss:0.00098343, Val MRR:0.41608, wn_one_to_x
|
|
2023-05-03 12:04:41,052 - wn_one_to_x - [INFO] - [E:77| 500]: Train Loss:0.00098433, Val MRR:0.41608, wn_one_to_x
|
|
2023-05-03 12:06:07,080 - wn_one_to_x - [INFO] - [E:77| 600]: Train Loss:0.00098531, Val MRR:0.41608, wn_one_to_x
|
|
2023-05-03 12:07:33,259 - wn_one_to_x - [INFO] - [E:77| 700]: Train Loss:0.00098696, Val MRR:0.41608, wn_one_to_x
|
|
2023-05-03 12:08:59,081 - wn_one_to_x - [INFO] - [E:77| 800]: Train Loss:0.00098795, Val MRR:0.41608, wn_one_to_x
|
|
2023-05-03 12:10:25,520 - wn_one_to_x - [INFO] - [E:77| 900]: Train Loss:0.00098813, Val MRR:0.41608, wn_one_to_x
|
|
2023-05-03 12:11:51,913 - wn_one_to_x - [INFO] - [E:77| 1000]: Train Loss:0.00098866, Val MRR:0.41608, wn_one_to_x
|
|
2023-05-03 12:13:18,545 - wn_one_to_x - [INFO] - [E:77| 1100]: Train Loss:0.00098967, Val MRR:0.41608, wn_one_to_x
|
|
2023-05-03 12:14:45,012 - wn_one_to_x - [INFO] - [E:77| 1200]: Train Loss:0.00099032, Val MRR:0.41608, wn_one_to_x
|
|
2023-05-03 12:16:11,613 - wn_one_to_x - [INFO] - [E:77| 1300]: Train Loss:0.00099134, Val MRR:0.41608, wn_one_to_x
|
|
2023-05-03 12:17:00,857 - wn_one_to_x - [INFO] - [Epoch:77]: Training Loss:0.0009912
|
|
|
|
2023-05-03 12:17:01,285 - wn_one_to_x - [INFO] - [Valid, Tail_Batch Step 0] wn_one_to_x
|
|
2023-05-03 12:17:07,728 - wn_one_to_x - [INFO] - [Valid, Head_Batch Step 0] wn_one_to_x
|
|
2023-05-03 12:17:14,165 - wn_one_to_x - [INFO] - [Evaluating Epoch 77 valid]:
|
|
MRR: Tail : 0.42359, Head : 0.40532, Avg : 0.41445
|
|
|
|
2023-05-03 12:17:14,387 - wn_one_to_x - [INFO] - [Epoch 77]: Training Loss: 0.00099119, Valid MRR: 0.41608,
|
|
|
|
|
|
|
|
2023-05-03 12:17:15,324 - wn_one_to_x - [INFO] - [E:78| 0]: Train Loss:0.00095552, Val MRR:0.41608, wn_one_to_x
|
|
2023-05-03 12:18:43,290 - wn_one_to_x - [INFO] - [E:78| 100]: Train Loss:0.00097386, Val MRR:0.41608, wn_one_to_x
|
|
2023-05-03 12:20:09,421 - wn_one_to_x - [INFO] - [E:78| 200]: Train Loss:0.00096992, Val MRR:0.41608, wn_one_to_x
|
|
2023-05-03 12:21:35,950 - wn_one_to_x - [INFO] - [E:78| 300]: Train Loss:0.0009694, Val MRR:0.41608, wn_one_to_x
|
|
2023-05-03 12:23:02,851 - wn_one_to_x - [INFO] - [E:78| 400]: Train Loss:0.00097154, Val MRR:0.41608, wn_one_to_x
|
|
2023-05-03 12:24:28,810 - wn_one_to_x - [INFO] - [E:78| 500]: Train Loss:0.00097337, Val MRR:0.41608, wn_one_to_x
|
|
2023-05-03 12:25:55,088 - wn_one_to_x - [INFO] - [E:78| 600]: Train Loss:0.00097327, Val MRR:0.41608, wn_one_to_x
|
|
2023-05-03 12:27:21,109 - wn_one_to_x - [INFO] - [E:78| 700]: Train Loss:0.00097493, Val MRR:0.41608, wn_one_to_x
|
|
2023-05-03 12:28:47,306 - wn_one_to_x - [INFO] - [E:78| 800]: Train Loss:0.00097631, Val MRR:0.41608, wn_one_to_x
|
|
2023-05-03 12:30:13,639 - wn_one_to_x - [INFO] - [E:78| 900]: Train Loss:0.0009765, Val MRR:0.41608, wn_one_to_x
|
|
2023-05-03 12:31:39,883 - wn_one_to_x - [INFO] - [E:78| 1000]: Train Loss:0.00097763, Val MRR:0.41608, wn_one_to_x
|
|
2023-05-03 12:33:05,819 - wn_one_to_x - [INFO] - [E:78| 1100]: Train Loss:0.00097876, Val MRR:0.41608, wn_one_to_x
|
|
2023-05-03 12:34:32,159 - wn_one_to_x - [INFO] - [E:78| 1200]: Train Loss:0.00097983, Val MRR:0.41608, wn_one_to_x
|
|
2023-05-03 12:35:58,393 - wn_one_to_x - [INFO] - [E:78| 1300]: Train Loss:0.00098072, Val MRR:0.41608, wn_one_to_x
|
|
2023-05-03 12:36:46,650 - wn_one_to_x - [INFO] - [Epoch:78]: Training Loss:0.0009815
|
|
|
|
2023-05-03 12:36:46,889 - wn_one_to_x - [INFO] - [Valid, Tail_Batch Step 0] wn_one_to_x
|
|
2023-05-03 12:36:52,532 - wn_one_to_x - [INFO] - [Valid, Head_Batch Step 0] wn_one_to_x
|
|
2023-05-03 12:36:57,800 - wn_one_to_x - [INFO] - [Evaluating Epoch 78 valid]:
|
|
MRR: Tail : 0.42419, Head : 0.40543, Avg : 0.41481
|
|
|
|
2023-05-03 12:36:57,801 - wn_one_to_x - [INFO] - [Epoch 78]: Training Loss: 0.0009815, Valid MRR: 0.41608,
|
|
|
|
|
|
|
|
2023-05-03 12:36:58,707 - wn_one_to_x - [INFO] - [E:79| 0]: Train Loss:0.00093749, Val MRR:0.41608, wn_one_to_x
|
|
2023-05-03 12:38:25,204 - wn_one_to_x - [INFO] - [E:79| 100]: Train Loss:0.00095972, Val MRR:0.41608, wn_one_to_x
|
|
2023-05-03 12:39:51,427 - wn_one_to_x - [INFO] - [E:79| 200]: Train Loss:0.00096353, Val MRR:0.41608, wn_one_to_x
|
|
2023-05-03 12:41:17,401 - wn_one_to_x - [INFO] - [E:79| 300]: Train Loss:0.00096423, Val MRR:0.41608, wn_one_to_x
|
|
2023-05-03 12:42:43,854 - wn_one_to_x - [INFO] - [E:79| 400]: Train Loss:0.00096577, Val MRR:0.41608, wn_one_to_x
|
|
2023-05-03 12:44:09,820 - wn_one_to_x - [INFO] - [E:79| 500]: Train Loss:0.00096669, Val MRR:0.41608, wn_one_to_x
|
|
2023-05-03 12:45:36,633 - wn_one_to_x - [INFO] - [E:79| 600]: Train Loss:0.00096791, Val MRR:0.41608, wn_one_to_x
|
|
2023-05-03 12:47:03,165 - wn_one_to_x - [INFO] - [E:79| 700]: Train Loss:0.00096914, Val MRR:0.41608, wn_one_to_x
|
|
2023-05-03 12:48:28,994 - wn_one_to_x - [INFO] - [E:79| 800]: Train Loss:0.00097032, Val MRR:0.41608, wn_one_to_x
|
|
2023-05-03 12:49:55,535 - wn_one_to_x - [INFO] - [E:79| 900]: Train Loss:0.00097067, Val MRR:0.41608, wn_one_to_x
|
|
2023-05-03 12:51:21,647 - wn_one_to_x - [INFO] - [E:79| 1000]: Train Loss:0.00097173, Val MRR:0.41608, wn_one_to_x
|
|
2023-05-03 12:52:47,725 - wn_one_to_x - [INFO] - [E:79| 1100]: Train Loss:0.00097276, Val MRR:0.41608, wn_one_to_x
|
|
2023-05-03 12:54:14,448 - wn_one_to_x - [INFO] - [E:79| 1200]: Train Loss:0.00097354, Val MRR:0.41608, wn_one_to_x
|
|
2023-05-03 12:55:40,583 - wn_one_to_x - [INFO] - [E:79| 1300]: Train Loss:0.00097497, Val MRR:0.41608, wn_one_to_x
|
|
2023-05-03 12:56:28,797 - wn_one_to_x - [INFO] - [Epoch:79]: Training Loss:0.0009755
|
|
|
|
2023-05-03 12:56:29,040 - wn_one_to_x - [INFO] - [Valid, Tail_Batch Step 0] wn_one_to_x
|
|
2023-05-03 12:56:34,627 - wn_one_to_x - [INFO] - [Valid, Head_Batch Step 0] wn_one_to_x
|
|
2023-05-03 12:56:39,892 - wn_one_to_x - [INFO] - [Evaluating Epoch 79 valid]:
|
|
MRR: Tail : 0.42811, Head : 0.40735, Avg : 0.41773
|
|
MR: Tail : 3912.7, Head : 3935.3, Avg : 3924.0
|
|
Hit-1: Tail : 0.39618, Head : 0.37574, Avg : 0.38596
|
|
Hit-3: Tail : 0.44331, Head : 0.42287, Avg : 0.43309
|
|
Hit-10: Tail : 0.48187, Head : 0.46012, Avg : 0.471
|
|
2023-05-03 12:56:41,665 - wn_one_to_x - [INFO] - [Epoch 79]: Training Loss: 0.0009755, Valid MRR: 0.41773,
|
|
|
|
|
|
|
|
2023-05-03 12:56:42,551 - wn_one_to_x - [INFO] - [E:80| 0]: Train Loss:0.0010059, Val MRR:0.41773, wn_one_to_x
|
|
2023-05-03 12:58:08,742 - wn_one_to_x - [INFO] - [E:80| 100]: Train Loss:0.00095357, Val MRR:0.41773, wn_one_to_x
|
|
2023-05-03 12:59:34,827 - wn_one_to_x - [INFO] - [E:80| 200]: Train Loss:0.00095509, Val MRR:0.41773, wn_one_to_x
|
|
2023-05-03 13:01:01,226 - wn_one_to_x - [INFO] - [E:80| 300]: Train Loss:0.0009558, Val MRR:0.41773, wn_one_to_x
|
|
2023-05-03 13:02:27,452 - wn_one_to_x - [INFO] - [E:80| 400]: Train Loss:0.00095691, Val MRR:0.41773, wn_one_to_x
|
|
2023-05-03 13:03:53,687 - wn_one_to_x - [INFO] - [E:80| 500]: Train Loss:0.00095792, Val MRR:0.41773, wn_one_to_x
|
|
2023-05-03 13:05:19,967 - wn_one_to_x - [INFO] - [E:80| 600]: Train Loss:0.00095903, Val MRR:0.41773, wn_one_to_x
|
|
2023-05-03 13:06:46,443 - wn_one_to_x - [INFO] - [E:80| 700]: Train Loss:0.00096053, Val MRR:0.41773, wn_one_to_x
|
|
2023-05-03 13:08:13,646 - wn_one_to_x - [INFO] - [E:80| 800]: Train Loss:0.0009611, Val MRR:0.41773, wn_one_to_x
|
|
2023-05-03 13:09:39,783 - wn_one_to_x - [INFO] - [E:80| 900]: Train Loss:0.00096242, Val MRR:0.41773, wn_one_to_x
|
|
2023-05-03 13:11:05,574 - wn_one_to_x - [INFO] - [E:80| 1000]: Train Loss:0.00096228, Val MRR:0.41773, wn_one_to_x
|
|
2023-05-03 13:12:31,991 - wn_one_to_x - [INFO] - [E:80| 1100]: Train Loss:0.00096287, Val MRR:0.41773, wn_one_to_x
|
|
2023-05-03 13:13:58,290 - wn_one_to_x - [INFO] - [E:80| 1200]: Train Loss:0.00096344, Val MRR:0.41773, wn_one_to_x
|
|
2023-05-03 13:15:24,403 - wn_one_to_x - [INFO] - [E:80| 1300]: Train Loss:0.00096435, Val MRR:0.41773, wn_one_to_x
|
|
2023-05-03 13:16:12,661 - wn_one_to_x - [INFO] - [Epoch:80]: Training Loss:0.000965
|
|
|
|
2023-05-03 13:16:12,905 - wn_one_to_x - [INFO] - [Valid, Tail_Batch Step 0] wn_one_to_x
|
|
2023-05-03 13:16:18,524 - wn_one_to_x - [INFO] - [Valid, Head_Batch Step 0] wn_one_to_x
|
|
2023-05-03 13:16:23,773 - wn_one_to_x - [INFO] - [Evaluating Epoch 80 valid]:
|
|
MRR: Tail : 0.42922, Head : 0.40769, Avg : 0.41846
|
|
|
|
2023-05-03 13:16:25,685 - wn_one_to_x - [INFO] - [Epoch 80]: Training Loss: 0.00096505, Valid MRR: 0.41846,
|
|
|
|
|
|
|
|
2023-05-03 13:16:26,562 - wn_one_to_x - [INFO] - [E:81| 0]: Train Loss:0.00092343, Val MRR:0.41846, wn_one_to_x
|
|
2023-05-03 13:17:52,683 - wn_one_to_x - [INFO] - [E:81| 100]: Train Loss:0.00095358, Val MRR:0.41846, wn_one_to_x
|
|
2023-05-03 13:19:18,887 - wn_one_to_x - [INFO] - [E:81| 200]: Train Loss:0.00094968, Val MRR:0.41846, wn_one_to_x
|
|
2023-05-03 13:20:45,150 - wn_one_to_x - [INFO] - [E:81| 300]: Train Loss:0.00094949, Val MRR:0.41846, wn_one_to_x
|
|
2023-05-03 13:22:11,424 - wn_one_to_x - [INFO] - [E:81| 400]: Train Loss:0.00095086, Val MRR:0.41846, wn_one_to_x
|
|
2023-05-03 13:23:37,186 - wn_one_to_x - [INFO] - [E:81| 500]: Train Loss:0.00095301, Val MRR:0.41846, wn_one_to_x
|
|
2023-05-03 13:25:03,639 - wn_one_to_x - [INFO] - [E:81| 600]: Train Loss:0.00095403, Val MRR:0.41846, wn_one_to_x
|
|
2023-05-03 13:26:29,959 - wn_one_to_x - [INFO] - [E:81| 700]: Train Loss:0.00095495, Val MRR:0.41846, wn_one_to_x
|
|
2023-05-03 13:27:56,102 - wn_one_to_x - [INFO] - [E:81| 800]: Train Loss:0.00095629, Val MRR:0.41846, wn_one_to_x
|
|
2023-05-03 13:29:22,266 - wn_one_to_x - [INFO] - [E:81| 900]: Train Loss:0.00095643, Val MRR:0.41846, wn_one_to_x
|
|
2023-05-03 13:30:49,515 - wn_one_to_x - [INFO] - [E:81| 1000]: Train Loss:0.00095684, Val MRR:0.41846, wn_one_to_x
|
|
2023-05-03 13:32:16,028 - wn_one_to_x - [INFO] - [E:81| 1100]: Train Loss:0.00095765, Val MRR:0.41846, wn_one_to_x
|
|
2023-05-03 13:33:42,742 - wn_one_to_x - [INFO] - [E:81| 1200]: Train Loss:0.00095859, Val MRR:0.41846, wn_one_to_x
|
|
2023-05-03 13:35:09,125 - wn_one_to_x - [INFO] - [E:81| 1300]: Train Loss:0.0009598, Val MRR:0.41846, wn_one_to_x
|
|
2023-05-03 13:35:57,461 - wn_one_to_x - [INFO] - [Epoch:81]: Training Loss:0.0009602
|
|
|
|
2023-05-03 13:35:57,706 - wn_one_to_x - [INFO] - [Valid, Tail_Batch Step 0] wn_one_to_x
|
|
2023-05-03 13:36:03,237 - wn_one_to_x - [INFO] - [Valid, Head_Batch Step 0] wn_one_to_x
|
|
2023-05-03 13:36:08,520 - wn_one_to_x - [INFO] - [Evaluating Epoch 81 valid]:
|
|
MRR: Tail : 0.42882, Head : 0.40892, Avg : 0.41887
|
|
|
|
2023-05-03 13:36:10,340 - wn_one_to_x - [INFO] - [Epoch 81]: Training Loss: 0.00096021, Valid MRR: 0.41887,
|
|
|
|
|
|
|
|
2023-05-03 13:36:11,225 - wn_one_to_x - [INFO] - [E:82| 0]: Train Loss:0.0009004, Val MRR:0.41887, wn_one_to_x
|
|
2023-05-03 13:37:37,806 - wn_one_to_x - [INFO] - [E:82| 100]: Train Loss:0.00093909, Val MRR:0.41887, wn_one_to_x
|
|
2023-05-03 13:39:03,914 - wn_one_to_x - [INFO] - [E:82| 200]: Train Loss:0.00094252, Val MRR:0.41887, wn_one_to_x
|
|
2023-05-03 13:40:30,020 - wn_one_to_x - [INFO] - [E:82| 300]: Train Loss:0.00094269, Val MRR:0.41887, wn_one_to_x
|
|
2023-05-03 13:41:56,576 - wn_one_to_x - [INFO] - [E:82| 400]: Train Loss:0.00094492, Val MRR:0.41887, wn_one_to_x
|
|
2023-05-03 13:43:22,895 - wn_one_to_x - [INFO] - [E:82| 500]: Train Loss:0.00094521, Val MRR:0.41887, wn_one_to_x
|
|
2023-05-03 13:44:49,173 - wn_one_to_x - [INFO] - [E:82| 600]: Train Loss:0.00094622, Val MRR:0.41887, wn_one_to_x
|
|
2023-05-03 13:46:15,522 - wn_one_to_x - [INFO] - [E:82| 700]: Train Loss:0.00094611, Val MRR:0.41887, wn_one_to_x
|
|
2023-05-03 13:47:41,838 - wn_one_to_x - [INFO] - [E:82| 800]: Train Loss:0.00094657, Val MRR:0.41887, wn_one_to_x
|
|
2023-05-03 13:49:08,533 - wn_one_to_x - [INFO] - [E:82| 900]: Train Loss:0.00094753, Val MRR:0.41887, wn_one_to_x
|
|
2023-05-03 13:50:34,793 - wn_one_to_x - [INFO] - [E:82| 1000]: Train Loss:0.00094835, Val MRR:0.41887, wn_one_to_x
|
|
2023-05-03 13:52:02,207 - wn_one_to_x - [INFO] - [E:82| 1100]: Train Loss:0.00094928, Val MRR:0.41887, wn_one_to_x
|
|
2023-05-03 13:53:29,425 - wn_one_to_x - [INFO] - [E:82| 1200]: Train Loss:0.00095032, Val MRR:0.41887, wn_one_to_x
|
|
2023-05-03 13:54:55,820 - wn_one_to_x - [INFO] - [E:82| 1300]: Train Loss:0.00095131, Val MRR:0.41887, wn_one_to_x
|
|
2023-05-03 13:55:43,886 - wn_one_to_x - [INFO] - [Epoch:82]: Training Loss:0.0009519
|
|
|
|
2023-05-03 13:55:44,130 - wn_one_to_x - [INFO] - [Valid, Tail_Batch Step 0] wn_one_to_x
|
|
2023-05-03 13:55:49,788 - wn_one_to_x - [INFO] - [Valid, Head_Batch Step 0] wn_one_to_x
|
|
2023-05-03 13:55:55,061 - wn_one_to_x - [INFO] - [Evaluating Epoch 82 valid]:
|
|
MRR: Tail : 0.42903, Head : 0.41037, Avg : 0.4197
|
|
|
|
2023-05-03 13:55:57,202 - wn_one_to_x - [INFO] - [Epoch 82]: Training Loss: 0.00095194, Valid MRR: 0.4197,
|
|
|
|
|
|
|
|
2023-05-03 13:55:58,072 - wn_one_to_x - [INFO] - [E:83| 0]: Train Loss:0.00091192, Val MRR:0.4197, wn_one_to_x
|
|
2023-05-03 13:57:25,009 - wn_one_to_x - [INFO] - [E:83| 100]: Train Loss:0.0009354, Val MRR:0.4197, wn_one_to_x
|
|
2023-05-03 13:58:51,286 - wn_one_to_x - [INFO] - [E:83| 200]: Train Loss:0.00093658, Val MRR:0.4197, wn_one_to_x
|
|
2023-05-03 14:00:17,339 - wn_one_to_x - [INFO] - [E:83| 300]: Train Loss:0.00093613, Val MRR:0.4197, wn_one_to_x
|
|
2023-05-03 14:01:43,756 - wn_one_to_x - [INFO] - [E:83| 400]: Train Loss:0.00093662, Val MRR:0.4197, wn_one_to_x
|
|
2023-05-03 14:03:09,836 - wn_one_to_x - [INFO] - [E:83| 500]: Train Loss:0.00093678, Val MRR:0.4197, wn_one_to_x
|
|
2023-05-03 14:04:37,652 - wn_one_to_x - [INFO] - [E:83| 600]: Train Loss:0.00093748, Val MRR:0.4197, wn_one_to_x
|
|
2023-05-03 14:06:03,630 - wn_one_to_x - [INFO] - [E:83| 700]: Train Loss:0.00093812, Val MRR:0.4197, wn_one_to_x
|
|
2023-05-03 14:07:30,211 - wn_one_to_x - [INFO] - [E:83| 800]: Train Loss:0.00093873, Val MRR:0.4197, wn_one_to_x
|
|
2023-05-03 14:08:56,701 - wn_one_to_x - [INFO] - [E:83| 900]: Train Loss:0.00093943, Val MRR:0.4197, wn_one_to_x
|
|
2023-05-03 14:10:22,774 - wn_one_to_x - [INFO] - [E:83| 1000]: Train Loss:0.00094061, Val MRR:0.4197, wn_one_to_x
|
|
2023-05-03 14:11:48,875 - wn_one_to_x - [INFO] - [E:83| 1100]: Train Loss:0.00094182, Val MRR:0.4197, wn_one_to_x
|
|
2023-05-03 14:13:15,083 - wn_one_to_x - [INFO] - [E:83| 1200]: Train Loss:0.00094271, Val MRR:0.4197, wn_one_to_x
|
|
2023-05-03 14:14:40,909 - wn_one_to_x - [INFO] - [E:83| 1300]: Train Loss:0.00094382, Val MRR:0.4197, wn_one_to_x
|
|
2023-05-03 14:15:29,390 - wn_one_to_x - [INFO] - [Epoch:83]: Training Loss:0.0009443
|
|
|
|
2023-05-03 14:15:29,635 - wn_one_to_x - [INFO] - [Valid, Tail_Batch Step 0] wn_one_to_x
|
|
2023-05-03 14:15:35,089 - wn_one_to_x - [INFO] - [Valid, Head_Batch Step 0] wn_one_to_x
|
|
2023-05-03 14:15:40,353 - wn_one_to_x - [INFO] - [Evaluating Epoch 83 valid]:
|
|
MRR: Tail : 0.42761, Head : 0.41072, Avg : 0.41917
|
|
|
|
2023-05-03 14:15:40,354 - wn_one_to_x - [INFO] - [Epoch 83]: Training Loss: 0.00094427, Valid MRR: 0.4197,
|
|
|
|
|
|
|
|
2023-05-03 14:15:41,227 - wn_one_to_x - [INFO] - [E:84| 0]: Train Loss:0.0009279, Val MRR:0.4197, wn_one_to_x
|
|
2023-05-03 14:17:07,518 - wn_one_to_x - [INFO] - [E:84| 100]: Train Loss:0.00093179, Val MRR:0.4197, wn_one_to_x
|
|
2023-05-03 14:18:33,258 - wn_one_to_x - [INFO] - [E:84| 200]: Train Loss:0.00093361, Val MRR:0.4197, wn_one_to_x
|
|
2023-05-03 14:19:59,180 - wn_one_to_x - [INFO] - [E:84| 300]: Train Loss:0.00093524, Val MRR:0.4197, wn_one_to_x
|
|
2023-05-03 14:21:25,210 - wn_one_to_x - [INFO] - [E:84| 400]: Train Loss:0.00093602, Val MRR:0.4197, wn_one_to_x
|
|
2023-05-03 14:22:51,470 - wn_one_to_x - [INFO] - [E:84| 500]: Train Loss:0.00093773, Val MRR:0.4197, wn_one_to_x
|
|
2023-05-03 14:24:17,685 - wn_one_to_x - [INFO] - [E:84| 600]: Train Loss:0.0009374, Val MRR:0.4197, wn_one_to_x
|
|
2023-05-03 14:25:43,753 - wn_one_to_x - [INFO] - [E:84| 700]: Train Loss:0.00093812, Val MRR:0.4197, wn_one_to_x
|
|
2023-05-03 14:27:09,724 - wn_one_to_x - [INFO] - [E:84| 800]: Train Loss:0.00093869, Val MRR:0.4197, wn_one_to_x
|
|
2023-05-03 14:28:35,903 - wn_one_to_x - [INFO] - [E:84| 900]: Train Loss:0.0009391, Val MRR:0.4197, wn_one_to_x
|
|
2023-05-03 14:30:02,015 - wn_one_to_x - [INFO] - [E:84| 1000]: Train Loss:0.00093853, Val MRR:0.4197, wn_one_to_x
|
|
2023-05-03 14:31:28,484 - wn_one_to_x - [INFO] - [E:84| 1100]: Train Loss:0.00093949, Val MRR:0.4197, wn_one_to_x
|
|
2023-05-03 14:32:54,666 - wn_one_to_x - [INFO] - [E:84| 1200]: Train Loss:0.00094007, Val MRR:0.4197, wn_one_to_x
|
|
2023-05-03 14:34:21,268 - wn_one_to_x - [INFO] - [E:84| 1300]: Train Loss:0.0009401, Val MRR:0.4197, wn_one_to_x
|
|
2023-05-03 14:35:09,521 - wn_one_to_x - [INFO] - [Epoch:84]: Training Loss:0.0009407
|
|
|
|
2023-05-03 14:35:09,754 - wn_one_to_x - [INFO] - [Valid, Tail_Batch Step 0] wn_one_to_x
|
|
2023-05-03 14:35:15,329 - wn_one_to_x - [INFO] - [Valid, Head_Batch Step 0] wn_one_to_x
|
|
2023-05-03 14:35:20,687 - wn_one_to_x - [INFO] - [Evaluating Epoch 84 valid]:
|
|
MRR: Tail : 0.42994, Head : 0.40979, Avg : 0.41986
|
|
|
|
2023-05-03 14:35:22,441 - wn_one_to_x - [INFO] - [Epoch 84]: Training Loss: 0.00094069, Valid MRR: 0.41986,
|
|
|
|
|
|
|
|
2023-05-03 14:35:23,316 - wn_one_to_x - [INFO] - [E:85| 0]: Train Loss:0.00091859, Val MRR:0.41986, wn_one_to_x
|
|
2023-05-03 14:36:50,546 - wn_one_to_x - [INFO] - [E:85| 100]: Train Loss:0.00092342, Val MRR:0.41986, wn_one_to_x
|
|
2023-05-03 14:38:16,434 - wn_one_to_x - [INFO] - [E:85| 200]: Train Loss:0.00092367, Val MRR:0.41986, wn_one_to_x
|
|
2023-05-03 14:39:42,695 - wn_one_to_x - [INFO] - [E:85| 300]: Train Loss:0.00092566, Val MRR:0.41986, wn_one_to_x
|
|
2023-05-03 14:41:09,064 - wn_one_to_x - [INFO] - [E:85| 400]: Train Loss:0.0009274, Val MRR:0.41986, wn_one_to_x
|
|
2023-05-03 14:42:35,237 - wn_one_to_x - [INFO] - [E:85| 500]: Train Loss:0.0009289, Val MRR:0.41986, wn_one_to_x
|
|
2023-05-03 14:44:01,941 - wn_one_to_x - [INFO] - [E:85| 600]: Train Loss:0.00093083, Val MRR:0.41986, wn_one_to_x
|
|
2023-05-03 14:45:27,783 - wn_one_to_x - [INFO] - [E:85| 700]: Train Loss:0.0009314, Val MRR:0.41986, wn_one_to_x
|
|
2023-05-03 14:46:54,212 - wn_one_to_x - [INFO] - [E:85| 800]: Train Loss:0.00093282, Val MRR:0.41986, wn_one_to_x
|
|
2023-05-03 14:48:20,226 - wn_one_to_x - [INFO] - [E:85| 900]: Train Loss:0.00093369, Val MRR:0.41986, wn_one_to_x
|
|
2023-05-03 14:49:46,519 - wn_one_to_x - [INFO] - [E:85| 1000]: Train Loss:0.00093385, Val MRR:0.41986, wn_one_to_x
|
|
2023-05-03 14:51:12,717 - wn_one_to_x - [INFO] - [E:85| 1100]: Train Loss:0.00093436, Val MRR:0.41986, wn_one_to_x
|
|
2023-05-03 14:52:41,193 - wn_one_to_x - [INFO] - [E:85| 1200]: Train Loss:0.00093468, Val MRR:0.41986, wn_one_to_x
|
|
2023-05-03 14:54:07,164 - wn_one_to_x - [INFO] - [E:85| 1300]: Train Loss:0.00093514, Val MRR:0.41986, wn_one_to_x
|
|
2023-05-03 14:54:55,352 - wn_one_to_x - [INFO] - [Epoch:85]: Training Loss:0.0009354
|
|
|
|
2023-05-03 14:54:55,599 - wn_one_to_x - [INFO] - [Valid, Tail_Batch Step 0] wn_one_to_x
|
|
2023-05-03 14:55:01,114 - wn_one_to_x - [INFO] - [Valid, Head_Batch Step 0] wn_one_to_x
|
|
2023-05-03 14:55:06,612 - wn_one_to_x - [INFO] - [Evaluating Epoch 85 valid]:
|
|
MRR: Tail : 0.4269, Head : 0.41318, Avg : 0.42004
|
|
|
|
2023-05-03 14:55:08,395 - wn_one_to_x - [INFO] - [Epoch 85]: Training Loss: 0.0009354, Valid MRR: 0.42004,
|
|
|
|
|
|
|
|
2023-05-03 14:55:09,289 - wn_one_to_x - [INFO] - [E:86| 0]: Train Loss:0.00092648, Val MRR:0.42004, wn_one_to_x
|
|
2023-05-03 14:56:35,135 - wn_one_to_x - [INFO] - [E:86| 100]: Train Loss:0.00092177, Val MRR:0.42004, wn_one_to_x
|
|
2023-05-03 14:58:01,752 - wn_one_to_x - [INFO] - [E:86| 200]: Train Loss:0.00092114, Val MRR:0.42004, wn_one_to_x
|
|
2023-05-03 14:59:28,281 - wn_one_to_x - [INFO] - [E:86| 300]: Train Loss:0.00092152, Val MRR:0.42004, wn_one_to_x
|
|
2023-05-03 15:00:54,541 - wn_one_to_x - [INFO] - [E:86| 400]: Train Loss:0.0009212, Val MRR:0.42004, wn_one_to_x
|
|
2023-05-03 15:02:20,873 - wn_one_to_x - [INFO] - [E:86| 500]: Train Loss:0.00092135, Val MRR:0.42004, wn_one_to_x
|
|
2023-05-03 15:03:47,500 - wn_one_to_x - [INFO] - [E:86| 600]: Train Loss:0.00092269, Val MRR:0.42004, wn_one_to_x
|
|
2023-05-03 15:05:14,225 - wn_one_to_x - [INFO] - [E:86| 700]: Train Loss:0.00092402, Val MRR:0.42004, wn_one_to_x
|
|
2023-05-03 15:06:40,310 - wn_one_to_x - [INFO] - [E:86| 800]: Train Loss:0.0009246, Val MRR:0.42004, wn_one_to_x
|
|
2023-05-03 15:08:06,205 - wn_one_to_x - [INFO] - [E:86| 900]: Train Loss:0.00092564, Val MRR:0.42004, wn_one_to_x
|
|
2023-05-03 15:09:32,012 - wn_one_to_x - [INFO] - [E:86| 1000]: Train Loss:0.00092637, Val MRR:0.42004, wn_one_to_x
|
|
2023-05-03 15:10:58,131 - wn_one_to_x - [INFO] - [E:86| 1100]: Train Loss:0.00092721, Val MRR:0.42004, wn_one_to_x
|
|
2023-05-03 15:12:24,514 - wn_one_to_x - [INFO] - [E:86| 1200]: Train Loss:0.00092772, Val MRR:0.42004, wn_one_to_x
|
|
2023-05-03 15:13:50,902 - wn_one_to_x - [INFO] - [E:86| 1300]: Train Loss:0.00092825, Val MRR:0.42004, wn_one_to_x
|
|
2023-05-03 15:14:39,063 - wn_one_to_x - [INFO] - [Epoch:86]: Training Loss:0.0009286
|
|
|
|
2023-05-03 15:14:39,305 - wn_one_to_x - [INFO] - [Valid, Tail_Batch Step 0] wn_one_to_x
|
|
2023-05-03 15:14:44,782 - wn_one_to_x - [INFO] - [Valid, Head_Batch Step 0] wn_one_to_x
|
|
2023-05-03 15:14:50,241 - wn_one_to_x - [INFO] - [Evaluating Epoch 86 valid]:
|
|
MRR: Tail : 0.43214, Head : 0.41221, Avg : 0.42218
|
|
|
|
2023-05-03 15:14:52,031 - wn_one_to_x - [INFO] - [Epoch 86]: Training Loss: 0.00092861, Valid MRR: 0.42218,
|
|
|
|
|
|
|
|
2023-05-03 15:14:52,948 - wn_one_to_x - [INFO] - [E:87| 0]: Train Loss:0.00090584, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 15:16:18,616 - wn_one_to_x - [INFO] - [E:87| 100]: Train Loss:0.00091426, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 15:17:44,366 - wn_one_to_x - [INFO] - [E:87| 200]: Train Loss:0.00091609, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 15:19:10,422 - wn_one_to_x - [INFO] - [E:87| 300]: Train Loss:0.00091577, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 15:20:36,342 - wn_one_to_x - [INFO] - [E:87| 400]: Train Loss:0.00091821, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 15:22:02,212 - wn_one_to_x - [INFO] - [E:87| 500]: Train Loss:0.00091923, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 15:23:28,624 - wn_one_to_x - [INFO] - [E:87| 600]: Train Loss:0.00092027, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 15:24:54,271 - wn_one_to_x - [INFO] - [E:87| 700]: Train Loss:0.00092126, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 15:26:20,330 - wn_one_to_x - [INFO] - [E:87| 800]: Train Loss:0.00092168, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 15:27:46,494 - wn_one_to_x - [INFO] - [E:87| 900]: Train Loss:0.00092285, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 15:29:12,558 - wn_one_to_x - [INFO] - [E:87| 1000]: Train Loss:0.00092389, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 15:30:38,050 - wn_one_to_x - [INFO] - [E:87| 1100]: Train Loss:0.00092436, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 15:32:03,804 - wn_one_to_x - [INFO] - [E:87| 1200]: Train Loss:0.00092494, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 15:33:29,574 - wn_one_to_x - [INFO] - [E:87| 1300]: Train Loss:0.00092561, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 15:34:17,658 - wn_one_to_x - [INFO] - [Epoch:87]: Training Loss:0.0009257
|
|
|
|
2023-05-03 15:34:17,909 - wn_one_to_x - [INFO] - [Valid, Tail_Batch Step 0] wn_one_to_x
|
|
2023-05-03 15:34:23,870 - wn_one_to_x - [INFO] - [Valid, Head_Batch Step 0] wn_one_to_x
|
|
2023-05-03 15:34:29,197 - wn_one_to_x - [INFO] - [Evaluating Epoch 87 valid]:
|
|
MRR: Tail : 0.42745, Head : 0.41103, Avg : 0.41924
|
|
|
|
2023-05-03 15:34:29,197 - wn_one_to_x - [INFO] - [Epoch 87]: Training Loss: 0.00092573, Valid MRR: 0.42218,
|
|
|
|
|
|
|
|
2023-05-03 15:34:30,143 - wn_one_to_x - [INFO] - [E:88| 0]: Train Loss:0.00088307, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 15:35:56,534 - wn_one_to_x - [INFO] - [E:88| 100]: Train Loss:0.00091316, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 15:37:22,677 - wn_one_to_x - [INFO] - [E:88| 200]: Train Loss:0.00091215, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 15:38:49,050 - wn_one_to_x - [INFO] - [E:88| 300]: Train Loss:0.00091307, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 15:40:15,235 - wn_one_to_x - [INFO] - [E:88| 400]: Train Loss:0.00091353, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 15:41:42,089 - wn_one_to_x - [INFO] - [E:88| 500]: Train Loss:0.00091523, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 15:43:08,054 - wn_one_to_x - [INFO] - [E:88| 600]: Train Loss:0.00091628, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 15:44:34,121 - wn_one_to_x - [INFO] - [E:88| 700]: Train Loss:0.00091654, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 15:46:00,331 - wn_one_to_x - [INFO] - [E:88| 800]: Train Loss:0.00091705, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 15:47:26,331 - wn_one_to_x - [INFO] - [E:88| 900]: Train Loss:0.00091776, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 15:48:52,211 - wn_one_to_x - [INFO] - [E:88| 1000]: Train Loss:0.00091833, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 15:50:18,189 - wn_one_to_x - [INFO] - [E:88| 1100]: Train Loss:0.00091856, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 15:51:44,410 - wn_one_to_x - [INFO] - [E:88| 1200]: Train Loss:0.00091904, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 15:53:10,514 - wn_one_to_x - [INFO] - [E:88| 1300]: Train Loss:0.00091966, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 15:53:58,431 - wn_one_to_x - [INFO] - [Epoch:88]: Training Loss:0.0009198
|
|
|
|
2023-05-03 15:53:58,677 - wn_one_to_x - [INFO] - [Valid, Tail_Batch Step 0] wn_one_to_x
|
|
2023-05-03 15:54:04,196 - wn_one_to_x - [INFO] - [Valid, Head_Batch Step 0] wn_one_to_x
|
|
2023-05-03 15:54:09,506 - wn_one_to_x - [INFO] - [Evaluating Epoch 88 valid]:
|
|
MRR: Tail : 0.43134, Head : 0.41302, Avg : 0.42218
|
|
|
|
2023-05-03 15:54:09,506 - wn_one_to_x - [INFO] - [Epoch 88]: Training Loss: 0.00091985, Valid MRR: 0.42218,
|
|
|
|
|
|
|
|
2023-05-03 15:54:10,379 - wn_one_to_x - [INFO] - [E:89| 0]: Train Loss:0.00086436, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 15:55:36,924 - wn_one_to_x - [INFO] - [E:89| 100]: Train Loss:0.00090345, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 15:57:02,728 - wn_one_to_x - [INFO] - [E:89| 200]: Train Loss:0.00090633, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 15:58:28,881 - wn_one_to_x - [INFO] - [E:89| 300]: Train Loss:0.00090591, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 15:59:55,390 - wn_one_to_x - [INFO] - [E:89| 400]: Train Loss:0.00090708, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 16:01:21,358 - wn_one_to_x - [INFO] - [E:89| 500]: Train Loss:0.00090858, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 16:02:47,714 - wn_one_to_x - [INFO] - [E:89| 600]: Train Loss:0.00090956, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 16:04:14,010 - wn_one_to_x - [INFO] - [E:89| 700]: Train Loss:0.00091038, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 16:05:39,755 - wn_one_to_x - [INFO] - [E:89| 800]: Train Loss:0.00091067, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 16:07:05,684 - wn_one_to_x - [INFO] - [E:89| 900]: Train Loss:0.00091087, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 16:08:31,665 - wn_one_to_x - [INFO] - [E:89| 1000]: Train Loss:0.00091149, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 16:09:57,873 - wn_one_to_x - [INFO] - [E:89| 1100]: Train Loss:0.00091203, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 16:11:23,780 - wn_one_to_x - [INFO] - [E:89| 1200]: Train Loss:0.0009126, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 16:12:49,701 - wn_one_to_x - [INFO] - [E:89| 1300]: Train Loss:0.00091336, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 16:13:37,866 - wn_one_to_x - [INFO] - [Epoch:89]: Training Loss:0.0009136
|
|
|
|
2023-05-03 16:13:38,109 - wn_one_to_x - [INFO] - [Valid, Tail_Batch Step 0] wn_one_to_x
|
|
2023-05-03 16:13:43,573 - wn_one_to_x - [INFO] - [Valid, Head_Batch Step 0] wn_one_to_x
|
|
2023-05-03 16:13:48,829 - wn_one_to_x - [INFO] - [Evaluating Epoch 89 valid]:
|
|
MRR: Tail : 0.42672, Head : 0.4109, Avg : 0.41881
|
|
MR: Tail : 3838.6, Head : 3951.1, Avg : 3894.9
|
|
Hit-1: Tail : 0.39651, Head : 0.38134, Avg : 0.38893
|
|
Hit-3: Tail : 0.43902, Head : 0.42518, Avg : 0.4321
|
|
Hit-10: Tail : 0.47825, Head : 0.46407, Avg : 0.47116
|
|
2023-05-03 16:13:48,829 - wn_one_to_x - [INFO] - [Epoch 89]: Training Loss: 0.00091361, Valid MRR: 0.42218,
|
|
|
|
|
|
|
|
2023-05-03 16:13:49,710 - wn_one_to_x - [INFO] - [E:90| 0]: Train Loss:0.00087851, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 16:15:15,939 - wn_one_to_x - [INFO] - [E:90| 100]: Train Loss:0.00090192, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 16:16:41,955 - wn_one_to_x - [INFO] - [E:90| 200]: Train Loss:0.00090395, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 16:18:07,735 - wn_one_to_x - [INFO] - [E:90| 300]: Train Loss:0.00090377, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 16:19:33,764 - wn_one_to_x - [INFO] - [E:90| 400]: Train Loss:0.00090361, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 16:21:00,753 - wn_one_to_x - [INFO] - [E:90| 500]: Train Loss:0.00090471, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 16:22:27,268 - wn_one_to_x - [INFO] - [E:90| 600]: Train Loss:0.0009064, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 16:23:53,312 - wn_one_to_x - [INFO] - [E:90| 700]: Train Loss:0.00090652, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 16:25:19,815 - wn_one_to_x - [INFO] - [E:90| 800]: Train Loss:0.00090681, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 16:26:46,510 - wn_one_to_x - [INFO] - [E:90| 900]: Train Loss:0.00090792, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 16:28:12,636 - wn_one_to_x - [INFO] - [E:90| 1000]: Train Loss:0.00090815, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 16:29:38,816 - wn_one_to_x - [INFO] - [E:90| 1100]: Train Loss:0.00090848, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 16:31:04,863 - wn_one_to_x - [INFO] - [E:90| 1200]: Train Loss:0.00090929, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 16:32:30,710 - wn_one_to_x - [INFO] - [E:90| 1300]: Train Loss:0.0009101, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 16:33:18,531 - wn_one_to_x - [INFO] - [Epoch:90]: Training Loss:0.0009107
|
|
|
|
2023-05-03 16:33:18,774 - wn_one_to_x - [INFO] - [Valid, Tail_Batch Step 0] wn_one_to_x
|
|
2023-05-03 16:33:24,322 - wn_one_to_x - [INFO] - [Valid, Head_Batch Step 0] wn_one_to_x
|
|
2023-05-03 16:33:29,664 - wn_one_to_x - [INFO] - [Evaluating Epoch 90 valid]:
|
|
MRR: Tail : 0.42751, Head : 0.41064, Avg : 0.41907
|
|
|
|
2023-05-03 16:33:29,664 - wn_one_to_x - [INFO] - [Epoch 90]: Training Loss: 0.0009107, Valid MRR: 0.42218,
|
|
|
|
|
|
|
|
2023-05-03 16:33:30,533 - wn_one_to_x - [INFO] - [E:91| 0]: Train Loss:0.00092397, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 16:34:56,468 - wn_one_to_x - [INFO] - [E:91| 100]: Train Loss:0.00089986, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 16:36:22,809 - wn_one_to_x - [INFO] - [E:91| 200]: Train Loss:0.00090033, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 16:37:48,541 - wn_one_to_x - [INFO] - [E:91| 300]: Train Loss:0.00089997, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 16:39:14,692 - wn_one_to_x - [INFO] - [E:91| 400]: Train Loss:0.00090042, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 16:40:40,984 - wn_one_to_x - [INFO] - [E:91| 500]: Train Loss:0.00090126, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 16:42:06,503 - wn_one_to_x - [INFO] - [E:91| 600]: Train Loss:0.00090159, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 16:43:32,450 - wn_one_to_x - [INFO] - [E:91| 700]: Train Loss:0.00090225, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 16:44:58,503 - wn_one_to_x - [INFO] - [E:91| 800]: Train Loss:0.0009028, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 16:46:25,208 - wn_one_to_x - [INFO] - [E:91| 900]: Train Loss:0.0009033, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 16:47:51,060 - wn_one_to_x - [INFO] - [E:91| 1000]: Train Loss:0.00090332, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 16:49:17,587 - wn_one_to_x - [INFO] - [E:91| 1100]: Train Loss:0.00090404, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 16:50:43,416 - wn_one_to_x - [INFO] - [E:91| 1200]: Train Loss:0.0009049, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 16:52:09,099 - wn_one_to_x - [INFO] - [E:91| 1300]: Train Loss:0.00090587, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 16:52:57,292 - wn_one_to_x - [INFO] - [Epoch:91]: Training Loss:0.0009061
|
|
|
|
2023-05-03 16:52:57,540 - wn_one_to_x - [INFO] - [Valid, Tail_Batch Step 0] wn_one_to_x
|
|
2023-05-03 16:53:03,049 - wn_one_to_x - [INFO] - [Valid, Head_Batch Step 0] wn_one_to_x
|
|
2023-05-03 16:53:08,386 - wn_one_to_x - [INFO] - [Evaluating Epoch 91 valid]:
|
|
MRR: Tail : 0.42877, Head : 0.41105, Avg : 0.41991
|
|
|
|
2023-05-03 16:53:08,387 - wn_one_to_x - [INFO] - [Epoch 91]: Training Loss: 0.00090605, Valid MRR: 0.42218,
|
|
|
|
|
|
|
|
2023-05-03 16:53:09,250 - wn_one_to_x - [INFO] - [E:92| 0]: Train Loss:0.00090663, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 16:54:35,243 - wn_one_to_x - [INFO] - [E:92| 100]: Train Loss:0.00089017, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 16:56:01,451 - wn_one_to_x - [INFO] - [E:92| 200]: Train Loss:0.00089003, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 16:57:27,636 - wn_one_to_x - [INFO] - [E:92| 300]: Train Loss:0.00089256, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 16:58:53,606 - wn_one_to_x - [INFO] - [E:92| 400]: Train Loss:0.00089425, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 17:00:19,491 - wn_one_to_x - [INFO] - [E:92| 500]: Train Loss:0.00089496, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 17:01:54,820 - wn_one_to_x - [INFO] - [E:92| 600]: Train Loss:0.0008953, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 17:04:20,823 - wn_one_to_x - [INFO] - [E:92| 700]: Train Loss:0.00089608, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 17:05:46,890 - wn_one_to_x - [INFO] - [E:92| 800]: Train Loss:0.00089691, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 17:07:12,271 - wn_one_to_x - [INFO] - [E:92| 900]: Train Loss:0.00089774, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 17:08:39,254 - wn_one_to_x - [INFO] - [E:92| 1000]: Train Loss:0.00089826, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 17:10:05,367 - wn_one_to_x - [INFO] - [E:92| 1100]: Train Loss:0.0008994, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 17:11:31,227 - wn_one_to_x - [INFO] - [E:92| 1200]: Train Loss:0.00089991, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 17:12:57,527 - wn_one_to_x - [INFO] - [E:92| 1300]: Train Loss:0.00090062, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 17:13:45,600 - wn_one_to_x - [INFO] - [Epoch:92]: Training Loss:0.0009007
|
|
|
|
2023-05-03 17:13:45,844 - wn_one_to_x - [INFO] - [Valid, Tail_Batch Step 0] wn_one_to_x
|
|
2023-05-03 17:13:51,393 - wn_one_to_x - [INFO] - [Valid, Head_Batch Step 0] wn_one_to_x
|
|
2023-05-03 17:13:56,704 - wn_one_to_x - [INFO] - [Evaluating Epoch 92 valid]:
|
|
MRR: Tail : 0.42905, Head : 0.41093, Avg : 0.41999
|
|
|
|
2023-05-03 17:13:56,704 - wn_one_to_x - [INFO] - [Epoch 92]: Training Loss: 0.00090071, Valid MRR: 0.42218,
|
|
|
|
|
|
|
|
2023-05-03 17:13:57,583 - wn_one_to_x - [INFO] - [E:93| 0]: Train Loss:0.00088662, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 17:15:23,560 - wn_one_to_x - [INFO] - [E:93| 100]: Train Loss:0.00089016, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 17:16:49,348 - wn_one_to_x - [INFO] - [E:93| 200]: Train Loss:0.00089209, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 17:18:15,781 - wn_one_to_x - [INFO] - [E:93| 300]: Train Loss:0.00089231, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 17:19:41,606 - wn_one_to_x - [INFO] - [E:93| 400]: Train Loss:0.000893, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 17:21:07,816 - wn_one_to_x - [INFO] - [E:93| 500]: Train Loss:0.00089361, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 17:22:33,968 - wn_one_to_x - [INFO] - [E:93| 600]: Train Loss:0.00089385, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 17:24:00,682 - wn_one_to_x - [INFO] - [E:93| 700]: Train Loss:0.00089428, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 17:25:27,551 - wn_one_to_x - [INFO] - [E:93| 800]: Train Loss:0.00089505, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 17:26:53,627 - wn_one_to_x - [INFO] - [E:93| 900]: Train Loss:0.00089522, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 17:28:19,288 - wn_one_to_x - [INFO] - [E:93| 1000]: Train Loss:0.00089526, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 17:29:45,943 - wn_one_to_x - [INFO] - [E:93| 1100]: Train Loss:0.00089589, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 17:31:12,456 - wn_one_to_x - [INFO] - [E:93| 1200]: Train Loss:0.00089634, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 17:32:38,603 - wn_one_to_x - [INFO] - [E:93| 1300]: Train Loss:0.00089689, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 17:33:26,714 - wn_one_to_x - [INFO] - [Epoch:93]: Training Loss:0.0008969
|
|
|
|
2023-05-03 17:33:26,958 - wn_one_to_x - [INFO] - [Valid, Tail_Batch Step 0] wn_one_to_x
|
|
2023-05-03 17:33:32,435 - wn_one_to_x - [INFO] - [Valid, Head_Batch Step 0] wn_one_to_x
|
|
2023-05-03 17:33:37,740 - wn_one_to_x - [INFO] - [Evaluating Epoch 93 valid]:
|
|
MRR: Tail : 0.42832, Head : 0.40983, Avg : 0.41907
|
|
|
|
2023-05-03 17:33:37,741 - wn_one_to_x - [INFO] - [Epoch 93]: Training Loss: 0.00089692, Valid MRR: 0.42218,
|
|
|
|
|
|
|
|
2023-05-03 17:33:38,608 - wn_one_to_x - [INFO] - [E:94| 0]: Train Loss:0.00086044, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 17:35:04,508 - wn_one_to_x - [INFO] - [E:94| 100]: Train Loss:0.00089184, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 17:36:30,633 - wn_one_to_x - [INFO] - [E:94| 200]: Train Loss:0.00088695, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 17:37:56,707 - wn_one_to_x - [INFO] - [E:94| 300]: Train Loss:0.00088799, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 17:39:23,014 - wn_one_to_x - [INFO] - [E:94| 400]: Train Loss:0.00088845, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 17:40:49,048 - wn_one_to_x - [INFO] - [E:94| 500]: Train Loss:0.00088804, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 17:42:15,226 - wn_one_to_x - [INFO] - [E:94| 600]: Train Loss:0.00088853, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 17:43:41,110 - wn_one_to_x - [INFO] - [E:94| 700]: Train Loss:0.00088942, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 17:45:07,002 - wn_one_to_x - [INFO] - [E:94| 800]: Train Loss:0.0008904, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 17:46:33,041 - wn_one_to_x - [INFO] - [E:94| 900]: Train Loss:0.00089138, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 17:47:59,643 - wn_one_to_x - [INFO] - [E:94| 1000]: Train Loss:0.00089218, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 17:49:25,258 - wn_one_to_x - [INFO] - [E:94| 1100]: Train Loss:0.00089253, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 17:50:51,816 - wn_one_to_x - [INFO] - [E:94| 1200]: Train Loss:0.00089292, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 17:52:17,511 - wn_one_to_x - [INFO] - [E:94| 1300]: Train Loss:0.0008935, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 17:53:05,752 - wn_one_to_x - [INFO] - [Epoch:94]: Training Loss:0.0008938
|
|
|
|
2023-05-03 17:53:06,229 - wn_one_to_x - [INFO] - [Valid, Tail_Batch Step 0] wn_one_to_x
|
|
2023-05-03 17:53:11,831 - wn_one_to_x - [INFO] - [Valid, Head_Batch Step 0] wn_one_to_x
|
|
2023-05-03 17:53:17,093 - wn_one_to_x - [INFO] - [Evaluating Epoch 94 valid]:
|
|
MRR: Tail : 0.42729, Head : 0.40939, Avg : 0.41834
|
|
|
|
2023-05-03 17:53:17,094 - wn_one_to_x - [INFO] - [Epoch 94]: Training Loss: 0.00089385, Valid MRR: 0.42218,
|
|
|
|
|
|
|
|
2023-05-03 17:53:17,979 - wn_one_to_x - [INFO] - [E:95| 0]: Train Loss:0.00087543, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 17:54:44,183 - wn_one_to_x - [INFO] - [E:95| 100]: Train Loss:0.00088424, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 17:56:10,020 - wn_one_to_x - [INFO] - [E:95| 200]: Train Loss:0.00088651, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 17:57:35,680 - wn_one_to_x - [INFO] - [E:95| 300]: Train Loss:0.00088803, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 17:59:01,516 - wn_one_to_x - [INFO] - [E:95| 400]: Train Loss:0.00088718, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 18:00:27,714 - wn_one_to_x - [INFO] - [E:95| 500]: Train Loss:0.00088776, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 18:01:53,985 - wn_one_to_x - [INFO] - [E:95| 600]: Train Loss:0.00088717, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 18:03:19,913 - wn_one_to_x - [INFO] - [E:95| 700]: Train Loss:0.00088757, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 18:04:45,797 - wn_one_to_x - [INFO] - [E:95| 800]: Train Loss:0.00088732, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 18:06:12,012 - wn_one_to_x - [INFO] - [E:95| 900]: Train Loss:0.0008876, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 18:07:38,807 - wn_one_to_x - [INFO] - [E:95| 1000]: Train Loss:0.00088782, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 18:09:05,353 - wn_one_to_x - [INFO] - [E:95| 1100]: Train Loss:0.0008883, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 18:10:32,194 - wn_one_to_x - [INFO] - [E:95| 1200]: Train Loss:0.00088882, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 18:11:58,341 - wn_one_to_x - [INFO] - [E:95| 1300]: Train Loss:0.00088974, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 18:12:47,045 - wn_one_to_x - [INFO] - [Epoch:95]: Training Loss:0.0008901
|
|
|
|
2023-05-03 18:12:47,448 - wn_one_to_x - [INFO] - [Valid, Tail_Batch Step 0] wn_one_to_x
|
|
2023-05-03 18:12:52,940 - wn_one_to_x - [INFO] - [Valid, Head_Batch Step 0] wn_one_to_x
|
|
2023-05-03 18:12:58,284 - wn_one_to_x - [INFO] - [Evaluating Epoch 95 valid]:
|
|
MRR: Tail : 0.42993, Head : 0.41074, Avg : 0.42034
|
|
|
|
2023-05-03 18:12:58,284 - wn_one_to_x - [INFO] - [Epoch 95]: Training Loss: 0.00089008, Valid MRR: 0.42218,
|
|
|
|
|
|
|
|
2023-05-03 18:12:59,160 - wn_one_to_x - [INFO] - [E:96| 0]: Train Loss:0.00089069, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 18:14:24,969 - wn_one_to_x - [INFO] - [E:96| 100]: Train Loss:0.00088238, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 18:15:51,315 - wn_one_to_x - [INFO] - [E:96| 200]: Train Loss:0.00088214, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 18:17:17,340 - wn_one_to_x - [INFO] - [E:96| 300]: Train Loss:0.00088238, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 18:18:43,243 - wn_one_to_x - [INFO] - [E:96| 400]: Train Loss:0.0008819, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 18:20:09,421 - wn_one_to_x - [INFO] - [E:96| 500]: Train Loss:0.00088296, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 18:21:35,593 - wn_one_to_x - [INFO] - [E:96| 600]: Train Loss:0.00088322, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 18:23:02,130 - wn_one_to_x - [INFO] - [E:96| 700]: Train Loss:0.00088412, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 18:24:28,558 - wn_one_to_x - [INFO] - [E:96| 800]: Train Loss:0.00088411, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 18:25:54,161 - wn_one_to_x - [INFO] - [E:96| 900]: Train Loss:0.00088473, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 18:27:20,216 - wn_one_to_x - [INFO] - [E:96| 1000]: Train Loss:0.00088542, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 18:28:45,929 - wn_one_to_x - [INFO] - [E:96| 1100]: Train Loss:0.00088584, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 18:30:11,696 - wn_one_to_x - [INFO] - [E:96| 1200]: Train Loss:0.00088603, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 18:31:37,960 - wn_one_to_x - [INFO] - [E:96| 1300]: Train Loss:0.00088649, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 18:32:26,115 - wn_one_to_x - [INFO] - [Epoch:96]: Training Loss:0.0008864
|
|
|
|
2023-05-03 18:32:26,355 - wn_one_to_x - [INFO] - [Valid, Tail_Batch Step 0] wn_one_to_x
|
|
2023-05-03 18:32:31,856 - wn_one_to_x - [INFO] - [Valid, Head_Batch Step 0] wn_one_to_x
|
|
2023-05-03 18:32:37,110 - wn_one_to_x - [INFO] - [Evaluating Epoch 96 valid]:
|
|
MRR: Tail : 0.43078, Head : 0.41065, Avg : 0.42071
|
|
|
|
2023-05-03 18:32:37,110 - wn_one_to_x - [INFO] - [Epoch 96]: Training Loss: 0.00088636, Valid MRR: 0.42218,
|
|
|
|
|
|
|
|
2023-05-03 18:32:37,976 - wn_one_to_x - [INFO] - [E:97| 0]: Train Loss:0.00087165, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 18:34:04,845 - wn_one_to_x - [INFO] - [E:97| 100]: Train Loss:0.00087655, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 18:35:30,779 - wn_one_to_x - [INFO] - [E:97| 200]: Train Loss:0.00087685, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 18:36:57,042 - wn_one_to_x - [INFO] - [E:97| 300]: Train Loss:0.000876, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 18:38:23,178 - wn_one_to_x - [INFO] - [E:97| 400]: Train Loss:0.00087588, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 18:39:49,542 - wn_one_to_x - [INFO] - [E:97| 500]: Train Loss:0.00087656, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 18:41:15,549 - wn_one_to_x - [INFO] - [E:97| 600]: Train Loss:0.00087784, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 18:42:42,115 - wn_one_to_x - [INFO] - [E:97| 700]: Train Loss:0.00087931, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 18:44:08,652 - wn_one_to_x - [INFO] - [E:97| 800]: Train Loss:0.00087988, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 18:45:34,500 - wn_one_to_x - [INFO] - [E:97| 900]: Train Loss:0.00088063, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 18:47:00,914 - wn_one_to_x - [INFO] - [E:97| 1000]: Train Loss:0.00088131, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 18:48:27,025 - wn_one_to_x - [INFO] - [E:97| 1100]: Train Loss:0.00088183, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 18:49:52,962 - wn_one_to_x - [INFO] - [E:97| 1200]: Train Loss:0.00088249, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 18:51:18,728 - wn_one_to_x - [INFO] - [E:97| 1300]: Train Loss:0.00088282, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 18:52:06,918 - wn_one_to_x - [INFO] - [Epoch:97]: Training Loss:0.000883
|
|
|
|
2023-05-03 18:52:07,164 - wn_one_to_x - [INFO] - [Valid, Tail_Batch Step 0] wn_one_to_x
|
|
2023-05-03 18:52:12,620 - wn_one_to_x - [INFO] - [Valid, Head_Batch Step 0] wn_one_to_x
|
|
2023-05-03 18:52:17,901 - wn_one_to_x - [INFO] - [Evaluating Epoch 97 valid]:
|
|
MRR: Tail : 0.43031, Head : 0.40855, Avg : 0.41943
|
|
|
|
2023-05-03 18:52:17,901 - wn_one_to_x - [INFO] - [Epoch 97]: Training Loss: 0.00088304, Valid MRR: 0.42218,
|
|
|
|
|
|
|
|
2023-05-03 18:52:18,772 - wn_one_to_x - [INFO] - [E:98| 0]: Train Loss:0.0008771, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 18:53:45,179 - wn_one_to_x - [INFO] - [E:98| 100]: Train Loss:0.00087281, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 18:55:11,644 - wn_one_to_x - [INFO] - [E:98| 200]: Train Loss:0.00087281, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 18:56:38,166 - wn_one_to_x - [INFO] - [E:98| 300]: Train Loss:0.00087362, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 18:58:05,244 - wn_one_to_x - [INFO] - [E:98| 400]: Train Loss:0.00087409, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 18:59:31,263 - wn_one_to_x - [INFO] - [E:98| 500]: Train Loss:0.00087481, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 19:00:57,571 - wn_one_to_x - [INFO] - [E:98| 600]: Train Loss:0.00087541, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 19:02:23,760 - wn_one_to_x - [INFO] - [E:98| 700]: Train Loss:0.00087577, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 19:03:50,063 - wn_one_to_x - [INFO] - [E:98| 800]: Train Loss:0.00087675, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 19:05:15,761 - wn_one_to_x - [INFO] - [E:98| 900]: Train Loss:0.00087712, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 19:06:41,809 - wn_one_to_x - [INFO] - [E:98| 1000]: Train Loss:0.00087789, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 19:08:08,011 - wn_one_to_x - [INFO] - [E:98| 1100]: Train Loss:0.00087872, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 19:09:34,238 - wn_one_to_x - [INFO] - [E:98| 1200]: Train Loss:0.0008794, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 19:11:00,003 - wn_one_to_x - [INFO] - [E:98| 1300]: Train Loss:0.00087973, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 19:11:48,137 - wn_one_to_x - [INFO] - [Epoch:98]: Training Loss:0.0008799
|
|
|
|
2023-05-03 19:11:48,380 - wn_one_to_x - [INFO] - [Valid, Tail_Batch Step 0] wn_one_to_x
|
|
2023-05-03 19:11:54,036 - wn_one_to_x - [INFO] - [Valid, Head_Batch Step 0] wn_one_to_x
|
|
2023-05-03 19:11:59,327 - wn_one_to_x - [INFO] - [Evaluating Epoch 98 valid]:
|
|
MRR: Tail : 0.4309, Head : 0.41092, Avg : 0.42091
|
|
|
|
2023-05-03 19:11:59,328 - wn_one_to_x - [INFO] - [Epoch 98]: Training Loss: 0.00087986, Valid MRR: 0.42218,
|
|
|
|
|
|
|
|
2023-05-03 19:12:00,201 - wn_one_to_x - [INFO] - [E:99| 0]: Train Loss:0.00085252, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 19:13:26,884 - wn_one_to_x - [INFO] - [E:99| 100]: Train Loss:0.00087403, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 19:14:52,771 - wn_one_to_x - [INFO] - [E:99| 200]: Train Loss:0.00087229, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 19:16:18,971 - wn_one_to_x - [INFO] - [E:99| 300]: Train Loss:0.00087347, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 19:17:45,677 - wn_one_to_x - [INFO] - [E:99| 400]: Train Loss:0.00087351, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 19:19:12,162 - wn_one_to_x - [INFO] - [E:99| 500]: Train Loss:0.0008742, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 19:20:38,091 - wn_one_to_x - [INFO] - [E:99| 600]: Train Loss:0.000874, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 19:22:04,069 - wn_one_to_x - [INFO] - [E:99| 700]: Train Loss:0.00087413, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 19:23:30,613 - wn_one_to_x - [INFO] - [E:99| 800]: Train Loss:0.00087483, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 19:24:57,184 - wn_one_to_x - [INFO] - [E:99| 900]: Train Loss:0.00087532, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 19:26:22,926 - wn_one_to_x - [INFO] - [E:99| 1000]: Train Loss:0.00087588, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 19:27:49,048 - wn_one_to_x - [INFO] - [E:99| 1100]: Train Loss:0.00087649, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 19:29:15,370 - wn_one_to_x - [INFO] - [E:99| 1200]: Train Loss:0.00087698, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 19:30:41,576 - wn_one_to_x - [INFO] - [E:99| 1300]: Train Loss:0.00087749, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 19:31:29,511 - wn_one_to_x - [INFO] - [Epoch:99]: Training Loss:0.0008776
|
|
|
|
2023-05-03 19:31:29,750 - wn_one_to_x - [INFO] - [Valid, Tail_Batch Step 0] wn_one_to_x
|
|
2023-05-03 19:31:35,293 - wn_one_to_x - [INFO] - [Valid, Head_Batch Step 0] wn_one_to_x
|
|
2023-05-03 19:31:40,575 - wn_one_to_x - [INFO] - [Evaluating Epoch 99 valid]:
|
|
MRR: Tail : 0.42793, Head : 0.40873, Avg : 0.41833
|
|
MR: Tail : 4176.7, Head : 4018.6, Avg : 4097.7
|
|
Hit-1: Tail : 0.40079, Head : 0.38003, Avg : 0.39041
|
|
Hit-3: Tail : 0.43606, Head : 0.4209, Avg : 0.42848
|
|
Hit-10: Tail : 0.47858, Head : 0.45946, Avg : 0.46902
|
|
2023-05-03 19:31:40,575 - wn_one_to_x - [INFO] - [Epoch 99]: Training Loss: 0.00087759, Valid MRR: 0.42218,
|
|
|
|
|
|
|
|
2023-05-03 19:31:41,451 - wn_one_to_x - [INFO] - [E:100| 0]: Train Loss:0.00084191, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 19:33:07,862 - wn_one_to_x - [INFO] - [E:100| 100]: Train Loss:0.00086717, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 19:34:34,312 - wn_one_to_x - [INFO] - [E:100| 200]: Train Loss:0.00086741, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 19:36:00,850 - wn_one_to_x - [INFO] - [E:100| 300]: Train Loss:0.00086741, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 19:37:27,051 - wn_one_to_x - [INFO] - [E:100| 400]: Train Loss:0.00086741, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 19:38:54,028 - wn_one_to_x - [INFO] - [E:100| 500]: Train Loss:0.00086873, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 19:40:19,847 - wn_one_to_x - [INFO] - [E:100| 600]: Train Loss:0.00086936, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 19:41:45,691 - wn_one_to_x - [INFO] - [E:100| 700]: Train Loss:0.00087019, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 19:43:11,827 - wn_one_to_x - [INFO] - [E:100| 800]: Train Loss:0.00087067, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 19:44:37,775 - wn_one_to_x - [INFO] - [E:100| 900]: Train Loss:0.00087111, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 19:46:03,967 - wn_one_to_x - [INFO] - [E:100| 1000]: Train Loss:0.00087164, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 19:47:29,937 - wn_one_to_x - [INFO] - [E:100| 1100]: Train Loss:0.00087204, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 19:48:55,999 - wn_one_to_x - [INFO] - [E:100| 1200]: Train Loss:0.00087249, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 19:50:21,944 - wn_one_to_x - [INFO] - [E:100| 1300]: Train Loss:0.00087311, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 19:51:10,246 - wn_one_to_x - [INFO] - [Epoch:100]: Training Loss:0.0008733
|
|
|
|
2023-05-03 19:51:10,488 - wn_one_to_x - [INFO] - [Valid, Tail_Batch Step 0] wn_one_to_x
|
|
2023-05-03 19:51:15,959 - wn_one_to_x - [INFO] - [Valid, Head_Batch Step 0] wn_one_to_x
|
|
2023-05-03 19:51:21,232 - wn_one_to_x - [INFO] - [Evaluating Epoch 100 valid]:
|
|
MRR: Tail : 0.43309, Head : 0.40904, Avg : 0.42106
|
|
|
|
2023-05-03 19:51:21,232 - wn_one_to_x - [INFO] - [Epoch 100]: Training Loss: 0.00087333, Valid MRR: 0.42218,
|
|
|
|
|
|
|
|
2023-05-03 19:51:22,192 - wn_one_to_x - [INFO] - [E:101| 0]: Train Loss:0.00084992, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 19:52:49,139 - wn_one_to_x - [INFO] - [E:101| 100]: Train Loss:0.00086919, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 19:54:15,745 - wn_one_to_x - [INFO] - [E:101| 200]: Train Loss:0.00086883, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 19:55:42,034 - wn_one_to_x - [INFO] - [E:101| 300]: Train Loss:0.00086773, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 19:57:08,015 - wn_one_to_x - [INFO] - [E:101| 400]: Train Loss:0.00086752, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 19:58:33,793 - wn_one_to_x - [INFO] - [E:101| 500]: Train Loss:0.00086791, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 20:00:00,333 - wn_one_to_x - [INFO] - [E:101| 600]: Train Loss:0.00086866, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 20:01:26,928 - wn_one_to_x - [INFO] - [E:101| 700]: Train Loss:0.00086872, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 20:02:52,862 - wn_one_to_x - [INFO] - [E:101| 800]: Train Loss:0.00086888, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 20:04:19,656 - wn_one_to_x - [INFO] - [E:101| 900]: Train Loss:0.00086884, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 20:05:45,589 - wn_one_to_x - [INFO] - [E:101| 1000]: Train Loss:0.00086955, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 20:07:11,710 - wn_one_to_x - [INFO] - [E:101| 1100]: Train Loss:0.00086987, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 20:08:37,503 - wn_one_to_x - [INFO] - [E:101| 1200]: Train Loss:0.00087018, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 20:10:03,214 - wn_one_to_x - [INFO] - [E:101| 1300]: Train Loss:0.00087067, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 20:10:51,132 - wn_one_to_x - [INFO] - [Epoch:101]: Training Loss:0.000871
|
|
|
|
2023-05-03 20:10:51,374 - wn_one_to_x - [INFO] - [Valid, Tail_Batch Step 0] wn_one_to_x
|
|
2023-05-03 20:10:56,848 - wn_one_to_x - [INFO] - [Valid, Head_Batch Step 0] wn_one_to_x
|
|
2023-05-03 20:11:02,093 - wn_one_to_x - [INFO] - [Evaluating Epoch 101 valid]:
|
|
MRR: Tail : 0.42858, Head : 0.41075, Avg : 0.41966
|
|
|
|
2023-05-03 20:11:02,093 - wn_one_to_x - [INFO] - [Epoch 101]: Training Loss: 0.00087095, Valid MRR: 0.42218,
|
|
|
|
|
|
|
|
2023-05-03 20:11:02,959 - wn_one_to_x - [INFO] - [E:102| 0]: Train Loss:0.00087495, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 20:12:29,163 - wn_one_to_x - [INFO] - [E:102| 100]: Train Loss:0.00085766, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 20:13:55,530 - wn_one_to_x - [INFO] - [E:102| 200]: Train Loss:0.00086017, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 20:15:21,859 - wn_one_to_x - [INFO] - [E:102| 300]: Train Loss:0.00086213, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 20:16:47,987 - wn_one_to_x - [INFO] - [E:102| 400]: Train Loss:0.0008634, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 20:18:13,497 - wn_one_to_x - [INFO] - [E:102| 500]: Train Loss:0.00086305, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 20:19:39,635 - wn_one_to_x - [INFO] - [E:102| 600]: Train Loss:0.00086369, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 20:21:06,102 - wn_one_to_x - [INFO] - [E:102| 700]: Train Loss:0.00086445, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 20:22:32,697 - wn_one_to_x - [INFO] - [E:102| 800]: Train Loss:0.00086502, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 20:23:59,280 - wn_one_to_x - [INFO] - [E:102| 900]: Train Loss:0.00086571, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 20:25:25,948 - wn_one_to_x - [INFO] - [E:102| 1000]: Train Loss:0.00086608, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 20:26:51,828 - wn_one_to_x - [INFO] - [E:102| 1100]: Train Loss:0.00086642, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 20:28:17,319 - wn_one_to_x - [INFO] - [E:102| 1200]: Train Loss:0.00086667, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 20:29:43,550 - wn_one_to_x - [INFO] - [E:102| 1300]: Train Loss:0.00086733, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 20:30:31,723 - wn_one_to_x - [INFO] - [Epoch:102]: Training Loss:0.0008677
|
|
|
|
2023-05-03 20:30:31,965 - wn_one_to_x - [INFO] - [Valid, Tail_Batch Step 0] wn_one_to_x
|
|
2023-05-03 20:30:37,432 - wn_one_to_x - [INFO] - [Valid, Head_Batch Step 0] wn_one_to_x
|
|
2023-05-03 20:30:42,679 - wn_one_to_x - [INFO] - [Evaluating Epoch 102 valid]:
|
|
MRR: Tail : 0.43136, Head : 0.41209, Avg : 0.42172
|
|
|
|
2023-05-03 20:30:42,680 - wn_one_to_x - [INFO] - [Epoch 102]: Training Loss: 0.00086766, Valid MRR: 0.42218,
|
|
|
|
|
|
|
|
2023-05-03 20:30:43,552 - wn_one_to_x - [INFO] - [E:103| 0]: Train Loss:0.00084376, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 20:32:09,396 - wn_one_to_x - [INFO] - [E:103| 100]: Train Loss:0.00085448, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 20:33:34,977 - wn_one_to_x - [INFO] - [E:103| 200]: Train Loss:0.00085628, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 20:35:00,815 - wn_one_to_x - [INFO] - [E:103| 300]: Train Loss:0.00085756, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 20:36:26,606 - wn_one_to_x - [INFO] - [E:103| 400]: Train Loss:0.00085822, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 20:37:52,629 - wn_one_to_x - [INFO] - [E:103| 500]: Train Loss:0.00086041, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 20:39:18,456 - wn_one_to_x - [INFO] - [E:103| 600]: Train Loss:0.00086137, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 20:40:45,029 - wn_one_to_x - [INFO] - [E:103| 700]: Train Loss:0.00086237, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 20:42:11,174 - wn_one_to_x - [INFO] - [E:103| 800]: Train Loss:0.00086258, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 20:43:37,853 - wn_one_to_x - [INFO] - [E:103| 900]: Train Loss:0.00086343, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 20:45:03,749 - wn_one_to_x - [INFO] - [E:103| 1000]: Train Loss:0.00086425, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 20:46:29,447 - wn_one_to_x - [INFO] - [E:103| 1100]: Train Loss:0.00086487, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 20:47:55,497 - wn_one_to_x - [INFO] - [E:103| 1200]: Train Loss:0.00086514, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 20:49:21,619 - wn_one_to_x - [INFO] - [E:103| 1300]: Train Loss:0.00086568, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 20:50:09,722 - wn_one_to_x - [INFO] - [Epoch:103]: Training Loss:0.0008659
|
|
|
|
2023-05-03 20:50:09,968 - wn_one_to_x - [INFO] - [Valid, Tail_Batch Step 0] wn_one_to_x
|
|
2023-05-03 20:50:15,460 - wn_one_to_x - [INFO] - [Valid, Head_Batch Step 0] wn_one_to_x
|
|
2023-05-03 20:50:20,698 - wn_one_to_x - [INFO] - [Evaluating Epoch 103 valid]:
|
|
MRR: Tail : 0.42857, Head : 0.41206, Avg : 0.42031
|
|
|
|
2023-05-03 20:50:20,698 - wn_one_to_x - [INFO] - [Epoch 103]: Training Loss: 0.0008659, Valid MRR: 0.42218,
|
|
|
|
|
|
|
|
2023-05-03 20:50:21,570 - wn_one_to_x - [INFO] - [E:104| 0]: Train Loss:0.00088686, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 20:51:47,792 - wn_one_to_x - [INFO] - [E:104| 100]: Train Loss:0.00085677, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 20:53:14,107 - wn_one_to_x - [INFO] - [E:104| 200]: Train Loss:0.00085857, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 20:54:40,321 - wn_one_to_x - [INFO] - [E:104| 300]: Train Loss:0.00085855, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 20:56:06,259 - wn_one_to_x - [INFO] - [E:104| 400]: Train Loss:0.00085987, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 20:57:32,370 - wn_one_to_x - [INFO] - [E:104| 500]: Train Loss:0.00086063, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 20:58:58,650 - wn_one_to_x - [INFO] - [E:104| 600]: Train Loss:0.00086121, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 21:00:24,467 - wn_one_to_x - [INFO] - [E:104| 700]: Train Loss:0.00086167, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 21:01:50,809 - wn_one_to_x - [INFO] - [E:104| 800]: Train Loss:0.00086252, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 21:03:16,929 - wn_one_to_x - [INFO] - [E:104| 900]: Train Loss:0.00086269, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 21:04:42,860 - wn_one_to_x - [INFO] - [E:104| 1000]: Train Loss:0.00086343, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 21:06:09,275 - wn_one_to_x - [INFO] - [E:104| 1100]: Train Loss:0.0008633, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 21:07:35,337 - wn_one_to_x - [INFO] - [E:104| 1200]: Train Loss:0.0008634, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 21:09:01,371 - wn_one_to_x - [INFO] - [E:104| 1300]: Train Loss:0.00086378, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 21:09:49,436 - wn_one_to_x - [INFO] - [Epoch:104]: Training Loss:0.000864
|
|
|
|
2023-05-03 21:09:49,681 - wn_one_to_x - [INFO] - [Valid, Tail_Batch Step 0] wn_one_to_x
|
|
2023-05-03 21:09:55,171 - wn_one_to_x - [INFO] - [Valid, Head_Batch Step 0] wn_one_to_x
|
|
2023-05-03 21:10:00,434 - wn_one_to_x - [INFO] - [Evaluating Epoch 104 valid]:
|
|
MRR: Tail : 0.42983, Head : 0.41024, Avg : 0.42004
|
|
|
|
2023-05-03 21:10:00,435 - wn_one_to_x - [INFO] - [Epoch 104]: Training Loss: 0.00086396, Valid MRR: 0.42218,
|
|
|
|
|
|
|
|
2023-05-03 21:10:01,300 - wn_one_to_x - [INFO] - [E:105| 0]: Train Loss:0.00086423, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 21:11:28,326 - wn_one_to_x - [INFO] - [E:105| 100]: Train Loss:0.00085556, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 21:12:54,230 - wn_one_to_x - [INFO] - [E:105| 200]: Train Loss:0.00085489, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 21:14:20,335 - wn_one_to_x - [INFO] - [E:105| 300]: Train Loss:0.00085621, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 21:15:46,579 - wn_one_to_x - [INFO] - [E:105| 400]: Train Loss:0.00085582, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 21:17:12,427 - wn_one_to_x - [INFO] - [E:105| 500]: Train Loss:0.00085593, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 21:18:38,578 - wn_one_to_x - [INFO] - [E:105| 600]: Train Loss:0.000857, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 21:20:04,900 - wn_one_to_x - [INFO] - [E:105| 700]: Train Loss:0.00085718, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 21:21:31,092 - wn_one_to_x - [INFO] - [E:105| 800]: Train Loss:0.00085777, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 21:22:57,027 - wn_one_to_x - [INFO] - [E:105| 900]: Train Loss:0.00085854, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 21:24:23,013 - wn_one_to_x - [INFO] - [E:105| 1000]: Train Loss:0.00085905, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 21:25:48,955 - wn_one_to_x - [INFO] - [E:105| 1100]: Train Loss:0.00085954, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 21:27:15,405 - wn_one_to_x - [INFO] - [E:105| 1200]: Train Loss:0.0008599, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 21:28:41,186 - wn_one_to_x - [INFO] - [E:105| 1300]: Train Loss:0.00086039, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 21:29:28,966 - wn_one_to_x - [INFO] - [Epoch:105]: Training Loss:0.0008604
|
|
|
|
2023-05-03 21:29:29,207 - wn_one_to_x - [INFO] - [Valid, Tail_Batch Step 0] wn_one_to_x
|
|
2023-05-03 21:29:34,693 - wn_one_to_x - [INFO] - [Valid, Head_Batch Step 0] wn_one_to_x
|
|
2023-05-03 21:29:40,208 - wn_one_to_x - [INFO] - [Evaluating Epoch 105 valid]:
|
|
MRR: Tail : 0.43164, Head : 0.40986, Avg : 0.42075
|
|
|
|
2023-05-03 21:29:40,208 - wn_one_to_x - [INFO] - [Epoch 105]: Training Loss: 0.00086044, Valid MRR: 0.42218,
|
|
|
|
|
|
|
|
2023-05-03 21:29:41,072 - wn_one_to_x - [INFO] - [E:106| 0]: Train Loss:0.0008493, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 21:31:06,735 - wn_one_to_x - [INFO] - [E:106| 100]: Train Loss:0.00085184, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 21:32:33,058 - wn_one_to_x - [INFO] - [E:106| 200]: Train Loss:0.0008534, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 21:33:58,764 - wn_one_to_x - [INFO] - [E:106| 300]: Train Loss:0.00085235, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 21:35:24,603 - wn_one_to_x - [INFO] - [E:106| 400]: Train Loss:0.00085249, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 21:36:51,192 - wn_one_to_x - [INFO] - [E:106| 500]: Train Loss:0.00085359, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 21:38:17,123 - wn_one_to_x - [INFO] - [E:106| 600]: Train Loss:0.00085381, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 21:39:43,077 - wn_one_to_x - [INFO] - [E:106| 700]: Train Loss:0.00085457, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 21:41:09,222 - wn_one_to_x - [INFO] - [E:106| 800]: Train Loss:0.0008556, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 21:42:34,842 - wn_one_to_x - [INFO] - [E:106| 900]: Train Loss:0.00085604, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 21:44:00,507 - wn_one_to_x - [INFO] - [E:106| 1000]: Train Loss:0.0008561, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 21:45:25,955 - wn_one_to_x - [INFO] - [E:106| 1100]: Train Loss:0.00085636, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 21:46:51,100 - wn_one_to_x - [INFO] - [E:106| 1200]: Train Loss:0.00085659, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 21:48:16,829 - wn_one_to_x - [INFO] - [E:106| 1300]: Train Loss:0.0008571, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 21:49:04,622 - wn_one_to_x - [INFO] - [Epoch:106]: Training Loss:0.0008572
|
|
|
|
2023-05-03 21:49:04,866 - wn_one_to_x - [INFO] - [Valid, Tail_Batch Step 0] wn_one_to_x
|
|
2023-05-03 21:49:10,359 - wn_one_to_x - [INFO] - [Valid, Head_Batch Step 0] wn_one_to_x
|
|
2023-05-03 21:49:15,617 - wn_one_to_x - [INFO] - [Evaluating Epoch 106 valid]:
|
|
MRR: Tail : 0.42894, Head : 0.41023, Avg : 0.41959
|
|
|
|
2023-05-03 21:49:15,617 - wn_one_to_x - [INFO] - [Epoch 106]: Training Loss: 0.00085721, Valid MRR: 0.42218,
|
|
|
|
|
|
|
|
2023-05-03 21:49:16,482 - wn_one_to_x - [INFO] - [E:107| 0]: Train Loss:0.00089201, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 21:50:41,739 - wn_one_to_x - [INFO] - [E:107| 100]: Train Loss:0.00084861, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 21:52:07,046 - wn_one_to_x - [INFO] - [E:107| 200]: Train Loss:0.00084911, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 21:53:32,389 - wn_one_to_x - [INFO] - [E:107| 300]: Train Loss:0.00085086, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 21:54:58,126 - wn_one_to_x - [INFO] - [E:107| 400]: Train Loss:0.00085108, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 21:56:23,472 - wn_one_to_x - [INFO] - [E:107| 500]: Train Loss:0.00085189, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 21:57:49,020 - wn_one_to_x - [INFO] - [E:107| 600]: Train Loss:0.00085229, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 21:59:14,436 - wn_one_to_x - [INFO] - [E:107| 700]: Train Loss:0.00085258, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 22:00:39,747 - wn_one_to_x - [INFO] - [E:107| 800]: Train Loss:0.00085274, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 22:02:05,369 - wn_one_to_x - [INFO] - [E:107| 900]: Train Loss:0.00085339, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 22:03:30,839 - wn_one_to_x - [INFO] - [E:107| 1000]: Train Loss:0.00085365, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 22:04:56,451 - wn_one_to_x - [INFO] - [E:107| 1100]: Train Loss:0.00085448, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 22:06:21,910 - wn_one_to_x - [INFO] - [E:107| 1200]: Train Loss:0.00085468, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 22:07:47,335 - wn_one_to_x - [INFO] - [E:107| 1300]: Train Loss:0.00085507, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 22:08:34,909 - wn_one_to_x - [INFO] - [Epoch:107]: Training Loss:0.0008553
|
|
|
|
2023-05-03 22:08:35,163 - wn_one_to_x - [INFO] - [Valid, Tail_Batch Step 0] wn_one_to_x
|
|
2023-05-03 22:08:40,666 - wn_one_to_x - [INFO] - [Valid, Head_Batch Step 0] wn_one_to_x
|
|
2023-05-03 22:08:45,974 - wn_one_to_x - [INFO] - [Evaluating Epoch 107 valid]:
|
|
MRR: Tail : 0.42821, Head : 0.41262, Avg : 0.42042
|
|
|
|
2023-05-03 22:08:45,974 - wn_one_to_x - [INFO] - [Epoch 107]: Training Loss: 0.00085534, Valid MRR: 0.42218,
|
|
|
|
|
|
|
|
2023-05-03 22:08:46,840 - wn_one_to_x - [INFO] - [E:108| 0]: Train Loss:0.00082592, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 22:10:13,141 - wn_one_to_x - [INFO] - [E:108| 100]: Train Loss:0.00084805, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 22:11:38,519 - wn_one_to_x - [INFO] - [E:108| 200]: Train Loss:0.00084657, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 22:13:03,824 - wn_one_to_x - [INFO] - [E:108| 300]: Train Loss:0.00084733, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 22:14:28,999 - wn_one_to_x - [INFO] - [E:108| 400]: Train Loss:0.00084701, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 22:15:54,304 - wn_one_to_x - [INFO] - [E:108| 500]: Train Loss:0.00084692, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 22:17:19,686 - wn_one_to_x - [INFO] - [E:108| 600]: Train Loss:0.00084792, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 22:18:45,053 - wn_one_to_x - [INFO] - [E:108| 700]: Train Loss:0.00084849, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 22:20:10,601 - wn_one_to_x - [INFO] - [E:108| 800]: Train Loss:0.000849, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 22:21:36,226 - wn_one_to_x - [INFO] - [E:108| 900]: Train Loss:0.00084981, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 22:23:01,387 - wn_one_to_x - [INFO] - [E:108| 1000]: Train Loss:0.00085029, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 22:24:27,207 - wn_one_to_x - [INFO] - [E:108| 1100]: Train Loss:0.00085095, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 22:25:52,488 - wn_one_to_x - [INFO] - [E:108| 1200]: Train Loss:0.00085122, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 22:27:17,522 - wn_one_to_x - [INFO] - [E:108| 1300]: Train Loss:0.00085179, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 22:28:05,083 - wn_one_to_x - [INFO] - [Epoch:108]: Training Loss:0.0008521
|
|
|
|
2023-05-03 22:28:05,326 - wn_one_to_x - [INFO] - [Valid, Tail_Batch Step 0] wn_one_to_x
|
|
2023-05-03 22:28:10,779 - wn_one_to_x - [INFO] - [Valid, Head_Batch Step 0] wn_one_to_x
|
|
2023-05-03 22:28:16,031 - wn_one_to_x - [INFO] - [Evaluating Epoch 108 valid]:
|
|
MRR: Tail : 0.42773, Head : 0.41214, Avg : 0.41993
|
|
|
|
2023-05-03 22:28:16,031 - wn_one_to_x - [INFO] - [Epoch 108]: Training Loss: 0.00085207, Valid MRR: 0.42218,
|
|
|
|
|
|
|
|
2023-05-03 22:28:16,892 - wn_one_to_x - [INFO] - [E:109| 0]: Train Loss:0.00083251, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 22:29:42,444 - wn_one_to_x - [INFO] - [E:109| 100]: Train Loss:0.00084263, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 22:31:08,252 - wn_one_to_x - [INFO] - [E:109| 200]: Train Loss:0.00084409, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 22:32:33,531 - wn_one_to_x - [INFO] - [E:109| 300]: Train Loss:0.00084527, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 22:33:59,061 - wn_one_to_x - [INFO] - [E:109| 400]: Train Loss:0.00084555, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 22:35:24,363 - wn_one_to_x - [INFO] - [E:109| 500]: Train Loss:0.00084534, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 22:36:49,722 - wn_one_to_x - [INFO] - [E:109| 600]: Train Loss:0.00084603, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 22:38:15,001 - wn_one_to_x - [INFO] - [E:109| 700]: Train Loss:0.0008465, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 22:39:40,558 - wn_one_to_x - [INFO] - [E:109| 800]: Train Loss:0.00084672, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 22:41:06,103 - wn_one_to_x - [INFO] - [E:109| 900]: Train Loss:0.00084715, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 22:42:32,053 - wn_one_to_x - [INFO] - [E:109| 1000]: Train Loss:0.00084809, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 22:43:57,296 - wn_one_to_x - [INFO] - [E:109| 1100]: Train Loss:0.00084897, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 22:45:22,755 - wn_one_to_x - [INFO] - [E:109| 1200]: Train Loss:0.00084907, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 22:46:47,855 - wn_one_to_x - [INFO] - [E:109| 1300]: Train Loss:0.00084923, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 22:47:35,490 - wn_one_to_x - [INFO] - [Epoch:109]: Training Loss:0.0008493
|
|
|
|
2023-05-03 22:47:35,732 - wn_one_to_x - [INFO] - [Valid, Tail_Batch Step 0] wn_one_to_x
|
|
2023-05-03 22:47:41,176 - wn_one_to_x - [INFO] - [Valid, Head_Batch Step 0] wn_one_to_x
|
|
2023-05-03 22:47:46,411 - wn_one_to_x - [INFO] - [Evaluating Epoch 109 valid]:
|
|
MRR: Tail : 0.42936, Head : 0.41086, Avg : 0.42011
|
|
MR: Tail : 4317.5, Head : 4119.3, Avg : 4218.4
|
|
Hit-1: Tail : 0.40046, Head : 0.38497, Avg : 0.39272
|
|
Hit-3: Tail : 0.44265, Head : 0.41958, Avg : 0.43111
|
|
Hit-10: Tail : 0.47759, Head : 0.45715, Avg : 0.46737
|
|
2023-05-03 22:47:46,411 - wn_one_to_x - [INFO] - [Epoch 109]: Training Loss: 0.00084928, Valid MRR: 0.42218,
|
|
|
|
|
|
|
|
2023-05-03 22:47:47,281 - wn_one_to_x - [INFO] - [E:110| 0]: Train Loss:0.00080466, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 22:49:12,621 - wn_one_to_x - [INFO] - [E:110| 100]: Train Loss:0.00083803, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 22:50:37,838 - wn_one_to_x - [INFO] - [E:110| 200]: Train Loss:0.00084204, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 22:52:03,564 - wn_one_to_x - [INFO] - [E:110| 300]: Train Loss:0.00084211, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 22:53:29,298 - wn_one_to_x - [INFO] - [E:110| 400]: Train Loss:0.00084443, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 22:54:55,177 - wn_one_to_x - [INFO] - [E:110| 500]: Train Loss:0.00084496, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 22:56:20,685 - wn_one_to_x - [INFO] - [E:110| 600]: Train Loss:0.000845, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 22:57:46,154 - wn_one_to_x - [INFO] - [E:110| 700]: Train Loss:0.0008455, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 22:59:11,100 - wn_one_to_x - [INFO] - [E:110| 800]: Train Loss:0.00084616, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 23:00:36,366 - wn_one_to_x - [INFO] - [E:110| 900]: Train Loss:0.00084666, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 23:02:01,762 - wn_one_to_x - [INFO] - [E:110| 1000]: Train Loss:0.00084701, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 23:03:27,089 - wn_one_to_x - [INFO] - [E:110| 1100]: Train Loss:0.00084767, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 23:04:52,362 - wn_one_to_x - [INFO] - [E:110| 1200]: Train Loss:0.00084818, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 23:06:17,595 - wn_one_to_x - [INFO] - [E:110| 1300]: Train Loss:0.00084858, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 23:07:05,105 - wn_one_to_x - [INFO] - [Epoch:110]: Training Loss:0.0008489
|
|
|
|
2023-05-03 23:07:05,348 - wn_one_to_x - [INFO] - [Valid, Tail_Batch Step 0] wn_one_to_x
|
|
2023-05-03 23:07:10,838 - wn_one_to_x - [INFO] - [Valid, Head_Batch Step 0] wn_one_to_x
|
|
2023-05-03 23:07:16,078 - wn_one_to_x - [INFO] - [Evaluating Epoch 110 valid]:
|
|
MRR: Tail : 0.42947, Head : 0.41333, Avg : 0.4214
|
|
|
|
2023-05-03 23:07:16,078 - wn_one_to_x - [INFO] - [Epoch 110]: Training Loss: 0.00084892, Valid MRR: 0.42218,
|
|
|
|
|
|
|
|
2023-05-03 23:07:16,943 - wn_one_to_x - [INFO] - [E:111| 0]: Train Loss:0.000848, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 23:08:42,283 - wn_one_to_x - [INFO] - [E:111| 100]: Train Loss:0.00084027, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 23:10:07,777 - wn_one_to_x - [INFO] - [E:111| 200]: Train Loss:0.00084164, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 23:11:33,458 - wn_one_to_x - [INFO] - [E:111| 300]: Train Loss:0.00084241, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 23:12:58,708 - wn_one_to_x - [INFO] - [E:111| 400]: Train Loss:0.00084223, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 23:14:24,439 - wn_one_to_x - [INFO] - [E:111| 500]: Train Loss:0.00084302, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 23:15:49,289 - wn_one_to_x - [INFO] - [E:111| 600]: Train Loss:0.00084351, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 23:17:14,600 - wn_one_to_x - [INFO] - [E:111| 700]: Train Loss:0.00084428, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 23:18:40,027 - wn_one_to_x - [INFO] - [E:111| 800]: Train Loss:0.00084435, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 23:20:05,264 - wn_one_to_x - [INFO] - [E:111| 900]: Train Loss:0.0008445, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 23:21:30,982 - wn_one_to_x - [INFO] - [E:111| 1000]: Train Loss:0.00084498, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 23:22:56,094 - wn_one_to_x - [INFO] - [E:111| 1100]: Train Loss:0.00084548, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 23:24:21,717 - wn_one_to_x - [INFO] - [E:111| 1200]: Train Loss:0.00084582, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 23:25:46,528 - wn_one_to_x - [INFO] - [E:111| 1300]: Train Loss:0.00084638, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 23:26:34,058 - wn_one_to_x - [INFO] - [Epoch:111]: Training Loss:0.0008464
|
|
|
|
2023-05-03 23:26:34,304 - wn_one_to_x - [INFO] - [Valid, Tail_Batch Step 0] wn_one_to_x
|
|
2023-05-03 23:26:39,757 - wn_one_to_x - [INFO] - [Valid, Head_Batch Step 0] wn_one_to_x
|
|
2023-05-03 23:26:45,034 - wn_one_to_x - [INFO] - [Evaluating Epoch 111 valid]:
|
|
MRR: Tail : 0.42658, Head : 0.41159, Avg : 0.41908
|
|
|
|
2023-05-03 23:26:45,034 - wn_one_to_x - [INFO] - [Epoch 111]: Training Loss: 0.00084639, Valid MRR: 0.42218,
|
|
|
|
|
|
|
|
2023-05-03 23:26:45,900 - wn_one_to_x - [INFO] - [E:112| 0]: Train Loss:0.00087139, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 23:28:11,664 - wn_one_to_x - [INFO] - [E:112| 100]: Train Loss:0.0008401, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 23:29:37,055 - wn_one_to_x - [INFO] - [E:112| 200]: Train Loss:0.00083928, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 23:31:02,179 - wn_one_to_x - [INFO] - [E:112| 300]: Train Loss:0.00084057, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 23:32:27,514 - wn_one_to_x - [INFO] - [E:112| 400]: Train Loss:0.00084061, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 23:33:52,638 - wn_one_to_x - [INFO] - [E:112| 500]: Train Loss:0.00083987, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 23:35:18,156 - wn_one_to_x - [INFO] - [E:112| 600]: Train Loss:0.00083971, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 23:36:44,071 - wn_one_to_x - [INFO] - [E:112| 700]: Train Loss:0.00084016, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 23:38:09,088 - wn_one_to_x - [INFO] - [E:112| 800]: Train Loss:0.00084073, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 23:39:33,981 - wn_one_to_x - [INFO] - [E:112| 900]: Train Loss:0.00084098, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 23:40:59,114 - wn_one_to_x - [INFO] - [E:112| 1000]: Train Loss:0.0008413, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 23:42:24,338 - wn_one_to_x - [INFO] - [E:112| 1100]: Train Loss:0.00084224, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 23:43:49,794 - wn_one_to_x - [INFO] - [E:112| 1200]: Train Loss:0.00084261, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 23:45:14,997 - wn_one_to_x - [INFO] - [E:112| 1300]: Train Loss:0.00084304, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 23:46:02,490 - wn_one_to_x - [INFO] - [Epoch:112]: Training Loss:0.0008432
|
|
|
|
2023-05-03 23:46:02,730 - wn_one_to_x - [INFO] - [Valid, Tail_Batch Step 0] wn_one_to_x
|
|
2023-05-03 23:46:08,305 - wn_one_to_x - [INFO] - [Valid, Head_Batch Step 0] wn_one_to_x
|
|
2023-05-03 23:46:13,543 - wn_one_to_x - [INFO] - [Evaluating Epoch 112 valid]:
|
|
MRR: Tail : 0.42872, Head : 0.41071, Avg : 0.41972
|
|
|
|
2023-05-03 23:46:13,544 - wn_one_to_x - [INFO] - [Epoch 112]: Training Loss: 0.00084324, Valid MRR: 0.42218,
|
|
|
|
|
|
|
|
2023-05-03 23:46:14,411 - wn_one_to_x - [INFO] - [E:113| 0]: Train Loss:0.0008542, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 23:47:39,709 - wn_one_to_x - [INFO] - [E:113| 100]: Train Loss:0.00083597, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 23:49:04,911 - wn_one_to_x - [INFO] - [E:113| 200]: Train Loss:0.00083664, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 23:50:30,249 - wn_one_to_x - [INFO] - [E:113| 300]: Train Loss:0.00083736, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 23:51:55,972 - wn_one_to_x - [INFO] - [E:113| 400]: Train Loss:0.00083796, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 23:53:21,826 - wn_one_to_x - [INFO] - [E:113| 500]: Train Loss:0.00083748, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 23:54:47,446 - wn_one_to_x - [INFO] - [E:113| 600]: Train Loss:0.00083768, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 23:56:12,784 - wn_one_to_x - [INFO] - [E:113| 700]: Train Loss:0.00083862, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 23:57:38,510 - wn_one_to_x - [INFO] - [E:113| 800]: Train Loss:0.00083896, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-03 23:59:03,679 - wn_one_to_x - [INFO] - [E:113| 900]: Train Loss:0.00083957, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 00:00:28,594 - wn_one_to_x - [INFO] - [E:113| 1000]: Train Loss:0.00084011, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 00:01:53,977 - wn_one_to_x - [INFO] - [E:113| 1100]: Train Loss:0.00084052, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 00:03:19,517 - wn_one_to_x - [INFO] - [E:113| 1200]: Train Loss:0.00084092, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 00:04:45,146 - wn_one_to_x - [INFO] - [E:113| 1300]: Train Loss:0.00084131, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 00:05:32,826 - wn_one_to_x - [INFO] - [Epoch:113]: Training Loss:0.0008416
|
|
|
|
2023-05-04 00:05:33,069 - wn_one_to_x - [INFO] - [Valid, Tail_Batch Step 0] wn_one_to_x
|
|
2023-05-04 00:05:38,617 - wn_one_to_x - [INFO] - [Valid, Head_Batch Step 0] wn_one_to_x
|
|
2023-05-04 00:05:43,859 - wn_one_to_x - [INFO] - [Evaluating Epoch 113 valid]:
|
|
MRR: Tail : 0.42897, Head : 0.41101, Avg : 0.41999
|
|
|
|
2023-05-04 00:05:43,859 - wn_one_to_x - [INFO] - [Epoch 113]: Training Loss: 0.00084157, Valid MRR: 0.42218,
|
|
|
|
|
|
|
|
2023-05-04 00:05:44,733 - wn_one_to_x - [INFO] - [E:114| 0]: Train Loss:0.00084565, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 00:07:09,858 - wn_one_to_x - [INFO] - [E:114| 100]: Train Loss:0.0008339, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 00:08:34,951 - wn_one_to_x - [INFO] - [E:114| 200]: Train Loss:0.00083408, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 00:10:00,881 - wn_one_to_x - [INFO] - [E:114| 300]: Train Loss:0.00083483, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 00:11:25,958 - wn_one_to_x - [INFO] - [E:114| 400]: Train Loss:0.00083618, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 00:12:51,246 - wn_one_to_x - [INFO] - [E:114| 500]: Train Loss:0.00083719, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 00:14:16,364 - wn_one_to_x - [INFO] - [E:114| 600]: Train Loss:0.00083765, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 00:15:41,687 - wn_one_to_x - [INFO] - [E:114| 700]: Train Loss:0.00083759, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 00:17:06,988 - wn_one_to_x - [INFO] - [E:114| 800]: Train Loss:0.00083782, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 00:18:32,794 - wn_one_to_x - [INFO] - [E:114| 900]: Train Loss:0.00083863, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 00:19:58,032 - wn_one_to_x - [INFO] - [E:114| 1000]: Train Loss:0.0008392, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 00:21:23,291 - wn_one_to_x - [INFO] - [E:114| 1100]: Train Loss:0.00083969, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 00:22:48,835 - wn_one_to_x - [INFO] - [E:114| 1200]: Train Loss:0.00084023, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 00:24:14,452 - wn_one_to_x - [INFO] - [E:114| 1300]: Train Loss:0.00084084, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 00:25:01,737 - wn_one_to_x - [INFO] - [Epoch:114]: Training Loss:0.0008409
|
|
|
|
2023-05-04 00:25:01,979 - wn_one_to_x - [INFO] - [Valid, Tail_Batch Step 0] wn_one_to_x
|
|
2023-05-04 00:25:07,441 - wn_one_to_x - [INFO] - [Valid, Head_Batch Step 0] wn_one_to_x
|
|
2023-05-04 00:25:12,680 - wn_one_to_x - [INFO] - [Evaluating Epoch 114 valid]:
|
|
MRR: Tail : 0.42606, Head : 0.4083, Avg : 0.41718
|
|
|
|
2023-05-04 00:25:12,680 - wn_one_to_x - [INFO] - [Epoch 114]: Training Loss: 0.00084095, Valid MRR: 0.42218,
|
|
|
|
|
|
|
|
2023-05-04 00:25:13,546 - wn_one_to_x - [INFO] - [E:115| 0]: Train Loss:0.0008656, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 00:26:53,109 - wn_one_to_x - [INFO] - [E:115| 100]: Train Loss:0.0008329, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 00:29:13,661 - wn_one_to_x - [INFO] - [E:115| 200]: Train Loss:0.00083419, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 00:30:39,307 - wn_one_to_x - [INFO] - [E:115| 300]: Train Loss:0.00083457, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 00:32:04,450 - wn_one_to_x - [INFO] - [E:115| 400]: Train Loss:0.00083493, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 00:33:29,511 - wn_one_to_x - [INFO] - [E:115| 500]: Train Loss:0.0008356, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 00:34:54,704 - wn_one_to_x - [INFO] - [E:115| 600]: Train Loss:0.00083648, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 00:36:19,971 - wn_one_to_x - [INFO] - [E:115| 700]: Train Loss:0.00083675, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 00:37:45,711 - wn_one_to_x - [INFO] - [E:115| 800]: Train Loss:0.00083706, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 00:39:11,114 - wn_one_to_x - [INFO] - [E:115| 900]: Train Loss:0.00083748, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 00:40:36,847 - wn_one_to_x - [INFO] - [E:115| 1000]: Train Loss:0.00083731, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 00:42:02,071 - wn_one_to_x - [INFO] - [E:115| 1100]: Train Loss:0.00083769, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 00:43:27,249 - wn_one_to_x - [INFO] - [E:115| 1200]: Train Loss:0.00083791, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 00:44:52,894 - wn_one_to_x - [INFO] - [E:115| 1300]: Train Loss:0.00083808, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 00:45:40,423 - wn_one_to_x - [INFO] - [Epoch:115]: Training Loss:0.0008383
|
|
|
|
2023-05-04 00:45:40,661 - wn_one_to_x - [INFO] - [Valid, Tail_Batch Step 0] wn_one_to_x
|
|
2023-05-04 00:45:46,138 - wn_one_to_x - [INFO] - [Valid, Head_Batch Step 0] wn_one_to_x
|
|
2023-05-04 00:45:51,441 - wn_one_to_x - [INFO] - [Evaluating Epoch 115 valid]:
|
|
MRR: Tail : 0.42945, Head : 0.41258, Avg : 0.42102
|
|
|
|
2023-05-04 00:45:51,441 - wn_one_to_x - [INFO] - [Epoch 115]: Training Loss: 0.00083831, Valid MRR: 0.42218,
|
|
|
|
|
|
|
|
2023-05-04 00:45:52,301 - wn_one_to_x - [INFO] - [E:116| 0]: Train Loss:0.00085877, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 00:47:17,511 - wn_one_to_x - [INFO] - [E:116| 100]: Train Loss:0.00083036, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 00:48:42,863 - wn_one_to_x - [INFO] - [E:116| 200]: Train Loss:0.00083021, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 00:50:08,469 - wn_one_to_x - [INFO] - [E:116| 300]: Train Loss:0.00083079, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 00:51:33,572 - wn_one_to_x - [INFO] - [E:116| 400]: Train Loss:0.00083215, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 00:52:58,936 - wn_one_to_x - [INFO] - [E:116| 500]: Train Loss:0.00083242, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 00:54:24,497 - wn_one_to_x - [INFO] - [E:116| 600]: Train Loss:0.0008327, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 00:55:50,185 - wn_one_to_x - [INFO] - [E:116| 700]: Train Loss:0.00083302, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 00:57:15,787 - wn_one_to_x - [INFO] - [E:116| 800]: Train Loss:0.00083347, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 00:58:41,003 - wn_one_to_x - [INFO] - [E:116| 900]: Train Loss:0.00083368, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 01:00:05,834 - wn_one_to_x - [INFO] - [E:116| 1000]: Train Loss:0.00083387, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 01:01:31,628 - wn_one_to_x - [INFO] - [E:116| 1100]: Train Loss:0.00083443, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 01:02:57,283 - wn_one_to_x - [INFO] - [E:116| 1200]: Train Loss:0.00083485, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 01:04:23,080 - wn_one_to_x - [INFO] - [E:116| 1300]: Train Loss:0.00083515, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 01:05:10,699 - wn_one_to_x - [INFO] - [Epoch:116]: Training Loss:0.0008353
|
|
|
|
2023-05-04 01:05:10,939 - wn_one_to_x - [INFO] - [Valid, Tail_Batch Step 0] wn_one_to_x
|
|
2023-05-04 01:05:16,482 - wn_one_to_x - [INFO] - [Valid, Head_Batch Step 0] wn_one_to_x
|
|
2023-05-04 01:05:21,723 - wn_one_to_x - [INFO] - [Evaluating Epoch 116 valid]:
|
|
MRR: Tail : 0.42939, Head : 0.41109, Avg : 0.42024
|
|
|
|
2023-05-04 01:05:21,723 - wn_one_to_x - [INFO] - [Epoch 116]: Training Loss: 0.00083527, Valid MRR: 0.42218,
|
|
|
|
|
|
|
|
2023-05-04 01:05:22,584 - wn_one_to_x - [INFO] - [E:117| 0]: Train Loss:0.00080305, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 01:06:48,008 - wn_one_to_x - [INFO] - [E:117| 100]: Train Loss:0.00083009, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 01:08:13,396 - wn_one_to_x - [INFO] - [E:117| 200]: Train Loss:0.00082943, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 01:09:38,642 - wn_one_to_x - [INFO] - [E:117| 300]: Train Loss:0.00083026, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 01:11:03,853 - wn_one_to_x - [INFO] - [E:117| 400]: Train Loss:0.00083082, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 01:12:29,123 - wn_one_to_x - [INFO] - [E:117| 500]: Train Loss:0.00083094, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 01:13:54,420 - wn_one_to_x - [INFO] - [E:117| 600]: Train Loss:0.00083123, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 01:15:19,795 - wn_one_to_x - [INFO] - [E:117| 700]: Train Loss:0.0008319, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 01:16:45,221 - wn_one_to_x - [INFO] - [E:117| 800]: Train Loss:0.00083222, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 01:18:10,761 - wn_one_to_x - [INFO] - [E:117| 900]: Train Loss:0.00083266, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 01:19:36,038 - wn_one_to_x - [INFO] - [E:117| 1000]: Train Loss:0.00083318, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 01:21:01,462 - wn_one_to_x - [INFO] - [E:117| 1100]: Train Loss:0.00083348, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 01:22:26,923 - wn_one_to_x - [INFO] - [E:117| 1200]: Train Loss:0.00083386, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 01:23:53,238 - wn_one_to_x - [INFO] - [E:117| 1300]: Train Loss:0.00083425, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 01:24:40,967 - wn_one_to_x - [INFO] - [Epoch:117]: Training Loss:0.0008345
|
|
|
|
2023-05-04 01:24:41,210 - wn_one_to_x - [INFO] - [Valid, Tail_Batch Step 0] wn_one_to_x
|
|
2023-05-04 01:24:46,713 - wn_one_to_x - [INFO] - [Valid, Head_Batch Step 0] wn_one_to_x
|
|
2023-05-04 01:24:52,037 - wn_one_to_x - [INFO] - [Evaluating Epoch 117 valid]:
|
|
MRR: Tail : 0.43153, Head : 0.41106, Avg : 0.42129
|
|
|
|
2023-05-04 01:24:52,038 - wn_one_to_x - [INFO] - [Epoch 117]: Training Loss: 0.00083449, Valid MRR: 0.42218,
|
|
|
|
|
|
|
|
2023-05-04 01:24:52,905 - wn_one_to_x - [INFO] - [E:118| 0]: Train Loss:0.00082981, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 01:26:18,572 - wn_one_to_x - [INFO] - [E:118| 100]: Train Loss:0.00083302, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 01:27:44,019 - wn_one_to_x - [INFO] - [E:118| 200]: Train Loss:0.00083143, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 01:29:10,090 - wn_one_to_x - [INFO] - [E:118| 300]: Train Loss:0.00082999, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 01:30:35,426 - wn_one_to_x - [INFO] - [E:118| 400]: Train Loss:0.00082929, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 01:32:01,004 - wn_one_to_x - [INFO] - [E:118| 500]: Train Loss:0.00082926, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 01:33:26,539 - wn_one_to_x - [INFO] - [E:118| 600]: Train Loss:0.00083002, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 01:34:51,676 - wn_one_to_x - [INFO] - [E:118| 700]: Train Loss:0.00083057, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 01:36:17,238 - wn_one_to_x - [INFO] - [E:118| 800]: Train Loss:0.00083093, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 01:37:42,418 - wn_one_to_x - [INFO] - [E:118| 900]: Train Loss:0.00083179, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 01:39:07,912 - wn_one_to_x - [INFO] - [E:118| 1000]: Train Loss:0.00083209, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 01:40:33,347 - wn_one_to_x - [INFO] - [E:118| 1100]: Train Loss:0.00083236, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 01:41:58,678 - wn_one_to_x - [INFO] - [E:118| 1200]: Train Loss:0.00083261, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 01:43:24,186 - wn_one_to_x - [INFO] - [E:118| 1300]: Train Loss:0.00083318, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 01:44:12,026 - wn_one_to_x - [INFO] - [Epoch:118]: Training Loss:0.0008334
|
|
|
|
2023-05-04 01:44:12,268 - wn_one_to_x - [INFO] - [Valid, Tail_Batch Step 0] wn_one_to_x
|
|
2023-05-04 01:44:18,958 - wn_one_to_x - [INFO] - [Valid, Head_Batch Step 0] wn_one_to_x
|
|
2023-05-04 01:44:24,459 - wn_one_to_x - [INFO] - [Evaluating Epoch 118 valid]:
|
|
MRR: Tail : 0.43067, Head : 0.41043, Avg : 0.42055
|
|
|
|
2023-05-04 01:44:24,459 - wn_one_to_x - [INFO] - [Epoch 118]: Training Loss: 0.0008334, Valid MRR: 0.42218,
|
|
|
|
|
|
|
|
2023-05-04 01:44:25,373 - wn_one_to_x - [INFO] - [E:119| 0]: Train Loss:0.00081665, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 01:45:50,975 - wn_one_to_x - [INFO] - [E:119| 100]: Train Loss:0.00083026, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 01:47:16,489 - wn_one_to_x - [INFO] - [E:119| 200]: Train Loss:0.00082917, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 01:48:42,149 - wn_one_to_x - [INFO] - [E:119| 300]: Train Loss:0.00082855, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 01:50:07,709 - wn_one_to_x - [INFO] - [E:119| 400]: Train Loss:0.00082739, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 01:51:32,909 - wn_one_to_x - [INFO] - [E:119| 500]: Train Loss:0.0008272, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 01:52:58,435 - wn_one_to_x - [INFO] - [E:119| 600]: Train Loss:0.00082753, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 01:54:24,083 - wn_one_to_x - [INFO] - [E:119| 700]: Train Loss:0.00082814, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 01:55:48,957 - wn_one_to_x - [INFO] - [E:119| 800]: Train Loss:0.00082924, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 01:57:14,284 - wn_one_to_x - [INFO] - [E:119| 900]: Train Loss:0.00082971, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 01:58:39,580 - wn_one_to_x - [INFO] - [E:119| 1000]: Train Loss:0.0008302, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 02:00:04,621 - wn_one_to_x - [INFO] - [E:119| 1100]: Train Loss:0.00083078, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 02:01:30,158 - wn_one_to_x - [INFO] - [E:119| 1200]: Train Loss:0.00083124, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 02:02:55,768 - wn_one_to_x - [INFO] - [E:119| 1300]: Train Loss:0.00083167, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 02:03:43,613 - wn_one_to_x - [INFO] - [Epoch:119]: Training Loss:0.0008319
|
|
|
|
2023-05-04 02:03:43,853 - wn_one_to_x - [INFO] - [Valid, Tail_Batch Step 0] wn_one_to_x
|
|
2023-05-04 02:03:49,297 - wn_one_to_x - [INFO] - [Valid, Head_Batch Step 0] wn_one_to_x
|
|
2023-05-04 02:03:54,532 - wn_one_to_x - [INFO] - [Evaluating Epoch 119 valid]:
|
|
MRR: Tail : 0.42952, Head : 0.41288, Avg : 0.4212
|
|
MR: Tail : 4639.8, Head : 4164.6, Avg : 4402.2
|
|
Hit-1: Tail : 0.40211, Head : 0.38761, Avg : 0.39486
|
|
Hit-3: Tail : 0.43869, Head : 0.42057, Avg : 0.42963
|
|
Hit-10: Tail : 0.47627, Head : 0.46078, Avg : 0.46852
|
|
2023-05-04 02:03:54,532 - wn_one_to_x - [INFO] - [Epoch 119]: Training Loss: 0.00083193, Valid MRR: 0.42218,
|
|
|
|
|
|
|
|
2023-05-04 02:03:55,397 - wn_one_to_x - [INFO] - [E:120| 0]: Train Loss:0.00081399, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 02:05:20,780 - wn_one_to_x - [INFO] - [E:120| 100]: Train Loss:0.00083021, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 02:06:46,501 - wn_one_to_x - [INFO] - [E:120| 200]: Train Loss:0.00083042, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 02:08:12,094 - wn_one_to_x - [INFO] - [E:120| 300]: Train Loss:0.00082801, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 02:09:37,232 - wn_one_to_x - [INFO] - [E:120| 400]: Train Loss:0.00082757, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 02:11:02,684 - wn_one_to_x - [INFO] - [E:120| 500]: Train Loss:0.00082719, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 02:12:27,865 - wn_one_to_x - [INFO] - [E:120| 600]: Train Loss:0.00082773, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 02:13:53,001 - wn_one_to_x - [INFO] - [E:120| 700]: Train Loss:0.0008276, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 02:15:18,330 - wn_one_to_x - [INFO] - [E:120| 800]: Train Loss:0.0008278, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 02:16:43,425 - wn_one_to_x - [INFO] - [E:120| 900]: Train Loss:0.00082769, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 02:18:08,643 - wn_one_to_x - [INFO] - [E:120| 1000]: Train Loss:0.0008285, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 02:19:33,679 - wn_one_to_x - [INFO] - [E:120| 1100]: Train Loss:0.000829, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 02:20:59,043 - wn_one_to_x - [INFO] - [E:120| 1200]: Train Loss:0.00082912, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 02:22:24,821 - wn_one_to_x - [INFO] - [E:120| 1300]: Train Loss:0.0008294, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 02:23:12,441 - wn_one_to_x - [INFO] - [Epoch:120]: Training Loss:0.0008295
|
|
|
|
2023-05-04 02:23:12,682 - wn_one_to_x - [INFO] - [Valid, Tail_Batch Step 0] wn_one_to_x
|
|
2023-05-04 02:23:18,223 - wn_one_to_x - [INFO] - [Valid, Head_Batch Step 0] wn_one_to_x
|
|
2023-05-04 02:23:26,940 - wn_one_to_x - [INFO] - [Evaluating Epoch 120 valid]:
|
|
MRR: Tail : 0.43145, Head : 0.41157, Avg : 0.42151
|
|
|
|
2023-05-04 02:23:28,615 - wn_one_to_x - [INFO] - [Epoch 120]: Training Loss: 0.00082951, Valid MRR: 0.42218,
|
|
|
|
|
|
|
|
2023-05-04 02:23:29,547 - wn_one_to_x - [INFO] - [E:121| 0]: Train Loss:0.00083978, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 02:24:56,082 - wn_one_to_x - [INFO] - [E:121| 100]: Train Loss:0.0008245, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 02:26:21,264 - wn_one_to_x - [INFO] - [E:121| 200]: Train Loss:0.00082545, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 02:27:47,035 - wn_one_to_x - [INFO] - [E:121| 300]: Train Loss:0.00082345, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 02:29:12,595 - wn_one_to_x - [INFO] - [E:121| 400]: Train Loss:0.00082425, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 02:30:38,314 - wn_one_to_x - [INFO] - [E:121| 500]: Train Loss:0.00082481, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 02:32:03,632 - wn_one_to_x - [INFO] - [E:121| 600]: Train Loss:0.00082471, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 02:33:29,097 - wn_one_to_x - [INFO] - [E:121| 700]: Train Loss:0.0008253, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 02:34:54,645 - wn_one_to_x - [INFO] - [E:121| 800]: Train Loss:0.00082628, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 02:36:20,442 - wn_one_to_x - [INFO] - [E:121| 900]: Train Loss:0.00082681, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 02:37:46,304 - wn_one_to_x - [INFO] - [E:121| 1000]: Train Loss:0.00082743, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 02:39:12,008 - wn_one_to_x - [INFO] - [E:121| 1100]: Train Loss:0.00082779, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 02:40:37,495 - wn_one_to_x - [INFO] - [E:121| 1200]: Train Loss:0.00082794, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 02:42:02,891 - wn_one_to_x - [INFO] - [E:121| 1300]: Train Loss:0.00082823, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 02:42:50,438 - wn_one_to_x - [INFO] - [Epoch:121]: Training Loss:0.0008283
|
|
|
|
2023-05-04 02:42:50,681 - wn_one_to_x - [INFO] - [Valid, Tail_Batch Step 0] wn_one_to_x
|
|
2023-05-04 02:42:56,219 - wn_one_to_x - [INFO] - [Valid, Head_Batch Step 0] wn_one_to_x
|
|
2023-05-04 02:43:01,605 - wn_one_to_x - [INFO] - [Evaluating Epoch 121 valid]:
|
|
MRR: Tail : 0.4306, Head : 0.41047, Avg : 0.42054
|
|
|
|
2023-05-04 02:43:01,605 - wn_one_to_x - [INFO] - [Epoch 121]: Training Loss: 0.00082832, Valid MRR: 0.42218,
|
|
|
|
|
|
|
|
2023-05-04 02:43:02,504 - wn_one_to_x - [INFO] - [E:122| 0]: Train Loss:0.00081347, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 02:44:27,666 - wn_one_to_x - [INFO] - [E:122| 100]: Train Loss:0.00082297, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 02:45:52,950 - wn_one_to_x - [INFO] - [E:122| 200]: Train Loss:0.00082234, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 02:47:18,100 - wn_one_to_x - [INFO] - [E:122| 300]: Train Loss:0.00082326, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 02:48:43,353 - wn_one_to_x - [INFO] - [E:122| 400]: Train Loss:0.00082265, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 02:50:09,222 - wn_one_to_x - [INFO] - [E:122| 500]: Train Loss:0.00082276, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 02:51:34,498 - wn_one_to_x - [INFO] - [E:122| 600]: Train Loss:0.00082305, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 02:52:59,734 - wn_one_to_x - [INFO] - [E:122| 700]: Train Loss:0.00082312, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 02:54:25,687 - wn_one_to_x - [INFO] - [E:122| 800]: Train Loss:0.00082362, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 02:55:51,015 - wn_one_to_x - [INFO] - [E:122| 900]: Train Loss:0.00082432, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 02:57:16,454 - wn_one_to_x - [INFO] - [E:122| 1000]: Train Loss:0.00082488, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 02:58:41,903 - wn_one_to_x - [INFO] - [E:122| 1100]: Train Loss:0.00082537, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 03:00:07,238 - wn_one_to_x - [INFO] - [E:122| 1200]: Train Loss:0.00082581, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 03:01:32,439 - wn_one_to_x - [INFO] - [E:122| 1300]: Train Loss:0.00082591, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 03:02:20,009 - wn_one_to_x - [INFO] - [Epoch:122]: Training Loss:0.0008261
|
|
|
|
2023-05-04 03:02:20,255 - wn_one_to_x - [INFO] - [Valid, Tail_Batch Step 0] wn_one_to_x
|
|
2023-05-04 03:02:25,705 - wn_one_to_x - [INFO] - [Valid, Head_Batch Step 0] wn_one_to_x
|
|
2023-05-04 03:02:31,114 - wn_one_to_x - [INFO] - [Evaluating Epoch 122 valid]:
|
|
MRR: Tail : 0.42783, Head : 0.4119, Avg : 0.41987
|
|
|
|
2023-05-04 03:02:31,114 - wn_one_to_x - [INFO] - [Epoch 122]: Training Loss: 0.00082606, Valid MRR: 0.42218,
|
|
|
|
|
|
|
|
2023-05-04 03:02:31,977 - wn_one_to_x - [INFO] - [E:123| 0]: Train Loss:0.00082398, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 03:03:57,232 - wn_one_to_x - [INFO] - [E:123| 100]: Train Loss:0.000823, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 03:05:22,606 - wn_one_to_x - [INFO] - [E:123| 200]: Train Loss:0.00082195, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 03:06:47,746 - wn_one_to_x - [INFO] - [E:123| 300]: Train Loss:0.00082106, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 03:08:12,999 - wn_one_to_x - [INFO] - [E:123| 400]: Train Loss:0.00082156, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 03:09:38,103 - wn_one_to_x - [INFO] - [E:123| 500]: Train Loss:0.00082111, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 03:11:04,077 - wn_one_to_x - [INFO] - [E:123| 600]: Train Loss:0.00082166, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 03:12:29,272 - wn_one_to_x - [INFO] - [E:123| 700]: Train Loss:0.00082201, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 03:13:54,434 - wn_one_to_x - [INFO] - [E:123| 800]: Train Loss:0.0008227, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 03:15:19,947 - wn_one_to_x - [INFO] - [E:123| 900]: Train Loss:0.00082306, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 03:16:45,339 - wn_one_to_x - [INFO] - [E:123| 1000]: Train Loss:0.0008234, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 03:18:10,707 - wn_one_to_x - [INFO] - [E:123| 1100]: Train Loss:0.00082376, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 03:19:36,319 - wn_one_to_x - [INFO] - [E:123| 1200]: Train Loss:0.00082412, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 03:21:01,596 - wn_one_to_x - [INFO] - [E:123| 1300]: Train Loss:0.00082461, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 03:21:49,378 - wn_one_to_x - [INFO] - [Epoch:123]: Training Loss:0.0008249
|
|
|
|
2023-05-04 03:21:49,622 - wn_one_to_x - [INFO] - [Valid, Tail_Batch Step 0] wn_one_to_x
|
|
2023-05-04 03:21:55,075 - wn_one_to_x - [INFO] - [Valid, Head_Batch Step 0] wn_one_to_x
|
|
2023-05-04 03:22:00,315 - wn_one_to_x - [INFO] - [Evaluating Epoch 123 valid]:
|
|
MRR: Tail : 0.42901, Head : 0.40926, Avg : 0.41913
|
|
|
|
2023-05-04 03:22:00,315 - wn_one_to_x - [INFO] - [Epoch 123]: Training Loss: 0.00082489, Valid MRR: 0.42218,
|
|
|
|
|
|
|
|
2023-05-04 03:22:01,182 - wn_one_to_x - [INFO] - [E:124| 0]: Train Loss:0.00077819, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 03:23:26,839 - wn_one_to_x - [INFO] - [E:124| 100]: Train Loss:0.00082045, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 03:24:52,321 - wn_one_to_x - [INFO] - [E:124| 200]: Train Loss:0.0008189, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 03:26:17,786 - wn_one_to_x - [INFO] - [E:124| 300]: Train Loss:0.00081971, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 03:27:43,268 - wn_one_to_x - [INFO] - [E:124| 400]: Train Loss:0.00082009, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 03:29:08,759 - wn_one_to_x - [INFO] - [E:124| 500]: Train Loss:0.00082021, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 03:30:34,511 - wn_one_to_x - [INFO] - [E:124| 600]: Train Loss:0.00082056, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 03:32:00,193 - wn_one_to_x - [INFO] - [E:124| 700]: Train Loss:0.00082046, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 03:33:26,051 - wn_one_to_x - [INFO] - [E:124| 800]: Train Loss:0.00082141, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 03:34:51,656 - wn_one_to_x - [INFO] - [E:124| 900]: Train Loss:0.00082168, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 03:36:16,975 - wn_one_to_x - [INFO] - [E:124| 1000]: Train Loss:0.00082166, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 03:37:42,524 - wn_one_to_x - [INFO] - [E:124| 1100]: Train Loss:0.00082197, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 03:39:07,683 - wn_one_to_x - [INFO] - [E:124| 1200]: Train Loss:0.00082234, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 03:40:32,920 - wn_one_to_x - [INFO] - [E:124| 1300]: Train Loss:0.00082257, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 03:41:20,782 - wn_one_to_x - [INFO] - [Epoch:124]: Training Loss:0.0008228
|
|
|
|
2023-05-04 03:41:21,026 - wn_one_to_x - [INFO] - [Valid, Tail_Batch Step 0] wn_one_to_x
|
|
2023-05-04 03:41:26,524 - wn_one_to_x - [INFO] - [Valid, Head_Batch Step 0] wn_one_to_x
|
|
2023-05-04 03:41:31,835 - wn_one_to_x - [INFO] - [Evaluating Epoch 124 valid]:
|
|
MRR: Tail : 0.42761, Head : 0.41234, Avg : 0.41998
|
|
|
|
2023-05-04 03:41:31,835 - wn_one_to_x - [INFO] - [Epoch 124]: Training Loss: 0.0008228, Valid MRR: 0.42218,
|
|
|
|
|
|
|
|
2023-05-04 03:41:32,696 - wn_one_to_x - [INFO] - [E:125| 0]: Train Loss:0.00085485, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 03:42:57,688 - wn_one_to_x - [INFO] - [E:125| 100]: Train Loss:0.00082008, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 03:44:23,250 - wn_one_to_x - [INFO] - [E:125| 200]: Train Loss:0.00081818, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 03:45:48,452 - wn_one_to_x - [INFO] - [E:125| 300]: Train Loss:0.00081851, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 03:47:14,145 - wn_one_to_x - [INFO] - [E:125| 400]: Train Loss:0.00081811, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 03:48:38,940 - wn_one_to_x - [INFO] - [E:125| 500]: Train Loss:0.00081776, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 03:50:04,337 - wn_one_to_x - [INFO] - [E:125| 600]: Train Loss:0.00081875, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 03:51:29,854 - wn_one_to_x - [INFO] - [E:125| 700]: Train Loss:0.00081891, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 03:52:55,052 - wn_one_to_x - [INFO] - [E:125| 800]: Train Loss:0.00081919, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 03:54:21,450 - wn_one_to_x - [INFO] - [E:125| 900]: Train Loss:0.00081989, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 03:55:46,845 - wn_one_to_x - [INFO] - [E:125| 1000]: Train Loss:0.00082008, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 03:57:12,270 - wn_one_to_x - [INFO] - [E:125| 1100]: Train Loss:0.00082024, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 03:58:37,752 - wn_one_to_x - [INFO] - [E:125| 1200]: Train Loss:0.00082051, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 04:00:03,351 - wn_one_to_x - [INFO] - [E:125| 1300]: Train Loss:0.00082108, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 04:00:51,136 - wn_one_to_x - [INFO] - [Epoch:125]: Training Loss:0.0008213
|
|
|
|
2023-05-04 04:00:51,376 - wn_one_to_x - [INFO] - [Valid, Tail_Batch Step 0] wn_one_to_x
|
|
2023-05-04 04:00:56,869 - wn_one_to_x - [INFO] - [Valid, Head_Batch Step 0] wn_one_to_x
|
|
2023-05-04 04:01:02,113 - wn_one_to_x - [INFO] - [Evaluating Epoch 125 valid]:
|
|
MRR: Tail : 0.42814, Head : 0.41267, Avg : 0.42041
|
|
|
|
2023-05-04 04:01:02,113 - wn_one_to_x - [INFO] - [Epoch 125]: Training Loss: 0.00082126, Valid MRR: 0.42218,
|
|
|
|
|
|
|
|
2023-05-04 04:01:03,014 - wn_one_to_x - [INFO] - [E:126| 0]: Train Loss:0.00082138, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 04:02:28,232 - wn_one_to_x - [INFO] - [E:126| 100]: Train Loss:0.00081825, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 04:03:53,567 - wn_one_to_x - [INFO] - [E:126| 200]: Train Loss:0.00081907, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 04:05:19,080 - wn_one_to_x - [INFO] - [E:126| 300]: Train Loss:0.00081853, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 04:06:44,091 - wn_one_to_x - [INFO] - [E:126| 400]: Train Loss:0.00081803, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 04:08:09,559 - wn_one_to_x - [INFO] - [E:126| 500]: Train Loss:0.0008179, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 04:09:34,841 - wn_one_to_x - [INFO] - [E:126| 600]: Train Loss:0.0008185, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 04:11:00,093 - wn_one_to_x - [INFO] - [E:126| 700]: Train Loss:0.00081835, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 04:12:25,268 - wn_one_to_x - [INFO] - [E:126| 800]: Train Loss:0.00081838, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 04:13:50,955 - wn_one_to_x - [INFO] - [E:126| 900]: Train Loss:0.00081856, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 04:15:16,698 - wn_one_to_x - [INFO] - [E:126| 1000]: Train Loss:0.00081911, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 04:16:42,699 - wn_one_to_x - [INFO] - [E:126| 1100]: Train Loss:0.00081935, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 04:18:07,836 - wn_one_to_x - [INFO] - [E:126| 1200]: Train Loss:0.00081947, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 04:19:33,036 - wn_one_to_x - [INFO] - [E:126| 1300]: Train Loss:0.00082, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 04:20:20,530 - wn_one_to_x - [INFO] - [Epoch:126]: Training Loss:0.0008202
|
|
|
|
2023-05-04 04:20:20,779 - wn_one_to_x - [INFO] - [Valid, Tail_Batch Step 0] wn_one_to_x
|
|
2023-05-04 04:20:26,235 - wn_one_to_x - [INFO] - [Valid, Head_Batch Step 0] wn_one_to_x
|
|
2023-05-04 04:20:31,467 - wn_one_to_x - [INFO] - [Evaluating Epoch 126 valid]:
|
|
MRR: Tail : 0.42866, Head : 0.41111, Avg : 0.41989
|
|
|
|
2023-05-04 04:20:31,467 - wn_one_to_x - [INFO] - [Epoch 126]: Training Loss: 0.00082024, Valid MRR: 0.42218,
|
|
|
|
|
|
|
|
2023-05-04 04:20:32,330 - wn_one_to_x - [INFO] - [E:127| 0]: Train Loss:0.00079011, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 04:21:57,442 - wn_one_to_x - [INFO] - [E:127| 100]: Train Loss:0.00081408, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 04:23:22,493 - wn_one_to_x - [INFO] - [E:127| 200]: Train Loss:0.00081466, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 04:24:47,322 - wn_one_to_x - [INFO] - [E:127| 300]: Train Loss:0.00081471, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 04:26:12,154 - wn_one_to_x - [INFO] - [E:127| 400]: Train Loss:0.00081489, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 04:27:37,030 - wn_one_to_x - [INFO] - [E:127| 500]: Train Loss:0.00081516, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 04:29:02,252 - wn_one_to_x - [INFO] - [E:127| 600]: Train Loss:0.00081585, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 04:30:27,514 - wn_one_to_x - [INFO] - [E:127| 700]: Train Loss:0.00081618, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 04:31:52,710 - wn_one_to_x - [INFO] - [E:127| 800]: Train Loss:0.00081668, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 04:33:17,946 - wn_one_to_x - [INFO] - [E:127| 900]: Train Loss:0.00081679, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 04:34:43,325 - wn_one_to_x - [INFO] - [E:127| 1000]: Train Loss:0.00081716, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 04:36:08,229 - wn_one_to_x - [INFO] - [E:127| 1100]: Train Loss:0.00081738, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 04:37:33,126 - wn_one_to_x - [INFO] - [E:127| 1200]: Train Loss:0.00081781, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 04:38:58,451 - wn_one_to_x - [INFO] - [E:127| 1300]: Train Loss:0.00081813, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 04:39:46,008 - wn_one_to_x - [INFO] - [Epoch:127]: Training Loss:0.0008184
|
|
|
|
2023-05-04 04:39:46,240 - wn_one_to_x - [INFO] - [Valid, Tail_Batch Step 0] wn_one_to_x
|
|
2023-05-04 04:39:51,687 - wn_one_to_x - [INFO] - [Valid, Head_Batch Step 0] wn_one_to_x
|
|
2023-05-04 04:39:56,944 - wn_one_to_x - [INFO] - [Evaluating Epoch 127 valid]:
|
|
MRR: Tail : 0.42576, Head : 0.41273, Avg : 0.41924
|
|
|
|
2023-05-04 04:39:56,945 - wn_one_to_x - [INFO] - [Epoch 127]: Training Loss: 0.0008184, Valid MRR: 0.42218,
|
|
|
|
|
|
|
|
2023-05-04 04:39:57,808 - wn_one_to_x - [INFO] - [E:128| 0]: Train Loss:0.00080716, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 04:41:23,070 - wn_one_to_x - [INFO] - [E:128| 100]: Train Loss:0.00081318, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 04:42:48,107 - wn_one_to_x - [INFO] - [E:128| 200]: Train Loss:0.00081256, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 04:44:13,215 - wn_one_to_x - [INFO] - [E:128| 300]: Train Loss:0.0008124, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 04:45:38,277 - wn_one_to_x - [INFO] - [E:128| 400]: Train Loss:0.00081305, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 04:47:03,535 - wn_one_to_x - [INFO] - [E:128| 500]: Train Loss:0.00081364, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 04:48:29,046 - wn_one_to_x - [INFO] - [E:128| 600]: Train Loss:0.00081439, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 04:49:53,708 - wn_one_to_x - [INFO] - [E:128| 700]: Train Loss:0.00081517, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 04:51:18,285 - wn_one_to_x - [INFO] - [E:128| 800]: Train Loss:0.00081553, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 04:52:43,751 - wn_one_to_x - [INFO] - [E:128| 900]: Train Loss:0.00081563, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 04:54:08,820 - wn_one_to_x - [INFO] - [E:128| 1000]: Train Loss:0.00081587, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 04:55:33,513 - wn_one_to_x - [INFO] - [E:128| 1100]: Train Loss:0.00081611, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 04:56:58,355 - wn_one_to_x - [INFO] - [E:128| 1200]: Train Loss:0.00081604, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 04:58:22,804 - wn_one_to_x - [INFO] - [E:128| 1300]: Train Loss:0.0008163, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 04:59:09,932 - wn_one_to_x - [INFO] - [Epoch:128]: Training Loss:0.0008164
|
|
|
|
2023-05-04 04:59:10,178 - wn_one_to_x - [INFO] - [Valid, Tail_Batch Step 0] wn_one_to_x
|
|
2023-05-04 04:59:15,619 - wn_one_to_x - [INFO] - [Valid, Head_Batch Step 0] wn_one_to_x
|
|
2023-05-04 04:59:20,866 - wn_one_to_x - [INFO] - [Evaluating Epoch 128 valid]:
|
|
MRR: Tail : 0.4314, Head : 0.41211, Avg : 0.42176
|
|
|
|
2023-05-04 04:59:20,867 - wn_one_to_x - [INFO] - [Epoch 128]: Training Loss: 0.00081642, Valid MRR: 0.42218,
|
|
|
|
|
|
|
|
2023-05-04 04:59:21,726 - wn_one_to_x - [INFO] - [E:129| 0]: Train Loss:0.00085258, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 05:00:46,790 - wn_one_to_x - [INFO] - [E:129| 100]: Train Loss:0.00080988, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 05:02:11,560 - wn_one_to_x - [INFO] - [E:129| 200]: Train Loss:0.00081325, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 05:03:36,122 - wn_one_to_x - [INFO] - [E:129| 300]: Train Loss:0.00081444, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 05:05:00,953 - wn_one_to_x - [INFO] - [E:129| 400]: Train Loss:0.00081469, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 05:06:26,124 - wn_one_to_x - [INFO] - [E:129| 500]: Train Loss:0.00081483, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 05:07:51,386 - wn_one_to_x - [INFO] - [E:129| 600]: Train Loss:0.00081454, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 05:09:16,857 - wn_one_to_x - [INFO] - [E:129| 700]: Train Loss:0.00081479, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 05:10:41,874 - wn_one_to_x - [INFO] - [E:129| 800]: Train Loss:0.00081503, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 05:12:06,709 - wn_one_to_x - [INFO] - [E:129| 900]: Train Loss:0.00081494, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 05:13:31,751 - wn_one_to_x - [INFO] - [E:129| 1000]: Train Loss:0.00081496, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 05:14:56,330 - wn_one_to_x - [INFO] - [E:129| 1100]: Train Loss:0.0008151, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 05:16:21,236 - wn_one_to_x - [INFO] - [E:129| 1200]: Train Loss:0.00081529, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 05:17:46,087 - wn_one_to_x - [INFO] - [E:129| 1300]: Train Loss:0.00081556, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 05:18:33,538 - wn_one_to_x - [INFO] - [Epoch:129]: Training Loss:0.0008159
|
|
|
|
2023-05-04 05:18:33,782 - wn_one_to_x - [INFO] - [Valid, Tail_Batch Step 0] wn_one_to_x
|
|
2023-05-04 05:18:39,230 - wn_one_to_x - [INFO] - [Valid, Head_Batch Step 0] wn_one_to_x
|
|
2023-05-04 05:18:44,457 - wn_one_to_x - [INFO] - [Evaluating Epoch 129 valid]:
|
|
MRR: Tail : 0.42828, Head : 0.41122, Avg : 0.41975
|
|
MR: Tail : 4763.2, Head : 4183.2, Avg : 4473.2
|
|
Hit-1: Tail : 0.40277, Head : 0.38464, Avg : 0.3937
|
|
Hit-3: Tail : 0.43902, Head : 0.4209, Avg : 0.42996
|
|
Hit-10: Tail : 0.47264, Head : 0.45715, Avg : 0.4649
|
|
2023-05-04 05:18:44,458 - wn_one_to_x - [INFO] - [Epoch 129]: Training Loss: 0.00081591, Valid MRR: 0.42218,
|
|
|
|
|
|
|
|
2023-05-04 05:18:45,324 - wn_one_to_x - [INFO] - [E:130| 0]: Train Loss:0.00077041, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 05:20:10,200 - wn_one_to_x - [INFO] - [E:130| 100]: Train Loss:0.00081225, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 05:21:35,162 - wn_one_to_x - [INFO] - [E:130| 200]: Train Loss:0.00081291, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 05:23:00,187 - wn_one_to_x - [INFO] - [E:130| 300]: Train Loss:0.0008129, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 05:24:25,193 - wn_one_to_x - [INFO] - [E:130| 400]: Train Loss:0.0008131, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 05:25:50,426 - wn_one_to_x - [INFO] - [E:130| 500]: Train Loss:0.0008127, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 05:27:15,892 - wn_one_to_x - [INFO] - [E:130| 600]: Train Loss:0.00081299, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 05:28:41,041 - wn_one_to_x - [INFO] - [E:130| 700]: Train Loss:0.0008129, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 05:30:05,897 - wn_one_to_x - [INFO] - [E:130| 800]: Train Loss:0.00081322, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 05:31:30,610 - wn_one_to_x - [INFO] - [E:130| 900]: Train Loss:0.00081321, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 05:32:55,509 - wn_one_to_x - [INFO] - [E:130| 1000]: Train Loss:0.00081339, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 05:34:20,544 - wn_one_to_x - [INFO] - [E:130| 1100]: Train Loss:0.00081335, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 05:35:45,342 - wn_one_to_x - [INFO] - [E:130| 1200]: Train Loss:0.00081367, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 05:37:10,478 - wn_one_to_x - [INFO] - [E:130| 1300]: Train Loss:0.00081414, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 05:37:57,728 - wn_one_to_x - [INFO] - [Epoch:130]: Training Loss:0.0008143
|
|
|
|
2023-05-04 05:37:57,969 - wn_one_to_x - [INFO] - [Valid, Tail_Batch Step 0] wn_one_to_x
|
|
2023-05-04 05:38:03,408 - wn_one_to_x - [INFO] - [Valid, Head_Batch Step 0] wn_one_to_x
|
|
2023-05-04 05:38:08,649 - wn_one_to_x - [INFO] - [Evaluating Epoch 130 valid]:
|
|
MRR: Tail : 0.42889, Head : 0.41202, Avg : 0.42046
|
|
|
|
2023-05-04 05:38:08,649 - wn_one_to_x - [INFO] - [Epoch 130]: Training Loss: 0.00081427, Valid MRR: 0.42218,
|
|
|
|
|
|
|
|
2023-05-04 05:38:09,536 - wn_one_to_x - [INFO] - [E:131| 0]: Train Loss:0.00084235, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 05:39:34,829 - wn_one_to_x - [INFO] - [E:131| 100]: Train Loss:0.00080742, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 05:42:09,227 - wn_one_to_x - [INFO] - [E:131| 200]: Train Loss:0.00081054, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 05:43:34,338 - wn_one_to_x - [INFO] - [E:131| 300]: Train Loss:0.00081118, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 05:44:59,590 - wn_one_to_x - [INFO] - [E:131| 400]: Train Loss:0.00081141, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 05:46:24,783 - wn_one_to_x - [INFO] - [E:131| 500]: Train Loss:0.00081177, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 05:47:49,851 - wn_one_to_x - [INFO] - [E:131| 600]: Train Loss:0.0008119, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 05:49:14,853 - wn_one_to_x - [INFO] - [E:131| 700]: Train Loss:0.00081192, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 05:50:39,907 - wn_one_to_x - [INFO] - [E:131| 800]: Train Loss:0.00081208, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 05:52:04,954 - wn_one_to_x - [INFO] - [E:131| 900]: Train Loss:0.00081216, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 05:53:29,878 - wn_one_to_x - [INFO] - [E:131| 1000]: Train Loss:0.00081266, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 05:54:54,679 - wn_one_to_x - [INFO] - [E:131| 1100]: Train Loss:0.00081305, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 05:56:19,497 - wn_one_to_x - [INFO] - [E:131| 1200]: Train Loss:0.00081359, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 05:57:44,366 - wn_one_to_x - [INFO] - [E:131| 1300]: Train Loss:0.0008137, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 05:58:31,803 - wn_one_to_x - [INFO] - [Epoch:131]: Training Loss:0.0008139
|
|
|
|
2023-05-04 05:58:32,043 - wn_one_to_x - [INFO] - [Valid, Tail_Batch Step 0] wn_one_to_x
|
|
2023-05-04 05:58:37,516 - wn_one_to_x - [INFO] - [Valid, Head_Batch Step 0] wn_one_to_x
|
|
2023-05-04 05:58:42,753 - wn_one_to_x - [INFO] - [Evaluating Epoch 131 valid]:
|
|
MRR: Tail : 0.42879, Head : 0.41347, Avg : 0.42113
|
|
|
|
2023-05-04 05:58:42,753 - wn_one_to_x - [INFO] - [Epoch 131]: Training Loss: 0.00081391, Valid MRR: 0.42218,
|
|
|
|
|
|
|
|
2023-05-04 05:58:43,623 - wn_one_to_x - [INFO] - [E:132| 0]: Train Loss:0.00081517, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 06:00:08,767 - wn_one_to_x - [INFO] - [E:132| 100]: Train Loss:0.00081214, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 06:01:33,881 - wn_one_to_x - [INFO] - [E:132| 200]: Train Loss:0.00080956, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 06:02:59,269 - wn_one_to_x - [INFO] - [E:132| 300]: Train Loss:0.00081038, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 06:04:24,033 - wn_one_to_x - [INFO] - [E:132| 400]: Train Loss:0.00081027, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 06:05:49,102 - wn_one_to_x - [INFO] - [E:132| 500]: Train Loss:0.00081006, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 06:07:14,196 - wn_one_to_x - [INFO] - [E:132| 600]: Train Loss:0.00081001, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 06:08:39,324 - wn_one_to_x - [INFO] - [E:132| 700]: Train Loss:0.00081, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 06:10:04,296 - wn_one_to_x - [INFO] - [E:132| 800]: Train Loss:0.00081071, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 06:11:29,398 - wn_one_to_x - [INFO] - [E:132| 900]: Train Loss:0.0008109, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 06:12:54,298 - wn_one_to_x - [INFO] - [E:132| 1000]: Train Loss:0.00081139, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 06:14:19,212 - wn_one_to_x - [INFO] - [E:132| 1100]: Train Loss:0.00081157, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 06:15:43,901 - wn_one_to_x - [INFO] - [E:132| 1200]: Train Loss:0.00081205, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 06:17:08,463 - wn_one_to_x - [INFO] - [E:132| 1300]: Train Loss:0.0008125, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 06:17:55,685 - wn_one_to_x - [INFO] - [Epoch:132]: Training Loss:0.0008127
|
|
|
|
2023-05-04 06:17:55,954 - wn_one_to_x - [INFO] - [Valid, Tail_Batch Step 0] wn_one_to_x
|
|
2023-05-04 06:18:01,403 - wn_one_to_x - [INFO] - [Valid, Head_Batch Step 0] wn_one_to_x
|
|
2023-05-04 06:18:06,633 - wn_one_to_x - [INFO] - [Evaluating Epoch 132 valid]:
|
|
MRR: Tail : 0.42653, Head : 0.41326, Avg : 0.4199
|
|
|
|
2023-05-04 06:18:06,634 - wn_one_to_x - [INFO] - [Epoch 132]: Training Loss: 0.00081265, Valid MRR: 0.42218,
|
|
|
|
|
|
|
|
2023-05-04 06:18:07,494 - wn_one_to_x - [INFO] - [E:133| 0]: Train Loss:0.00081507, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 06:19:32,526 - wn_one_to_x - [INFO] - [E:133| 100]: Train Loss:0.00081017, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 06:20:58,055 - wn_one_to_x - [INFO] - [E:133| 200]: Train Loss:0.00080905, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 06:22:23,193 - wn_one_to_x - [INFO] - [E:133| 300]: Train Loss:0.00080843, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 06:23:48,443 - wn_one_to_x - [INFO] - [E:133| 400]: Train Loss:0.00080759, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 06:25:13,076 - wn_one_to_x - [INFO] - [E:133| 500]: Train Loss:0.0008082, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 06:26:38,262 - wn_one_to_x - [INFO] - [E:133| 600]: Train Loss:0.0008088, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 06:28:03,469 - wn_one_to_x - [INFO] - [E:133| 700]: Train Loss:0.00080862, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 06:29:28,460 - wn_one_to_x - [INFO] - [E:133| 800]: Train Loss:0.00080892, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 06:30:53,302 - wn_one_to_x - [INFO] - [E:133| 900]: Train Loss:0.00080912, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 06:32:18,121 - wn_one_to_x - [INFO] - [E:133| 1000]: Train Loss:0.00080949, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 06:33:43,047 - wn_one_to_x - [INFO] - [E:133| 1100]: Train Loss:0.00080973, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 06:35:07,637 - wn_one_to_x - [INFO] - [E:133| 1200]: Train Loss:0.00080998, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 06:36:32,269 - wn_one_to_x - [INFO] - [E:133| 1300]: Train Loss:0.00081003, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 06:37:19,571 - wn_one_to_x - [INFO] - [Epoch:133]: Training Loss:0.0008102
|
|
|
|
2023-05-04 06:37:19,814 - wn_one_to_x - [INFO] - [Valid, Tail_Batch Step 0] wn_one_to_x
|
|
2023-05-04 06:37:25,264 - wn_one_to_x - [INFO] - [Valid, Head_Batch Step 0] wn_one_to_x
|
|
2023-05-04 06:37:30,514 - wn_one_to_x - [INFO] - [Evaluating Epoch 133 valid]:
|
|
MRR: Tail : 0.42704, Head : 0.41389, Avg : 0.42046
|
|
|
|
2023-05-04 06:37:30,514 - wn_one_to_x - [INFO] - [Epoch 133]: Training Loss: 0.00081017, Valid MRR: 0.42218,
|
|
|
|
|
|
|
|
2023-05-04 06:37:31,380 - wn_one_to_x - [INFO] - [E:134| 0]: Train Loss:0.00079509, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 06:38:56,708 - wn_one_to_x - [INFO] - [E:134| 100]: Train Loss:0.00080325, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 06:40:21,765 - wn_one_to_x - [INFO] - [E:134| 200]: Train Loss:0.00080518, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 06:41:46,946 - wn_one_to_x - [INFO] - [E:134| 300]: Train Loss:0.00080686, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 06:43:11,825 - wn_one_to_x - [INFO] - [E:134| 400]: Train Loss:0.0008074, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 06:44:36,812 - wn_one_to_x - [INFO] - [E:134| 500]: Train Loss:0.00080737, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 06:46:01,856 - wn_one_to_x - [INFO] - [E:134| 600]: Train Loss:0.00080768, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 06:47:26,759 - wn_one_to_x - [INFO] - [E:134| 700]: Train Loss:0.00080792, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 06:48:51,431 - wn_one_to_x - [INFO] - [E:134| 800]: Train Loss:0.00080832, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 06:50:16,400 - wn_one_to_x - [INFO] - [E:134| 900]: Train Loss:0.00080841, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 06:51:41,571 - wn_one_to_x - [INFO] - [E:134| 1000]: Train Loss:0.00080863, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 06:53:06,479 - wn_one_to_x - [INFO] - [E:134| 1100]: Train Loss:0.0008092, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 06:54:31,175 - wn_one_to_x - [INFO] - [E:134| 1200]: Train Loss:0.00080938, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 06:55:56,397 - wn_one_to_x - [INFO] - [E:134| 1300]: Train Loss:0.00080981, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 06:56:43,829 - wn_one_to_x - [INFO] - [Epoch:134]: Training Loss:0.0008099
|
|
|
|
2023-05-04 06:56:44,072 - wn_one_to_x - [INFO] - [Valid, Tail_Batch Step 0] wn_one_to_x
|
|
2023-05-04 06:56:49,535 - wn_one_to_x - [INFO] - [Valid, Head_Batch Step 0] wn_one_to_x
|
|
2023-05-04 06:56:54,778 - wn_one_to_x - [INFO] - [Evaluating Epoch 134 valid]:
|
|
MRR: Tail : 0.42462, Head : 0.41462, Avg : 0.41962
|
|
|
|
2023-05-04 06:56:54,778 - wn_one_to_x - [INFO] - [Epoch 134]: Training Loss: 0.00080992, Valid MRR: 0.42218,
|
|
|
|
|
|
|
|
2023-05-04 06:56:55,646 - wn_one_to_x - [INFO] - [E:135| 0]: Train Loss:0.00078243, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 06:58:20,133 - wn_one_to_x - [INFO] - [E:135| 100]: Train Loss:0.00080471, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 06:59:44,688 - wn_one_to_x - [INFO] - [E:135| 200]: Train Loss:0.0008063, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 07:01:09,393 - wn_one_to_x - [INFO] - [E:135| 300]: Train Loss:0.00080645, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 07:02:34,649 - wn_one_to_x - [INFO] - [E:135| 400]: Train Loss:0.00080615, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 07:03:59,569 - wn_one_to_x - [INFO] - [E:135| 500]: Train Loss:0.00080613, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 07:05:24,481 - wn_one_to_x - [INFO] - [E:135| 600]: Train Loss:0.00080674, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 07:06:49,456 - wn_one_to_x - [INFO] - [E:135| 700]: Train Loss:0.00080691, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 07:08:14,565 - wn_one_to_x - [INFO] - [E:135| 800]: Train Loss:0.00080701, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 07:09:39,643 - wn_one_to_x - [INFO] - [E:135| 900]: Train Loss:0.00080699, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 07:11:04,916 - wn_one_to_x - [INFO] - [E:135| 1000]: Train Loss:0.00080742, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 07:12:30,192 - wn_one_to_x - [INFO] - [E:135| 1100]: Train Loss:0.00080756, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 07:13:55,712 - wn_one_to_x - [INFO] - [E:135| 1200]: Train Loss:0.00080794, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 07:15:20,663 - wn_one_to_x - [INFO] - [E:135| 1300]: Train Loss:0.00080814, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 07:16:08,335 - wn_one_to_x - [INFO] - [Epoch:135]: Training Loss:0.0008084
|
|
|
|
2023-05-04 07:16:08,576 - wn_one_to_x - [INFO] - [Valid, Tail_Batch Step 0] wn_one_to_x
|
|
2023-05-04 07:16:14,020 - wn_one_to_x - [INFO] - [Valid, Head_Batch Step 0] wn_one_to_x
|
|
2023-05-04 07:16:19,258 - wn_one_to_x - [INFO] - [Evaluating Epoch 135 valid]:
|
|
MRR: Tail : 0.42899, Head : 0.41298, Avg : 0.42099
|
|
|
|
2023-05-04 07:16:19,258 - wn_one_to_x - [INFO] - [Epoch 135]: Training Loss: 0.00080843, Valid MRR: 0.42218,
|
|
|
|
|
|
|
|
2023-05-04 07:16:20,123 - wn_one_to_x - [INFO] - [E:136| 0]: Train Loss:0.00078798, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 07:17:45,243 - wn_one_to_x - [INFO] - [E:136| 100]: Train Loss:0.00080132, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 07:19:10,114 - wn_one_to_x - [INFO] - [E:136| 200]: Train Loss:0.00080277, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 07:20:35,005 - wn_one_to_x - [INFO] - [E:136| 300]: Train Loss:0.00080315, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 07:21:59,951 - wn_one_to_x - [INFO] - [E:136| 400]: Train Loss:0.00080395, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 07:23:24,695 - wn_one_to_x - [INFO] - [E:136| 500]: Train Loss:0.00080454, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 07:24:49,424 - wn_one_to_x - [INFO] - [E:136| 600]: Train Loss:0.00080495, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 07:26:14,205 - wn_one_to_x - [INFO] - [E:136| 700]: Train Loss:0.00080496, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 07:27:39,342 - wn_one_to_x - [INFO] - [E:136| 800]: Train Loss:0.00080536, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 07:29:04,204 - wn_one_to_x - [INFO] - [E:136| 900]: Train Loss:0.00080602, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 07:30:29,014 - wn_one_to_x - [INFO] - [E:136| 1000]: Train Loss:0.00080607, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 07:31:54,576 - wn_one_to_x - [INFO] - [E:136| 1100]: Train Loss:0.00080583, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 07:33:20,039 - wn_one_to_x - [INFO] - [E:136| 1200]: Train Loss:0.00080612, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 07:34:44,989 - wn_one_to_x - [INFO] - [E:136| 1300]: Train Loss:0.00080651, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 07:35:32,487 - wn_one_to_x - [INFO] - [Epoch:136]: Training Loss:0.0008069
|
|
|
|
2023-05-04 07:35:32,735 - wn_one_to_x - [INFO] - [Valid, Tail_Batch Step 0] wn_one_to_x
|
|
2023-05-04 07:35:38,188 - wn_one_to_x - [INFO] - [Valid, Head_Batch Step 0] wn_one_to_x
|
|
2023-05-04 07:35:43,421 - wn_one_to_x - [INFO] - [Evaluating Epoch 136 valid]:
|
|
MRR: Tail : 0.42601, Head : 0.41281, Avg : 0.41941
|
|
|
|
2023-05-04 07:35:43,421 - wn_one_to_x - [INFO] - [Epoch 136]: Training Loss: 0.0008069, Valid MRR: 0.42218,
|
|
|
|
|
|
|
|
2023-05-04 07:35:44,280 - wn_one_to_x - [INFO] - [E:137| 0]: Train Loss:0.00080932, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 07:37:09,461 - wn_one_to_x - [INFO] - [E:137| 100]: Train Loss:0.00080054, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 07:38:34,665 - wn_one_to_x - [INFO] - [E:137| 200]: Train Loss:0.00080105, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 07:39:59,657 - wn_one_to_x - [INFO] - [E:137| 300]: Train Loss:0.0008023, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 07:41:25,192 - wn_one_to_x - [INFO] - [E:137| 400]: Train Loss:0.0008039, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 07:42:50,434 - wn_one_to_x - [INFO] - [E:137| 500]: Train Loss:0.00080438, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 07:44:15,700 - wn_one_to_x - [INFO] - [E:137| 600]: Train Loss:0.00080492, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 07:45:40,986 - wn_one_to_x - [INFO] - [E:137| 700]: Train Loss:0.00080486, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 07:47:05,883 - wn_one_to_x - [INFO] - [E:137| 800]: Train Loss:0.00080539, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 07:48:30,841 - wn_one_to_x - [INFO] - [E:137| 900]: Train Loss:0.00080567, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 07:49:56,005 - wn_one_to_x - [INFO] - [E:137| 1000]: Train Loss:0.0008057, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 07:51:21,139 - wn_one_to_x - [INFO] - [E:137| 1100]: Train Loss:0.00080632, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 07:52:45,744 - wn_one_to_x - [INFO] - [E:137| 1200]: Train Loss:0.00080657, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 07:54:10,710 - wn_one_to_x - [INFO] - [E:137| 1300]: Train Loss:0.00080671, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 07:54:58,252 - wn_one_to_x - [INFO] - [Epoch:137]: Training Loss:0.0008069
|
|
|
|
2023-05-04 07:54:58,494 - wn_one_to_x - [INFO] - [Valid, Tail_Batch Step 0] wn_one_to_x
|
|
2023-05-04 07:55:03,938 - wn_one_to_x - [INFO] - [Valid, Head_Batch Step 0] wn_one_to_x
|
|
2023-05-04 07:55:09,196 - wn_one_to_x - [INFO] - [Evaluating Epoch 137 valid]:
|
|
MRR: Tail : 0.42813, Head : 0.41187, Avg : 0.42
|
|
|
|
2023-05-04 07:55:09,196 - wn_one_to_x - [INFO] - [Epoch 137]: Training Loss: 0.0008069, Valid MRR: 0.42218,
|
|
|
|
|
|
|
|
2023-05-04 07:55:10,059 - wn_one_to_x - [INFO] - [E:138| 0]: Train Loss:0.00081907, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 07:56:35,304 - wn_one_to_x - [INFO] - [E:138| 100]: Train Loss:0.00080205, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 07:58:00,443 - wn_one_to_x - [INFO] - [E:138| 200]: Train Loss:0.0008024, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 07:59:25,420 - wn_one_to_x - [INFO] - [E:138| 300]: Train Loss:0.00080297, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 08:00:50,419 - wn_one_to_x - [INFO] - [E:138| 400]: Train Loss:0.00080285, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 08:02:15,395 - wn_one_to_x - [INFO] - [E:138| 500]: Train Loss:0.00080297, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 08:03:40,241 - wn_one_to_x - [INFO] - [E:138| 600]: Train Loss:0.0008038, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 08:05:05,428 - wn_one_to_x - [INFO] - [E:138| 700]: Train Loss:0.0008036, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 08:06:30,915 - wn_one_to_x - [INFO] - [E:138| 800]: Train Loss:0.00080384, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 08:07:55,855 - wn_one_to_x - [INFO] - [E:138| 900]: Train Loss:0.00080458, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 08:09:20,876 - wn_one_to_x - [INFO] - [E:138| 1000]: Train Loss:0.00080468, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 08:10:45,711 - wn_one_to_x - [INFO] - [E:138| 1100]: Train Loss:0.00080507, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 08:12:10,452 - wn_one_to_x - [INFO] - [E:138| 1200]: Train Loss:0.00080517, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 08:13:35,339 - wn_one_to_x - [INFO] - [E:138| 1300]: Train Loss:0.00080555, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 08:14:22,874 - wn_one_to_x - [INFO] - [Epoch:138]: Training Loss:0.0008057
|
|
|
|
2023-05-04 08:14:23,115 - wn_one_to_x - [INFO] - [Valid, Tail_Batch Step 0] wn_one_to_x
|
|
2023-05-04 08:14:28,558 - wn_one_to_x - [INFO] - [Valid, Head_Batch Step 0] wn_one_to_x
|
|
2023-05-04 08:14:33,821 - wn_one_to_x - [INFO] - [Evaluating Epoch 138 valid]:
|
|
MRR: Tail : 0.4258, Head : 0.41551, Avg : 0.42066
|
|
|
|
2023-05-04 08:14:33,821 - wn_one_to_x - [INFO] - [Epoch 138]: Training Loss: 0.0008057, Valid MRR: 0.42218,
|
|
|
|
|
|
|
|
2023-05-04 08:14:34,706 - wn_one_to_x - [INFO] - [E:139| 0]: Train Loss:0.00082367, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 08:15:59,363 - wn_one_to_x - [INFO] - [E:139| 100]: Train Loss:0.00080375, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 08:17:24,051 - wn_one_to_x - [INFO] - [E:139| 200]: Train Loss:0.00080428, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 08:18:48,696 - wn_one_to_x - [INFO] - [E:139| 300]: Train Loss:0.00080343, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 08:20:13,710 - wn_one_to_x - [INFO] - [E:139| 400]: Train Loss:0.0008023, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 08:21:38,184 - wn_one_to_x - [INFO] - [E:139| 500]: Train Loss:0.00080275, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 08:23:03,240 - wn_one_to_x - [INFO] - [E:139| 600]: Train Loss:0.00080254, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 08:24:28,333 - wn_one_to_x - [INFO] - [E:139| 700]: Train Loss:0.00080291, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 08:25:53,362 - wn_one_to_x - [INFO] - [E:139| 800]: Train Loss:0.00080316, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 08:27:18,424 - wn_one_to_x - [INFO] - [E:139| 900]: Train Loss:0.00080325, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 08:28:43,621 - wn_one_to_x - [INFO] - [E:139| 1000]: Train Loss:0.0008035, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 08:30:08,477 - wn_one_to_x - [INFO] - [E:139| 1100]: Train Loss:0.0008035, Val MRR:0.42218, wn_one_to_x
|
|
2023-05-04 08:31:33,509 - wn_one_to_x - [INFO] - [E:139| 1200]: Train Loss:0.0008036, Val MRR:0.42218, wn_one_to_x
|