Thesis/wikidata12k_at.out

76 lines
3.6 KiB
Plaintext
Raw Permalink Normal View History

2023-06-24 02:59:32 +00:00
nohup: ignoring input
2023-05-27 08:51:48,116 - [INFO] - {'dataset': 'wikidata12k', 'name': 'wikidata12k_at', 'gpu': '3', 'train_strategy': 'one_to_n', 'opt': 'adam', 'neg_num': 1000, 'batch_size': 128, 'l2': 0.0, 'lr': 0.0001, 'max_epochs': 500, 'num_workers': 0, 'seed': 42, 'restore': False, 'lbl_smooth': 0.1, 'embed_dim': 400, 'ent_vec_dim': 400, 'rel_vec_dim': 400, 'bias': False, 'form': 'plain', 'k_w': 10, 'k_h': 20, 'num_filt': 96, 'ker_sz': 9, 'perm': 1, 'hid_drop': 0.5, 'feat_drop': 0.2, 'inp_drop': 0.2, 'drop_path': 0.0, 'drop': 0.0, 'in_channels': 1, 'out_channels': 32, 'filt_h': 1, 'filt_w': 9, 'image_h': 128, 'image_w': 128, 'patch_size': 8, 'mixer_dim': 256, 'expansion_factor': 4, 'expansion_factor_token': 0.5, 'mixer_depth': 16, 'mixer_dropout': 0.2, 'log_dir': './log/', 'config_dir': './config/', 'test_only': False, 'grid_search': False}
{'batch_size': 128,
'bias': False,
'config_dir': './config/',
'dataset': 'wikidata12k',
'drop': 0.0,
'drop_path': 0.0,
'embed_dim': 400,
'ent_vec_dim': 400,
'expansion_factor': 4,
'expansion_factor_token': 0.5,
'feat_drop': 0.2,
'filt_h': 1,
'filt_w': 9,
'form': 'plain',
'gpu': '3',
'grid_search': False,
'hid_drop': 0.5,
'image_h': 128,
'image_w': 128,
'in_channels': 1,
'inp_drop': 0.2,
'k_h': 20,
'k_w': 10,
'ker_sz': 9,
'l2': 0.0,
'lbl_smooth': 0.1,
'log_dir': './log/',
'lr': 0.0001,
'max_epochs': 500,
'mixer_depth': 16,
'mixer_dim': 256,
'mixer_dropout': 0.2,
'name': 'wikidata12k_at',
'neg_num': 1000,
'num_filt': 96,
'num_workers': 0,
'opt': 'adam',
'out_channels': 32,
'patch_size': 8,
'perm': 1,
'rel_vec_dim': 400,
'restore': False,
'seed': 42,
'test_only': False,
'train_strategy': 'one_to_n'}
Traceback (most recent call last):
File "main.py", line 693, in <module>
model.fit()
File "main.py", line 492, in fit
train_loss = self.run_epoch(epoch)
File "main.py", line 458, in run_epoch
pred = self.model.forward(sub, rel, neg_ent, self.p.train_strategy)
File "/root/kg_374/Thesis_split/models.py", line 558, in forward
z = self.forward_tokens(z)
File "/root/kg_374/Thesis_split/models.py", line 547, in forward_tokens
x = block(x)
File "/opt/conda/envs/kgs2s/lib/python3.8/site-packages/torch/nn/modules/module.py", line 1130, in _call_impl
return forward_call(*input, **kwargs)
File "/opt/conda/envs/kgs2s/lib/python3.8/site-packages/torch/nn/modules/container.py", line 139, in forward
input = module(input)
File "/opt/conda/envs/kgs2s/lib/python3.8/site-packages/torch/nn/modules/module.py", line 1130, in _call_impl
return forward_call(*input, **kwargs)
File "/root/kg_374/Thesis_split/models.py", line 757, in forward
* self.mlp(self.norm2(x)))
File "/opt/conda/envs/kgs2s/lib/python3.8/site-packages/torch/nn/modules/module.py", line 1130, in _call_impl
return forward_call(*input, **kwargs)
File "/root/kg_374/Thesis_split/models.py", line 821, in forward
x = self.act(x)
File "/opt/conda/envs/kgs2s/lib/python3.8/site-packages/torch/nn/modules/module.py", line 1130, in _call_impl
return forward_call(*input, **kwargs)
File "/opt/conda/envs/kgs2s/lib/python3.8/site-packages/torch/nn/modules/activation.py", line 681, in forward
return F.gelu(input, approximate=self.approximate)
RuntimeError: CUDA out of memory. Tried to allocate 800.00 MiB (GPU 0; 31.72 GiB total capacity; 10.92 GiB already allocated; 669.94 MiB free; 10.98 GiB reserved in total by PyTorch) If reserved memory is >> allocated memory try setting max_split_size_mb to avoid fragmentation. See documentation for Memory Management and PYTORCH_CUDA_ALLOC_CONF