Compare commits
	
		
			28 Commits
		
	
	
		
			tourier_sp
			...
			sep_vit
		
	
	| Author | SHA1 | Date | |
|---|---|---|---|
|  | 9075b53be6 | ||
|  | ab5c1d0b4b | ||
|  | 3243b1d963 | ||
|  | 37b01708b4 | ||
|  | a246d2bb64 | ||
|  | 4a962a02ad | ||
|  | f8e969cbd1 | ||
|  | ae0f43ab4d | ||
|  | dda7f13dbd | ||
|  | 1dd423edf0 | ||
|  | a1bf2d7389 | ||
|  | c31588cc5f | ||
|  | c03e24f4c2 | ||
|  | a47a60f6a1 | ||
|  | ba388148d4 | ||
|  | 1b816fed50 | ||
|  | 32962bf421 | ||
|  | b9efe68d3c | ||
|  | 465f98bef8 | ||
|  | d4ac470c54 | ||
|  | 28a8352044 | ||
|  | b77c79708e | ||
|  | 22d44d1a99 | ||
|  | 63ccb4ec75 | ||
|  | 6ec566505f | ||
|  | 30805a0af9 | ||
| 2e2b12571a | |||
| d4b29eec2c | 
							
								
								
									
										15
									
								
								data/icews14_both/about.txt
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										15
									
								
								data/icews14_both/about.txt
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,15 @@ | ||||
| # triples: 86517  | ||||
| # entities: 7128  | ||||
| # relations: 12409  | ||||
| # timesteps: 208  | ||||
| # test triples: 8218  | ||||
| # valid triples: 8193  | ||||
| # train triples: 70106  | ||||
| Measure method:  N/A   | ||||
| Target Size :  0   | ||||
| Grow Factor:  0   | ||||
| Shrink Factor:  0   | ||||
| Epsilon Factor: 0   | ||||
| Search method: N/A   | ||||
| filter_dupes: both | ||||
| nonames: False | ||||
							
								
								
									
										7128
									
								
								data/icews14_both/entities.dict
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										7128
									
								
								data/icews14_both/entities.dict
									
									
									
									
									
										Normal file
									
								
							
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							
							
								
								
									
										12409
									
								
								data/icews14_both/relations.dict
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										12409
									
								
								data/icews14_both/relations.dict
									
									
									
									
									
										Normal file
									
								
							
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							
							
								
								
									
										8218
									
								
								data/icews14_both/test.txt
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										8218
									
								
								data/icews14_both/test.txt
									
									
									
									
									
										Normal file
									
								
							
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							
							
								
								
									
										209
									
								
								data/icews14_both/time_map.dict
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										209
									
								
								data/icews14_both/time_map.dict
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,209 @@ | ||||
| 0	0	2 | ||||
| 1	3	5 | ||||
| 2	6	7 | ||||
| 3	8	9 | ||||
| 4	10	12 | ||||
| 5	13	14 | ||||
| 6	15	16 | ||||
| 7	17	19 | ||||
| 8	20	21 | ||||
| 9	22	23 | ||||
| 10	24	26 | ||||
| 11	27	28 | ||||
| 12	29	30 | ||||
| 13	31	33 | ||||
| 14	34	35 | ||||
| 15	36	37 | ||||
| 16	38	40 | ||||
| 17	41	42 | ||||
| 18	43	44 | ||||
| 19	45	46 | ||||
| 20	47	48 | ||||
| 21	49	49 | ||||
| 22	50	50 | ||||
| 23	51	51 | ||||
| 24	52	53 | ||||
| 25	54	54 | ||||
| 26	55	55 | ||||
| 27	56	57 | ||||
| 28	58	59 | ||||
| 29	60	61 | ||||
| 30	62	62 | ||||
| 31	63	63 | ||||
| 32	64	65 | ||||
| 33	66	68 | ||||
| 34	69	70 | ||||
| 35	71	71 | ||||
| 36	72	72 | ||||
| 37	73	74 | ||||
| 38	75	76 | ||||
| 39	77	78 | ||||
| 40	79	80 | ||||
| 41	81	82 | ||||
| 42	83	84 | ||||
| 43	85	85 | ||||
| 44	86	87 | ||||
| 45	88	89 | ||||
| 46	90	91 | ||||
| 47	92	93 | ||||
| 48	94	96 | ||||
| 49	97	97 | ||||
| 50	98	99 | ||||
| 51	100	101 | ||||
| 52	102	103 | ||||
| 53	104	105 | ||||
| 54	106	107 | ||||
| 55	108	110 | ||||
| 56	111	112 | ||||
| 57	113	114 | ||||
| 58	115	116 | ||||
| 59	117	118 | ||||
| 60	119	119 | ||||
| 61	120	121 | ||||
| 62	122	124 | ||||
| 63	125	125 | ||||
| 64	126	127 | ||||
| 65	128	129 | ||||
| 66	130	131 | ||||
| 67	132	133 | ||||
| 68	134	135 | ||||
| 69	136	138 | ||||
| 70	139	139 | ||||
| 71	140	140 | ||||
| 72	141	141 | ||||
| 73	142	143 | ||||
| 74	144	145 | ||||
| 75	146	147 | ||||
| 76	148	148 | ||||
| 77	149	150 | ||||
| 78	151	152 | ||||
| 79	153	154 | ||||
| 80	155	155 | ||||
| 81	156	157 | ||||
| 82	158	159 | ||||
| 83	160	161 | ||||
| 84	162	163 | ||||
| 85	164	166 | ||||
| 86	167	167 | ||||
| 87	168	168 | ||||
| 88	169	169 | ||||
| 89	170	170 | ||||
| 90	171	173 | ||||
| 91	174	175 | ||||
| 92	176	177 | ||||
| 93	178	180 | ||||
| 94	181	182 | ||||
| 95	183	183 | ||||
| 96	184	185 | ||||
| 97	186	187 | ||||
| 98	188	188 | ||||
| 99	189	190 | ||||
| 100	191	192 | ||||
| 101	193	194 | ||||
| 102	195	195 | ||||
| 103	196	197 | ||||
| 104	198	199 | ||||
| 105	200	201 | ||||
| 106	202	203 | ||||
| 107	204	205 | ||||
| 108	206	208 | ||||
| 109	209	210 | ||||
| 110	211	212 | ||||
| 111	213	215 | ||||
| 112	216	217 | ||||
| 113	218	219 | ||||
| 114	220	221 | ||||
| 115	222	222 | ||||
| 116	223	224 | ||||
| 117	225	226 | ||||
| 118	227	229 | ||||
| 119	230	231 | ||||
| 120	232	233 | ||||
| 121	234	236 | ||||
| 122	237	238 | ||||
| 123	239	239 | ||||
| 124	240	241 | ||||
| 125	242	243 | ||||
| 126	244	245 | ||||
| 127	246	246 | ||||
| 128	247	248 | ||||
| 129	249	250 | ||||
| 130	251	251 | ||||
| 131	252	252 | ||||
| 132	253	253 | ||||
| 133	254	254 | ||||
| 134	255	256 | ||||
| 135	257	257 | ||||
| 136	258	259 | ||||
| 137	260	261 | ||||
| 138	262	263 | ||||
| 139	264	264 | ||||
| 140	265	265 | ||||
| 141	266	266 | ||||
| 142	267	267 | ||||
| 143	268	269 | ||||
| 144	270	271 | ||||
| 145	272	272 | ||||
| 146	273	273 | ||||
| 147	274	274 | ||||
| 148	275	276 | ||||
| 149	277	278 | ||||
| 150	279	279 | ||||
| 151	280	281 | ||||
| 152	282	283 | ||||
| 153	284	285 | ||||
| 154	286	286 | ||||
| 155	287	287 | ||||
| 156	288	288 | ||||
| 157	289	289 | ||||
| 158	290	291 | ||||
| 159	292	292 | ||||
| 160	293	293 | ||||
| 161	294	294 | ||||
| 162	295	295 | ||||
| 163	296	297 | ||||
| 164	298	299 | ||||
| 165	300	300 | ||||
| 166	301	301 | ||||
| 167	302	303 | ||||
| 168	304	305 | ||||
| 169	306	307 | ||||
| 170	308	309 | ||||
| 171	310	310 | ||||
| 172	311	312 | ||||
| 173	313	313 | ||||
| 174	314	314 | ||||
| 175	315	315 | ||||
| 176	316	316 | ||||
| 177	317	317 | ||||
| 178	318	319 | ||||
| 179	320	320 | ||||
| 180	321	321 | ||||
| 181	322	322 | ||||
| 182	323	323 | ||||
| 183	324	324 | ||||
| 184	325	326 | ||||
| 185	327	327 | ||||
| 186	328	328 | ||||
| 187	329	329 | ||||
| 188	330	330 | ||||
| 189	331	332 | ||||
| 190	333	334 | ||||
| 191	335	335 | ||||
| 192	336	336 | ||||
| 193	337	338 | ||||
| 194	339	340 | ||||
| 195	341	342 | ||||
| 196	343	343 | ||||
| 197	344	344 | ||||
| 198	345	346 | ||||
| 199	347	348 | ||||
| 200	349	349 | ||||
| 201	350	350 | ||||
| 202	351	352 | ||||
| 203	353	355 | ||||
| 204	356	357 | ||||
| 205	358	359 | ||||
| 206	360	362 | ||||
| 207	363	365 | ||||
| 208	366	366 | ||||
							
								
								
									
										70106
									
								
								data/icews14_both/train.txt
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										70106
									
								
								data/icews14_both/train.txt
									
									
									
									
									
										Normal file
									
								
							
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							
							
								
								
									
										8193
									
								
								data/icews14_both/valid.txt
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										8193
									
								
								data/icews14_both/valid.txt
									
									
									
									
									
										Normal file
									
								
							
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							
							
								
								
									
										15
									
								
								data/wikidata12k_both/about.txt
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										15
									
								
								data/wikidata12k_both/about.txt
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,15 @@ | ||||
| # triples: 231529  | ||||
| # entities: 12554  | ||||
| # relations: 423  | ||||
| # timesteps: 70  | ||||
| # test triples: 16195  | ||||
| # valid triples: 16707  | ||||
| # train triples: 198627  | ||||
| Measure method:  N/A   | ||||
| Target Size :  423   | ||||
| Grow Factor:  0   | ||||
| Shrink Factor:  4.0   | ||||
| Epsilon Factor: 0   | ||||
| Search method: N/A   | ||||
| filter_dupes: both | ||||
| nonames: False | ||||
							
								
								
									
										40621
									
								
								data/wikidata12k_both/complete_type.txt
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										40621
									
								
								data/wikidata12k_both/complete_type.txt
									
									
									
									
									
										Normal file
									
								
							
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							
							
								
								
									
										12554
									
								
								data/wikidata12k_both/entities.dict
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										12554
									
								
								data/wikidata12k_both/entities.dict
									
									
									
									
									
										Normal file
									
								
							
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							
							
								
								
									
										423
									
								
								data/wikidata12k_both/relations.dict
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										423
									
								
								data/wikidata12k_both/relations.dict
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,423 @@ | ||||
| 0	P131[0-0] | ||||
| 1	P131[1-1] | ||||
| 2	P131[2-2] | ||||
| 3	P131[3-3] | ||||
| 4	P131[4-4] | ||||
| 5	P131[5-5] | ||||
| 6	P131[6-6] | ||||
| 7	P131[7-7] | ||||
| 8	P131[8-8] | ||||
| 9	P131[9-9] | ||||
| 10	P131[10-10] | ||||
| 11	P131[11-11] | ||||
| 12	P131[12-12] | ||||
| 13	P131[13-13] | ||||
| 14	P131[14-14] | ||||
| 15	P131[15-15] | ||||
| 16	P131[16-16] | ||||
| 17	P131[17-17] | ||||
| 18	P131[18-18] | ||||
| 19	P131[19-19] | ||||
| 20	P131[20-20] | ||||
| 21	P131[21-21] | ||||
| 22	P131[22-22] | ||||
| 23	P131[23-23] | ||||
| 24	P131[24-24] | ||||
| 25	P131[25-25] | ||||
| 26	P131[26-26] | ||||
| 27	P131[27-27] | ||||
| 28	P131[28-28] | ||||
| 29	P131[29-29] | ||||
| 30	P131[30-30] | ||||
| 31	P131[31-31] | ||||
| 32	P131[32-32] | ||||
| 33	P131[33-33] | ||||
| 34	P131[34-34] | ||||
| 35	P131[35-35] | ||||
| 36	P131[36-36] | ||||
| 37	P131[37-37] | ||||
| 38	P131[38-38] | ||||
| 39	P131[39-39] | ||||
| 40	P131[40-40] | ||||
| 41	P131[41-41] | ||||
| 42	P131[42-42] | ||||
| 43	P131[43-43] | ||||
| 44	P131[44-44] | ||||
| 45	P131[45-45] | ||||
| 46	P131[46-46] | ||||
| 47	P131[47-47] | ||||
| 48	P131[48-48] | ||||
| 49	P131[49-49] | ||||
| 50	P131[50-50] | ||||
| 51	P131[51-51] | ||||
| 52	P131[52-52] | ||||
| 53	P131[53-53] | ||||
| 54	P131[54-54] | ||||
| 55	P131[55-55] | ||||
| 56	P131[56-56] | ||||
| 57	P131[57-57] | ||||
| 58	P131[58-58] | ||||
| 59	P131[59-59] | ||||
| 60	P131[60-60] | ||||
| 61	P131[61-61] | ||||
| 62	P131[62-62] | ||||
| 63	P131[63-63] | ||||
| 64	P131[64-64] | ||||
| 65	P131[65-65] | ||||
| 66	P131[66-66] | ||||
| 67	P131[67-67] | ||||
| 68	P131[68-68] | ||||
| 69	P131[69-69] | ||||
| 70	P1435[65-65] | ||||
| 71	P39[49-49] | ||||
| 72	P39[50-50] | ||||
| 73	P39[51-51] | ||||
| 74	P39[52-52] | ||||
| 75	P39[53-53] | ||||
| 76	P39[54-54] | ||||
| 77	P39[55-55] | ||||
| 78	P39[56-56] | ||||
| 79	P39[57-57] | ||||
| 80	P39[58-58] | ||||
| 81	P39[59-59] | ||||
| 82	P39[60-60] | ||||
| 83	P39[61-61] | ||||
| 84	P39[62-62] | ||||
| 85	P39[63-63] | ||||
| 86	P39[64-64] | ||||
| 87	P39[65-65] | ||||
| 88	P39[66-66] | ||||
| 89	P39[67-67] | ||||
| 90	P39[68-68] | ||||
| 91	P39[69-69] | ||||
| 92	P54[40-40] | ||||
| 93	P54[41-41] | ||||
| 94	P54[42-42] | ||||
| 95	P54[43-43] | ||||
| 96	P54[44-44] | ||||
| 97	P54[45-45] | ||||
| 98	P54[46-46] | ||||
| 99	P54[47-47] | ||||
| 100	P54[48-48] | ||||
| 101	P54[49-49] | ||||
| 102	P54[50-50] | ||||
| 103	P54[51-51] | ||||
| 104	P54[52-52] | ||||
| 105	P54[53-53] | ||||
| 106	P54[54-54] | ||||
| 107	P54[55-55] | ||||
| 108	P54[56-56] | ||||
| 109	P54[57-57] | ||||
| 110	P54[58-58] | ||||
| 111	P54[59-59] | ||||
| 112	P54[60-60] | ||||
| 113	P54[61-61] | ||||
| 114	P54[62-62] | ||||
| 115	P54[63-63] | ||||
| 116	P54[64-64] | ||||
| 117	P54[65-65] | ||||
| 118	P54[66-66] | ||||
| 119	P54[67-67] | ||||
| 120	P54[68-68] | ||||
| 121	P54[69-69] | ||||
| 122	P31[0-0] | ||||
| 123	P31[1-1] | ||||
| 124	P31[2-2] | ||||
| 125	P31[3-3] | ||||
| 126	P31[4-4] | ||||
| 127	P31[5-5] | ||||
| 128	P31[6-6] | ||||
| 129	P31[7-7] | ||||
| 130	P31[8-8] | ||||
| 131	P31[9-9] | ||||
| 132	P31[10-10] | ||||
| 133	P31[11-11] | ||||
| 134	P31[12-12] | ||||
| 135	P31[13-13] | ||||
| 136	P31[14-14] | ||||
| 137	P31[15-15] | ||||
| 138	P31[16-16] | ||||
| 139	P31[17-17] | ||||
| 140	P31[18-18] | ||||
| 141	P31[19-19] | ||||
| 142	P31[20-20] | ||||
| 143	P31[21-21] | ||||
| 144	P31[22-22] | ||||
| 145	P31[23-23] | ||||
| 146	P31[24-24] | ||||
| 147	P31[25-25] | ||||
| 148	P31[26-26] | ||||
| 149	P31[27-27] | ||||
| 150	P31[28-28] | ||||
| 151	P31[29-29] | ||||
| 152	P31[30-30] | ||||
| 153	P31[31-31] | ||||
| 154	P31[32-32] | ||||
| 155	P31[33-33] | ||||
| 156	P31[34-34] | ||||
| 157	P31[35-35] | ||||
| 158	P31[36-36] | ||||
| 159	P31[37-37] | ||||
| 160	P31[38-38] | ||||
| 161	P31[39-39] | ||||
| 162	P31[40-40] | ||||
| 163	P31[41-41] | ||||
| 164	P31[42-42] | ||||
| 165	P31[43-43] | ||||
| 166	P31[44-44] | ||||
| 167	P31[45-45] | ||||
| 168	P31[46-46] | ||||
| 169	P31[47-47] | ||||
| 170	P31[48-48] | ||||
| 171	P31[49-49] | ||||
| 172	P31[50-50] | ||||
| 173	P31[51-51] | ||||
| 174	P31[52-52] | ||||
| 175	P31[53-53] | ||||
| 176	P31[54-54] | ||||
| 177	P31[55-55] | ||||
| 178	P31[56-56] | ||||
| 179	P31[57-57] | ||||
| 180	P31[58-58] | ||||
| 181	P31[59-59] | ||||
| 182	P31[60-60] | ||||
| 183	P31[61-61] | ||||
| 184	P31[62-62] | ||||
| 185	P31[63-63] | ||||
| 186	P31[64-64] | ||||
| 187	P31[65-65] | ||||
| 188	P31[66-66] | ||||
| 189	P31[67-67] | ||||
| 190	P31[68-68] | ||||
| 191	P31[69-69] | ||||
| 192	P463[26-26] | ||||
| 193	P463[27-27] | ||||
| 194	P463[28-28] | ||||
| 195	P463[29-29] | ||||
| 196	P463[30-30] | ||||
| 197	P463[31-31] | ||||
| 198	P463[32-32] | ||||
| 199	P463[33-33] | ||||
| 200	P463[34-34] | ||||
| 201	P463[35-35] | ||||
| 202	P463[36-36] | ||||
| 203	P463[37-37] | ||||
| 204	P463[38-38] | ||||
| 205	P463[39-39] | ||||
| 206	P463[40-40] | ||||
| 207	P463[41-41] | ||||
| 208	P463[42-42] | ||||
| 209	P463[43-43] | ||||
| 210	P463[44-44] | ||||
| 211	P463[45-45] | ||||
| 212	P463[46-46] | ||||
| 213	P463[47-47] | ||||
| 214	P463[48-48] | ||||
| 215	P463[49-49] | ||||
| 216	P463[50-50] | ||||
| 217	P463[51-51] | ||||
| 218	P463[52-52] | ||||
| 219	P463[53-53] | ||||
| 220	P463[54-54] | ||||
| 221	P463[55-55] | ||||
| 222	P463[56-56] | ||||
| 223	P463[57-57] | ||||
| 224	P463[58-58] | ||||
| 225	P463[59-59] | ||||
| 226	P463[60-60] | ||||
| 227	P463[61-61] | ||||
| 228	P463[62-62] | ||||
| 229	P463[63-63] | ||||
| 230	P463[64-64] | ||||
| 231	P463[65-65] | ||||
| 232	P463[66-66] | ||||
| 233	P463[67-67] | ||||
| 234	P463[68-68] | ||||
| 235	P463[69-69] | ||||
| 236	P512[4-69] | ||||
| 237	P190[0-29] | ||||
| 238	P150[0-3] | ||||
| 239	P1376[39-47] | ||||
| 240	P463[0-7] | ||||
| 241	P166[0-7] | ||||
| 242	P2962[18-30] | ||||
| 243	P108[29-36] | ||||
| 244	P39[0-3] | ||||
| 245	P17[47-48] | ||||
| 246	P166[21-23] | ||||
| 247	P793[46-69] | ||||
| 248	P69[32-41] | ||||
| 249	P17[57-58] | ||||
| 250	P190[42-45] | ||||
| 251	P2962[39-42] | ||||
| 252	P54[0-18] | ||||
| 253	P26[56-61] | ||||
| 254	P150[14-17] | ||||
| 255	P463[16-17] | ||||
| 256	P26[39-46] | ||||
| 257	P579[36-43] | ||||
| 258	P579[16-23] | ||||
| 259	P2962[59-60] | ||||
| 260	P1411[59-61] | ||||
| 261	P26[20-27] | ||||
| 262	P6[4-69] | ||||
| 263	P1435[33-34] | ||||
| 264	P166[52-53] | ||||
| 265	P108[49-57] | ||||
| 266	P150[10-13] | ||||
| 267	P1346[47-68] | ||||
| 268	P150[18-21] | ||||
| 269	P1346[13-46] | ||||
| 270	P69[20-23] | ||||
| 271	P39[31-32] | ||||
| 272	P1411[32-37] | ||||
| 273	P166[62-63] | ||||
| 274	P150[44-47] | ||||
| 275	P2962[61-62] | ||||
| 276	P150[48-51] | ||||
| 277	P150[52-55] | ||||
| 278	P1411[62-67] | ||||
| 279	P1435[35-36] | ||||
| 280	P1411[48-51] | ||||
| 281	P150[22-25] | ||||
| 282	P2962[63-64] | ||||
| 283	P2962[65-66] | ||||
| 284	P166[58-59] | ||||
| 285	P190[46-49] | ||||
| 286	P54[34-35] | ||||
| 287	P1435[4-16] | ||||
| 288	P463[18-19] | ||||
| 289	P150[31-34] | ||||
| 290	P150[35-38] | ||||
| 291	P39[35-36] | ||||
| 292	P26[62-69] | ||||
| 293	P1411[56-58] | ||||
| 294	P1435[37-38] | ||||
| 295	P166[60-61] | ||||
| 296	P39[33-34] | ||||
| 297	P102[24-31] | ||||
| 298	P2962[43-46] | ||||
| 299	P108[37-48] | ||||
| 300	P190[50-53] | ||||
| 301	P39[4-6] | ||||
| 302	P1435[39-40] | ||||
| 303	P793[0-45] | ||||
| 304	P150[64-69] | ||||
| 305	P39[19-22] | ||||
| 306	P27[30-38] | ||||
| 307	P2962[31-38] | ||||
| 308	P1411[24-31] | ||||
| 309	P102[40-45] | ||||
| 310	P39[37-38] | ||||
| 311	P463[8-11] | ||||
| 312	P1435[41-42] | ||||
| 313	P27[52-59] | ||||
| 314	P69[16-19] | ||||
| 315	P17[16-18] | ||||
| 316	P190[54-57] | ||||
| 317	P1435[43-44] | ||||
| 318	P166[8-15] | ||||
| 319	P166[45-47] | ||||
| 320	P2962[47-50] | ||||
| 321	P39[39-40] | ||||
| 322	P1411[52-55] | ||||
| 323	P108[58-69] | ||||
| 324	P463[20-21] | ||||
| 325	P39[41-42] | ||||
| 326	P150[26-30] | ||||
| 327	P150[39-43] | ||||
| 328	P1435[45-46] | ||||
| 329	P26[28-38] | ||||
| 330	P54[27-30] | ||||
| 331	P190[58-61] | ||||
| 332	P17[59-61] | ||||
| 333	P54[36-37] | ||||
| 334	P166[16-20] | ||||
| 335	P166[37-40] | ||||
| 336	P1435[47-48] | ||||
| 337	P17[0-3] | ||||
| 338	P26[47-55] | ||||
| 339	P1435[49-50] | ||||
| 340	P1435[25-28] | ||||
| 341	P150[4-9] | ||||
| 342	P102[63-69] | ||||
| 343	P26[0-19] | ||||
| 344	P1435[17-24] | ||||
| 345	P39[23-26] | ||||
| 346	P1435[51-52] | ||||
| 347	P39[7-11] | ||||
| 348	P69[12-15] | ||||
| 349	P69[24-31] | ||||
| 350	P102[0-23] | ||||
| 351	P39[43-44] | ||||
| 352	P579[24-35] | ||||
| 353	P190[62-65] | ||||
| 354	P1435[53-54] | ||||
| 355	P1376[0-18] | ||||
| 356	P27[0-14] | ||||
| 357	P463[12-15] | ||||
| 358	P166[33-36] | ||||
| 359	P102[32-39] | ||||
| 360	P17[4-7] | ||||
| 361	P190[30-41] | ||||
| 362	P166[24-28] | ||||
| 363	P190[66-69] | ||||
| 364	P69[42-69] | ||||
| 365	P1435[55-56] | ||||
| 366	P54[31-33] | ||||
| 367	P39[45-46] | ||||
| 368	P17[12-15] | ||||
| 369	P1435[57-58] | ||||
| 370	P54[19-26] | ||||
| 371	P2962[51-54] | ||||
| 372	P2962[67-69] | ||||
| 373	P1435[59-60] | ||||
| 374	P579[44-56] | ||||
| 375	P1435[61-62] | ||||
| 376	P166[41-44] | ||||
| 377	P17[19-22] | ||||
| 378	P1376[19-38] | ||||
| 379	P17[23-26] | ||||
| 380	P1376[48-69] | ||||
| 381	P463[22-23] | ||||
| 382	P17[27-30] | ||||
| 383	P1435[63-64] | ||||
| 384	P69[0-3] | ||||
| 385	P1435[66-67] | ||||
| 386	P17[35-38] | ||||
| 387	P69[8-11] | ||||
| 388	P1435[68-69] | ||||
| 389	P17[31-34] | ||||
| 390	P102[46-53] | ||||
| 391	P27[60-69] | ||||
| 392	P579[57-69] | ||||
| 393	P69[4-7] | ||||
| 394	P1411[7-14] | ||||
| 395	P551[0-35] | ||||
| 396	P108[0-28] | ||||
| 397	P17[8-11] | ||||
| 398	P1411[38-47] | ||||
| 399	P17[43-46] | ||||
| 400	P17[49-52] | ||||
| 401	P166[64-69] | ||||
| 402	P1435[29-32] | ||||
| 403	P54[38-39] | ||||
| 404	P39[27-30] | ||||
| 405	P2962[55-58] | ||||
| 406	P463[24-25] | ||||
| 407	P17[39-42] | ||||
| 408	P17[53-56] | ||||
| 409	P17[66-69] | ||||
| 410	P17[62-65] | ||||
| 411	P1411[15-23] | ||||
| 412	P166[48-51] | ||||
| 413	P27[15-29] | ||||
| 414	P150[56-63] | ||||
| 415	P27[39-51] | ||||
| 416	P39[47-48] | ||||
| 417	P166[29-32] | ||||
| 418	P39[12-18] | ||||
| 419	P166[54-57] | ||||
| 420	P551[36-69] | ||||
| 421	P579[0-15] | ||||
| 422	P102[54-62] | ||||
							
								
								
									
										16195
									
								
								data/wikidata12k_both/test.txt
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										16195
									
								
								data/wikidata12k_both/test.txt
									
									
									
									
									
										Normal file
									
								
							
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							
							
								
								
									
										4062
									
								
								data/wikidata12k_both/test_type.txt
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										4062
									
								
								data/wikidata12k_both/test_type.txt
									
									
									
									
									
										Normal file
									
								
							
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							
							
								
								
									
										71
									
								
								data/wikidata12k_both/time_map.dict
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										71
									
								
								data/wikidata12k_both/time_map.dict
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,71 @@ | ||||
| 0	19	19 | ||||
| 1	20	1643 | ||||
| 2	1644	1790 | ||||
| 3	1791	1816 | ||||
| 4	1817	1855 | ||||
| 5	1856	1871 | ||||
| 6	1872	1893 | ||||
| 7	1894	1905 | ||||
| 8	1906	1913 | ||||
| 9	1914	1918 | ||||
| 10	1919	1920 | ||||
| 11	1921	1924 | ||||
| 12	1925	1929 | ||||
| 13	1930	1933 | ||||
| 14	1934	1937 | ||||
| 15	1938	1941 | ||||
| 16	1942	1945 | ||||
| 17	1946	1948 | ||||
| 18	1949	1950 | ||||
| 19	1951	1953 | ||||
| 20	1954	1956 | ||||
| 21	1957	1959 | ||||
| 22	1960	1961 | ||||
| 23	1962	1963 | ||||
| 24	1964	1965 | ||||
| 25	1966	1967 | ||||
| 26	1968	1968 | ||||
| 27	1969	1970 | ||||
| 28	1971	1972 | ||||
| 29	1973	1974 | ||||
| 30	1975	1976 | ||||
| 31	1977	1978 | ||||
| 32	1979	1980 | ||||
| 33	1981	1982 | ||||
| 34	1983	1983 | ||||
| 35	1984	1984 | ||||
| 36	1985	1985 | ||||
| 37	1986	1986 | ||||
| 38	1987	1987 | ||||
| 39	1988	1988 | ||||
| 40	1989	1989 | ||||
| 41	1990	1990 | ||||
| 42	1991	1991 | ||||
| 43	1992	1992 | ||||
| 44	1993	1993 | ||||
| 45	1994	1994 | ||||
| 46	1995	1995 | ||||
| 47	1996	1996 | ||||
| 48	1997	1997 | ||||
| 49	1998	1998 | ||||
| 50	1999	1999 | ||||
| 51	2000	2000 | ||||
| 52	2001	2001 | ||||
| 53	2002	2002 | ||||
| 54	2003	2003 | ||||
| 55	2004	2004 | ||||
| 56	2005	2005 | ||||
| 57	2006	2006 | ||||
| 58	2007	2007 | ||||
| 59	2008	2008 | ||||
| 60	2009	2009 | ||||
| 61	2010	2010 | ||||
| 62	2011	2011 | ||||
| 63	2012	2012 | ||||
| 64	2013	2013 | ||||
| 65	2014	2014 | ||||
| 66	2015	2015 | ||||
| 67	2016	2016 | ||||
| 68	2017	2017 | ||||
| 69	2018	2020 | ||||
| 70	2021	2021 | ||||
							
								
								
									
										198627
									
								
								data/wikidata12k_both/train.txt
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										198627
									
								
								data/wikidata12k_both/train.txt
									
									
									
									
									
										Normal file
									
								
							
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							
							
								
								
									
										32497
									
								
								data/wikidata12k_both/train_type.txt
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										32497
									
								
								data/wikidata12k_both/train_type.txt
									
									
									
									
									
										Normal file
									
								
							
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							
							
								
								
									
										16707
									
								
								data/wikidata12k_both/valid.txt
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										16707
									
								
								data/wikidata12k_both/valid.txt
									
									
									
									
									
										Normal file
									
								
							
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							
							
								
								
									
										15
									
								
								data/yago11k_both/about.txt
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										15
									
								
								data/yago11k_both/about.txt
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,15 @@ | ||||
| # triples: 78032  | ||||
| # entities: 10526  | ||||
| # relations: 177  | ||||
| # timesteps: 46  | ||||
| # test triples: 6909  | ||||
| # valid triples: 7198  | ||||
| # train triples: 63925  | ||||
| Measure method:  N/A   | ||||
| Target Size :  0   | ||||
| Grow Factor:  0   | ||||
| Shrink Factor:  0   | ||||
| Epsilon Factor: 5.0   | ||||
| Search method: N/A   | ||||
| filter_dupes: both | ||||
| nonames: False | ||||
							
								
								
									
										20509
									
								
								data/yago11k_both/complete_type.txt
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										20509
									
								
								data/yago11k_both/complete_type.txt
									
									
									
									
									
										Normal file
									
								
							
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							
							
								
								
									
										10526
									
								
								data/yago11k_both/entities.dict
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										10526
									
								
								data/yago11k_both/entities.dict
									
									
									
									
									
										Normal file
									
								
							
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							
							
								
								
									
										177
									
								
								data/yago11k_both/relations.dict
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										177
									
								
								data/yago11k_both/relations.dict
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,177 @@ | ||||
| 0	<wasBornIn>[0-2] | ||||
| 1	<wasBornIn>[2-5] | ||||
| 2	<wasBornIn>[5-7] | ||||
| 3	<wasBornIn>[7-10] | ||||
| 4	<wasBornIn>[10-12] | ||||
| 5	<wasBornIn>[12-15] | ||||
| 6	<wasBornIn>[15-17] | ||||
| 7	<wasBornIn>[17-20] | ||||
| 8	<wasBornIn>[20-22] | ||||
| 9	<wasBornIn>[22-25] | ||||
| 10	<wasBornIn>[25-27] | ||||
| 11	<wasBornIn>[27-30] | ||||
| 12	<wasBornIn>[30-32] | ||||
| 13	<wasBornIn>[32-35] | ||||
| 14	<wasBornIn>[35-45] | ||||
| 15	<wasBornIn>[52-52] | ||||
| 16	<diedIn>[0-3] | ||||
| 17	<diedIn>[3-5] | ||||
| 18	<diedIn>[5-7] | ||||
| 19	<diedIn>[7-10] | ||||
| 20	<diedIn>[10-12] | ||||
| 21	<diedIn>[12-14] | ||||
| 22	<diedIn>[14-17] | ||||
| 23	<diedIn>[17-19] | ||||
| 24	<diedIn>[19-21] | ||||
| 25	<diedIn>[21-23] | ||||
| 26	<diedIn>[23-25] | ||||
| 27	<diedIn>[25-27] | ||||
| 28	<diedIn>[27-29] | ||||
| 29	<diedIn>[29-32] | ||||
| 30	<diedIn>[32-34] | ||||
| 31	<diedIn>[34-36] | ||||
| 32	<diedIn>[36-38] | ||||
| 33	<diedIn>[38-40] | ||||
| 34	<diedIn>[40-42] | ||||
| 35	<diedIn>[42-44] | ||||
| 36	<diedIn>[44-47] | ||||
| 37	<diedIn>[47-49] | ||||
| 38	<diedIn>[49-51] | ||||
| 39	<diedIn>[51-53] | ||||
| 40	<diedIn>[53-55] | ||||
| 41	<diedIn>[55-57] | ||||
| 42	<diedIn>[59-59] | ||||
| 43	<worksAt>[0-3] | ||||
| 44	<worksAt>[3-5] | ||||
| 45	<worksAt>[5-7] | ||||
| 46	<worksAt>[7-10] | ||||
| 47	<worksAt>[10-12] | ||||
| 48	<worksAt>[12-14] | ||||
| 49	<worksAt>[14-17] | ||||
| 50	<worksAt>[17-19] | ||||
| 51	<worksAt>[19-21] | ||||
| 52	<worksAt>[21-23] | ||||
| 53	<worksAt>[23-25] | ||||
| 54	<worksAt>[25-27] | ||||
| 55	<worksAt>[27-29] | ||||
| 56	<worksAt>[29-32] | ||||
| 57	<worksAt>[32-34] | ||||
| 58	<worksAt>[34-36] | ||||
| 59	<worksAt>[36-40] | ||||
| 60	<worksAt>[40-42] | ||||
| 61	<worksAt>[42-47] | ||||
| 62	<worksAt>[47-53] | ||||
| 63	<worksAt>[59-59] | ||||
| 64	<playsFor>[0-3] | ||||
| 65	<playsFor>[3-5] | ||||
| 66	<playsFor>[5-23] | ||||
| 67	<playsFor>[23-25] | ||||
| 68	<playsFor>[25-27] | ||||
| 69	<playsFor>[27-29] | ||||
| 70	<playsFor>[29-32] | ||||
| 71	<playsFor>[32-34] | ||||
| 72	<playsFor>[34-36] | ||||
| 73	<playsFor>[36-38] | ||||
| 74	<playsFor>[38-40] | ||||
| 75	<playsFor>[40-42] | ||||
| 76	<playsFor>[42-44] | ||||
| 77	<playsFor>[44-47] | ||||
| 78	<playsFor>[47-51] | ||||
| 79	<playsFor>[59-59] | ||||
| 80	<hasWonPrize>[1-4] | ||||
| 81	<hasWonPrize>[4-6] | ||||
| 82	<hasWonPrize>[6-8] | ||||
| 83	<hasWonPrize>[8-11] | ||||
| 84	<hasWonPrize>[11-15] | ||||
| 85	<hasWonPrize>[15-18] | ||||
| 86	<hasWonPrize>[18-22] | ||||
| 87	<hasWonPrize>[22-26] | ||||
| 88	<hasWonPrize>[26-30] | ||||
| 89	<hasWonPrize>[30-33] | ||||
| 90	<hasWonPrize>[33-37] | ||||
| 91	<hasWonPrize>[37-47] | ||||
| 92	<hasWonPrize>[47-53] | ||||
| 93	<hasWonPrize>[59-59] | ||||
| 94	<isMarriedTo>[0-3] | ||||
| 95	<isMarriedTo>[3-5] | ||||
| 96	<isMarriedTo>[5-7] | ||||
| 97	<isMarriedTo>[7-10] | ||||
| 98	<isMarriedTo>[10-12] | ||||
| 99	<isMarriedTo>[12-14] | ||||
| 100	<isMarriedTo>[14-17] | ||||
| 101	<isMarriedTo>[17-19] | ||||
| 102	<isMarriedTo>[19-21] | ||||
| 103	<isMarriedTo>[21-23] | ||||
| 104	<isMarriedTo>[23-25] | ||||
| 105	<isMarriedTo>[25-27] | ||||
| 106	<isMarriedTo>[27-29] | ||||
| 107	<isMarriedTo>[29-32] | ||||
| 108	<isMarriedTo>[32-34] | ||||
| 109	<isMarriedTo>[34-38] | ||||
| 110	<isMarriedTo>[38-42] | ||||
| 111	<isMarriedTo>[42-47] | ||||
| 112	<isMarriedTo>[47-51] | ||||
| 113	<isMarriedTo>[51-55] | ||||
| 114	<isMarriedTo>[59-59] | ||||
| 115	<owns>[0-10] | ||||
| 116	<owns>[10-17] | ||||
| 117	<owns>[17-19] | ||||
| 118	<owns>[19-23] | ||||
| 119	<owns>[23-36] | ||||
| 120	<owns>[36-38] | ||||
| 121	<owns>[59-59] | ||||
| 122	<graduatedFrom>[0-3] | ||||
| 123	<graduatedFrom>[3-5] | ||||
| 124	<graduatedFrom>[5-7] | ||||
| 125	<graduatedFrom>[7-10] | ||||
| 126	<graduatedFrom>[10-14] | ||||
| 127	<graduatedFrom>[14-17] | ||||
| 128	<graduatedFrom>[17-19] | ||||
| 129	<graduatedFrom>[19-21] | ||||
| 130	<graduatedFrom>[21-23] | ||||
| 131	<graduatedFrom>[23-27] | ||||
| 132	<graduatedFrom>[27-32] | ||||
| 133	<graduatedFrom>[32-34] | ||||
| 134	<graduatedFrom>[34-38] | ||||
| 135	<graduatedFrom>[38-42] | ||||
| 136	<graduatedFrom>[59-59] | ||||
| 137	<isAffiliatedTo>[1-4] | ||||
| 138	<isAffiliatedTo>[4-6] | ||||
| 139	<isAffiliatedTo>[6-8] | ||||
| 140	<isAffiliatedTo>[8-11] | ||||
| 141	<isAffiliatedTo>[11-13] | ||||
| 142	<isAffiliatedTo>[13-15] | ||||
| 143	<isAffiliatedTo>[15-18] | ||||
| 144	<isAffiliatedTo>[18-20] | ||||
| 145	<isAffiliatedTo>[20-22] | ||||
| 146	<isAffiliatedTo>[22-24] | ||||
| 147	<isAffiliatedTo>[24-26] | ||||
| 148	<isAffiliatedTo>[26-28] | ||||
| 149	<isAffiliatedTo>[28-30] | ||||
| 150	<isAffiliatedTo>[30-33] | ||||
| 151	<isAffiliatedTo>[33-35] | ||||
| 152	<isAffiliatedTo>[35-37] | ||||
| 153	<isAffiliatedTo>[37-40] | ||||
| 154	<isAffiliatedTo>[40-42] | ||||
| 155	<isAffiliatedTo>[42-44] | ||||
| 156	<isAffiliatedTo>[44-47] | ||||
| 157	<isAffiliatedTo>[47-49] | ||||
| 158	<isAffiliatedTo>[49-51] | ||||
| 159	<isAffiliatedTo>[51-53] | ||||
| 160	<isAffiliatedTo>[53-55] | ||||
| 161	<isAffiliatedTo>[55-57] | ||||
| 162	<isAffiliatedTo>[59-59] | ||||
| 163	<created>[0-3] | ||||
| 164	<created>[3-5] | ||||
| 165	<created>[5-10] | ||||
| 166	<created>[10-12] | ||||
| 167	<created>[12-17] | ||||
| 168	<created>[17-19] | ||||
| 169	<created>[19-25] | ||||
| 170	<created>[25-29] | ||||
| 171	<created>[29-32] | ||||
| 172	<created>[32-36] | ||||
| 173	<created>[36-42] | ||||
| 174	<created>[42-47] | ||||
| 175	<created>[47-53] | ||||
| 176	<created>[59-59] | ||||
							
								
								
									
										6909
									
								
								data/yago11k_both/test.txt
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										6909
									
								
								data/yago11k_both/test.txt
									
									
									
									
									
										Normal file
									
								
							
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							
							
								
								
									
										2051
									
								
								data/yago11k_both/test_type.txt
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										2051
									
								
								data/yago11k_both/test_type.txt
									
									
									
									
									
										Normal file
									
								
							
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							
							
								
								
									
										60
									
								
								data/yago11k_both/time_map.dict
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										60
									
								
								data/yago11k_both/time_map.dict
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,60 @@ | ||||
| 0	-431	1782 | ||||
| 1	1783	1848 | ||||
| 2	1849	1870 | ||||
| 3	1871	1888 | ||||
| 4	1889	1899 | ||||
| 5	1900	1906 | ||||
| 6	1907	1912 | ||||
| 7	1913	1917 | ||||
| 8	1918	1922 | ||||
| 9	1923	1926 | ||||
| 10	1927	1930 | ||||
| 11	1931	1934 | ||||
| 12	1935	1938 | ||||
| 13	1939	1941 | ||||
| 14	1942	1944 | ||||
| 15	1945	1947 | ||||
| 16	1948	1950 | ||||
| 17	1951	1953 | ||||
| 18	1954	1956 | ||||
| 19	1957	1959 | ||||
| 20	1960	1962 | ||||
| 21	1963	1965 | ||||
| 22	1966	1967 | ||||
| 23	1968	1969 | ||||
| 24	1970	1971 | ||||
| 25	1972	1973 | ||||
| 26	1974	1975 | ||||
| 27	1976	1977 | ||||
| 28	1978	1979 | ||||
| 29	1980	1981 | ||||
| 30	1982	1983 | ||||
| 31	1984	1985 | ||||
| 32	1986	1987 | ||||
| 33	1988	1989 | ||||
| 34	1990	1991 | ||||
| 35	1992	1993 | ||||
| 36	1994	1994 | ||||
| 37	1995	1996 | ||||
| 38	1997	1997 | ||||
| 39	1998	1998 | ||||
| 40	1999	1999 | ||||
| 41	2000	2000 | ||||
| 42	2001	2001 | ||||
| 43	2002	2002 | ||||
| 44	2003	2003 | ||||
| 45	2004	2004 | ||||
| 46	2005	2005 | ||||
| 47	2006	2006 | ||||
| 48	2007	2007 | ||||
| 49	2008	2008 | ||||
| 50	2009	2009 | ||||
| 51	2010	2010 | ||||
| 52	2011	2011 | ||||
| 53	2012	2012 | ||||
| 54	2013	2013 | ||||
| 55	2014	2014 | ||||
| 56	2015	2015 | ||||
| 57	2016	2016 | ||||
| 58	2017	2017 | ||||
| 59	2018	2018 | ||||
							
								
								
									
										63925
									
								
								data/yago11k_both/train.txt
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										63925
									
								
								data/yago11k_both/train.txt
									
									
									
									
									
										Normal file
									
								
							
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							
							
								
								
									
										16408
									
								
								data/yago11k_both/train_type.txt
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										16408
									
								
								data/yago11k_both/train_type.txt
									
									
									
									
									
										Normal file
									
								
							
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							
							
								
								
									
										7198
									
								
								data/yago11k_both/valid.txt
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										7198
									
								
								data/yago11k_both/valid.txt
									
									
									
									
									
										Normal file
									
								
							
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							
							
								
								
									
										9483
									
								
								icews14.out
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										9483
									
								
								icews14.out
									
									
									
									
									
										Normal file
									
								
							
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							
							
								
								
									
										3
									
								
								icews14_both.log
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										3
									
								
								icews14_both.log
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,3 @@ | ||||
| nohup: ignoring input | ||||
| 2023-06-20 09:22:51,618 - [INFO] - {'dataset': 'icews14_both', 'name': 'icews14_both', 'gpu': '2', 'train_strategy': 'one_to_n', 'opt': 'adam', 'neg_num': 1000, 'batch_size': 128, 'l2': 0.0, 'lr': 0.0001, 'max_epochs': 500, 'num_workers': 0, 'seed': 42, 'restore': False, 'lbl_smooth': 0.1, 'embed_dim': 400, 'ent_vec_dim': 400, 'rel_vec_dim': 400, 'bias': False, 'form': 'plain', 'k_w': 10, 'k_h': 20, 'num_filt': 96, 'ker_sz': 9, 'perm': 1, 'hid_drop': 0.5, 'feat_drop': 0.2, 'inp_drop': 0.2, 'drop_path': 0.0, 'drop': 0.0, 'in_channels': 1, 'out_channels': 32, 'filt_h': 1, 'filt_w': 9, 'image_h': 128, 'image_w': 128, 'patch_size': 8, 'mixer_dim': 256, 'expansion_factor': 4, 'expansion_factor_token': 0.5, 'mixer_depth': 16, 'mixer_dropout': 0.2, 'log_dir': './log/', 'config_dir': './config/', 'test_only': False, 'grid_search': False} | ||||
| 2023-06-20 09:22:57,979 - [INFO] - [E:0| 0]: Train Loss:0.70005,  Val MRR:0.0, 	icews14_both | ||||
							
								
								
									
										4331
									
								
								icews14_l2_1e-5.out
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										4331
									
								
								icews14_l2_1e-5.out
									
									
									
									
									
										Normal file
									
								
							
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							| @@ -1964,3 +1964,25 @@ | ||||
| 2023-05-04 08:27:31,384 - fb_one_to_x - [INFO] - [E:34| 1500]: Train Loss:0.0027362,  Val MRR:0.33574, 	fb_one_to_x | ||||
| 2023-05-04 08:29:20,404 - fb_one_to_x - [INFO] - [E:34| 1600]: Train Loss:0.0027362,  Val MRR:0.33574, 	fb_one_to_x | ||||
| 2023-05-04 08:31:12,139 - fb_one_to_x - [INFO] - [E:34| 1700]: Train Loss:0.0027362,  Val MRR:0.33574, 	fb_one_to_x | ||||
| 2023-05-04 08:55:56,065 - fb_one_to_x - [INFO] - {'dataset': 'FB15k-237', 'name': 'fb_one_to_x', 'gpu': '0', 'train_strategy': 'one_to_n', 'opt': 'adam', 'neg_num': 1000, 'batch_size': 128, 'l2': 0.0, 'lr': 0.0001, 'max_epochs': 500, 'num_workers': 0, 'seed': 42, 'restore': False, 'lbl_smooth': 0.1, 'embed_dim': 400, 'ent_vec_dim': 400, 'rel_vec_dim': 400, 'bias': False, 'form': 'plain', 'k_w': 10, 'k_h': 20, 'num_filt': 96, 'ker_sz': 9, 'perm': 1, 'hid_drop': 0.5, 'feat_drop': 0.2, 'inp_drop': 0.2, 'drop_path': 0.1, 'drop': 0.2, 'in_channels': 1, 'out_channels': 32, 'filt_h': 1, 'filt_w': 9, 'image_h': 128, 'image_w': 128, 'patch_size': 8, 'mixer_dim': 256, 'expansion_factor': 4, 'expansion_factor_token': 0.5, 'mixer_depth': 16, 'mixer_dropout': 0.2, 'log_dir': './log/', 'config_dir': './config/', 'test_only': True} | ||||
| 2023-05-04 08:56:07,953 - fb_one_to_x - [INFO] - [Test, Tail_Batch Step 0]	fb_one_to_x | ||||
| 2023-05-04 08:56:53,173 - fb_one_to_x - [INFO] - [Test, Tail_Batch Step 100]	fb_one_to_x | ||||
| 2023-05-04 08:57:20,187 - fb_one_to_x - [INFO] - [Test, Head_Batch Step 0]	fb_one_to_x | ||||
| 2023-05-04 08:58:08,090 - fb_one_to_x - [INFO] - [Test, Head_Batch Step 100]	fb_one_to_x | ||||
| 2023-05-04 08:58:36,338 - fb_one_to_x - [INFO] - [Evaluating Epoch 0 test]:  | ||||
| 	MRR: Tail : 0.43029, Head : 0.23256, Avg : 0.33142 | ||||
| 	MR: Tail : 149.91, Head : 288.48, Avg : 219.2 | ||||
| 	Hit-1: Tail : 0.33563, Head : 0.14883, Avg : 0.24223 | ||||
| 	Hit-3: Tail : 0.47068, Head : 0.25515, Avg : 0.36292 | ||||
| 	Hit-10: Tail : 0.61952, Head : 0.40096, Avg : 0.51024 | ||||
| 2023-05-04 09:03:55,555 - fb_one_to_x - [INFO] - {'dataset': 'FB15k-237', 'name': 'fb_one_to_x', 'gpu': '0', 'train_strategy': 'one_to_n', 'opt': 'adam', 'neg_num': 1000, 'batch_size': 128, 'l2': 0.0, 'lr': 0.0001, 'max_epochs': 500, 'num_workers': 0, 'seed': 42, 'restore': False, 'lbl_smooth': 0.1, 'embed_dim': 400, 'ent_vec_dim': 400, 'rel_vec_dim': 400, 'bias': False, 'form': 'plain', 'k_w': 10, 'k_h': 20, 'num_filt': 96, 'ker_sz': 9, 'perm': 1, 'hid_drop': 0.5, 'feat_drop': 0.2, 'inp_drop': 0.2, 'drop_path': 0.1, 'drop': 0.2, 'in_channels': 1, 'out_channels': 32, 'filt_h': 1, 'filt_w': 9, 'image_h': 128, 'image_w': 128, 'patch_size': 8, 'mixer_dim': 256, 'expansion_factor': 4, 'expansion_factor_token': 0.5, 'mixer_depth': 16, 'mixer_dropout': 0.2, 'log_dir': './log/', 'config_dir': './config/', 'test_only': True} | ||||
| 2023-05-04 09:04:07,491 - fb_one_to_x - [INFO] - [Test, Tail_Batch Step 0]	fb_one_to_x | ||||
| 2023-05-04 09:04:52,620 - fb_one_to_x - [INFO] - [Test, Tail_Batch Step 100]	fb_one_to_x | ||||
| 2023-05-04 09:05:19,645 - fb_one_to_x - [INFO] - [Test, Head_Batch Step 0]	fb_one_to_x | ||||
| 2023-05-04 09:06:07,591 - fb_one_to_x - [INFO] - [Test, Head_Batch Step 100]	fb_one_to_x | ||||
| 2023-05-04 09:06:35,660 - fb_one_to_x - [INFO] - [Evaluating Epoch 0 test]:  | ||||
| 	MRR: Tail : 0.43029, Head : 0.23256, Avg : 0.33142 | ||||
| 	MR: Tail : 149.91, Head : 288.48, Avg : 219.2 | ||||
| 	Hit-1: Tail : 0.33563, Head : 0.14883, Avg : 0.24223 | ||||
| 	Hit-3: Tail : 0.47068, Head : 0.25515, Avg : 0.36292 | ||||
| 	Hit-10: Tail : 0.61952, Head : 0.40096, Avg : 0.51024 | ||||
|   | ||||
							
								
								
									
										14945
									
								
								log/ice00001
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										14945
									
								
								log/ice00001
									
									
									
									
									
										Normal file
									
								
							
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							
							
								
								
									
										4904
									
								
								log/ice0003
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										4904
									
								
								log/ice0003
									
									
									
									
									
										Normal file
									
								
							
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							
							
								
								
									
										6607
									
								
								log/ice0003_2
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										6607
									
								
								log/ice0003_2
									
									
									
									
									
										Normal file
									
								
							
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							
							
								
								
									
										6205
									
								
								log/ice001
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										6205
									
								
								log/ice001
									
									
									
									
									
										Normal file
									
								
							
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							
							
								
								
									
										9541
									
								
								log/ice14ws_128
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										9541
									
								
								log/ice14ws_128
									
									
									
									
									
										Normal file
									
								
							
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							
							
								
								
									
										4154
									
								
								log/iceboth
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										4154
									
								
								log/iceboth
									
									
									
									
									
										Normal file
									
								
							
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							
							
								
								
									
										9482
									
								
								log/icews14
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										9482
									
								
								log/icews14
									
									
									
									
									
										Normal file
									
								
							
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							
							
								
								
									
										1
									
								
								log/icews14_128
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										1
									
								
								log/icews14_128
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1 @@ | ||||
| 2023-05-13 03:52:44,141 - icews14_128 - [INFO] - {'dataset': 'icews14', 'name': 'icews14_128', 'gpu': '0', 'train_strategy': 'one_to_n', 'opt': 'adam', 'neg_num': 1000, 'batch_size': 128, 'l2': 0.0, 'lr': 0.0001, 'max_epochs': 500, 'num_workers': 0, 'seed': 42, 'restore': False, 'lbl_smooth': 0.1, 'embed_dim': 400, 'ent_vec_dim': 400, 'rel_vec_dim': 400, 'bias': False, 'form': 'plain', 'k_w': 10, 'k_h': 20, 'num_filt': 96, 'ker_sz': 9, 'perm': 1, 'hid_drop': 0.5, 'feat_drop': 0.2, 'inp_drop': 0.2, 'drop_path': 0.0, 'drop': 0.0, 'in_channels': 1, 'out_channels': 32, 'filt_h': 1, 'filt_w': 9, 'image_h': 128, 'image_w': 128, 'patch_size': 8, 'mixer_dim': 256, 'expansion_factor': 4, 'expansion_factor_token': 0.5, 'mixer_depth': 16, 'mixer_dropout': 0.2, 'log_dir': './log/', 'config_dir': './config/', 'test_only': True, 'filtered': False} | ||||
							
								
								
									
										10670
									
								
								log/icews14_both
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										10670
									
								
								log/icews14_both
									
									
									
									
									
										Normal file
									
								
							
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							
							
								
								
									
										2
									
								
								log/poofnet.log
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										2
									
								
								log/poofnet.log
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,2 @@ | ||||
| nohup: ignoring input | ||||
| python: can't open file 'run.py': [Errno 2] No such file or directory | ||||
							
								
								
									
										1
									
								
								log/testrun_227cb2f9
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										1
									
								
								log/testrun_227cb2f9
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1 @@ | ||||
| 2023-05-17 06:54:57,988 - testrun_227cb2f9 - [INFO] - {'dataset': 'icews14', 'name': 'testrun_227cb2f9', 'gpu': '1', 'train_strategy': 'one_to_n', 'opt': 'adam', 'neg_num': 1000, 'batch_size': 128, 'l2': 0.0, 'lr': 0.0001, 'max_epochs': 500, 'num_workers': 0, 'seed': 42, 'restore': False, 'lbl_smooth': 0.1, 'embed_dim': 400, 'ent_vec_dim': 400, 'rel_vec_dim': 400, 'bias': False, 'form': 'plain', 'k_w': 10, 'k_h': 20, 'num_filt': 96, 'ker_sz': 9, 'perm': 1, 'hid_drop': 0.5, 'feat_drop': 0.2, 'inp_drop': 0.2, 'drop_path': 0.0, 'drop': 0.0, 'in_channels': 1, 'out_channels': 32, 'filt_h': 1, 'filt_w': 9, 'image_h': 128, 'image_w': 128, 'patch_size': 8, 'mixer_dim': 256, 'expansion_factor': 4, 'expansion_factor_token': 0.5, 'mixer_depth': 16, 'mixer_dropout': 0.2, 'log_dir': './log/', 'config_dir': './config/', 'test_only': False, 'grid_search': True} | ||||
							
								
								
									
										1
									
								
								log/testrun_30d70322
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										1
									
								
								log/testrun_30d70322
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1 @@ | ||||
| 2023-05-17 06:23:34,181 - testrun_30d70322 - [INFO] - {'dataset': 'icews14', 'name': 'testrun_30d70322', 'gpu': '1', 'train_strategy': 'one_to_n', 'opt': 'adam', 'neg_num': 1000, 'batch_size': 128, 'l2': 0.0, 'lr': 0.0001, 'max_epochs': 500, 'num_workers': 0, 'seed': 42, 'restore': False, 'lbl_smooth': 0.1, 'embed_dim': 400, 'ent_vec_dim': 400, 'rel_vec_dim': 400, 'bias': False, 'form': 'plain', 'k_w': 10, 'k_h': 20, 'num_filt': 96, 'ker_sz': 9, 'perm': 1, 'hid_drop': 0.5, 'feat_drop': 0.2, 'inp_drop': 0.2, 'drop_path': 0.0, 'drop': 0.0, 'in_channels': 1, 'out_channels': 32, 'filt_h': 1, 'filt_w': 9, 'image_h': 128, 'image_w': 128, 'patch_size': 8, 'mixer_dim': 256, 'expansion_factor': 4, 'expansion_factor_token': 0.5, 'mixer_depth': 16, 'mixer_dropout': 0.2, 'log_dir': './log/', 'config_dir': './config/', 'test_only': False, 'grid_search': True} | ||||
							
								
								
									
										1
									
								
								log/testrun_3212b281
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										1
									
								
								log/testrun_3212b281
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1 @@ | ||||
| 2023-05-17 06:53:01,668 - testrun_3212b281 - [INFO] - {'dataset': 'icews14', 'name': 'testrun_3212b281', 'gpu': '1', 'train_strategy': 'one_to_n', 'opt': 'adam', 'neg_num': 1000, 'batch_size': 128, 'l2': 0.0, 'lr': 0.0001, 'max_epochs': 500, 'num_workers': 0, 'seed': 42, 'restore': False, 'lbl_smooth': 0.1, 'embed_dim': 400, 'ent_vec_dim': 400, 'rel_vec_dim': 400, 'bias': False, 'form': 'plain', 'k_w': 10, 'k_h': 20, 'num_filt': 96, 'ker_sz': 9, 'perm': 1, 'hid_drop': 0.5, 'feat_drop': 0.2, 'inp_drop': 0.2, 'drop_path': 0.0, 'drop': 0.0, 'in_channels': 1, 'out_channels': 32, 'filt_h': 1, 'filt_w': 9, 'image_h': 128, 'image_w': 128, 'patch_size': 8, 'mixer_dim': 256, 'expansion_factor': 4, 'expansion_factor_token': 0.5, 'mixer_depth': 16, 'mixer_dropout': 0.2, 'log_dir': './log/', 'config_dir': './config/', 'test_only': False, 'grid_search': True} | ||||
							
								
								
									
										1
									
								
								log/testrun_3dbc9e89
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										1
									
								
								log/testrun_3dbc9e89
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1 @@ | ||||
| 2023-05-06 08:35:38,753 - testrun_3dbc9e89 - [INFO] - {'dataset': 'wikidata12k', 'name': 'testrun_3dbc9e89', 'gpu': '0', 'train_strategy': 'one_to_n', 'opt': 'adam', 'neg_num': 1000, 'batch_size': 128, 'l2': 0.0, 'lr': 0.0001, 'max_epochs': 500, 'num_workers': 0, 'seed': 42, 'restore': False, 'lbl_smooth': 0.1, 'embed_dim': 400, 'ent_vec_dim': 400, 'rel_vec_dim': 400, 'bias': False, 'form': 'plain', 'k_w': 10, 'k_h': 20, 'num_filt': 96, 'ker_sz': 9, 'perm': 1, 'hid_drop': 0.5, 'feat_drop': 0.2, 'inp_drop': 0.2, 'drop_path': 0.0, 'drop': 0.0, 'in_channels': 1, 'out_channels': 32, 'filt_h': 1, 'filt_w': 9, 'image_h': 128, 'image_w': 128, 'patch_size': 8, 'mixer_dim': 256, 'expansion_factor': 4, 'expansion_factor_token': 0.5, 'mixer_depth': 16, 'mixer_dropout': 0.2, 'log_dir': './log/', 'config_dir': './config/', 'test_only': False} | ||||
							
								
								
									
										1
									
								
								log/testrun_43389ddf
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										1
									
								
								log/testrun_43389ddf
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1 @@ | ||||
| 2023-05-17 06:38:00,469 - testrun_43389ddf - [INFO] - {'dataset': 'icews14', 'name': 'testrun_43389ddf', 'gpu': '1', 'train_strategy': 'one_to_n', 'opt': 'adam', 'neg_num': 1000, 'batch_size': 128, 'l2': 0.0, 'lr': 0.0001, 'max_epochs': 500, 'num_workers': 0, 'seed': 42, 'restore': False, 'lbl_smooth': 0.1, 'embed_dim': 400, 'ent_vec_dim': 400, 'rel_vec_dim': 400, 'bias': False, 'form': 'plain', 'k_w': 10, 'k_h': 20, 'num_filt': 96, 'ker_sz': 9, 'perm': 1, 'hid_drop': 0.5, 'feat_drop': 0.2, 'inp_drop': 0.2, 'drop_path': 0.0, 'drop': 0.0, 'in_channels': 1, 'out_channels': 32, 'filt_h': 1, 'filt_w': 9, 'image_h': 128, 'image_w': 128, 'patch_size': 8, 'mixer_dim': 256, 'expansion_factor': 4, 'expansion_factor_token': 0.5, 'mixer_depth': 16, 'mixer_dropout': 0.2, 'log_dir': './log/', 'config_dir': './config/', 'test_only': False, 'grid_search': True} | ||||
							
								
								
									
										1
									
								
								log/testrun_47ede3b9
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										1
									
								
								log/testrun_47ede3b9
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1 @@ | ||||
| 2023-05-17 06:13:02,952 - testrun_47ede3b9 - [INFO] - {'dataset': 'FB15k-237', 'name': 'testrun_47ede3b9', 'gpu': '1', 'train_strategy': 'one_to_n', 'opt': 'adam', 'neg_num': 1000, 'batch_size': 128, 'l2': 0.0, 'lr': 0.0001, 'max_epochs': 500, 'num_workers': 0, 'seed': 42, 'restore': False, 'lbl_smooth': 0.1, 'embed_dim': 400, 'ent_vec_dim': 400, 'rel_vec_dim': 400, 'bias': False, 'form': 'plain', 'k_w': 10, 'k_h': 20, 'num_filt': 96, 'ker_sz': 9, 'perm': 1, 'hid_drop': 0.5, 'feat_drop': 0.2, 'inp_drop': 0.2, 'drop_path': 0.0, 'drop': 0.0, 'in_channels': 1, 'out_channels': 32, 'filt_h': 1, 'filt_w': 9, 'image_h': 128, 'image_w': 128, 'patch_size': 8, 'mixer_dim': 256, 'expansion_factor': 4, 'expansion_factor_token': 0.5, 'mixer_depth': 16, 'mixer_dropout': 0.2, 'log_dir': './log/', 'config_dir': './config/', 'test_only': False, 'grid_search': True} | ||||
							
								
								
									
										1
									
								
								log/testrun_49495af8
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										1
									
								
								log/testrun_49495af8
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1 @@ | ||||
| 2023-05-06 08:37:18,939 - testrun_49495af8 - [INFO] - {'dataset': 'wikidata12k', 'name': 'testrun_49495af8', 'gpu': '3', 'train_strategy': 'one_to_n', 'opt': 'adam', 'neg_num': 1000, 'batch_size': 128, 'l2': 0.0, 'lr': 0.0001, 'max_epochs': 500, 'num_workers': 0, 'seed': 42, 'restore': False, 'lbl_smooth': 0.1, 'embed_dim': 400, 'ent_vec_dim': 400, 'rel_vec_dim': 400, 'bias': False, 'form': 'plain', 'k_w': 10, 'k_h': 20, 'num_filt': 96, 'ker_sz': 9, 'perm': 1, 'hid_drop': 0.5, 'feat_drop': 0.2, 'inp_drop': 0.2, 'drop_path': 0.0, 'drop': 0.0, 'in_channels': 1, 'out_channels': 32, 'filt_h': 1, 'filt_w': 9, 'image_h': 128, 'image_w': 128, 'patch_size': 8, 'mixer_dim': 256, 'expansion_factor': 4, 'expansion_factor_token': 0.5, 'mixer_depth': 16, 'mixer_dropout': 0.2, 'log_dir': './log/', 'config_dir': './config/', 'test_only': False} | ||||
							
								
								
									
										7877
									
								
								log/testrun_4a235016
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										7877
									
								
								log/testrun_4a235016
									
									
									
									
									
										Normal file
									
								
							
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							
							
								
								
									
										1
									
								
								log/testrun_4f5d8391
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										1
									
								
								log/testrun_4f5d8391
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1 @@ | ||||
| 2023-05-06 08:35:13,356 - testrun_4f5d8391 - [INFO] - {'dataset': 'wikidata12k', 'name': 'testrun_4f5d8391', 'gpu': '0', 'train_strategy': 'one_to_n', 'opt': 'adam', 'neg_num': 1000, 'batch_size': 128, 'l2': 0.0, 'lr': 0.0001, 'max_epochs': 500, 'num_workers': 0, 'seed': 42, 'restore': False, 'lbl_smooth': 0.1, 'embed_dim': 400, 'ent_vec_dim': 400, 'rel_vec_dim': 400, 'bias': False, 'form': 'plain', 'k_w': 10, 'k_h': 20, 'num_filt': 96, 'ker_sz': 9, 'perm': 1, 'hid_drop': 0.5, 'feat_drop': 0.2, 'inp_drop': 0.2, 'drop_path': 0.0, 'drop': 0.0, 'in_channels': 1, 'out_channels': 32, 'filt_h': 1, 'filt_w': 9, 'image_h': 128, 'image_w': 128, 'patch_size': 8, 'mixer_dim': 256, 'expansion_factor': 4, 'expansion_factor_token': 0.5, 'mixer_depth': 16, 'mixer_dropout': 0.2, 'log_dir': './log/', 'config_dir': './config/', 'test_only': False} | ||||
							
								
								
									
										1
									
								
								log/testrun_540f6a03
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										1
									
								
								log/testrun_540f6a03
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1 @@ | ||||
| 2023-05-06 08:34:55,992 - testrun_540f6a03 - [INFO] - {'dataset': 'wikidata12k', 'name': 'testrun_540f6a03', 'gpu': '0', 'train_strategy': 'one_to_n', 'opt': 'adam', 'neg_num': 1000, 'batch_size': 128, 'l2': 0.0, 'lr': 0.0001, 'max_epochs': 500, 'num_workers': 0, 'seed': 42, 'restore': False, 'lbl_smooth': 0.1, 'embed_dim': 400, 'ent_vec_dim': 400, 'rel_vec_dim': 400, 'bias': False, 'form': 'plain', 'k_w': 10, 'k_h': 20, 'num_filt': 96, 'ker_sz': 9, 'perm': 1, 'hid_drop': 0.5, 'feat_drop': 0.2, 'inp_drop': 0.2, 'drop_path': 0.0, 'drop': 0.0, 'in_channels': 1, 'out_channels': 32, 'filt_h': 1, 'filt_w': 9, 'image_h': 128, 'image_w': 128, 'patch_size': 8, 'mixer_dim': 256, 'expansion_factor': 4, 'expansion_factor_token': 0.5, 'mixer_depth': 16, 'mixer_dropout': 0.2, 'log_dir': './log/', 'config_dir': './config/', 'test_only': False} | ||||
							
								
								
									
										1
									
								
								log/testrun_5a901712
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										1
									
								
								log/testrun_5a901712
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1 @@ | ||||
| 2023-05-17 07:04:56,051 - testrun_5a901712 - [INFO] - {'dataset': 'icews14', 'name': 'testrun_5a901712', 'gpu': '1', 'train_strategy': 'one_to_n', 'opt': 'adam', 'neg_num': 1000, 'batch_size': 128, 'l2': 0.0, 'lr': 0.0001, 'max_epochs': 500, 'num_workers': 0, 'seed': 42, 'restore': False, 'lbl_smooth': 0.1, 'embed_dim': 400, 'ent_vec_dim': 400, 'rel_vec_dim': 400, 'bias': False, 'form': 'plain', 'k_w': 10, 'k_h': 20, 'num_filt': 96, 'ker_sz': 9, 'perm': 1, 'hid_drop': 0.5, 'feat_drop': 0.2, 'inp_drop': 0.2, 'drop_path': 0.0, 'drop': 0.0, 'in_channels': 1, 'out_channels': 32, 'filt_h': 1, 'filt_w': 9, 'image_h': 128, 'image_w': 128, 'patch_size': 8, 'mixer_dim': 256, 'expansion_factor': 4, 'expansion_factor_token': 0.5, 'mixer_depth': 16, 'mixer_dropout': 0.2, 'log_dir': './log/', 'config_dir': './config/', 'test_only': False, 'grid_search': True} | ||||
							
								
								
									
										44
									
								
								log/testrun_5cafe61a
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										44
									
								
								log/testrun_5cafe61a
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,44 @@ | ||||
| 2023-05-17 06:48:57,396 - testrun_5cafe61a - [INFO] - {'dataset': 'icews14', 'name': 'testrun_5cafe61a', 'gpu': '1', 'train_strategy': 'one_to_n', 'opt': 'adam', 'neg_num': 1000, 'batch_size': 128, 'l2': 0.0, 'lr': 0.0001, 'max_epochs': 500, 'num_workers': 0, 'seed': 42, 'restore': False, 'lbl_smooth': 0.1, 'embed_dim': 400, 'ent_vec_dim': 400, 'rel_vec_dim': 400, 'bias': False, 'form': 'plain', 'k_w': 10, 'k_h': 20, 'num_filt': 96, 'ker_sz': 9, 'perm': 1, 'hid_drop': 0.5, 'feat_drop': 0.2, 'inp_drop': 0.2, 'drop_path': 0.0, 'drop': 0.0, 'in_channels': 1, 'out_channels': 32, 'filt_h': 1, 'filt_w': 9, 'image_h': 128, 'image_w': 128, 'patch_size': 8, 'mixer_dim': 256, 'expansion_factor': 4, 'expansion_factor_token': 0.5, 'mixer_depth': 16, 'mixer_dropout': 0.2, 'log_dir': './log/', 'config_dir': './config/', 'test_only': False, 'grid_search': True} | ||||
| 2023-05-17 06:49:44,802 - concurrent.futures - [ERROR] - exception calling callback for <Future at 0x7efb51b74160 state=finished raised BrokenProcessPool> | ||||
| joblib.externals.loky.process_executor._RemoteTraceback:  | ||||
| """ | ||||
| Traceback (most recent call last): | ||||
|   File "/opt/conda/envs/kgs2s/lib/python3.8/site-packages/joblib/externals/loky/process_executor.py", line 391, in _process_worker | ||||
|     call_item = call_queue.get(block=True, timeout=timeout) | ||||
|   File "/opt/conda/envs/kgs2s/lib/python3.8/multiprocessing/queues.py", line 116, in get | ||||
|     return _ForkingPickler.loads(res) | ||||
|   File "/opt/conda/envs/kgs2s/lib/python3.8/site-packages/torch/storage.py", line 222, in _load_from_bytes | ||||
|     return torch.load(io.BytesIO(b)) | ||||
|   File "/opt/conda/envs/kgs2s/lib/python3.8/site-packages/torch/serialization.py", line 713, in load | ||||
|     return _legacy_load(opened_file, map_location, pickle_module, **pickle_load_args) | ||||
|   File "/opt/conda/envs/kgs2s/lib/python3.8/site-packages/torch/serialization.py", line 930, in _legacy_load | ||||
|     result = unpickler.load() | ||||
|   File "/opt/conda/envs/kgs2s/lib/python3.8/site-packages/torch/serialization.py", line 876, in persistent_load | ||||
|     wrap_storage=restore_location(obj, location), | ||||
|   File "/opt/conda/envs/kgs2s/lib/python3.8/site-packages/torch/serialization.py", line 175, in default_restore_location | ||||
|     result = fn(storage, location) | ||||
|   File "/opt/conda/envs/kgs2s/lib/python3.8/site-packages/torch/serialization.py", line 155, in _cuda_deserialize | ||||
|     return torch._UntypedStorage(obj.nbytes(), device=torch.device(location)) | ||||
| RuntimeError: CUDA out of memory. Tried to allocate 678.00 MiB (GPU 0; 31.72 GiB total capacity; 0 bytes already allocated; 593.94 MiB free; 0 bytes reserved in total by PyTorch) If reserved memory is >> allocated memory try setting max_split_size_mb to avoid fragmentation.  See documentation for Memory Management and PYTORCH_CUDA_ALLOC_CONF | ||||
| """ | ||||
|  | ||||
| The above exception was the direct cause of the following exception: | ||||
|  | ||||
| Traceback (most recent call last): | ||||
|   File "/opt/conda/envs/kgs2s/lib/python3.8/site-packages/joblib/externals/loky/_base.py", line 26, in _invoke_callbacks | ||||
|     callback(self) | ||||
|   File "/opt/conda/envs/kgs2s/lib/python3.8/site-packages/joblib/parallel.py", line 385, in __call__ | ||||
|     self.parallel.dispatch_next() | ||||
|   File "/opt/conda/envs/kgs2s/lib/python3.8/site-packages/joblib/parallel.py", line 834, in dispatch_next | ||||
|     if not self.dispatch_one_batch(self._original_iterator): | ||||
|   File "/opt/conda/envs/kgs2s/lib/python3.8/site-packages/joblib/parallel.py", line 901, in dispatch_one_batch | ||||
|     self._dispatch(tasks) | ||||
|   File "/opt/conda/envs/kgs2s/lib/python3.8/site-packages/joblib/parallel.py", line 819, in _dispatch | ||||
|     job = self._backend.apply_async(batch, callback=cb) | ||||
|   File "/opt/conda/envs/kgs2s/lib/python3.8/site-packages/joblib/_parallel_backends.py", line 556, in apply_async | ||||
|     future = self._workers.submit(SafeFunction(func)) | ||||
|   File "/opt/conda/envs/kgs2s/lib/python3.8/site-packages/joblib/externals/loky/reusable_executor.py", line 176, in submit | ||||
|     return super().submit(fn, *args, **kwargs) | ||||
|   File "/opt/conda/envs/kgs2s/lib/python3.8/site-packages/joblib/externals/loky/process_executor.py", line 1129, in submit | ||||
|     raise self._flags.broken | ||||
| joblib.externals.loky.process_executor.BrokenProcessPool: A task has failed to un-serialize. Please ensure that the arguments of the function are all picklable. | ||||
							
								
								
									
										1
									
								
								log/testrun_6fd94d59
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										1
									
								
								log/testrun_6fd94d59
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1 @@ | ||||
| 2023-05-06 08:34:33,652 - testrun_6fd94d59 - [INFO] - {'dataset': 'wikidata12k', 'name': 'testrun_6fd94d59', 'gpu': '3', 'train_strategy': 'one_to_n', 'opt': 'adam', 'neg_num': 1000, 'batch_size': 128, 'l2': 0.0, 'lr': 0.0001, 'max_epochs': 500, 'num_workers': 0, 'seed': 42, 'restore': False, 'lbl_smooth': 0.1, 'embed_dim': 400, 'ent_vec_dim': 400, 'rel_vec_dim': 400, 'bias': False, 'form': 'plain', 'k_w': 10, 'k_h': 20, 'num_filt': 96, 'ker_sz': 9, 'perm': 1, 'hid_drop': 0.5, 'feat_drop': 0.2, 'inp_drop': 0.2, 'drop_path': 0.0, 'drop': 0.0, 'in_channels': 1, 'out_channels': 32, 'filt_h': 1, 'filt_w': 9, 'image_h': 128, 'image_w': 128, 'patch_size': 8, 'mixer_dim': 256, 'expansion_factor': 4, 'expansion_factor_token': 0.5, 'mixer_depth': 16, 'mixer_dropout': 0.2, 'log_dir': './log/', 'config_dir': './config/', 'test_only': False} | ||||
							
								
								
									
										1
									
								
								log/testrun_7c096a18
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										1
									
								
								log/testrun_7c096a18
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1 @@ | ||||
| 2023-05-17 06:56:35,124 - testrun_7c096a18 - [INFO] - {'dataset': 'icews14', 'name': 'testrun_7c096a18', 'gpu': '1', 'train_strategy': 'one_to_n', 'opt': 'adam', 'neg_num': 1000, 'batch_size': 128, 'l2': 0.0, 'lr': 0.0001, 'max_epochs': 500, 'num_workers': 0, 'seed': 42, 'restore': False, 'lbl_smooth': 0.1, 'embed_dim': 400, 'ent_vec_dim': 400, 'rel_vec_dim': 400, 'bias': False, 'form': 'plain', 'k_w': 10, 'k_h': 20, 'num_filt': 96, 'ker_sz': 9, 'perm': 1, 'hid_drop': 0.5, 'feat_drop': 0.2, 'inp_drop': 0.2, 'drop_path': 0.0, 'drop': 0.0, 'in_channels': 1, 'out_channels': 32, 'filt_h': 1, 'filt_w': 9, 'image_h': 128, 'image_w': 128, 'patch_size': 8, 'mixer_dim': 256, 'expansion_factor': 4, 'expansion_factor_token': 0.5, 'mixer_depth': 16, 'mixer_dropout': 0.2, 'log_dir': './log/', 'config_dir': './config/', 'test_only': False, 'grid_search': True} | ||||
							
								
								
									
										1
									
								
								log/testrun_7fb885ee
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										1
									
								
								log/testrun_7fb885ee
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1 @@ | ||||
| 2023-05-17 07:13:14,777 - testrun_7fb885ee - [INFO] - {'dataset': 'icews14', 'name': 'testrun_7fb885ee', 'gpu': '1', 'train_strategy': 'one_to_n', 'opt': 'adam', 'neg_num': 1000, 'batch_size': 128, 'l2': 0.0, 'lr': 0.0001, 'max_epochs': 500, 'num_workers': 0, 'seed': 42, 'restore': False, 'lbl_smooth': 0.1, 'embed_dim': 400, 'ent_vec_dim': 400, 'rel_vec_dim': 400, 'bias': False, 'form': 'plain', 'k_w': 10, 'k_h': 20, 'num_filt': 96, 'ker_sz': 9, 'perm': 1, 'hid_drop': 0.5, 'feat_drop': 0.2, 'inp_drop': 0.2, 'drop_path': 0.0, 'drop': 0.0, 'in_channels': 1, 'out_channels': 32, 'filt_h': 1, 'filt_w': 9, 'image_h': 128, 'image_w': 128, 'patch_size': 8, 'mixer_dim': 256, 'expansion_factor': 4, 'expansion_factor_token': 0.5, 'mixer_depth': 16, 'mixer_dropout': 0.2, 'log_dir': './log/', 'config_dir': './config/', 'test_only': False, 'grid_search': True} | ||||
							
								
								
									
										1
									
								
								log/testrun_8f32040f
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										1
									
								
								log/testrun_8f32040f
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1 @@ | ||||
| 2023-05-17 06:59:35,220 - testrun_8f32040f - [INFO] - {'dataset': 'icews14', 'name': 'testrun_8f32040f', 'gpu': '1', 'train_strategy': 'one_to_n', 'opt': 'adam', 'neg_num': 1000, 'batch_size': 128, 'l2': 0.0, 'lr': 0.0001, 'max_epochs': 500, 'num_workers': 0, 'seed': 42, 'restore': False, 'lbl_smooth': 0.1, 'embed_dim': 400, 'ent_vec_dim': 400, 'rel_vec_dim': 400, 'bias': False, 'form': 'plain', 'k_w': 10, 'k_h': 20, 'num_filt': 96, 'ker_sz': 9, 'perm': 1, 'hid_drop': 0.5, 'feat_drop': 0.2, 'inp_drop': 0.2, 'drop_path': 0.0, 'drop': 0.0, 'in_channels': 1, 'out_channels': 32, 'filt_h': 1, 'filt_w': 9, 'image_h': 128, 'image_w': 128, 'patch_size': 8, 'mixer_dim': 256, 'expansion_factor': 4, 'expansion_factor_token': 0.5, 'mixer_depth': 16, 'mixer_dropout': 0.2, 'log_dir': './log/', 'config_dir': './config/', 'test_only': False, 'grid_search': True} | ||||
							
								
								
									
										1
									
								
								log/testrun_958ef154
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										1
									
								
								log/testrun_958ef154
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1 @@ | ||||
| 2023-05-17 06:16:45,427 - testrun_958ef154 - [INFO] - {'dataset': 'icews14', 'name': 'testrun_958ef154', 'gpu': '1', 'train_strategy': 'one_to_n', 'opt': 'adam', 'neg_num': 1000, 'batch_size': 128, 'l2': 0.0, 'lr': 0.0001, 'max_epochs': 500, 'num_workers': 0, 'seed': 42, 'restore': False, 'lbl_smooth': 0.1, 'embed_dim': 400, 'ent_vec_dim': 400, 'rel_vec_dim': 400, 'bias': False, 'form': 'plain', 'k_w': 10, 'k_h': 20, 'num_filt': 96, 'ker_sz': 9, 'perm': 1, 'hid_drop': 0.5, 'feat_drop': 0.2, 'inp_drop': 0.2, 'drop_path': 0.0, 'drop': 0.0, 'in_channels': 1, 'out_channels': 32, 'filt_h': 1, 'filt_w': 9, 'image_h': 128, 'image_w': 128, 'patch_size': 8, 'mixer_dim': 256, 'expansion_factor': 4, 'expansion_factor_token': 0.5, 'mixer_depth': 16, 'mixer_dropout': 0.2, 'log_dir': './log/', 'config_dir': './config/', 'test_only': False, 'grid_search': True} | ||||
							
								
								
									
										2
									
								
								log/testrun_9acdfb58
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										2
									
								
								log/testrun_9acdfb58
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,2 @@ | ||||
| 2023-05-06 08:36:46,668 - testrun_9acdfb58 - [INFO] - {'dataset': 'wikidata12k', 'name': 'testrun_9acdfb58', 'gpu': '3', 'train_strategy': 'one_to_n', 'opt': 'adam', 'neg_num': 1000, 'batch_size': 128, 'l2': 0.0, 'lr': 0.0001, 'max_epochs': 500, 'num_workers': 0, 'seed': 42, 'restore': False, 'lbl_smooth': 0.1, 'embed_dim': 400, 'ent_vec_dim': 400, 'rel_vec_dim': 400, 'bias': False, 'form': 'plain', 'k_w': 10, 'k_h': 20, 'num_filt': 96, 'ker_sz': 9, 'perm': 1, 'hid_drop': 0.5, 'feat_drop': 0.2, 'inp_drop': 0.2, 'drop_path': 0.0, 'drop': 0.0, 'in_channels': 1, 'out_channels': 32, 'filt_h': 1, 'filt_w': 9, 'image_h': 128, 'image_w': 128, 'patch_size': 8, 'mixer_dim': 256, 'expansion_factor': 4, 'expansion_factor_token': 0.5, 'mixer_depth': 16, 'mixer_dropout': 0.2, 'log_dir': './log/', 'config_dir': './config/', 'test_only': False} | ||||
| 2023-05-06 08:36:57,409 - testrun_9acdfb58 - [INFO] - [E:0| 0]: Train Loss:0.69813,  Val MRR:0.0, 	testrun_9acdfb58 | ||||
							
								
								
									
										1
									
								
								log/testrun_a051cf32
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										1
									
								
								log/testrun_a051cf32
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1 @@ | ||||
| 2023-05-17 06:36:14,606 - testrun_a051cf32 - [INFO] - {'dataset': 'icews14', 'name': 'testrun_a051cf32', 'gpu': '1', 'train_strategy': 'one_to_n', 'opt': 'adam', 'neg_num': 1000, 'batch_size': 128, 'l2': 0.0, 'lr': 0.0001, 'max_epochs': 500, 'num_workers': 0, 'seed': 42, 'restore': False, 'lbl_smooth': 0.1, 'embed_dim': 400, 'ent_vec_dim': 400, 'rel_vec_dim': 400, 'bias': False, 'form': 'plain', 'k_w': 10, 'k_h': 20, 'num_filt': 96, 'ker_sz': 9, 'perm': 1, 'hid_drop': 0.5, 'feat_drop': 0.2, 'inp_drop': 0.2, 'drop_path': 0.0, 'drop': 0.0, 'in_channels': 1, 'out_channels': 32, 'filt_h': 1, 'filt_w': 9, 'image_h': 128, 'image_w': 128, 'patch_size': 8, 'mixer_dim': 256, 'expansion_factor': 4, 'expansion_factor_token': 0.5, 'mixer_depth': 16, 'mixer_dropout': 0.2, 'log_dir': './log/', 'config_dir': './config/', 'test_only': False, 'grid_search': True} | ||||
							
								
								
									
										1
									
								
								log/testrun_a06d39d0
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										1
									
								
								log/testrun_a06d39d0
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1 @@ | ||||
| 2023-05-17 06:13:16,274 - testrun_a06d39d0 - [INFO] - {'dataset': 'icews14', 'name': 'testrun_a06d39d0', 'gpu': '1', 'train_strategy': 'one_to_n', 'opt': 'adam', 'neg_num': 1000, 'batch_size': 128, 'l2': 0.0, 'lr': 0.0001, 'max_epochs': 500, 'num_workers': 0, 'seed': 42, 'restore': False, 'lbl_smooth': 0.1, 'embed_dim': 400, 'ent_vec_dim': 400, 'rel_vec_dim': 400, 'bias': False, 'form': 'plain', 'k_w': 10, 'k_h': 20, 'num_filt': 96, 'ker_sz': 9, 'perm': 1, 'hid_drop': 0.5, 'feat_drop': 0.2, 'inp_drop': 0.2, 'drop_path': 0.0, 'drop': 0.0, 'in_channels': 1, 'out_channels': 32, 'filt_h': 1, 'filt_w': 9, 'image_h': 128, 'image_w': 128, 'patch_size': 8, 'mixer_dim': 256, 'expansion_factor': 4, 'expansion_factor_token': 0.5, 'mixer_depth': 16, 'mixer_dropout': 0.2, 'log_dir': './log/', 'config_dir': './config/', 'test_only': False, 'grid_search': True} | ||||
							
								
								
									
										1
									
								
								log/testrun_aca2b734
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										1
									
								
								log/testrun_aca2b734
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1 @@ | ||||
| 2023-05-17 06:41:20,654 - testrun_aca2b734 - [INFO] - {'dataset': 'icews14', 'name': 'testrun_aca2b734', 'gpu': '1', 'train_strategy': 'one_to_n', 'opt': 'adam', 'neg_num': 1000, 'batch_size': 128, 'l2': 0.0, 'lr': 0.0001, 'max_epochs': 500, 'num_workers': 0, 'seed': 42, 'restore': False, 'lbl_smooth': 0.1, 'embed_dim': 400, 'ent_vec_dim': 400, 'rel_vec_dim': 400, 'bias': False, 'form': 'plain', 'k_w': 10, 'k_h': 20, 'num_filt': 96, 'ker_sz': 9, 'perm': 1, 'hid_drop': 0.5, 'feat_drop': 0.2, 'inp_drop': 0.2, 'drop_path': 0.0, 'drop': 0.0, 'in_channels': 1, 'out_channels': 32, 'filt_h': 1, 'filt_w': 9, 'image_h': 128, 'image_w': 128, 'patch_size': 8, 'mixer_dim': 256, 'expansion_factor': 4, 'expansion_factor_token': 0.5, 'mixer_depth': 16, 'mixer_dropout': 0.2, 'log_dir': './log/', 'config_dir': './config/', 'test_only': False, 'grid_search': True} | ||||
							
								
								
									
										1
									
								
								log/testrun_ad7a0edb
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										1
									
								
								log/testrun_ad7a0edb
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1 @@ | ||||
| 2023-05-17 06:45:54,332 - testrun_ad7a0edb - [INFO] - {'dataset': 'icews14', 'name': 'testrun_ad7a0edb', 'gpu': '1', 'train_strategy': 'one_to_n', 'opt': 'adam', 'neg_num': 1000, 'batch_size': 128, 'l2': 0.0, 'lr': 0.0001, 'max_epochs': 500, 'num_workers': 0, 'seed': 42, 'restore': False, 'lbl_smooth': 0.1, 'embed_dim': 400, 'ent_vec_dim': 400, 'rel_vec_dim': 400, 'bias': False, 'form': 'plain', 'k_w': 10, 'k_h': 20, 'num_filt': 96, 'ker_sz': 9, 'perm': 1, 'hid_drop': 0.5, 'feat_drop': 0.2, 'inp_drop': 0.2, 'drop_path': 0.0, 'drop': 0.0, 'in_channels': 1, 'out_channels': 32, 'filt_h': 1, 'filt_w': 9, 'image_h': 128, 'image_w': 128, 'patch_size': 8, 'mixer_dim': 256, 'expansion_factor': 4, 'expansion_factor_token': 0.5, 'mixer_depth': 16, 'mixer_dropout': 0.2, 'log_dir': './log/', 'config_dir': './config/', 'test_only': False, 'grid_search': True} | ||||
							
								
								
									
										7958
									
								
								log/testrun_ae6f81ee
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										7958
									
								
								log/testrun_ae6f81ee
									
									
									
									
									
										Normal file
									
								
							
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							
							
								
								
									
										1
									
								
								log/testrun_b381870f
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										1
									
								
								log/testrun_b381870f
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1 @@ | ||||
| 2023-05-30 17:54:20,857 - testrun_b381870f - [INFO] - {'dataset': 'wikidata12k', 'name': 'testrun_b381870f', 'gpu': '0', 'train_strategy': 'one_to_n', 'opt': 'adam', 'neg_num': 1000, 'batch_size': 128, 'l2': 0.0, 'lr': 0.0003, 'max_epochs': 500, 'num_workers': 0, 'seed': 42, 'restore': False, 'lbl_smooth': 0.1, 'embed_dim': 400, 'ent_vec_dim': 400, 'rel_vec_dim': 400, 'bias': False, 'form': 'plain', 'k_w': 10, 'k_h': 20, 'num_filt': 96, 'ker_sz': 9, 'perm': 1, 'hid_drop': 0.5, 'feat_drop': 0.2, 'inp_drop': 0.2, 'drop_path': 0.0, 'drop': 0.0, 'in_channels': 1, 'out_channels': 32, 'filt_h': 1, 'filt_w': 9, 'image_h': 128, 'image_w': 128, 'patch_size': 8, 'mixer_dim': 256, 'expansion_factor': 4, 'expansion_factor_token': 0.5, 'mixer_depth': 16, 'mixer_dropout': 0.2, 'log_dir': './log/', 'config_dir': './config/', 'test_only': False, 'grid_search': False} | ||||
							
								
								
									
										2
									
								
								log/testrun_b396dcde
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										2
									
								
								log/testrun_b396dcde
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,2 @@ | ||||
| 2023-05-30 17:56:25,430 - testrun_b396dcde - [INFO] - {'dataset': 'wikidata12k', 'name': 'testrun_b396dcde', 'gpu': '0', 'train_strategy': 'one_to_n', 'opt': 'adam', 'neg_num': 1000, 'batch_size': 128, 'l2': 0.0, 'lr': 0.0003, 'max_epochs': 500, 'num_workers': 0, 'seed': 42, 'restore': False, 'lbl_smooth': 0.1, 'embed_dim': 400, 'ent_vec_dim': 400, 'rel_vec_dim': 400, 'bias': False, 'form': 'plain', 'k_w': 10, 'k_h': 20, 'num_filt': 96, 'ker_sz': 9, 'perm': 1, 'hid_drop': 0.5, 'feat_drop': 0.2, 'inp_drop': 0.2, 'drop_path': 0.0, 'drop': 0.0, 'in_channels': 1, 'out_channels': 32, 'filt_h': 1, 'filt_w': 9, 'image_h': 128, 'image_w': 128, 'patch_size': 8, 'mixer_dim': 256, 'expansion_factor': 4, 'expansion_factor_token': 0.5, 'mixer_depth': 16, 'mixer_dropout': 0.2, 'log_dir': './log/', 'config_dir': './config/', 'test_only': False, 'grid_search': False} | ||||
| 2023-05-30 17:57:00,673 - testrun_b396dcde - [INFO] - {'dataset': 'wikidata12k', 'name': 'testrun_b396dcde', 'gpu': '0', 'train_strategy': 'one_to_n', 'opt': 'adam', 'neg_num': 1000, 'batch_size': 128, 'l2': 0.0, 'lr': 0.0003, 'max_epochs': 500, 'num_workers': 0, 'seed': 42, 'restore': False, 'lbl_smooth': 0.1, 'embed_dim': 400, 'ent_vec_dim': 400, 'rel_vec_dim': 400, 'bias': False, 'form': 'plain', 'k_w': 10, 'k_h': 20, 'num_filt': 96, 'ker_sz': 9, 'perm': 1, 'hid_drop': 0.5, 'feat_drop': 0.2, 'inp_drop': 0.2, 'drop_path': 0.0, 'drop': 0.0, 'in_channels': 1, 'out_channels': 32, 'filt_h': 1, 'filt_w': 9, 'image_h': 128, 'image_w': 128, 'patch_size': 8, 'mixer_dim': 256, 'expansion_factor': 4, 'expansion_factor_token': 0.5, 'mixer_depth': 16, 'mixer_dropout': 0.2, 'log_dir': './log/', 'config_dir': './config/', 'test_only': False, 'grid_search': False, 'num_ent': 12554, 'num_rel': 423} | ||||
							
								
								
									
										1
									
								
								log/testrun_bbf65ab5
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										1
									
								
								log/testrun_bbf65ab5
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1 @@ | ||||
| 2023-05-17 06:21:14,228 - testrun_bbf65ab5 - [INFO] - {'dataset': 'icews14', 'name': 'testrun_bbf65ab5', 'gpu': '1', 'train_strategy': 'one_to_n', 'opt': 'adam', 'neg_num': 1000, 'batch_size': 128, 'l2': 0.0, 'lr': 0.0001, 'max_epochs': 500, 'num_workers': 0, 'seed': 42, 'restore': False, 'lbl_smooth': 0.1, 'embed_dim': 400, 'ent_vec_dim': 400, 'rel_vec_dim': 400, 'bias': False, 'form': 'plain', 'k_w': 10, 'k_h': 20, 'num_filt': 96, 'ker_sz': 9, 'perm': 1, 'hid_drop': 0.5, 'feat_drop': 0.2, 'inp_drop': 0.2, 'drop_path': 0.0, 'drop': 0.0, 'in_channels': 1, 'out_channels': 32, 'filt_h': 1, 'filt_w': 9, 'image_h': 128, 'image_w': 128, 'patch_size': 8, 'mixer_dim': 256, 'expansion_factor': 4, 'expansion_factor_token': 0.5, 'mixer_depth': 16, 'mixer_dropout': 0.2, 'log_dir': './log/', 'config_dir': './config/', 'test_only': False, 'grid_search': True} | ||||
							
								
								
									
										1
									
								
								log/testrun_bfaa042b
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										1
									
								
								log/testrun_bfaa042b
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1 @@ | ||||
| 2023-05-17 06:50:58,251 - testrun_bfaa042b - [INFO] - {'dataset': 'icews14', 'name': 'testrun_bfaa042b', 'gpu': '1', 'train_strategy': 'one_to_n', 'opt': 'adam', 'neg_num': 1000, 'batch_size': 128, 'l2': 0.0, 'lr': 0.0001, 'max_epochs': 500, 'num_workers': 0, 'seed': 42, 'restore': False, 'lbl_smooth': 0.1, 'embed_dim': 400, 'ent_vec_dim': 400, 'rel_vec_dim': 400, 'bias': False, 'form': 'plain', 'k_w': 10, 'k_h': 20, 'num_filt': 96, 'ker_sz': 9, 'perm': 1, 'hid_drop': 0.5, 'feat_drop': 0.2, 'inp_drop': 0.2, 'drop_path': 0.0, 'drop': 0.0, 'in_channels': 1, 'out_channels': 32, 'filt_h': 1, 'filt_w': 9, 'image_h': 128, 'image_w': 128, 'patch_size': 8, 'mixer_dim': 256, 'expansion_factor': 4, 'expansion_factor_token': 0.5, 'mixer_depth': 16, 'mixer_dropout': 0.2, 'log_dir': './log/', 'config_dir': './config/', 'test_only': False, 'grid_search': True} | ||||
							
								
								
									
										1
									
								
								log/testrun_c77a8ec3
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										1
									
								
								log/testrun_c77a8ec3
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1 @@ | ||||
| 2023-05-17 06:37:11,288 - testrun_c77a8ec3 - [INFO] - {'dataset': 'icews14', 'name': 'testrun_c77a8ec3', 'gpu': '1', 'train_strategy': 'one_to_n', 'opt': 'adam', 'neg_num': 1000, 'batch_size': 128, 'l2': 0.0, 'lr': 0.0001, 'max_epochs': 500, 'num_workers': 0, 'seed': 42, 'restore': False, 'lbl_smooth': 0.1, 'embed_dim': 400, 'ent_vec_dim': 400, 'rel_vec_dim': 400, 'bias': False, 'form': 'plain', 'k_w': 10, 'k_h': 20, 'num_filt': 96, 'ker_sz': 9, 'perm': 1, 'hid_drop': 0.5, 'feat_drop': 0.2, 'inp_drop': 0.2, 'drop_path': 0.0, 'drop': 0.0, 'in_channels': 1, 'out_channels': 32, 'filt_h': 1, 'filt_w': 9, 'image_h': 128, 'image_w': 128, 'patch_size': 8, 'mixer_dim': 256, 'expansion_factor': 4, 'expansion_factor_token': 0.5, 'mixer_depth': 16, 'mixer_dropout': 0.2, 'log_dir': './log/', 'config_dir': './config/', 'test_only': False, 'grid_search': True} | ||||
							
								
								
									
										1
									
								
								log/testrun_cb3528f3
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										1
									
								
								log/testrun_cb3528f3
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1 @@ | ||||
| 2023-05-17 07:08:13,688 - testrun_cb3528f3 - [INFO] - {'dataset': 'icews14', 'name': 'testrun_cb3528f3', 'gpu': '1', 'train_strategy': 'one_to_n', 'opt': 'adam', 'neg_num': 1000, 'batch_size': 128, 'l2': 0.0, 'lr': 0.0001, 'max_epochs': 500, 'num_workers': 0, 'seed': 42, 'restore': False, 'lbl_smooth': 0.1, 'embed_dim': 400, 'ent_vec_dim': 400, 'rel_vec_dim': 400, 'bias': False, 'form': 'plain', 'k_w': 10, 'k_h': 20, 'num_filt': 96, 'ker_sz': 9, 'perm': 1, 'hid_drop': 0.5, 'feat_drop': 0.2, 'inp_drop': 0.2, 'drop_path': 0.0, 'drop': 0.0, 'in_channels': 1, 'out_channels': 32, 'filt_h': 1, 'filt_w': 9, 'image_h': 128, 'image_w': 128, 'patch_size': 8, 'mixer_dim': 256, 'expansion_factor': 4, 'expansion_factor_token': 0.5, 'mixer_depth': 16, 'mixer_dropout': 0.2, 'log_dir': './log/', 'config_dir': './config/', 'test_only': False, 'grid_search': True} | ||||
							
								
								
									
										1
									
								
								log/testrun_cd333c33
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										1
									
								
								log/testrun_cd333c33
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1 @@ | ||||
| 2023-05-17 06:25:12,047 - testrun_cd333c33 - [INFO] - {'dataset': 'icews14', 'name': 'testrun_cd333c33', 'gpu': '1', 'train_strategy': 'one_to_n', 'opt': 'adam', 'neg_num': 1000, 'batch_size': 128, 'l2': 0.0, 'lr': 0.0001, 'max_epochs': 500, 'num_workers': 0, 'seed': 42, 'restore': False, 'lbl_smooth': 0.1, 'embed_dim': 400, 'ent_vec_dim': 400, 'rel_vec_dim': 400, 'bias': False, 'form': 'plain', 'k_w': 10, 'k_h': 20, 'num_filt': 96, 'ker_sz': 9, 'perm': 1, 'hid_drop': 0.5, 'feat_drop': 0.2, 'inp_drop': 0.2, 'drop_path': 0.0, 'drop': 0.0, 'in_channels': 1, 'out_channels': 32, 'filt_h': 1, 'filt_w': 9, 'image_h': 128, 'image_w': 128, 'patch_size': 8, 'mixer_dim': 256, 'expansion_factor': 4, 'expansion_factor_token': 0.5, 'mixer_depth': 16, 'mixer_dropout': 0.2, 'log_dir': './log/', 'config_dir': './config/', 'test_only': False, 'grid_search': True} | ||||
							
								
								
									
										2
									
								
								log/testrun_d0367b19
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										2
									
								
								log/testrun_d0367b19
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,2 @@ | ||||
| 2023-05-06 08:37:25,129 - testrun_d0367b19 - [INFO] - {'dataset': 'wikidata12k', 'name': 'testrun_d0367b19', 'gpu': '3', 'train_strategy': 'one_to_n', 'opt': 'adam', 'neg_num': 1000, 'batch_size': 128, 'l2': 0.0, 'lr': 0.0001, 'max_epochs': 500, 'num_workers': 0, 'seed': 42, 'restore': False, 'lbl_smooth': 0.1, 'embed_dim': 400, 'ent_vec_dim': 400, 'rel_vec_dim': 400, 'bias': False, 'form': 'plain', 'k_w': 10, 'k_h': 20, 'num_filt': 96, 'ker_sz': 9, 'perm': 1, 'hid_drop': 0.5, 'feat_drop': 0.2, 'inp_drop': 0.2, 'drop_path': 0.0, 'drop': 0.0, 'in_channels': 1, 'out_channels': 32, 'filt_h': 1, 'filt_w': 9, 'image_h': 128, 'image_w': 128, 'patch_size': 8, 'mixer_dim': 256, 'expansion_factor': 4, 'expansion_factor_token': 0.5, 'mixer_depth': 16, 'mixer_dropout': 0.2, 'log_dir': './log/', 'config_dir': './config/', 'test_only': False} | ||||
| 2023-05-06 08:37:36,239 - testrun_d0367b19 - [INFO] - [E:0| 0]: Train Loss:0.69813,  Val MRR:0.0, 	testrun_d0367b19 | ||||
							
								
								
									
										9001
									
								
								log/testrun_d2ab6391
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										9001
									
								
								log/testrun_d2ab6391
									
									
									
									
									
										Normal file
									
								
							
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							
							
								
								
									
										11836
									
								
								log/testrun_e1726b98
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										11836
									
								
								log/testrun_e1726b98
									
									
									
									
									
										Normal file
									
								
							
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							
							
								
								
									
										1
									
								
								log/testrun_f0394b3c
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										1
									
								
								log/testrun_f0394b3c
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1 @@ | ||||
| 2023-05-17 06:47:48,537 - testrun_f0394b3c - [INFO] - {'dataset': 'icews14', 'name': 'testrun_f0394b3c', 'gpu': '1', 'train_strategy': 'one_to_n', 'opt': 'adam', 'neg_num': 1000, 'batch_size': 128, 'l2': 0.0, 'lr': 0.0001, 'max_epochs': 500, 'num_workers': 0, 'seed': 42, 'restore': False, 'lbl_smooth': 0.1, 'embed_dim': 400, 'ent_vec_dim': 400, 'rel_vec_dim': 400, 'bias': False, 'form': 'plain', 'k_w': 10, 'k_h': 20, 'num_filt': 96, 'ker_sz': 9, 'perm': 1, 'hid_drop': 0.5, 'feat_drop': 0.2, 'inp_drop': 0.2, 'drop_path': 0.0, 'drop': 0.0, 'in_channels': 1, 'out_channels': 32, 'filt_h': 1, 'filt_w': 9, 'image_h': 128, 'image_w': 128, 'patch_size': 8, 'mixer_dim': 256, 'expansion_factor': 4, 'expansion_factor_token': 0.5, 'mixer_depth': 16, 'mixer_dropout': 0.2, 'log_dir': './log/', 'config_dir': './config/', 'test_only': False, 'grid_search': True} | ||||
							
								
								
									
										1
									
								
								log/testrun_f42f568c
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										1
									
								
								log/testrun_f42f568c
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1 @@ | ||||
| 2023-05-30 17:55:52,461 - testrun_f42f568c - [INFO] - {'dataset': 'wikidata12k', 'name': 'testrun_f42f568c', 'gpu': '0', 'train_strategy': 'one_to_n', 'opt': 'adam', 'neg_num': 1000, 'batch_size': 128, 'l2': 0.0, 'lr': 0.0003, 'max_epochs': 500, 'num_workers': 0, 'seed': 42, 'restore': False, 'lbl_smooth': 0.1, 'embed_dim': 400, 'ent_vec_dim': 400, 'rel_vec_dim': 400, 'bias': False, 'form': 'plain', 'k_w': 10, 'k_h': 20, 'num_filt': 96, 'ker_sz': 9, 'perm': 1, 'hid_drop': 0.5, 'feat_drop': 0.2, 'inp_drop': 0.2, 'drop_path': 0.0, 'drop': 0.0, 'in_channels': 1, 'out_channels': 32, 'filt_h': 1, 'filt_w': 9, 'image_h': 128, 'image_w': 128, 'patch_size': 8, 'mixer_dim': 256, 'expansion_factor': 4, 'expansion_factor_token': 0.5, 'mixer_depth': 16, 'mixer_dropout': 0.2, 'log_dir': './log/', 'config_dir': './config/', 'test_only': False, 'grid_search': False} | ||||
							
								
								
									
										1
									
								
								log/testrun_fdb0e82c
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										1
									
								
								log/testrun_fdb0e82c
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1 @@ | ||||
| 2023-05-17 06:39:01,301 - testrun_fdb0e82c - [INFO] - {'dataset': 'icews14', 'name': 'testrun_fdb0e82c', 'gpu': '1', 'train_strategy': 'one_to_n', 'opt': 'adam', 'neg_num': 1000, 'batch_size': 128, 'l2': 0.0, 'lr': 0.0001, 'max_epochs': 500, 'num_workers': 0, 'seed': 42, 'restore': False, 'lbl_smooth': 0.1, 'embed_dim': 400, 'ent_vec_dim': 400, 'rel_vec_dim': 400, 'bias': False, 'form': 'plain', 'k_w': 10, 'k_h': 20, 'num_filt': 96, 'ker_sz': 9, 'perm': 1, 'hid_drop': 0.5, 'feat_drop': 0.2, 'inp_drop': 0.2, 'drop_path': 0.0, 'drop': 0.0, 'in_channels': 1, 'out_channels': 32, 'filt_h': 1, 'filt_w': 9, 'image_h': 128, 'image_w': 128, 'patch_size': 8, 'mixer_dim': 256, 'expansion_factor': 4, 'expansion_factor_token': 0.5, 'mixer_depth': 16, 'mixer_dropout': 0.2, 'log_dir': './log/', 'config_dir': './config/', 'test_only': False, 'grid_search': True} | ||||
							
								
								
									
										1116
									
								
								log/wikidata12k
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										1116
									
								
								log/wikidata12k
									
									
									
									
									
										Normal file
									
								
							
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							
							
								
								
									
										2
									
								
								log/wikidata12k_0.00003
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										2
									
								
								log/wikidata12k_0.00003
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,2 @@ | ||||
| 2023-06-04 17:05:45,012 - wikidata12k_0.00003 - [INFO] - {'dataset': 'wikidata12k', 'name': 'wikidata12k_0.00003', 'gpu': '2', 'train_strategy': 'one_to_n', 'opt': 'adam', 'neg_num': 1000, 'batch_size': 128, 'l2': 0.0, 'lr': 0.0001, 'max_epochs': 500, 'num_workers': 0, 'seed': 42, 'restore': False, 'lbl_smooth': 0.1, 'embed_dim': 400, 'ent_vec_dim': 400, 'rel_vec_dim': 400, 'bias': False, 'form': 'plain', 'k_w': 10, 'k_h': 20, 'num_filt': 96, 'ker_sz': 9, 'perm': 1, 'hid_drop': 0.5, 'feat_drop': 0.2, 'inp_drop': 0.2, 'drop_path': 0.0, 'drop': 0.0, 'in_channels': 1, 'out_channels': 32, 'filt_h': 1, 'filt_w': 9, 'image_h': 128, 'image_w': 128, 'patch_size': 8, 'mixer_dim': 256, 'expansion_factor': 4, 'expansion_factor_token': 0.5, 'mixer_depth': 16, 'mixer_dropout': 0.2, 'log_dir': './log/', 'config_dir': './config/', 'test_only': False, 'grid_search': False} | ||||
| 2023-06-04 17:06:06,702 - wikidata12k_0.00003 - [INFO] - [E:0| 0]: Train Loss:0.69813,  Val MRR:0.0, 	wikidata12k_0.00003 | ||||
							
								
								
									
										4918
									
								
								log/wikidata12k_0.001
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										4918
									
								
								log/wikidata12k_0.001
									
									
									
									
									
										Normal file
									
								
							
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							
							
								
								
									
										15357
									
								
								log/wikidata12k_1n
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										15357
									
								
								log/wikidata12k_1n
									
									
									
									
									
										Normal file
									
								
							
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							
							
								
								
									
										11565
									
								
								log/wikidata12k_both
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										11565
									
								
								log/wikidata12k_both
									
									
									
									
									
										Normal file
									
								
							
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							
							
								
								
									
										9241
									
								
								log/yago11k
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										9241
									
								
								log/yago11k
									
									
									
									
									
										Normal file
									
								
							
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							
							
								
								
									
										9654
									
								
								log/yago11k_0.00003
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										9654
									
								
								log/yago11k_0.00003
									
									
									
									
									
										Normal file
									
								
							
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							
							
								
								
									
										9599
									
								
								log/yago11k_0.0003
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										9599
									
								
								log/yago11k_0.0003
									
									
									
									
									
										Normal file
									
								
							
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							
							
								
								
									
										7233
									
								
								log/yago11k_0.001
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										7233
									
								
								log/yago11k_0.001
									
									
									
									
									
										Normal file
									
								
							
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							
							
								
								
									
										18847
									
								
								log/yago11k_0.001.log
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										18847
									
								
								log/yago11k_0.001.log
									
									
									
									
									
										Normal file
									
								
							
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							
							
								
								
									
										9169
									
								
								log/yago11k_both
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										9169
									
								
								log/yago11k_both
									
									
									
									
									
										Normal file
									
								
							
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							
							
								
								
									
										9162
									
								
								log/yago11k_both_0.001
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										9162
									
								
								log/yago11k_both_0.001
									
									
									
									
									
										Normal file
									
								
							
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							
							
								
								
									
										65
									
								
								main.py
									
									
									
									
									
								
							
							
						
						
									
										65
									
								
								main.py
									
									
									
									
									
								
							| @@ -3,9 +3,12 @@ import uuid | ||||
| import argparse | ||||
| import logging | ||||
| import logging.config | ||||
| import pandas as pd | ||||
| import sys | ||||
|  | ||||
| import torch | ||||
| import numpy as np | ||||
| import time | ||||
|  | ||||
| from collections import defaultdict as ddict | ||||
| from pprint import pprint | ||||
| @@ -17,11 +20,12 @@ from data_loader import TrainDataset, TestDataset | ||||
| from utils import get_logger, get_combined_results, set_gpu, prepare_env, set_seed | ||||
|  | ||||
| from models import ComplEx, ConvE, HypER, InteractE, FouriER, TuckER | ||||
| import traceback | ||||
|  | ||||
|  | ||||
| class Main(object): | ||||
|  | ||||
|     def __init__(self, params): | ||||
|     def __init__(self, params, logger): | ||||
|         """ | ||||
|         Constructor of the runner class | ||||
|         Parameters | ||||
| @@ -34,11 +38,9 @@ class Main(object): | ||||
|  | ||||
|         """ | ||||
|         self.p = params | ||||
|         self.logger = get_logger( | ||||
|             self.p.name, self.p.log_dir, self.p.config_dir) | ||||
|         self.logger = logger | ||||
|  | ||||
|         self.logger.info(vars(self.p)) | ||||
|         pprint(vars(self.p)) | ||||
|  | ||||
|         if self.p.gpu != '-1' and torch.cuda.is_available(): | ||||
|             self.device = torch.device('cuda') | ||||
| @@ -76,14 +78,14 @@ class Main(object): | ||||
|         ent_set, rel_set = OrderedSet(), OrderedSet() | ||||
|         for split in ['train', 'test', 'valid']: | ||||
|             for line in open('./data/{}/{}.txt'.format(self.p.dataset, split)): | ||||
|                 sub, rel, obj = map(str.lower, line.strip().split('\t')) | ||||
|                 sub, rel, obj, *_ = map(str.lower, line.strip().split('\t')) | ||||
|                 ent_set.add(sub) | ||||
|                 rel_set.add(rel) | ||||
|                 ent_set.add(obj) | ||||
|          | ||||
|         self.ent2id = {} | ||||
|         for line in open('./data/{}/{}'.format(self.p.dataset, "entities.dict")): | ||||
|             id, ent = map(str.lower, line.strip().split('\t')) | ||||
|             id, ent = map(str.lower, line.replace('\xa0', '').strip().split('\t')) | ||||
|             self.ent2id[ent] = int(id) | ||||
|         self.rel2id = {} | ||||
|         for line in open('./data/{}/{}'.format(self.p.dataset, "relations.dict")): | ||||
| @@ -108,7 +110,7 @@ class Main(object): | ||||
|  | ||||
|         for split in ['train', 'test', 'valid']: | ||||
|             for line in open('./data/{}/{}.txt'.format(self.p.dataset, split)): | ||||
|                 sub, rel, obj = map(str.lower, line.strip().split('\t')) | ||||
|                 sub, rel, obj, *_ = map(str.lower, line.replace('\xa0', '').strip().split('\t')) | ||||
|                 sub, rel, obj = self.ent2id[sub], self.rel2id[rel], self.ent2id[obj] | ||||
|                 self.data[split].append((sub, rel, obj)) | ||||
|  | ||||
| @@ -406,6 +408,13 @@ class Main(object): | ||||
|             train_iter = iter( | ||||
|                 self.data_iter['{}_{}'.format(split, mode.split('_')[0])]) | ||||
|  | ||||
|             sub_all = [] | ||||
|             obj_all = [] | ||||
|             rel_all = [] | ||||
|             target_score = [] | ||||
|             target_rank = [] | ||||
|             obj_pred = [] | ||||
|             obj_pred_score = [] | ||||
|             for step, batch in enumerate(train_iter): | ||||
|                 sub, rel, obj, label = self.read_batch(batch, split) | ||||
|                 pred = self.model.forward(sub, rel, None, 'one_to_n') | ||||
| @@ -413,9 +422,21 @@ class Main(object): | ||||
|                 target_pred = pred[b_range, obj] | ||||
|                 pred = torch.where(label.byte(), torch.zeros_like(pred), pred) | ||||
|                 pred[b_range, obj] = target_pred | ||||
|  | ||||
|                 highest = torch.argsort(pred, dim=1, descending=True)[:,0] | ||||
|                 highest_score = pred[b_range, highest] | ||||
|  | ||||
|                 ranks = 1 + torch.argsort(torch.argsort(pred, dim=1, | ||||
|                                           descending=True), dim=1, descending=False)[b_range, obj] | ||||
|  | ||||
|                 sub_all.extend(sub.cpu().numpy()) | ||||
|                 obj_all.extend(obj.cpu().numpy()) | ||||
|                 rel_all.extend(rel.cpu().numpy()) | ||||
|                 target_score.extend(target_pred.cpu().numpy()) | ||||
|                 target_rank.extend(ranks.cpu().numpy()) | ||||
|                 obj_pred.extend(highest.cpu().numpy()) | ||||
|                 obj_pred_score.extend(highest_score.cpu().numpy()) | ||||
|  | ||||
|                 ranks = ranks.float() | ||||
|                 results['count'] = torch.numel( | ||||
|                     ranks) + results.get('count', 0.0) | ||||
| @@ -430,7 +451,8 @@ class Main(object): | ||||
|                 if step % 100 == 0: | ||||
|                     self.logger.info('[{}, {} Step {}]\t{}'.format( | ||||
|                         split.title(), mode.title(), step, self.p.name)) | ||||
|  | ||||
|         df = pd.DataFrame({"sub":sub_all,"rel":rel_all,"obj":obj_all, "rank": target_rank,"score":target_score, "pred":obj_pred,"pred_score":obj_pred_score}) | ||||
|         df.to_csv(f"{self.p.name}_result.csv",header=True, index=False) | ||||
|         return results | ||||
|  | ||||
|     def run_epoch(self, epoch): | ||||
| @@ -456,7 +478,11 @@ class Main(object): | ||||
|                 batch, 'train') | ||||
|  | ||||
|             pred = self.model.forward(sub, rel, neg_ent, self.p.train_strategy) | ||||
|             loss = self.model.loss(pred, label, sub_samp) | ||||
|             try: | ||||
|                 loss = self.model.loss(pred, label, sub_samp) | ||||
|             except Exception as e: | ||||
|                 print(pred) | ||||
|                 raise e | ||||
|  | ||||
|             loss.backward() | ||||
|             self.optimizer.step() | ||||
| @@ -634,9 +660,10 @@ if __name__ == "__main__": | ||||
|     set_gpu(args.gpu) | ||||
|     set_seed(args.seed) | ||||
|  | ||||
|     model = Main(args) | ||||
|  | ||||
|     if (args.grid_search): | ||||
|          | ||||
|         model = Main(args) | ||||
|         from sklearn.model_selection import GridSearchCV | ||||
|         from skorch import NeuralNet | ||||
|  | ||||
| @@ -685,9 +712,27 @@ if __name__ == "__main__": | ||||
|             search = grid.fit(inputs, label) | ||||
|             print("BEST SCORE: ", search.best_score_) | ||||
|             print("BEST PARAMS: ", search.best_params_) | ||||
|     logger = get_logger( | ||||
|             args.name, args.log_dir, args.config_dir) | ||||
|     if (args.test_only): | ||||
|         model = Main(args, logger) | ||||
|         save_path = os.path.join('./torch_saved', args.name) | ||||
|         model.load_model(save_path) | ||||
|         model.evaluate('test') | ||||
|     else: | ||||
|         model = Main(args, logger) | ||||
|         model.fit() | ||||
|         # while True: | ||||
|         #     try: | ||||
|         #         model = Main(args, logger) | ||||
|         #         model.fit() | ||||
|         #     except Exception as e: | ||||
|         #         print(e) | ||||
|         #         traceback.print_exc() | ||||
|         #         try: | ||||
|         #             del model | ||||
|         #         except Exception: | ||||
|         #             pass | ||||
|         #         time.sleep(30) | ||||
|         #         continue | ||||
|         #     break | ||||
|   | ||||
							
								
								
									
										392
									
								
								models.py
									
									
									
									
									
								
							
							
						
						
									
										392
									
								
								models.py
									
									
									
									
									
								
							| @@ -1,15 +1,16 @@ | ||||
| import torch | ||||
| from torch import nn | ||||
| from torch import nn, einsum | ||||
| import torch.nn.functional as F | ||||
| import numpy as np | ||||
| from functools import partial | ||||
| from einops.layers.torch import Rearrange, Reduce | ||||
| from einops import rearrange, repeat | ||||
| from utils import * | ||||
| from layers import * | ||||
| from timm.data import IMAGENET_DEFAULT_MEAN, IMAGENET_DEFAULT_STD | ||||
| from timm.models.layers import DropPath, trunc_normal_ | ||||
| from timm.models.registry import register_model | ||||
| from timm.models.layers.helpers import to_2tuple | ||||
| from timm.layers.helpers import to_2tuple | ||||
|  | ||||
|  | ||||
| class ConvE(torch.nn.Module): | ||||
| @@ -557,6 +558,8 @@ class FouriER(torch.nn.Module): | ||||
|         z = self.forward_embeddings(y) | ||||
|         z = self.forward_tokens(z) | ||||
|         z = z.mean([-2, -1]) | ||||
|         if np.count_nonzero(np.isnan(z)) > 0: | ||||
|             print("ZZZ") | ||||
|         z = self.norm(z) | ||||
|         x = self.head(z) | ||||
|         x = self.hidden_drop(x) | ||||
| @@ -707,6 +710,363 @@ def basic_blocks(dim, index, layers, | ||||
|  | ||||
|     return blocks | ||||
|  | ||||
| def window_partition(x, window_size): | ||||
|     """ | ||||
|     Args: | ||||
|         x: (B, H, W, C) | ||||
|         window_size (int): window size | ||||
|  | ||||
|     Returns: | ||||
|         windows: (num_windows*B, window_size, window_size, C) | ||||
|     """ | ||||
|     B, C, H, W = x.shape | ||||
|     x = x.view(B, H // window_size, window_size, W // window_size, window_size, C) | ||||
|     windows = x.permute(0, 1, 3, 2, 4, 5).contiguous().view(-1, window_size, window_size, C) | ||||
|     return windows | ||||
|  | ||||
| class WindowAttention(nn.Module): | ||||
|     r""" Window based multi-head self attention (W-MSA) module with relative position bias. | ||||
|     It supports both of shifted and non-shifted window. | ||||
|  | ||||
|     Args: | ||||
|         dim (int): Number of input channels. | ||||
|         window_size (tuple[int]): The height and width of the window. | ||||
|         num_heads (int): Number of attention heads. | ||||
|         qkv_bias (bool, optional):  If True, add a learnable bias to query, key, value. Default: True | ||||
|         attn_drop (float, optional): Dropout ratio of attention weight. Default: 0.0 | ||||
|         proj_drop (float, optional): Dropout ratio of output. Default: 0.0 | ||||
|         pretrained_window_size (tuple[int]): The height and width of the window in pre-training. | ||||
|     """ | ||||
|  | ||||
|     def __init__(self, dim, window_size, num_heads, qkv_bias=True, attn_drop=0., proj_drop=0., | ||||
|                  pretrained_window_size=[0, 0]): | ||||
|  | ||||
|         super().__init__() | ||||
|         self.dim = dim | ||||
|         self.window_size = window_size  # Wh, Ww | ||||
|         self.pretrained_window_size = pretrained_window_size | ||||
|         self.num_heads = num_heads | ||||
|  | ||||
|         self.logit_scale = nn.Parameter(torch.log(10 * torch.ones((num_heads, 1, 1))), requires_grad=True) | ||||
|  | ||||
|         # mlp to generate continuous relative position bias | ||||
|         self.cpb_mlp = nn.Sequential(nn.Linear(2, 512, bias=True), | ||||
|                                      nn.ReLU(inplace=True), | ||||
|                                      nn.Linear(512, num_heads, bias=False)) | ||||
|  | ||||
|         # get relative_coords_table | ||||
|         relative_coords_h = torch.arange(-(self.window_size[0] - 1), self.window_size[0], dtype=torch.float32) | ||||
|         relative_coords_w = torch.arange(-(self.window_size[1] - 1), self.window_size[1], dtype=torch.float32) | ||||
|         relative_coords_table = torch.stack( | ||||
|             torch.meshgrid([relative_coords_h, | ||||
|                             relative_coords_w])).permute(1, 2, 0).contiguous().unsqueeze(0)  # 1, 2*Wh-1, 2*Ww-1, 2 | ||||
|         if pretrained_window_size[0] > 0: | ||||
|             relative_coords_table[:, :, :, 0] /= (pretrained_window_size[0] - 1) | ||||
|             relative_coords_table[:, :, :, 1] /= (pretrained_window_size[1] - 1) | ||||
|         else: | ||||
|             relative_coords_table[:, :, :, 0] /= (self.window_size[0] - 1) | ||||
|             relative_coords_table[:, :, :, 1] /= (self.window_size[1] - 1) | ||||
|         relative_coords_table *= 8  # normalize to -8, 8 | ||||
|         relative_coords_table = torch.sign(relative_coords_table) * torch.log2( | ||||
|             torch.abs(relative_coords_table) + 1.0) / np.log2(8) | ||||
|  | ||||
|         self.register_buffer("relative_coords_table", relative_coords_table) | ||||
|  | ||||
|         # get pair-wise relative position index for each token inside the window | ||||
|         coords_h = torch.arange(self.window_size[0]) | ||||
|         coords_w = torch.arange(self.window_size[1]) | ||||
|         coords = torch.stack(torch.meshgrid([coords_h, coords_w]))  # 2, Wh, Ww | ||||
|         coords_flatten = torch.flatten(coords, 1)  # 2, Wh*Ww | ||||
|         relative_coords = coords_flatten[:, :, None] - coords_flatten[:, None, :]  # 2, Wh*Ww, Wh*Ww | ||||
|         relative_coords = relative_coords.permute(1, 2, 0).contiguous()  # Wh*Ww, Wh*Ww, 2 | ||||
|         relative_coords[:, :, 0] += self.window_size[0] - 1  # shift to start from 0 | ||||
|         relative_coords[:, :, 1] += self.window_size[1] - 1 | ||||
|         relative_coords[:, :, 0] *= 2 * self.window_size[1] - 1 | ||||
|         relative_position_index = relative_coords.sum(-1)  # Wh*Ww, Wh*Ww | ||||
|         self.register_buffer("relative_position_index", relative_position_index) | ||||
|  | ||||
|         self.qkv = nn.Linear(dim, dim * 3, bias=False) | ||||
|         if qkv_bias: | ||||
|             self.q_bias = nn.Parameter(torch.zeros(dim)) | ||||
|             self.v_bias = nn.Parameter(torch.zeros(dim)) | ||||
|         else: | ||||
|             self.q_bias = None | ||||
|             self.v_bias = None | ||||
|         self.attn_drop = nn.Dropout(attn_drop) | ||||
|         self.proj = nn.Linear(dim, dim) | ||||
|         self.proj_drop = nn.Dropout(proj_drop) | ||||
|         self.softmax = nn.Softmax(dim=-1) | ||||
|  | ||||
|     def forward(self, x, mask=None): | ||||
|         """ | ||||
|         Args: | ||||
|             x: input features with shape of (num_windows*B, N, C) | ||||
|             mask: (0/-inf) mask with shape of (num_windows, Wh*Ww, Wh*Ww) or None | ||||
|         """ | ||||
|         B_, N, C = x.shape | ||||
|         qkv_bias = None | ||||
|         if self.q_bias is not None: | ||||
|             qkv_bias = torch.cat((self.q_bias, torch.zeros_like(self.v_bias, requires_grad=False), self.v_bias)) | ||||
|         qkv = F.linear(input=x, weight=self.qkv.weight, bias=qkv_bias) | ||||
|         qkv = qkv.reshape(B_, N, 3, self.num_heads, -1).permute(2, 0, 3, 1, 4) | ||||
|         q, k, v = qkv[0], qkv[1], qkv[2]  # make torchscript happy (cannot use tensor as tuple) | ||||
|  | ||||
|         # cosine attention | ||||
|         attn = (F.normalize(q, dim=-1) @ F.normalize(k, dim=-1).transpose(-2, -1)) | ||||
|         logit_scale = torch.clamp(self.logit_scale, max=torch.log(torch.tensor(1. / 0.01)).cuda()).exp() | ||||
|         attn = attn * logit_scale | ||||
|  | ||||
|         relative_position_bias_table = self.cpb_mlp(self.relative_coords_table).view(-1, self.num_heads) | ||||
|         relative_position_bias = relative_position_bias_table[self.relative_position_index.view(-1)].view( | ||||
|             self.window_size[0] * self.window_size[1], self.window_size[0] * self.window_size[1], -1)  # Wh*Ww,Wh*Ww,nH | ||||
|         relative_position_bias = relative_position_bias.permute(2, 0, 1).contiguous()  # nH, Wh*Ww, Wh*Ww | ||||
|         relative_position_bias = 16 * torch.sigmoid(relative_position_bias) | ||||
|         attn = attn + relative_position_bias.unsqueeze(0) | ||||
|  | ||||
|         if mask is not None: | ||||
|             nW = mask.shape[0] | ||||
|             attn = attn.view(B_ // nW, nW, self.num_heads, N, N) + mask.unsqueeze(1).unsqueeze(0) | ||||
|             attn = attn.view(-1, self.num_heads, N, N) | ||||
|             attn = self.softmax(attn) | ||||
|         else: | ||||
|             attn = self.softmax(attn) | ||||
|  | ||||
|         attn = self.attn_drop(attn) | ||||
|  | ||||
|         x = (attn @ v).transpose(1, 2).reshape(B_, N, C) | ||||
|         x = self.proj(x) | ||||
|         x = self.proj_drop(x) | ||||
|         return x | ||||
|  | ||||
|     def extra_repr(self) -> str: | ||||
|         return f'dim={self.dim}, window_size={self.window_size}, ' \ | ||||
|                f'pretrained_window_size={self.pretrained_window_size}, num_heads={self.num_heads}' | ||||
|  | ||||
|     def flops(self, N): | ||||
|         # calculate flops for 1 window with token length of N | ||||
|         flops = 0 | ||||
|         # qkv = self.qkv(x) | ||||
|         flops += N * self.dim * 3 * self.dim | ||||
|         # attn = (q @ k.transpose(-2, -1)) | ||||
|         flops += self.num_heads * N * (self.dim // self.num_heads) * N | ||||
|         #  x = (attn @ v) | ||||
|         flops += self.num_heads * N * N * (self.dim // self.num_heads) | ||||
|         # x = self.proj(x) | ||||
|         flops += N * self.dim * self.dim | ||||
|         return flops | ||||
|      | ||||
| def window_reverse(windows, window_size, H, W): | ||||
|     """ | ||||
|     Args: | ||||
|         windows: (num_windows*B, window_size, window_size, C) | ||||
|         window_size (int): Window size | ||||
|         H (int): Height of image | ||||
|         W (int): Width of image | ||||
|  | ||||
|     Returns: | ||||
|         x: (B, H, W, C) | ||||
|     """ | ||||
|     B = int(windows.shape[0] / (H * W / window_size / window_size)) | ||||
|     x = windows.view(B, H // window_size, W // window_size, window_size, window_size, -1) | ||||
|     x = x.permute(0, 1, 3, 2, 4, 5).contiguous().view(B, -1, H, W) | ||||
|     return x | ||||
|  | ||||
| def cast_tuple(val, length = 1): | ||||
|     return val if isinstance(val, tuple) else ((val,) * length) | ||||
|  | ||||
| # helper classes | ||||
|  | ||||
| class ChanLayerNorm(nn.Module): | ||||
|     def __init__(self, dim, eps = 1e-5): | ||||
|         super().__init__() | ||||
|         self.eps = eps | ||||
|         self.g = nn.Parameter(torch.ones(1, dim, 1, 1)) | ||||
|         self.b = nn.Parameter(torch.zeros(1, dim, 1, 1)) | ||||
|  | ||||
|     def forward(self, x): | ||||
|         var = torch.var(x, dim = 1, unbiased = False, keepdim = True) | ||||
|         mean = torch.mean(x, dim = 1, keepdim = True) | ||||
|         return (x - mean) / (var + self.eps).sqrt() * self.g + self.b | ||||
|  | ||||
| class OverlappingPatchEmbed(nn.Module): | ||||
|     def __init__(self, dim_in, dim_out, stride = 2): | ||||
|         super().__init__() | ||||
|         kernel_size = stride * 2 - 1 | ||||
|         padding = kernel_size // 2 | ||||
|         self.conv = nn.Conv2d(dim_in, dim_out, kernel_size, stride = stride, padding = padding) | ||||
|  | ||||
|     def forward(self, x): | ||||
|         return self.conv(x) | ||||
|  | ||||
| class PEG(nn.Module): | ||||
|     def __init__(self, dim, kernel_size = 3): | ||||
|         super().__init__() | ||||
|         self.proj = nn.Conv2d(dim, dim, kernel_size = kernel_size, padding = kernel_size // 2, groups = dim, stride = 1) | ||||
|  | ||||
|     def forward(self, x): | ||||
|         return self.proj(x) + x | ||||
|  | ||||
| # feedforward | ||||
|  | ||||
| class FeedForwardDSSA(nn.Module): | ||||
|     def __init__(self, dim, mult = 4, dropout = 0.): | ||||
|         super().__init__() | ||||
|         inner_dim = int(dim * mult) | ||||
|         self.net = nn.Sequential( | ||||
|             ChanLayerNorm(dim), | ||||
|             nn.Conv2d(dim, inner_dim, 1), | ||||
|             nn.GELU(), | ||||
|             nn.Dropout(dropout), | ||||
|             nn.Conv2d(inner_dim, dim, 1), | ||||
|             nn.Dropout(dropout) | ||||
|         ) | ||||
|     def forward(self, x): | ||||
|         return self.net(x) | ||||
|  | ||||
| # attention | ||||
|  | ||||
| class DSSA(nn.Module): | ||||
|     def __init__( | ||||
|         self, | ||||
|         dim, | ||||
|         heads = 8, | ||||
|         dim_head = 32, | ||||
|         dropout = 0., | ||||
|         window_size = 7 | ||||
|     ): | ||||
|         super().__init__() | ||||
|         self.heads = heads | ||||
|         self.scale = dim_head ** -0.5 | ||||
|         self.window_size = window_size | ||||
|         inner_dim = dim_head * heads | ||||
|  | ||||
|         self.norm = ChanLayerNorm(dim) | ||||
|  | ||||
|         self.attend = nn.Sequential( | ||||
|             nn.Softmax(dim = -1), | ||||
|             nn.Dropout(dropout) | ||||
|         ) | ||||
|  | ||||
|         self.to_qkv = nn.Conv1d(dim, inner_dim * 3, 1, bias = False) | ||||
|  | ||||
|         # window tokens | ||||
|  | ||||
|         self.window_tokens = nn.Parameter(torch.randn(dim)) | ||||
|  | ||||
|         # prenorm and non-linearity for window tokens | ||||
|         # then projection to queries and keys for window tokens | ||||
|  | ||||
|         self.window_tokens_to_qk = nn.Sequential( | ||||
|             nn.LayerNorm(dim_head), | ||||
|             nn.GELU(), | ||||
|             Rearrange('b h n c -> b (h c) n'), | ||||
|             nn.Conv1d(inner_dim, inner_dim * 2, 1), | ||||
|             Rearrange('b (h c) n -> b h n c', h = heads), | ||||
|         ) | ||||
|  | ||||
|         # window attention | ||||
|  | ||||
|         self.window_attend = nn.Sequential( | ||||
|             nn.Softmax(dim = -1), | ||||
|             nn.Dropout(dropout) | ||||
|         ) | ||||
|  | ||||
|         self.to_out = nn.Sequential( | ||||
|             nn.Conv2d(inner_dim, dim, 1), | ||||
|             nn.Dropout(dropout) | ||||
|         ) | ||||
|  | ||||
|     def forward(self, x): | ||||
|         """ | ||||
|         einstein notation | ||||
|  | ||||
|         b - batch | ||||
|         c - channels | ||||
|         w1 - window size (height) | ||||
|         w2 - also window size (width) | ||||
|         i - sequence dimension (source) | ||||
|         j - sequence dimension (target dimension to be reduced) | ||||
|         h - heads | ||||
|         x - height of feature map divided by window size | ||||
|         y - width of feature map divided by window size | ||||
|         """ | ||||
|  | ||||
|         batch, height, width, heads, wsz = x.shape[0], *x.shape[-2:], self.heads, self.window_size | ||||
|         assert (height % wsz) == 0 and (width % wsz) == 0, f'height {height} and width {width} must be divisible by window size {wsz}' | ||||
|         num_windows = (height // wsz) * (width // wsz) | ||||
|  | ||||
|         x = self.norm(x) | ||||
|  | ||||
|         # fold in windows for "depthwise" attention - not sure why it is named depthwise when it is just "windowed" attention | ||||
|  | ||||
|         x = rearrange(x, 'b c (h w1) (w w2) -> (b h w) c (w1 w2)', w1 = wsz, w2 = wsz) | ||||
|  | ||||
|         # add windowing tokens | ||||
|  | ||||
|         w = repeat(self.window_tokens, 'c -> b c 1', b = x.shape[0]) | ||||
|         x = torch.cat((w, x), dim = -1) | ||||
|  | ||||
|         # project for queries, keys, value | ||||
|  | ||||
|         q, k, v = self.to_qkv(x).chunk(3, dim = 1) | ||||
|  | ||||
|         # split out heads | ||||
|  | ||||
|         q, k, v = map(lambda t: rearrange(t, 'b (h d) ... -> b h (...) d', h = heads), (q, k, v)) | ||||
|  | ||||
|         # scale | ||||
|  | ||||
|         q = q * self.scale | ||||
|  | ||||
|         # similarity | ||||
|  | ||||
|         dots = einsum('b h i d, b h j d -> b h i j', q, k) | ||||
|  | ||||
|         # attention | ||||
|  | ||||
|         attn = self.attend(dots) | ||||
|  | ||||
|         # aggregate values | ||||
|  | ||||
|         out = torch.matmul(attn, v) | ||||
|  | ||||
|         # split out windowed tokens | ||||
|  | ||||
|         window_tokens, windowed_fmaps = out[:, :, 0], out[:, :, 1:] | ||||
|  | ||||
|         # early return if there is only 1 window | ||||
|  | ||||
|         if num_windows == 1: | ||||
|             fmap = rearrange(windowed_fmaps, '(b x y) h (w1 w2) d -> b (h d) (x w1) (y w2)', x = height // wsz, y = width // wsz, w1 = wsz, w2 = wsz) | ||||
|             return self.to_out(fmap) | ||||
|  | ||||
|         # carry out the pointwise attention, the main novelty in the paper | ||||
|  | ||||
|         window_tokens = rearrange(window_tokens, '(b x y) h d -> b h (x y) d', x = height // wsz, y = width // wsz) | ||||
|         windowed_fmaps = rearrange(windowed_fmaps, '(b x y) h n d -> b h (x y) n d', x = height // wsz, y = width // wsz) | ||||
|  | ||||
|         # windowed queries and keys (preceded by prenorm activation) | ||||
|  | ||||
|         w_q, w_k = self.window_tokens_to_qk(window_tokens).chunk(2, dim = -1) | ||||
|  | ||||
|         # scale | ||||
|  | ||||
|         w_q = w_q * self.scale | ||||
|  | ||||
|         # similarities | ||||
|  | ||||
|         w_dots = einsum('b h i d, b h j d -> b h i j', w_q, w_k) | ||||
|  | ||||
|         w_attn = self.window_attend(w_dots) | ||||
|  | ||||
|         # aggregate the feature maps from the "depthwise" attention step (the most interesting part of the paper, one i haven't seen before) | ||||
|  | ||||
|         aggregated_windowed_fmap = einsum('b h i j, b h j w d -> b h i w d', w_attn, windowed_fmaps) | ||||
|  | ||||
|         # fold back the windows and then combine heads for aggregation | ||||
|  | ||||
|         fmap = rearrange(aggregated_windowed_fmap, 'b h (x y) (w1 w2) d -> b (h d) (x w1) (y w2)', x = height // wsz, y = width // wsz, w1 = wsz, w2 = wsz) | ||||
|         return self.to_out(fmap) | ||||
|  | ||||
| class PoolFormerBlock(nn.Module): | ||||
|     """ | ||||
| @@ -731,7 +1091,15 @@ class PoolFormerBlock(nn.Module): | ||||
|  | ||||
|         self.norm1 = norm_layer(dim) | ||||
|         #self.token_mixer = Pooling(pool_size=pool_size) | ||||
|         self.token_mixer = FNetBlock() | ||||
|         # self.token_mixer = FNetBlock() | ||||
|         self.window_size = 4 | ||||
|         self.attn_heads = 4 | ||||
|         self.attn_mask = None | ||||
|         # self.token_mixer = WindowAttention(dim=dim, window_size=to_2tuple(self.window_size), num_heads=4) | ||||
|         self.token_mixer = nn.ModuleList([ | ||||
|             DSSA(dim, heads=self.attn_heads, window_size=self.window_size), | ||||
|             FeedForwardDSSA(dim) | ||||
|         ]) | ||||
|         self.norm2 = norm_layer(dim) | ||||
|         mlp_hidden_dim = int(dim * mlp_ratio) | ||||
|         self.mlp = Mlp(in_features=dim, hidden_features=mlp_hidden_dim,  | ||||
| @@ -748,16 +1116,26 @@ class PoolFormerBlock(nn.Module): | ||||
|                 layer_scale_init_value * torch.ones((dim)), requires_grad=True) | ||||
|  | ||||
|     def forward(self, x): | ||||
|         B, C, H, W = x.shape | ||||
|         # x_windows = window_partition(x, self.window_size) | ||||
|         # x_windows = x_windows.view(-1, self.window_size * self.window_size, C) | ||||
|         # attn_windows = self.token_mixer(x_windows, mask=self.attn_mask) | ||||
|         # attn_windows = attn_windows.view(-1, self.window_size, self.window_size, C) | ||||
|         # x_attn = window_reverse(attn_windows, self.window_size, H, W) | ||||
|         x_attn = self.token_mixer(x) | ||||
|         if self.use_layer_scale: | ||||
|             x = x + self.drop_path( | ||||
|                 self.layer_scale_1.unsqueeze(-1).unsqueeze(-1) | ||||
|                 * self.token_mixer(self.norm1(x))) | ||||
|                 * x_attn) | ||||
|             x = x + self.drop_path( | ||||
|                 self.layer_scale_2.unsqueeze(-1).unsqueeze(-1) | ||||
|                 * self.mlp(self.norm2(x))) | ||||
|         else: | ||||
|             x = x + self.drop_path(self.token_mixer(self.norm1(x))) | ||||
|             x = x + self.drop_path(x_attn) | ||||
|             x = x + self.drop_path(self.mlp(self.norm2(x))) | ||||
|  | ||||
|         if np.count_nonzero(np.isnan(x)) > 0: | ||||
|             print("PFBlock") | ||||
|         return x | ||||
| class PatchEmbed(nn.Module): | ||||
|     """ | ||||
| @@ -843,7 +1221,7 @@ class LayerNormChannel(nn.Module): | ||||
|             + self.bias.unsqueeze(-1).unsqueeze(-1) | ||||
|         return x | ||||
|  | ||||
| class FeedForward(nn.Module): | ||||
| class FeedForwardFNet(nn.Module): | ||||
|     def __init__(self, dim, hidden_dim, dropout = 0.): | ||||
|         super().__init__() | ||||
|         self.net = nn.Sequential( | ||||
| @@ -879,7 +1257,7 @@ class FNet(nn.Module): | ||||
|         for _ in range(depth): | ||||
|             self.layers.append(nn.ModuleList([ | ||||
|                 PreNorm(dim, FNetBlock()), | ||||
|                 PreNorm(dim, FeedForward(dim, mlp_dim, dropout = dropout)) | ||||
|                 PreNorm(dim, FeedForwardFNet(dim, mlp_dim, dropout = dropout)) | ||||
|             ])) | ||||
|     def forward(self, x): | ||||
|         for attn, ff in self.layers: | ||||
|   | ||||
| @@ -2,3 +2,5 @@ torch==1.12.1+cu116 | ||||
| ordered-set==4.1.0 | ||||
| numpy==1.21.5 | ||||
| einops==0.4.1 | ||||
| pandas | ||||
| timm==0.9.16 | ||||
							
								
								
									
										17
									
								
								run.sh
									
									
									
									
									
								
							
							
						
						
									
										17
									
								
								run.sh
									
									
									
									
									
								
							| @@ -24,3 +24,20 @@ PID: 4503 | ||||
| test: testrun_d542676f | ||||
| --- | ||||
| nohup python main.py --gpu 3 --data WN18RR --drop 0.0 --drop_path 0.0 >run_log/fnet-wn.log 2>&1 & | ||||
| --- | ||||
| nohup python main.py --name ice0003 --lr 0.0003 --data icews14 --gpu 1 >run_log/ice0003.log 2>&1 & | ||||
| PID: 3076 | ||||
| tail -f -n 200 run_log/ice0003.log | ||||
| --- | ||||
| nohup python main.py --name ice0003_2 --lr 0.00003 --data icews14 --gpu 3 >run_log/ice0003_2.log 2>&1 & | ||||
| PID: 3390 | ||||
| tail -f -n 200 run_log/ice0003_2.log | ||||
| --- | ||||
| nohup python main.py --name ice00001 --lr 0.00001 --data icews14 --gpu 2 >run_log/ice00001.log 2>&1 & | ||||
| PID: | ||||
|  | ||||
| ___ | ||||
| nohup python main.py --name ice001 --lr 0.001 --data icews14 --gpu 3 >run_log/0.001.log 2>&1 & | ||||
| ___ | ||||
| nohup python main.py --name iceboth --data icews14_both --gpu 0 >run_log/iceboth.log 2>&1 & | ||||
| PID: 21984 | ||||
							
								
								
									
										74
									
								
								visualization_util.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										74
									
								
								visualization_util.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,74 @@ | ||||
| import argparse | ||||
| import re | ||||
| import os | ||||
| import matplotlib.pyplot as plt | ||||
| import numpy as np | ||||
| from datetime import datetime | ||||
|  | ||||
| def extract_learning_curves(args): | ||||
|     paths = args.log_path.split(',') | ||||
|     if len(paths) == 1 and os.path.isdir(paths[0]): | ||||
|         paths = [os.path.join(paths[0], f) for f in os.listdir(paths[0]) if os.path.isfile(os.path.join(paths[0], f))] | ||||
|     learning_curves = {} | ||||
|     print(paths) | ||||
|     for path in paths: | ||||
|         print(path) | ||||
|         learning_curve = [] | ||||
|         lines = open(path, 'r').readlines() | ||||
|         last_epoch = -1 | ||||
|         stacked_epoch = -1 | ||||
|         max_epoch = -1 | ||||
|         for line in lines: | ||||
|             matched = re.match(r'[0-9\- :,]*\[INFO\] - \[Epoch ([0-9]+)\].*Valid MRR: ([0-9\.]+).*', line) | ||||
|             # matched = re.match(r'\tMRR: Tail : [0-9\.]+, Head : [0-9\.]+, Avg : ([0-9\.]+)', line) | ||||
|             if matched: | ||||
|                 this_epoch = int(matched.group(1)) | ||||
|                 if (this_epoch > max_epoch): | ||||
|                     learning_curve.append(float(matched.group(2))) | ||||
|                     max_epoch = this_epoch | ||||
|                     stacked_epoch = this_epoch | ||||
|                 elif (this_epoch < max_epoch and this_epoch > last_epoch): | ||||
|                     last_epoch = this_epoch | ||||
|                     max_epoch = stacked_epoch + 1 + this_epoch | ||||
|                     learning_curve.append(float(matched.group(2))) | ||||
|                 if max_epoch >= args.num_epochs: | ||||
|                     break | ||||
|             # if matched: | ||||
|             #     max_epoch += 1 | ||||
|             #     learning_curve.append(float(matched.group(1))) | ||||
|             #     if max_epoch >= args.num_epochs: | ||||
|             #         break | ||||
|         while len(learning_curve) < args.num_epochs: | ||||
|             learning_curve.append(learning_curve[-1]) | ||||
|         learning_curves[os.path.basename(path)] = learning_curve | ||||
|     return learning_curves | ||||
|  | ||||
| def draw_learning_curves(args, learning_curves): | ||||
|     for name in learning_curves.keys(): | ||||
|         epochs = np.arange(len(learning_curves[name])) | ||||
|         matched = re.match(r'(.*)\..*', name) | ||||
|         if matched: | ||||
|             label = matched.group(1) | ||||
|         else: | ||||
|             label = name | ||||
|         plt.plot(epochs, learning_curves[name], label = label) | ||||
|     plt.xlabel("Epochs") | ||||
|     plt.ylabel("Best Valid MRR") | ||||
|     plt.legend(title=args.legend_title) | ||||
|     plt.savefig(os.path.join(args.out_path, str(round(datetime.utcnow().timestamp() * 1000)) + '.' + args.fig_filetype)) | ||||
|  | ||||
|  | ||||
| if __name__ == '__main__': | ||||
|     parser = argparse.ArgumentParser( | ||||
|         description="Parser For Arguments", formatter_class=argparse.ArgumentDefaultsHelpFormatter) | ||||
|     parser.add_argument('--task', default = None, type=str) | ||||
|     parser.add_argument('--log_path', type=str, default=None) | ||||
|     parser.add_argument('--out_path', type=str, default=None) | ||||
|     parser.add_argument('--num_epochs', type=int, default=200) | ||||
|     parser.add_argument('--legend_title', type=str, default="Learning rate") | ||||
|     parser.add_argument('--fig_filetype', type=str, default="svg") | ||||
|  | ||||
|     args = parser.parse_args() | ||||
|  | ||||
|     if (args.task == 'learning_curve'): | ||||
|         draw_learning_curves(args, extract_learning_curves(args)) | ||||
							
								
								
									
										1072
									
								
								wikidata12k.log
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										1072
									
								
								wikidata12k.log
									
									
									
									
									
										Normal file
									
								
							
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							
							
								
								
									
										15209
									
								
								wikidata12k_1n.out
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										15209
									
								
								wikidata12k_1n.out
									
									
									
									
									
										Normal file
									
								
							
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							
							
								
								
									
										9207
									
								
								yago11k.out
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										9207
									
								
								yago11k.out
									
									
									
									
									
										Normal file
									
								
							
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							
		Reference in New Issue
	
	Block a user