Compare commits
1 Commits
sep_vit
...
tourier_sp
Author | SHA1 | Date | |
---|---|---|---|
45ce0c995b |
@ -1,15 +1,15 @@
|
|||||||
# triples: 291818
|
# triples: 291818
|
||||||
# entities: 12554
|
# entities: 12554
|
||||||
# relations: 423
|
# relations: 423
|
||||||
# timesteps: 70
|
# timesteps: 70
|
||||||
# test triples: 19271
|
# test triples: 19271
|
||||||
# valid triples: 20208
|
# valid triples: 20208
|
||||||
# train triples: 252339
|
# train triples: 252339
|
||||||
Measure method: N/A
|
Measure method: N/A
|
||||||
Target Size : 423
|
Target Size : 423
|
||||||
Grow Factor: 0
|
Grow Factor: 0
|
||||||
Shrink Factor: 4.0
|
Shrink Factor: 4.0
|
||||||
Epsilon Factor: 0
|
Epsilon Factor: 0
|
||||||
Search method: N/A
|
Search method: N/A
|
||||||
filter_dupes: inter
|
filter_dupes: inter
|
||||||
nonames: False
|
nonames: False
|
||||||
|
File diff suppressed because it is too large
Load Diff
@ -1,423 +1,423 @@
|
|||||||
0 P131[0-0]
|
0 P131[0-0]
|
||||||
1 P131[1-1]
|
1 P131[1-1]
|
||||||
2 P131[2-2]
|
2 P131[2-2]
|
||||||
3 P131[3-3]
|
3 P131[3-3]
|
||||||
4 P131[4-4]
|
4 P131[4-4]
|
||||||
5 P131[5-5]
|
5 P131[5-5]
|
||||||
6 P131[6-6]
|
6 P131[6-6]
|
||||||
7 P131[7-7]
|
7 P131[7-7]
|
||||||
8 P131[8-8]
|
8 P131[8-8]
|
||||||
9 P131[9-9]
|
9 P131[9-9]
|
||||||
10 P131[10-10]
|
10 P131[10-10]
|
||||||
11 P131[11-11]
|
11 P131[11-11]
|
||||||
12 P131[12-12]
|
12 P131[12-12]
|
||||||
13 P131[13-13]
|
13 P131[13-13]
|
||||||
14 P131[14-14]
|
14 P131[14-14]
|
||||||
15 P131[15-15]
|
15 P131[15-15]
|
||||||
16 P131[16-16]
|
16 P131[16-16]
|
||||||
17 P131[17-17]
|
17 P131[17-17]
|
||||||
18 P131[18-18]
|
18 P131[18-18]
|
||||||
19 P131[19-19]
|
19 P131[19-19]
|
||||||
20 P131[20-20]
|
20 P131[20-20]
|
||||||
21 P131[21-21]
|
21 P131[21-21]
|
||||||
22 P131[22-22]
|
22 P131[22-22]
|
||||||
23 P131[23-23]
|
23 P131[23-23]
|
||||||
24 P131[24-24]
|
24 P131[24-24]
|
||||||
25 P131[25-25]
|
25 P131[25-25]
|
||||||
26 P131[26-26]
|
26 P131[26-26]
|
||||||
27 P131[27-27]
|
27 P131[27-27]
|
||||||
28 P131[28-28]
|
28 P131[28-28]
|
||||||
29 P131[29-29]
|
29 P131[29-29]
|
||||||
30 P131[30-30]
|
30 P131[30-30]
|
||||||
31 P131[31-31]
|
31 P131[31-31]
|
||||||
32 P131[32-32]
|
32 P131[32-32]
|
||||||
33 P131[33-33]
|
33 P131[33-33]
|
||||||
34 P131[34-34]
|
34 P131[34-34]
|
||||||
35 P131[35-35]
|
35 P131[35-35]
|
||||||
36 P131[36-36]
|
36 P131[36-36]
|
||||||
37 P131[37-37]
|
37 P131[37-37]
|
||||||
38 P131[38-38]
|
38 P131[38-38]
|
||||||
39 P131[39-39]
|
39 P131[39-39]
|
||||||
40 P131[40-40]
|
40 P131[40-40]
|
||||||
41 P131[41-41]
|
41 P131[41-41]
|
||||||
42 P131[42-42]
|
42 P131[42-42]
|
||||||
43 P131[43-43]
|
43 P131[43-43]
|
||||||
44 P131[44-44]
|
44 P131[44-44]
|
||||||
45 P131[45-45]
|
45 P131[45-45]
|
||||||
46 P131[46-46]
|
46 P131[46-46]
|
||||||
47 P131[47-47]
|
47 P131[47-47]
|
||||||
48 P131[48-48]
|
48 P131[48-48]
|
||||||
49 P131[49-49]
|
49 P131[49-49]
|
||||||
50 P131[50-50]
|
50 P131[50-50]
|
||||||
51 P131[51-51]
|
51 P131[51-51]
|
||||||
52 P131[52-52]
|
52 P131[52-52]
|
||||||
53 P131[53-53]
|
53 P131[53-53]
|
||||||
54 P131[54-54]
|
54 P131[54-54]
|
||||||
55 P131[55-55]
|
55 P131[55-55]
|
||||||
56 P131[56-56]
|
56 P131[56-56]
|
||||||
57 P131[57-57]
|
57 P131[57-57]
|
||||||
58 P131[58-58]
|
58 P131[58-58]
|
||||||
59 P131[59-59]
|
59 P131[59-59]
|
||||||
60 P131[60-60]
|
60 P131[60-60]
|
||||||
61 P131[61-61]
|
61 P131[61-61]
|
||||||
62 P131[62-62]
|
62 P131[62-62]
|
||||||
63 P131[63-63]
|
63 P131[63-63]
|
||||||
64 P131[64-64]
|
64 P131[64-64]
|
||||||
65 P131[65-65]
|
65 P131[65-65]
|
||||||
66 P131[66-66]
|
66 P131[66-66]
|
||||||
67 P131[67-67]
|
67 P131[67-67]
|
||||||
68 P131[68-68]
|
68 P131[68-68]
|
||||||
69 P131[69-69]
|
69 P131[69-69]
|
||||||
70 P1435[65-65]
|
70 P1435[65-65]
|
||||||
71 P39[49-49]
|
71 P39[49-49]
|
||||||
72 P39[50-50]
|
72 P39[50-50]
|
||||||
73 P39[51-51]
|
73 P39[51-51]
|
||||||
74 P39[52-52]
|
74 P39[52-52]
|
||||||
75 P39[53-53]
|
75 P39[53-53]
|
||||||
76 P39[54-54]
|
76 P39[54-54]
|
||||||
77 P39[55-55]
|
77 P39[55-55]
|
||||||
78 P39[56-56]
|
78 P39[56-56]
|
||||||
79 P39[57-57]
|
79 P39[57-57]
|
||||||
80 P39[58-58]
|
80 P39[58-58]
|
||||||
81 P39[59-59]
|
81 P39[59-59]
|
||||||
82 P39[60-60]
|
82 P39[60-60]
|
||||||
83 P39[61-61]
|
83 P39[61-61]
|
||||||
84 P39[62-62]
|
84 P39[62-62]
|
||||||
85 P39[63-63]
|
85 P39[63-63]
|
||||||
86 P39[64-64]
|
86 P39[64-64]
|
||||||
87 P39[65-65]
|
87 P39[65-65]
|
||||||
88 P39[66-66]
|
88 P39[66-66]
|
||||||
89 P39[67-67]
|
89 P39[67-67]
|
||||||
90 P39[68-68]
|
90 P39[68-68]
|
||||||
91 P39[69-69]
|
91 P39[69-69]
|
||||||
92 P54[40-40]
|
92 P54[40-40]
|
||||||
93 P54[41-41]
|
93 P54[41-41]
|
||||||
94 P54[42-42]
|
94 P54[42-42]
|
||||||
95 P54[43-43]
|
95 P54[43-43]
|
||||||
96 P54[44-44]
|
96 P54[44-44]
|
||||||
97 P54[45-45]
|
97 P54[45-45]
|
||||||
98 P54[46-46]
|
98 P54[46-46]
|
||||||
99 P54[47-47]
|
99 P54[47-47]
|
||||||
100 P54[48-48]
|
100 P54[48-48]
|
||||||
101 P54[49-49]
|
101 P54[49-49]
|
||||||
102 P54[50-50]
|
102 P54[50-50]
|
||||||
103 P54[51-51]
|
103 P54[51-51]
|
||||||
104 P54[52-52]
|
104 P54[52-52]
|
||||||
105 P54[53-53]
|
105 P54[53-53]
|
||||||
106 P54[54-54]
|
106 P54[54-54]
|
||||||
107 P54[55-55]
|
107 P54[55-55]
|
||||||
108 P54[56-56]
|
108 P54[56-56]
|
||||||
109 P54[57-57]
|
109 P54[57-57]
|
||||||
110 P54[58-58]
|
110 P54[58-58]
|
||||||
111 P54[59-59]
|
111 P54[59-59]
|
||||||
112 P54[60-60]
|
112 P54[60-60]
|
||||||
113 P54[61-61]
|
113 P54[61-61]
|
||||||
114 P54[62-62]
|
114 P54[62-62]
|
||||||
115 P54[63-63]
|
115 P54[63-63]
|
||||||
116 P54[64-64]
|
116 P54[64-64]
|
||||||
117 P54[65-65]
|
117 P54[65-65]
|
||||||
118 P54[66-66]
|
118 P54[66-66]
|
||||||
119 P54[67-67]
|
119 P54[67-67]
|
||||||
120 P54[68-68]
|
120 P54[68-68]
|
||||||
121 P54[69-69]
|
121 P54[69-69]
|
||||||
122 P31[0-0]
|
122 P31[0-0]
|
||||||
123 P31[1-1]
|
123 P31[1-1]
|
||||||
124 P31[2-2]
|
124 P31[2-2]
|
||||||
125 P31[3-3]
|
125 P31[3-3]
|
||||||
126 P31[4-4]
|
126 P31[4-4]
|
||||||
127 P31[5-5]
|
127 P31[5-5]
|
||||||
128 P31[6-6]
|
128 P31[6-6]
|
||||||
129 P31[7-7]
|
129 P31[7-7]
|
||||||
130 P31[8-8]
|
130 P31[8-8]
|
||||||
131 P31[9-9]
|
131 P31[9-9]
|
||||||
132 P31[10-10]
|
132 P31[10-10]
|
||||||
133 P31[11-11]
|
133 P31[11-11]
|
||||||
134 P31[12-12]
|
134 P31[12-12]
|
||||||
135 P31[13-13]
|
135 P31[13-13]
|
||||||
136 P31[14-14]
|
136 P31[14-14]
|
||||||
137 P31[15-15]
|
137 P31[15-15]
|
||||||
138 P31[16-16]
|
138 P31[16-16]
|
||||||
139 P31[17-17]
|
139 P31[17-17]
|
||||||
140 P31[18-18]
|
140 P31[18-18]
|
||||||
141 P31[19-19]
|
141 P31[19-19]
|
||||||
142 P31[20-20]
|
142 P31[20-20]
|
||||||
143 P31[21-21]
|
143 P31[21-21]
|
||||||
144 P31[22-22]
|
144 P31[22-22]
|
||||||
145 P31[23-23]
|
145 P31[23-23]
|
||||||
146 P31[24-24]
|
146 P31[24-24]
|
||||||
147 P31[25-25]
|
147 P31[25-25]
|
||||||
148 P31[26-26]
|
148 P31[26-26]
|
||||||
149 P31[27-27]
|
149 P31[27-27]
|
||||||
150 P31[28-28]
|
150 P31[28-28]
|
||||||
151 P31[29-29]
|
151 P31[29-29]
|
||||||
152 P31[30-30]
|
152 P31[30-30]
|
||||||
153 P31[31-31]
|
153 P31[31-31]
|
||||||
154 P31[32-32]
|
154 P31[32-32]
|
||||||
155 P31[33-33]
|
155 P31[33-33]
|
||||||
156 P31[34-34]
|
156 P31[34-34]
|
||||||
157 P31[35-35]
|
157 P31[35-35]
|
||||||
158 P31[36-36]
|
158 P31[36-36]
|
||||||
159 P31[37-37]
|
159 P31[37-37]
|
||||||
160 P31[38-38]
|
160 P31[38-38]
|
||||||
161 P31[39-39]
|
161 P31[39-39]
|
||||||
162 P31[40-40]
|
162 P31[40-40]
|
||||||
163 P31[41-41]
|
163 P31[41-41]
|
||||||
164 P31[42-42]
|
164 P31[42-42]
|
||||||
165 P31[43-43]
|
165 P31[43-43]
|
||||||
166 P31[44-44]
|
166 P31[44-44]
|
||||||
167 P31[45-45]
|
167 P31[45-45]
|
||||||
168 P31[46-46]
|
168 P31[46-46]
|
||||||
169 P31[47-47]
|
169 P31[47-47]
|
||||||
170 P31[48-48]
|
170 P31[48-48]
|
||||||
171 P31[49-49]
|
171 P31[49-49]
|
||||||
172 P31[50-50]
|
172 P31[50-50]
|
||||||
173 P31[51-51]
|
173 P31[51-51]
|
||||||
174 P31[52-52]
|
174 P31[52-52]
|
||||||
175 P31[53-53]
|
175 P31[53-53]
|
||||||
176 P31[54-54]
|
176 P31[54-54]
|
||||||
177 P31[55-55]
|
177 P31[55-55]
|
||||||
178 P31[56-56]
|
178 P31[56-56]
|
||||||
179 P31[57-57]
|
179 P31[57-57]
|
||||||
180 P31[58-58]
|
180 P31[58-58]
|
||||||
181 P31[59-59]
|
181 P31[59-59]
|
||||||
182 P31[60-60]
|
182 P31[60-60]
|
||||||
183 P31[61-61]
|
183 P31[61-61]
|
||||||
184 P31[62-62]
|
184 P31[62-62]
|
||||||
185 P31[63-63]
|
185 P31[63-63]
|
||||||
186 P31[64-64]
|
186 P31[64-64]
|
||||||
187 P31[65-65]
|
187 P31[65-65]
|
||||||
188 P31[66-66]
|
188 P31[66-66]
|
||||||
189 P31[67-67]
|
189 P31[67-67]
|
||||||
190 P31[68-68]
|
190 P31[68-68]
|
||||||
191 P31[69-69]
|
191 P31[69-69]
|
||||||
192 P463[26-26]
|
192 P463[26-26]
|
||||||
193 P463[27-27]
|
193 P463[27-27]
|
||||||
194 P463[28-28]
|
194 P463[28-28]
|
||||||
195 P463[29-29]
|
195 P463[29-29]
|
||||||
196 P463[30-30]
|
196 P463[30-30]
|
||||||
197 P463[31-31]
|
197 P463[31-31]
|
||||||
198 P463[32-32]
|
198 P463[32-32]
|
||||||
199 P463[33-33]
|
199 P463[33-33]
|
||||||
200 P463[34-34]
|
200 P463[34-34]
|
||||||
201 P463[35-35]
|
201 P463[35-35]
|
||||||
202 P463[36-36]
|
202 P463[36-36]
|
||||||
203 P463[37-37]
|
203 P463[37-37]
|
||||||
204 P463[38-38]
|
204 P463[38-38]
|
||||||
205 P463[39-39]
|
205 P463[39-39]
|
||||||
206 P463[40-40]
|
206 P463[40-40]
|
||||||
207 P463[41-41]
|
207 P463[41-41]
|
||||||
208 P463[42-42]
|
208 P463[42-42]
|
||||||
209 P463[43-43]
|
209 P463[43-43]
|
||||||
210 P463[44-44]
|
210 P463[44-44]
|
||||||
211 P463[45-45]
|
211 P463[45-45]
|
||||||
212 P463[46-46]
|
212 P463[46-46]
|
||||||
213 P463[47-47]
|
213 P463[47-47]
|
||||||
214 P463[48-48]
|
214 P463[48-48]
|
||||||
215 P463[49-49]
|
215 P463[49-49]
|
||||||
216 P463[50-50]
|
216 P463[50-50]
|
||||||
217 P463[51-51]
|
217 P463[51-51]
|
||||||
218 P463[52-52]
|
218 P463[52-52]
|
||||||
219 P463[53-53]
|
219 P463[53-53]
|
||||||
220 P463[54-54]
|
220 P463[54-54]
|
||||||
221 P463[55-55]
|
221 P463[55-55]
|
||||||
222 P463[56-56]
|
222 P463[56-56]
|
||||||
223 P463[57-57]
|
223 P463[57-57]
|
||||||
224 P463[58-58]
|
224 P463[58-58]
|
||||||
225 P463[59-59]
|
225 P463[59-59]
|
||||||
226 P463[60-60]
|
226 P463[60-60]
|
||||||
227 P463[61-61]
|
227 P463[61-61]
|
||||||
228 P463[62-62]
|
228 P463[62-62]
|
||||||
229 P463[63-63]
|
229 P463[63-63]
|
||||||
230 P463[64-64]
|
230 P463[64-64]
|
||||||
231 P463[65-65]
|
231 P463[65-65]
|
||||||
232 P463[66-66]
|
232 P463[66-66]
|
||||||
233 P463[67-67]
|
233 P463[67-67]
|
||||||
234 P463[68-68]
|
234 P463[68-68]
|
||||||
235 P463[69-69]
|
235 P463[69-69]
|
||||||
236 P512[4-69]
|
236 P512[4-69]
|
||||||
237 P190[0-29]
|
237 P190[0-29]
|
||||||
238 P150[0-3]
|
238 P150[0-3]
|
||||||
239 P1376[39-47]
|
239 P1376[39-47]
|
||||||
240 P463[0-7]
|
240 P463[0-7]
|
||||||
241 P166[0-7]
|
241 P166[0-7]
|
||||||
242 P2962[18-30]
|
242 P2962[18-30]
|
||||||
243 P108[29-36]
|
243 P108[29-36]
|
||||||
244 P39[0-3]
|
244 P39[0-3]
|
||||||
245 P17[47-48]
|
245 P17[47-48]
|
||||||
246 P166[21-23]
|
246 P166[21-23]
|
||||||
247 P793[46-69]
|
247 P793[46-69]
|
||||||
248 P69[32-41]
|
248 P69[32-41]
|
||||||
249 P17[57-58]
|
249 P17[57-58]
|
||||||
250 P190[42-45]
|
250 P190[42-45]
|
||||||
251 P2962[39-42]
|
251 P2962[39-42]
|
||||||
252 P54[0-18]
|
252 P54[0-18]
|
||||||
253 P26[56-61]
|
253 P26[56-61]
|
||||||
254 P150[14-17]
|
254 P150[14-17]
|
||||||
255 P463[16-17]
|
255 P463[16-17]
|
||||||
256 P26[39-46]
|
256 P26[39-46]
|
||||||
257 P579[36-43]
|
257 P579[36-43]
|
||||||
258 P579[16-23]
|
258 P579[16-23]
|
||||||
259 P2962[59-60]
|
259 P2962[59-60]
|
||||||
260 P1411[59-61]
|
260 P1411[59-61]
|
||||||
261 P26[20-27]
|
261 P26[20-27]
|
||||||
262 P6[4-69]
|
262 P6[4-69]
|
||||||
263 P1435[33-34]
|
263 P1435[33-34]
|
||||||
264 P166[52-53]
|
264 P166[52-53]
|
||||||
265 P108[49-57]
|
265 P108[49-57]
|
||||||
266 P150[10-13]
|
266 P150[10-13]
|
||||||
267 P1346[47-68]
|
267 P1346[47-68]
|
||||||
268 P150[18-21]
|
268 P150[18-21]
|
||||||
269 P1346[13-46]
|
269 P1346[13-46]
|
||||||
270 P69[20-23]
|
270 P69[20-23]
|
||||||
271 P39[31-32]
|
271 P39[31-32]
|
||||||
272 P1411[32-37]
|
272 P1411[32-37]
|
||||||
273 P166[62-63]
|
273 P166[62-63]
|
||||||
274 P150[44-47]
|
274 P150[44-47]
|
||||||
275 P2962[61-62]
|
275 P2962[61-62]
|
||||||
276 P150[48-51]
|
276 P150[48-51]
|
||||||
277 P150[52-55]
|
277 P150[52-55]
|
||||||
278 P1411[62-67]
|
278 P1411[62-67]
|
||||||
279 P1435[35-36]
|
279 P1435[35-36]
|
||||||
280 P1411[48-51]
|
280 P1411[48-51]
|
||||||
281 P150[22-25]
|
281 P150[22-25]
|
||||||
282 P2962[63-64]
|
282 P2962[63-64]
|
||||||
283 P2962[65-66]
|
283 P2962[65-66]
|
||||||
284 P166[58-59]
|
284 P166[58-59]
|
||||||
285 P190[46-49]
|
285 P190[46-49]
|
||||||
286 P54[34-35]
|
286 P54[34-35]
|
||||||
287 P1435[4-16]
|
287 P1435[4-16]
|
||||||
288 P463[18-19]
|
288 P463[18-19]
|
||||||
289 P150[31-34]
|
289 P150[31-34]
|
||||||
290 P150[35-38]
|
290 P150[35-38]
|
||||||
291 P39[35-36]
|
291 P39[35-36]
|
||||||
292 P26[62-69]
|
292 P26[62-69]
|
||||||
293 P1411[56-58]
|
293 P1411[56-58]
|
||||||
294 P1435[37-38]
|
294 P1435[37-38]
|
||||||
295 P166[60-61]
|
295 P166[60-61]
|
||||||
296 P39[33-34]
|
296 P39[33-34]
|
||||||
297 P102[24-31]
|
297 P102[24-31]
|
||||||
298 P2962[43-46]
|
298 P2962[43-46]
|
||||||
299 P108[37-48]
|
299 P108[37-48]
|
||||||
300 P190[50-53]
|
300 P190[50-53]
|
||||||
301 P39[4-6]
|
301 P39[4-6]
|
||||||
302 P1435[39-40]
|
302 P1435[39-40]
|
||||||
303 P793[0-45]
|
303 P793[0-45]
|
||||||
304 P150[64-69]
|
304 P150[64-69]
|
||||||
305 P39[19-22]
|
305 P39[19-22]
|
||||||
306 P27[30-38]
|
306 P27[30-38]
|
||||||
307 P2962[31-38]
|
307 P2962[31-38]
|
||||||
308 P1411[24-31]
|
308 P1411[24-31]
|
||||||
309 P102[40-45]
|
309 P102[40-45]
|
||||||
310 P39[37-38]
|
310 P39[37-38]
|
||||||
311 P463[8-11]
|
311 P463[8-11]
|
||||||
312 P1435[41-42]
|
312 P1435[41-42]
|
||||||
313 P27[52-59]
|
313 P27[52-59]
|
||||||
314 P69[16-19]
|
314 P69[16-19]
|
||||||
315 P17[16-18]
|
315 P17[16-18]
|
||||||
316 P190[54-57]
|
316 P190[54-57]
|
||||||
317 P1435[43-44]
|
317 P1435[43-44]
|
||||||
318 P166[8-15]
|
318 P166[8-15]
|
||||||
319 P166[45-47]
|
319 P166[45-47]
|
||||||
320 P2962[47-50]
|
320 P2962[47-50]
|
||||||
321 P39[39-40]
|
321 P39[39-40]
|
||||||
322 P1411[52-55]
|
322 P1411[52-55]
|
||||||
323 P108[58-69]
|
323 P108[58-69]
|
||||||
324 P463[20-21]
|
324 P463[20-21]
|
||||||
325 P39[41-42]
|
325 P39[41-42]
|
||||||
326 P150[26-30]
|
326 P150[26-30]
|
||||||
327 P150[39-43]
|
327 P150[39-43]
|
||||||
328 P1435[45-46]
|
328 P1435[45-46]
|
||||||
329 P26[28-38]
|
329 P26[28-38]
|
||||||
330 P54[27-30]
|
330 P54[27-30]
|
||||||
331 P190[58-61]
|
331 P190[58-61]
|
||||||
332 P17[59-61]
|
332 P17[59-61]
|
||||||
333 P54[36-37]
|
333 P54[36-37]
|
||||||
334 P166[16-20]
|
334 P166[16-20]
|
||||||
335 P166[37-40]
|
335 P166[37-40]
|
||||||
336 P1435[47-48]
|
336 P1435[47-48]
|
||||||
337 P17[0-3]
|
337 P17[0-3]
|
||||||
338 P26[47-55]
|
338 P26[47-55]
|
||||||
339 P1435[49-50]
|
339 P1435[49-50]
|
||||||
340 P1435[25-28]
|
340 P1435[25-28]
|
||||||
341 P150[4-9]
|
341 P150[4-9]
|
||||||
342 P102[63-69]
|
342 P102[63-69]
|
||||||
343 P26[0-19]
|
343 P26[0-19]
|
||||||
344 P1435[17-24]
|
344 P1435[17-24]
|
||||||
345 P39[23-26]
|
345 P39[23-26]
|
||||||
346 P1435[51-52]
|
346 P1435[51-52]
|
||||||
347 P39[7-11]
|
347 P39[7-11]
|
||||||
348 P69[12-15]
|
348 P69[12-15]
|
||||||
349 P69[24-31]
|
349 P69[24-31]
|
||||||
350 P102[0-23]
|
350 P102[0-23]
|
||||||
351 P39[43-44]
|
351 P39[43-44]
|
||||||
352 P579[24-35]
|
352 P579[24-35]
|
||||||
353 P190[62-65]
|
353 P190[62-65]
|
||||||
354 P1435[53-54]
|
354 P1435[53-54]
|
||||||
355 P1376[0-18]
|
355 P1376[0-18]
|
||||||
356 P27[0-14]
|
356 P27[0-14]
|
||||||
357 P463[12-15]
|
357 P463[12-15]
|
||||||
358 P166[33-36]
|
358 P166[33-36]
|
||||||
359 P102[32-39]
|
359 P102[32-39]
|
||||||
360 P17[4-7]
|
360 P17[4-7]
|
||||||
361 P190[30-41]
|
361 P190[30-41]
|
||||||
362 P166[24-28]
|
362 P166[24-28]
|
||||||
363 P190[66-69]
|
363 P190[66-69]
|
||||||
364 P69[42-69]
|
364 P69[42-69]
|
||||||
365 P1435[55-56]
|
365 P1435[55-56]
|
||||||
366 P54[31-33]
|
366 P54[31-33]
|
||||||
367 P39[45-46]
|
367 P39[45-46]
|
||||||
368 P17[12-15]
|
368 P17[12-15]
|
||||||
369 P1435[57-58]
|
369 P1435[57-58]
|
||||||
370 P54[19-26]
|
370 P54[19-26]
|
||||||
371 P2962[51-54]
|
371 P2962[51-54]
|
||||||
372 P2962[67-69]
|
372 P2962[67-69]
|
||||||
373 P1435[59-60]
|
373 P1435[59-60]
|
||||||
374 P579[44-56]
|
374 P579[44-56]
|
||||||
375 P1435[61-62]
|
375 P1435[61-62]
|
||||||
376 P166[41-44]
|
376 P166[41-44]
|
||||||
377 P17[19-22]
|
377 P17[19-22]
|
||||||
378 P1376[19-38]
|
378 P1376[19-38]
|
||||||
379 P17[23-26]
|
379 P17[23-26]
|
||||||
380 P1376[48-69]
|
380 P1376[48-69]
|
||||||
381 P463[22-23]
|
381 P463[22-23]
|
||||||
382 P17[27-30]
|
382 P17[27-30]
|
||||||
383 P1435[63-64]
|
383 P1435[63-64]
|
||||||
384 P69[0-3]
|
384 P69[0-3]
|
||||||
385 P1435[66-67]
|
385 P1435[66-67]
|
||||||
386 P17[35-38]
|
386 P17[35-38]
|
||||||
387 P69[8-11]
|
387 P69[8-11]
|
||||||
388 P1435[68-69]
|
388 P1435[68-69]
|
||||||
389 P17[31-34]
|
389 P17[31-34]
|
||||||
390 P102[46-53]
|
390 P102[46-53]
|
||||||
391 P27[60-69]
|
391 P27[60-69]
|
||||||
392 P579[57-69]
|
392 P579[57-69]
|
||||||
393 P69[4-7]
|
393 P69[4-7]
|
||||||
394 P1411[7-14]
|
394 P1411[7-14]
|
||||||
395 P551[0-35]
|
395 P551[0-35]
|
||||||
396 P108[0-28]
|
396 P108[0-28]
|
||||||
397 P17[8-11]
|
397 P17[8-11]
|
||||||
398 P1411[38-47]
|
398 P1411[38-47]
|
||||||
399 P17[43-46]
|
399 P17[43-46]
|
||||||
400 P17[49-52]
|
400 P17[49-52]
|
||||||
401 P166[64-69]
|
401 P166[64-69]
|
||||||
402 P1435[29-32]
|
402 P1435[29-32]
|
||||||
403 P54[38-39]
|
403 P54[38-39]
|
||||||
404 P39[27-30]
|
404 P39[27-30]
|
||||||
405 P2962[55-58]
|
405 P2962[55-58]
|
||||||
406 P463[24-25]
|
406 P463[24-25]
|
||||||
407 P17[39-42]
|
407 P17[39-42]
|
||||||
408 P17[53-56]
|
408 P17[53-56]
|
||||||
409 P17[66-69]
|
409 P17[66-69]
|
||||||
410 P17[62-65]
|
410 P17[62-65]
|
||||||
411 P1411[15-23]
|
411 P1411[15-23]
|
||||||
412 P166[48-51]
|
412 P166[48-51]
|
||||||
413 P27[15-29]
|
413 P27[15-29]
|
||||||
414 P150[56-63]
|
414 P150[56-63]
|
||||||
415 P27[39-51]
|
415 P27[39-51]
|
||||||
416 P39[47-48]
|
416 P39[47-48]
|
||||||
417 P166[29-32]
|
417 P166[29-32]
|
||||||
418 P39[12-18]
|
418 P39[12-18]
|
||||||
419 P166[54-57]
|
419 P166[54-57]
|
||||||
420 P551[36-69]
|
420 P551[36-69]
|
||||||
421 P579[0-15]
|
421 P579[0-15]
|
||||||
422 P102[54-62]
|
422 P102[54-62]
|
||||||
|
File diff suppressed because it is too large
Load Diff
@ -1,71 +1,71 @@
|
|||||||
0 19 19
|
0 19 19
|
||||||
1 20 1643
|
1 20 1643
|
||||||
2 1644 1790
|
2 1644 1790
|
||||||
3 1791 1816
|
3 1791 1816
|
||||||
4 1817 1855
|
4 1817 1855
|
||||||
5 1856 1871
|
5 1856 1871
|
||||||
6 1872 1893
|
6 1872 1893
|
||||||
7 1894 1905
|
7 1894 1905
|
||||||
8 1906 1913
|
8 1906 1913
|
||||||
9 1914 1918
|
9 1914 1918
|
||||||
10 1919 1920
|
10 1919 1920
|
||||||
11 1921 1924
|
11 1921 1924
|
||||||
12 1925 1929
|
12 1925 1929
|
||||||
13 1930 1933
|
13 1930 1933
|
||||||
14 1934 1937
|
14 1934 1937
|
||||||
15 1938 1941
|
15 1938 1941
|
||||||
16 1942 1945
|
16 1942 1945
|
||||||
17 1946 1948
|
17 1946 1948
|
||||||
18 1949 1950
|
18 1949 1950
|
||||||
19 1951 1953
|
19 1951 1953
|
||||||
20 1954 1956
|
20 1954 1956
|
||||||
21 1957 1959
|
21 1957 1959
|
||||||
22 1960 1961
|
22 1960 1961
|
||||||
23 1962 1963
|
23 1962 1963
|
||||||
24 1964 1965
|
24 1964 1965
|
||||||
25 1966 1967
|
25 1966 1967
|
||||||
26 1968 1968
|
26 1968 1968
|
||||||
27 1969 1970
|
27 1969 1970
|
||||||
28 1971 1972
|
28 1971 1972
|
||||||
29 1973 1974
|
29 1973 1974
|
||||||
30 1975 1976
|
30 1975 1976
|
||||||
31 1977 1978
|
31 1977 1978
|
||||||
32 1979 1980
|
32 1979 1980
|
||||||
33 1981 1982
|
33 1981 1982
|
||||||
34 1983 1983
|
34 1983 1983
|
||||||
35 1984 1984
|
35 1984 1984
|
||||||
36 1985 1985
|
36 1985 1985
|
||||||
37 1986 1986
|
37 1986 1986
|
||||||
38 1987 1987
|
38 1987 1987
|
||||||
39 1988 1988
|
39 1988 1988
|
||||||
40 1989 1989
|
40 1989 1989
|
||||||
41 1990 1990
|
41 1990 1990
|
||||||
42 1991 1991
|
42 1991 1991
|
||||||
43 1992 1992
|
43 1992 1992
|
||||||
44 1993 1993
|
44 1993 1993
|
||||||
45 1994 1994
|
45 1994 1994
|
||||||
46 1995 1995
|
46 1995 1995
|
||||||
47 1996 1996
|
47 1996 1996
|
||||||
48 1997 1997
|
48 1997 1997
|
||||||
49 1998 1998
|
49 1998 1998
|
||||||
50 1999 1999
|
50 1999 1999
|
||||||
51 2000 2000
|
51 2000 2000
|
||||||
52 2001 2001
|
52 2001 2001
|
||||||
53 2002 2002
|
53 2002 2002
|
||||||
54 2003 2003
|
54 2003 2003
|
||||||
55 2004 2004
|
55 2004 2004
|
||||||
56 2005 2005
|
56 2005 2005
|
||||||
57 2006 2006
|
57 2006 2006
|
||||||
58 2007 2007
|
58 2007 2007
|
||||||
59 2008 2008
|
59 2008 2008
|
||||||
60 2009 2009
|
60 2009 2009
|
||||||
61 2010 2010
|
61 2010 2010
|
||||||
62 2011 2011
|
62 2011 2011
|
||||||
63 2012 2012
|
63 2012 2012
|
||||||
64 2013 2013
|
64 2013 2013
|
||||||
65 2014 2014
|
65 2014 2014
|
||||||
66 2015 2015
|
66 2015 2015
|
||||||
67 2016 2016
|
67 2016 2016
|
||||||
68 2017 2017
|
68 2017 2017
|
||||||
69 2018 2020
|
69 2018 2020
|
||||||
70 2021 2021
|
70 2021 2021
|
||||||
|
504678
data/wikidata12k/train.txt
504678
data/wikidata12k/train.txt
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
15
data/wikidata12k_old/about.txt
Normal file
15
data/wikidata12k_old/about.txt
Normal file
@ -0,0 +1,15 @@
|
|||||||
|
# triples: 291818
|
||||||
|
# entities: 12554
|
||||||
|
# relations: 423
|
||||||
|
# timesteps: 70
|
||||||
|
# test triples: 19271
|
||||||
|
# valid triples: 20208
|
||||||
|
# train triples: 252339
|
||||||
|
Measure method: N/A
|
||||||
|
Target Size : 423
|
||||||
|
Grow Factor: 0
|
||||||
|
Shrink Factor: 4.0
|
||||||
|
Epsilon Factor: 0
|
||||||
|
Search method: N/A
|
||||||
|
filter_dupes: inter
|
||||||
|
nonames: False
|
12554
data/wikidata12k_old/entities.dict
Normal file
12554
data/wikidata12k_old/entities.dict
Normal file
File diff suppressed because it is too large
Load Diff
1820
data/wikidata12k_old/indices_test.txt
Normal file
1820
data/wikidata12k_old/indices_test.txt
Normal file
File diff suppressed because it is too large
Load Diff
13036
data/wikidata12k_old/indices_train.txt
Normal file
13036
data/wikidata12k_old/indices_train.txt
Normal file
File diff suppressed because it is too large
Load Diff
1796
data/wikidata12k_old/indices_valid.txt
Normal file
1796
data/wikidata12k_old/indices_valid.txt
Normal file
File diff suppressed because it is too large
Load Diff
12554
data/wikidata12k_old/raw_entity2id.txt
Normal file
12554
data/wikidata12k_old/raw_entity2id.txt
Normal file
File diff suppressed because it is too large
Load Diff
24
data/wikidata12k_old/raw_rel2id.txt
Normal file
24
data/wikidata12k_old/raw_rel2id.txt
Normal file
@ -0,0 +1,24 @@
|
|||||||
|
P1376 0
|
||||||
|
P512 4
|
||||||
|
P579 3
|
||||||
|
P150 18
|
||||||
|
P190 5
|
||||||
|
P551 19
|
||||||
|
P131 1
|
||||||
|
P793 21
|
||||||
|
P1435 13
|
||||||
|
P39 14
|
||||||
|
P17 6
|
||||||
|
P54 22
|
||||||
|
P31 15
|
||||||
|
P6 7
|
||||||
|
P1411 20
|
||||||
|
P2962 2
|
||||||
|
P463 9
|
||||||
|
P1346 16
|
||||||
|
P108 10
|
||||||
|
P69 23
|
||||||
|
P166 17
|
||||||
|
P102 11
|
||||||
|
P27 12
|
||||||
|
P26 8
|
4062
data/wikidata12k_old/raw_test.txt
Normal file
4062
data/wikidata12k_old/raw_test.txt
Normal file
File diff suppressed because it is too large
Load Diff
423
data/wikidata12k_old/relations.dict
Normal file
423
data/wikidata12k_old/relations.dict
Normal file
@ -0,0 +1,423 @@
|
|||||||
|
0 P131[0-0]
|
||||||
|
1 P131[1-1]
|
||||||
|
2 P131[2-2]
|
||||||
|
3 P131[3-3]
|
||||||
|
4 P131[4-4]
|
||||||
|
5 P131[5-5]
|
||||||
|
6 P131[6-6]
|
||||||
|
7 P131[7-7]
|
||||||
|
8 P131[8-8]
|
||||||
|
9 P131[9-9]
|
||||||
|
10 P131[10-10]
|
||||||
|
11 P131[11-11]
|
||||||
|
12 P131[12-12]
|
||||||
|
13 P131[13-13]
|
||||||
|
14 P131[14-14]
|
||||||
|
15 P131[15-15]
|
||||||
|
16 P131[16-16]
|
||||||
|
17 P131[17-17]
|
||||||
|
18 P131[18-18]
|
||||||
|
19 P131[19-19]
|
||||||
|
20 P131[20-20]
|
||||||
|
21 P131[21-21]
|
||||||
|
22 P131[22-22]
|
||||||
|
23 P131[23-23]
|
||||||
|
24 P131[24-24]
|
||||||
|
25 P131[25-25]
|
||||||
|
26 P131[26-26]
|
||||||
|
27 P131[27-27]
|
||||||
|
28 P131[28-28]
|
||||||
|
29 P131[29-29]
|
||||||
|
30 P131[30-30]
|
||||||
|
31 P131[31-31]
|
||||||
|
32 P131[32-32]
|
||||||
|
33 P131[33-33]
|
||||||
|
34 P131[34-34]
|
||||||
|
35 P131[35-35]
|
||||||
|
36 P131[36-36]
|
||||||
|
37 P131[37-37]
|
||||||
|
38 P131[38-38]
|
||||||
|
39 P131[39-39]
|
||||||
|
40 P131[40-40]
|
||||||
|
41 P131[41-41]
|
||||||
|
42 P131[42-42]
|
||||||
|
43 P131[43-43]
|
||||||
|
44 P131[44-44]
|
||||||
|
45 P131[45-45]
|
||||||
|
46 P131[46-46]
|
||||||
|
47 P131[47-47]
|
||||||
|
48 P131[48-48]
|
||||||
|
49 P131[49-49]
|
||||||
|
50 P131[50-50]
|
||||||
|
51 P131[51-51]
|
||||||
|
52 P131[52-52]
|
||||||
|
53 P131[53-53]
|
||||||
|
54 P131[54-54]
|
||||||
|
55 P131[55-55]
|
||||||
|
56 P131[56-56]
|
||||||
|
57 P131[57-57]
|
||||||
|
58 P131[58-58]
|
||||||
|
59 P131[59-59]
|
||||||
|
60 P131[60-60]
|
||||||
|
61 P131[61-61]
|
||||||
|
62 P131[62-62]
|
||||||
|
63 P131[63-63]
|
||||||
|
64 P131[64-64]
|
||||||
|
65 P131[65-65]
|
||||||
|
66 P131[66-66]
|
||||||
|
67 P131[67-67]
|
||||||
|
68 P131[68-68]
|
||||||
|
69 P131[69-69]
|
||||||
|
70 P1435[65-65]
|
||||||
|
71 P39[49-49]
|
||||||
|
72 P39[50-50]
|
||||||
|
73 P39[51-51]
|
||||||
|
74 P39[52-52]
|
||||||
|
75 P39[53-53]
|
||||||
|
76 P39[54-54]
|
||||||
|
77 P39[55-55]
|
||||||
|
78 P39[56-56]
|
||||||
|
79 P39[57-57]
|
||||||
|
80 P39[58-58]
|
||||||
|
81 P39[59-59]
|
||||||
|
82 P39[60-60]
|
||||||
|
83 P39[61-61]
|
||||||
|
84 P39[62-62]
|
||||||
|
85 P39[63-63]
|
||||||
|
86 P39[64-64]
|
||||||
|
87 P39[65-65]
|
||||||
|
88 P39[66-66]
|
||||||
|
89 P39[67-67]
|
||||||
|
90 P39[68-68]
|
||||||
|
91 P39[69-69]
|
||||||
|
92 P54[40-40]
|
||||||
|
93 P54[41-41]
|
||||||
|
94 P54[42-42]
|
||||||
|
95 P54[43-43]
|
||||||
|
96 P54[44-44]
|
||||||
|
97 P54[45-45]
|
||||||
|
98 P54[46-46]
|
||||||
|
99 P54[47-47]
|
||||||
|
100 P54[48-48]
|
||||||
|
101 P54[49-49]
|
||||||
|
102 P54[50-50]
|
||||||
|
103 P54[51-51]
|
||||||
|
104 P54[52-52]
|
||||||
|
105 P54[53-53]
|
||||||
|
106 P54[54-54]
|
||||||
|
107 P54[55-55]
|
||||||
|
108 P54[56-56]
|
||||||
|
109 P54[57-57]
|
||||||
|
110 P54[58-58]
|
||||||
|
111 P54[59-59]
|
||||||
|
112 P54[60-60]
|
||||||
|
113 P54[61-61]
|
||||||
|
114 P54[62-62]
|
||||||
|
115 P54[63-63]
|
||||||
|
116 P54[64-64]
|
||||||
|
117 P54[65-65]
|
||||||
|
118 P54[66-66]
|
||||||
|
119 P54[67-67]
|
||||||
|
120 P54[68-68]
|
||||||
|
121 P54[69-69]
|
||||||
|
122 P31[0-0]
|
||||||
|
123 P31[1-1]
|
||||||
|
124 P31[2-2]
|
||||||
|
125 P31[3-3]
|
||||||
|
126 P31[4-4]
|
||||||
|
127 P31[5-5]
|
||||||
|
128 P31[6-6]
|
||||||
|
129 P31[7-7]
|
||||||
|
130 P31[8-8]
|
||||||
|
131 P31[9-9]
|
||||||
|
132 P31[10-10]
|
||||||
|
133 P31[11-11]
|
||||||
|
134 P31[12-12]
|
||||||
|
135 P31[13-13]
|
||||||
|
136 P31[14-14]
|
||||||
|
137 P31[15-15]
|
||||||
|
138 P31[16-16]
|
||||||
|
139 P31[17-17]
|
||||||
|
140 P31[18-18]
|
||||||
|
141 P31[19-19]
|
||||||
|
142 P31[20-20]
|
||||||
|
143 P31[21-21]
|
||||||
|
144 P31[22-22]
|
||||||
|
145 P31[23-23]
|
||||||
|
146 P31[24-24]
|
||||||
|
147 P31[25-25]
|
||||||
|
148 P31[26-26]
|
||||||
|
149 P31[27-27]
|
||||||
|
150 P31[28-28]
|
||||||
|
151 P31[29-29]
|
||||||
|
152 P31[30-30]
|
||||||
|
153 P31[31-31]
|
||||||
|
154 P31[32-32]
|
||||||
|
155 P31[33-33]
|
||||||
|
156 P31[34-34]
|
||||||
|
157 P31[35-35]
|
||||||
|
158 P31[36-36]
|
||||||
|
159 P31[37-37]
|
||||||
|
160 P31[38-38]
|
||||||
|
161 P31[39-39]
|
||||||
|
162 P31[40-40]
|
||||||
|
163 P31[41-41]
|
||||||
|
164 P31[42-42]
|
||||||
|
165 P31[43-43]
|
||||||
|
166 P31[44-44]
|
||||||
|
167 P31[45-45]
|
||||||
|
168 P31[46-46]
|
||||||
|
169 P31[47-47]
|
||||||
|
170 P31[48-48]
|
||||||
|
171 P31[49-49]
|
||||||
|
172 P31[50-50]
|
||||||
|
173 P31[51-51]
|
||||||
|
174 P31[52-52]
|
||||||
|
175 P31[53-53]
|
||||||
|
176 P31[54-54]
|
||||||
|
177 P31[55-55]
|
||||||
|
178 P31[56-56]
|
||||||
|
179 P31[57-57]
|
||||||
|
180 P31[58-58]
|
||||||
|
181 P31[59-59]
|
||||||
|
182 P31[60-60]
|
||||||
|
183 P31[61-61]
|
||||||
|
184 P31[62-62]
|
||||||
|
185 P31[63-63]
|
||||||
|
186 P31[64-64]
|
||||||
|
187 P31[65-65]
|
||||||
|
188 P31[66-66]
|
||||||
|
189 P31[67-67]
|
||||||
|
190 P31[68-68]
|
||||||
|
191 P31[69-69]
|
||||||
|
192 P463[26-26]
|
||||||
|
193 P463[27-27]
|
||||||
|
194 P463[28-28]
|
||||||
|
195 P463[29-29]
|
||||||
|
196 P463[30-30]
|
||||||
|
197 P463[31-31]
|
||||||
|
198 P463[32-32]
|
||||||
|
199 P463[33-33]
|
||||||
|
200 P463[34-34]
|
||||||
|
201 P463[35-35]
|
||||||
|
202 P463[36-36]
|
||||||
|
203 P463[37-37]
|
||||||
|
204 P463[38-38]
|
||||||
|
205 P463[39-39]
|
||||||
|
206 P463[40-40]
|
||||||
|
207 P463[41-41]
|
||||||
|
208 P463[42-42]
|
||||||
|
209 P463[43-43]
|
||||||
|
210 P463[44-44]
|
||||||
|
211 P463[45-45]
|
||||||
|
212 P463[46-46]
|
||||||
|
213 P463[47-47]
|
||||||
|
214 P463[48-48]
|
||||||
|
215 P463[49-49]
|
||||||
|
216 P463[50-50]
|
||||||
|
217 P463[51-51]
|
||||||
|
218 P463[52-52]
|
||||||
|
219 P463[53-53]
|
||||||
|
220 P463[54-54]
|
||||||
|
221 P463[55-55]
|
||||||
|
222 P463[56-56]
|
||||||
|
223 P463[57-57]
|
||||||
|
224 P463[58-58]
|
||||||
|
225 P463[59-59]
|
||||||
|
226 P463[60-60]
|
||||||
|
227 P463[61-61]
|
||||||
|
228 P463[62-62]
|
||||||
|
229 P463[63-63]
|
||||||
|
230 P463[64-64]
|
||||||
|
231 P463[65-65]
|
||||||
|
232 P463[66-66]
|
||||||
|
233 P463[67-67]
|
||||||
|
234 P463[68-68]
|
||||||
|
235 P463[69-69]
|
||||||
|
236 P512[4-69]
|
||||||
|
237 P190[0-29]
|
||||||
|
238 P150[0-3]
|
||||||
|
239 P1376[39-47]
|
||||||
|
240 P463[0-7]
|
||||||
|
241 P166[0-7]
|
||||||
|
242 P2962[18-30]
|
||||||
|
243 P108[29-36]
|
||||||
|
244 P39[0-3]
|
||||||
|
245 P17[47-48]
|
||||||
|
246 P166[21-23]
|
||||||
|
247 P793[46-69]
|
||||||
|
248 P69[32-41]
|
||||||
|
249 P17[57-58]
|
||||||
|
250 P190[42-45]
|
||||||
|
251 P2962[39-42]
|
||||||
|
252 P54[0-18]
|
||||||
|
253 P26[56-61]
|
||||||
|
254 P150[14-17]
|
||||||
|
255 P463[16-17]
|
||||||
|
256 P26[39-46]
|
||||||
|
257 P579[36-43]
|
||||||
|
258 P579[16-23]
|
||||||
|
259 P2962[59-60]
|
||||||
|
260 P1411[59-61]
|
||||||
|
261 P26[20-27]
|
||||||
|
262 P6[4-69]
|
||||||
|
263 P1435[33-34]
|
||||||
|
264 P166[52-53]
|
||||||
|
265 P108[49-57]
|
||||||
|
266 P150[10-13]
|
||||||
|
267 P1346[47-68]
|
||||||
|
268 P150[18-21]
|
||||||
|
269 P1346[13-46]
|
||||||
|
270 P69[20-23]
|
||||||
|
271 P39[31-32]
|
||||||
|
272 P1411[32-37]
|
||||||
|
273 P166[62-63]
|
||||||
|
274 P150[44-47]
|
||||||
|
275 P2962[61-62]
|
||||||
|
276 P150[48-51]
|
||||||
|
277 P150[52-55]
|
||||||
|
278 P1411[62-67]
|
||||||
|
279 P1435[35-36]
|
||||||
|
280 P1411[48-51]
|
||||||
|
281 P150[22-25]
|
||||||
|
282 P2962[63-64]
|
||||||
|
283 P2962[65-66]
|
||||||
|
284 P166[58-59]
|
||||||
|
285 P190[46-49]
|
||||||
|
286 P54[34-35]
|
||||||
|
287 P1435[4-16]
|
||||||
|
288 P463[18-19]
|
||||||
|
289 P150[31-34]
|
||||||
|
290 P150[35-38]
|
||||||
|
291 P39[35-36]
|
||||||
|
292 P26[62-69]
|
||||||
|
293 P1411[56-58]
|
||||||
|
294 P1435[37-38]
|
||||||
|
295 P166[60-61]
|
||||||
|
296 P39[33-34]
|
||||||
|
297 P102[24-31]
|
||||||
|
298 P2962[43-46]
|
||||||
|
299 P108[37-48]
|
||||||
|
300 P190[50-53]
|
||||||
|
301 P39[4-6]
|
||||||
|
302 P1435[39-40]
|
||||||
|
303 P793[0-45]
|
||||||
|
304 P150[64-69]
|
||||||
|
305 P39[19-22]
|
||||||
|
306 P27[30-38]
|
||||||
|
307 P2962[31-38]
|
||||||
|
308 P1411[24-31]
|
||||||
|
309 P102[40-45]
|
||||||
|
310 P39[37-38]
|
||||||
|
311 P463[8-11]
|
||||||
|
312 P1435[41-42]
|
||||||
|
313 P27[52-59]
|
||||||
|
314 P69[16-19]
|
||||||
|
315 P17[16-18]
|
||||||
|
316 P190[54-57]
|
||||||
|
317 P1435[43-44]
|
||||||
|
318 P166[8-15]
|
||||||
|
319 P166[45-47]
|
||||||
|
320 P2962[47-50]
|
||||||
|
321 P39[39-40]
|
||||||
|
322 P1411[52-55]
|
||||||
|
323 P108[58-69]
|
||||||
|
324 P463[20-21]
|
||||||
|
325 P39[41-42]
|
||||||
|
326 P150[26-30]
|
||||||
|
327 P150[39-43]
|
||||||
|
328 P1435[45-46]
|
||||||
|
329 P26[28-38]
|
||||||
|
330 P54[27-30]
|
||||||
|
331 P190[58-61]
|
||||||
|
332 P17[59-61]
|
||||||
|
333 P54[36-37]
|
||||||
|
334 P166[16-20]
|
||||||
|
335 P166[37-40]
|
||||||
|
336 P1435[47-48]
|
||||||
|
337 P17[0-3]
|
||||||
|
338 P26[47-55]
|
||||||
|
339 P1435[49-50]
|
||||||
|
340 P1435[25-28]
|
||||||
|
341 P150[4-9]
|
||||||
|
342 P102[63-69]
|
||||||
|
343 P26[0-19]
|
||||||
|
344 P1435[17-24]
|
||||||
|
345 P39[23-26]
|
||||||
|
346 P1435[51-52]
|
||||||
|
347 P39[7-11]
|
||||||
|
348 P69[12-15]
|
||||||
|
349 P69[24-31]
|
||||||
|
350 P102[0-23]
|
||||||
|
351 P39[43-44]
|
||||||
|
352 P579[24-35]
|
||||||
|
353 P190[62-65]
|
||||||
|
354 P1435[53-54]
|
||||||
|
355 P1376[0-18]
|
||||||
|
356 P27[0-14]
|
||||||
|
357 P463[12-15]
|
||||||
|
358 P166[33-36]
|
||||||
|
359 P102[32-39]
|
||||||
|
360 P17[4-7]
|
||||||
|
361 P190[30-41]
|
||||||
|
362 P166[24-28]
|
||||||
|
363 P190[66-69]
|
||||||
|
364 P69[42-69]
|
||||||
|
365 P1435[55-56]
|
||||||
|
366 P54[31-33]
|
||||||
|
367 P39[45-46]
|
||||||
|
368 P17[12-15]
|
||||||
|
369 P1435[57-58]
|
||||||
|
370 P54[19-26]
|
||||||
|
371 P2962[51-54]
|
||||||
|
372 P2962[67-69]
|
||||||
|
373 P1435[59-60]
|
||||||
|
374 P579[44-56]
|
||||||
|
375 P1435[61-62]
|
||||||
|
376 P166[41-44]
|
||||||
|
377 P17[19-22]
|
||||||
|
378 P1376[19-38]
|
||||||
|
379 P17[23-26]
|
||||||
|
380 P1376[48-69]
|
||||||
|
381 P463[22-23]
|
||||||
|
382 P17[27-30]
|
||||||
|
383 P1435[63-64]
|
||||||
|
384 P69[0-3]
|
||||||
|
385 P1435[66-67]
|
||||||
|
386 P17[35-38]
|
||||||
|
387 P69[8-11]
|
||||||
|
388 P1435[68-69]
|
||||||
|
389 P17[31-34]
|
||||||
|
390 P102[46-53]
|
||||||
|
391 P27[60-69]
|
||||||
|
392 P579[57-69]
|
||||||
|
393 P69[4-7]
|
||||||
|
394 P1411[7-14]
|
||||||
|
395 P551[0-35]
|
||||||
|
396 P108[0-28]
|
||||||
|
397 P17[8-11]
|
||||||
|
398 P1411[38-47]
|
||||||
|
399 P17[43-46]
|
||||||
|
400 P17[49-52]
|
||||||
|
401 P166[64-69]
|
||||||
|
402 P1435[29-32]
|
||||||
|
403 P54[38-39]
|
||||||
|
404 P39[27-30]
|
||||||
|
405 P2962[55-58]
|
||||||
|
406 P463[24-25]
|
||||||
|
407 P17[39-42]
|
||||||
|
408 P17[53-56]
|
||||||
|
409 P17[66-69]
|
||||||
|
410 P17[62-65]
|
||||||
|
411 P1411[15-23]
|
||||||
|
412 P166[48-51]
|
||||||
|
413 P27[15-29]
|
||||||
|
414 P150[56-63]
|
||||||
|
415 P27[39-51]
|
||||||
|
416 P39[47-48]
|
||||||
|
417 P166[29-32]
|
||||||
|
418 P39[12-18]
|
||||||
|
419 P166[54-57]
|
||||||
|
420 P551[36-69]
|
||||||
|
421 P579[0-15]
|
||||||
|
422 P102[54-62]
|
19271
data/wikidata12k_old/test.txt
Normal file
19271
data/wikidata12k_old/test.txt
Normal file
File diff suppressed because it is too large
Load Diff
71
data/wikidata12k_old/time_map.dict
Normal file
71
data/wikidata12k_old/time_map.dict
Normal file
@ -0,0 +1,71 @@
|
|||||||
|
0 19 19
|
||||||
|
1 20 1643
|
||||||
|
2 1644 1790
|
||||||
|
3 1791 1816
|
||||||
|
4 1817 1855
|
||||||
|
5 1856 1871
|
||||||
|
6 1872 1893
|
||||||
|
7 1894 1905
|
||||||
|
8 1906 1913
|
||||||
|
9 1914 1918
|
||||||
|
10 1919 1920
|
||||||
|
11 1921 1924
|
||||||
|
12 1925 1929
|
||||||
|
13 1930 1933
|
||||||
|
14 1934 1937
|
||||||
|
15 1938 1941
|
||||||
|
16 1942 1945
|
||||||
|
17 1946 1948
|
||||||
|
18 1949 1950
|
||||||
|
19 1951 1953
|
||||||
|
20 1954 1956
|
||||||
|
21 1957 1959
|
||||||
|
22 1960 1961
|
||||||
|
23 1962 1963
|
||||||
|
24 1964 1965
|
||||||
|
25 1966 1967
|
||||||
|
26 1968 1968
|
||||||
|
27 1969 1970
|
||||||
|
28 1971 1972
|
||||||
|
29 1973 1974
|
||||||
|
30 1975 1976
|
||||||
|
31 1977 1978
|
||||||
|
32 1979 1980
|
||||||
|
33 1981 1982
|
||||||
|
34 1983 1983
|
||||||
|
35 1984 1984
|
||||||
|
36 1985 1985
|
||||||
|
37 1986 1986
|
||||||
|
38 1987 1987
|
||||||
|
39 1988 1988
|
||||||
|
40 1989 1989
|
||||||
|
41 1990 1990
|
||||||
|
42 1991 1991
|
||||||
|
43 1992 1992
|
||||||
|
44 1993 1993
|
||||||
|
45 1994 1994
|
||||||
|
46 1995 1995
|
||||||
|
47 1996 1996
|
||||||
|
48 1997 1997
|
||||||
|
49 1998 1998
|
||||||
|
50 1999 1999
|
||||||
|
51 2000 2000
|
||||||
|
52 2001 2001
|
||||||
|
53 2002 2002
|
||||||
|
54 2003 2003
|
||||||
|
55 2004 2004
|
||||||
|
56 2005 2005
|
||||||
|
57 2006 2006
|
||||||
|
58 2007 2007
|
||||||
|
59 2008 2008
|
||||||
|
60 2009 2009
|
||||||
|
61 2010 2010
|
||||||
|
62 2011 2011
|
||||||
|
63 2012 2012
|
||||||
|
64 2013 2013
|
||||||
|
65 2014 2014
|
||||||
|
66 2015 2015
|
||||||
|
67 2016 2016
|
||||||
|
68 2017 2017
|
||||||
|
69 2018 2020
|
||||||
|
70 2021 2021
|
252339
data/wikidata12k_old/train.txt
Normal file
252339
data/wikidata12k_old/train.txt
Normal file
File diff suppressed because it is too large
Load Diff
20208
data/wikidata12k_old/valid.txt
Normal file
20208
data/wikidata12k_old/valid.txt
Normal file
File diff suppressed because it is too large
Load Diff
15
data/yago/about.txt
Normal file
15
data/yago/about.txt
Normal file
@ -0,0 +1,15 @@
|
|||||||
|
# triples: 78032
|
||||||
|
# entities: 10526
|
||||||
|
# relations: 177
|
||||||
|
# timesteps: 46
|
||||||
|
# test triples: 6909
|
||||||
|
# valid triples: 7198
|
||||||
|
# train triples: 63925
|
||||||
|
Measure method: N/A
|
||||||
|
Target Size : 0
|
||||||
|
Grow Factor: 0
|
||||||
|
Shrink Factor: 0
|
||||||
|
Epsilon Factor: 5.0
|
||||||
|
Search method: N/A
|
||||||
|
filter_dupes: inter
|
||||||
|
nonames: False
|
10526
data/yago/entities.dict
Normal file
10526
data/yago/entities.dict
Normal file
File diff suppressed because it is too large
Load Diff
177
data/yago/relations.dict
Normal file
177
data/yago/relations.dict
Normal file
@ -0,0 +1,177 @@
|
|||||||
|
0 <wasBornIn>[0-2]
|
||||||
|
1 <wasBornIn>[2-5]
|
||||||
|
2 <wasBornIn>[5-7]
|
||||||
|
3 <wasBornIn>[7-10]
|
||||||
|
4 <wasBornIn>[10-12]
|
||||||
|
5 <wasBornIn>[12-15]
|
||||||
|
6 <wasBornIn>[15-17]
|
||||||
|
7 <wasBornIn>[17-20]
|
||||||
|
8 <wasBornIn>[20-22]
|
||||||
|
9 <wasBornIn>[22-25]
|
||||||
|
10 <wasBornIn>[25-27]
|
||||||
|
11 <wasBornIn>[27-30]
|
||||||
|
12 <wasBornIn>[30-32]
|
||||||
|
13 <wasBornIn>[32-35]
|
||||||
|
14 <wasBornIn>[35-45]
|
||||||
|
15 <wasBornIn>[52-52]
|
||||||
|
16 <diedIn>[0-3]
|
||||||
|
17 <diedIn>[3-5]
|
||||||
|
18 <diedIn>[5-7]
|
||||||
|
19 <diedIn>[7-10]
|
||||||
|
20 <diedIn>[10-12]
|
||||||
|
21 <diedIn>[12-14]
|
||||||
|
22 <diedIn>[14-17]
|
||||||
|
23 <diedIn>[17-19]
|
||||||
|
24 <diedIn>[19-21]
|
||||||
|
25 <diedIn>[21-23]
|
||||||
|
26 <diedIn>[23-25]
|
||||||
|
27 <diedIn>[25-27]
|
||||||
|
28 <diedIn>[27-29]
|
||||||
|
29 <diedIn>[29-32]
|
||||||
|
30 <diedIn>[32-34]
|
||||||
|
31 <diedIn>[34-36]
|
||||||
|
32 <diedIn>[36-38]
|
||||||
|
33 <diedIn>[38-40]
|
||||||
|
34 <diedIn>[40-42]
|
||||||
|
35 <diedIn>[42-44]
|
||||||
|
36 <diedIn>[44-47]
|
||||||
|
37 <diedIn>[47-49]
|
||||||
|
38 <diedIn>[49-51]
|
||||||
|
39 <diedIn>[51-53]
|
||||||
|
40 <diedIn>[53-55]
|
||||||
|
41 <diedIn>[55-57]
|
||||||
|
42 <diedIn>[59-59]
|
||||||
|
43 <worksAt>[0-3]
|
||||||
|
44 <worksAt>[3-5]
|
||||||
|
45 <worksAt>[5-7]
|
||||||
|
46 <worksAt>[7-10]
|
||||||
|
47 <worksAt>[10-12]
|
||||||
|
48 <worksAt>[12-14]
|
||||||
|
49 <worksAt>[14-17]
|
||||||
|
50 <worksAt>[17-19]
|
||||||
|
51 <worksAt>[19-21]
|
||||||
|
52 <worksAt>[21-23]
|
||||||
|
53 <worksAt>[23-25]
|
||||||
|
54 <worksAt>[25-27]
|
||||||
|
55 <worksAt>[27-29]
|
||||||
|
56 <worksAt>[29-32]
|
||||||
|
57 <worksAt>[32-34]
|
||||||
|
58 <worksAt>[34-36]
|
||||||
|
59 <worksAt>[36-40]
|
||||||
|
60 <worksAt>[40-42]
|
||||||
|
61 <worksAt>[42-47]
|
||||||
|
62 <worksAt>[47-53]
|
||||||
|
63 <worksAt>[59-59]
|
||||||
|
64 <playsFor>[0-3]
|
||||||
|
65 <playsFor>[3-5]
|
||||||
|
66 <playsFor>[5-23]
|
||||||
|
67 <playsFor>[23-25]
|
||||||
|
68 <playsFor>[25-27]
|
||||||
|
69 <playsFor>[27-29]
|
||||||
|
70 <playsFor>[29-32]
|
||||||
|
71 <playsFor>[32-34]
|
||||||
|
72 <playsFor>[34-36]
|
||||||
|
73 <playsFor>[36-38]
|
||||||
|
74 <playsFor>[38-40]
|
||||||
|
75 <playsFor>[40-42]
|
||||||
|
76 <playsFor>[42-44]
|
||||||
|
77 <playsFor>[44-47]
|
||||||
|
78 <playsFor>[47-51]
|
||||||
|
79 <playsFor>[59-59]
|
||||||
|
80 <hasWonPrize>[1-4]
|
||||||
|
81 <hasWonPrize>[4-6]
|
||||||
|
82 <hasWonPrize>[6-8]
|
||||||
|
83 <hasWonPrize>[8-11]
|
||||||
|
84 <hasWonPrize>[11-15]
|
||||||
|
85 <hasWonPrize>[15-18]
|
||||||
|
86 <hasWonPrize>[18-22]
|
||||||
|
87 <hasWonPrize>[22-26]
|
||||||
|
88 <hasWonPrize>[26-30]
|
||||||
|
89 <hasWonPrize>[30-33]
|
||||||
|
90 <hasWonPrize>[33-37]
|
||||||
|
91 <hasWonPrize>[37-47]
|
||||||
|
92 <hasWonPrize>[47-53]
|
||||||
|
93 <hasWonPrize>[59-59]
|
||||||
|
94 <isMarriedTo>[0-3]
|
||||||
|
95 <isMarriedTo>[3-5]
|
||||||
|
96 <isMarriedTo>[5-7]
|
||||||
|
97 <isMarriedTo>[7-10]
|
||||||
|
98 <isMarriedTo>[10-12]
|
||||||
|
99 <isMarriedTo>[12-14]
|
||||||
|
100 <isMarriedTo>[14-17]
|
||||||
|
101 <isMarriedTo>[17-19]
|
||||||
|
102 <isMarriedTo>[19-21]
|
||||||
|
103 <isMarriedTo>[21-23]
|
||||||
|
104 <isMarriedTo>[23-25]
|
||||||
|
105 <isMarriedTo>[25-27]
|
||||||
|
106 <isMarriedTo>[27-29]
|
||||||
|
107 <isMarriedTo>[29-32]
|
||||||
|
108 <isMarriedTo>[32-34]
|
||||||
|
109 <isMarriedTo>[34-38]
|
||||||
|
110 <isMarriedTo>[38-42]
|
||||||
|
111 <isMarriedTo>[42-47]
|
||||||
|
112 <isMarriedTo>[47-51]
|
||||||
|
113 <isMarriedTo>[51-55]
|
||||||
|
114 <isMarriedTo>[59-59]
|
||||||
|
115 <owns>[0-10]
|
||||||
|
116 <owns>[10-17]
|
||||||
|
117 <owns>[17-19]
|
||||||
|
118 <owns>[19-23]
|
||||||
|
119 <owns>[23-36]
|
||||||
|
120 <owns>[36-38]
|
||||||
|
121 <owns>[59-59]
|
||||||
|
122 <graduatedFrom>[0-3]
|
||||||
|
123 <graduatedFrom>[3-5]
|
||||||
|
124 <graduatedFrom>[5-7]
|
||||||
|
125 <graduatedFrom>[7-10]
|
||||||
|
126 <graduatedFrom>[10-14]
|
||||||
|
127 <graduatedFrom>[14-17]
|
||||||
|
128 <graduatedFrom>[17-19]
|
||||||
|
129 <graduatedFrom>[19-21]
|
||||||
|
130 <graduatedFrom>[21-23]
|
||||||
|
131 <graduatedFrom>[23-27]
|
||||||
|
132 <graduatedFrom>[27-32]
|
||||||
|
133 <graduatedFrom>[32-34]
|
||||||
|
134 <graduatedFrom>[34-38]
|
||||||
|
135 <graduatedFrom>[38-42]
|
||||||
|
136 <graduatedFrom>[59-59]
|
||||||
|
137 <isAffiliatedTo>[1-4]
|
||||||
|
138 <isAffiliatedTo>[4-6]
|
||||||
|
139 <isAffiliatedTo>[6-8]
|
||||||
|
140 <isAffiliatedTo>[8-11]
|
||||||
|
141 <isAffiliatedTo>[11-13]
|
||||||
|
142 <isAffiliatedTo>[13-15]
|
||||||
|
143 <isAffiliatedTo>[15-18]
|
||||||
|
144 <isAffiliatedTo>[18-20]
|
||||||
|
145 <isAffiliatedTo>[20-22]
|
||||||
|
146 <isAffiliatedTo>[22-24]
|
||||||
|
147 <isAffiliatedTo>[24-26]
|
||||||
|
148 <isAffiliatedTo>[26-28]
|
||||||
|
149 <isAffiliatedTo>[28-30]
|
||||||
|
150 <isAffiliatedTo>[30-33]
|
||||||
|
151 <isAffiliatedTo>[33-35]
|
||||||
|
152 <isAffiliatedTo>[35-37]
|
||||||
|
153 <isAffiliatedTo>[37-40]
|
||||||
|
154 <isAffiliatedTo>[40-42]
|
||||||
|
155 <isAffiliatedTo>[42-44]
|
||||||
|
156 <isAffiliatedTo>[44-47]
|
||||||
|
157 <isAffiliatedTo>[47-49]
|
||||||
|
158 <isAffiliatedTo>[49-51]
|
||||||
|
159 <isAffiliatedTo>[51-53]
|
||||||
|
160 <isAffiliatedTo>[53-55]
|
||||||
|
161 <isAffiliatedTo>[55-57]
|
||||||
|
162 <isAffiliatedTo>[59-59]
|
||||||
|
163 <created>[0-3]
|
||||||
|
164 <created>[3-5]
|
||||||
|
165 <created>[5-10]
|
||||||
|
166 <created>[10-12]
|
||||||
|
167 <created>[12-17]
|
||||||
|
168 <created>[17-19]
|
||||||
|
169 <created>[19-25]
|
||||||
|
170 <created>[25-29]
|
||||||
|
171 <created>[29-32]
|
||||||
|
172 <created>[32-36]
|
||||||
|
173 <created>[36-42]
|
||||||
|
174 <created>[42-47]
|
||||||
|
175 <created>[47-53]
|
||||||
|
176 <created>[59-59]
|
6909
data/yago/test.txt
Normal file
6909
data/yago/test.txt
Normal file
File diff suppressed because it is too large
Load Diff
60
data/yago/time_map.dict
Normal file
60
data/yago/time_map.dict
Normal file
@ -0,0 +1,60 @@
|
|||||||
|
0 -431 1782
|
||||||
|
1 1783 1848
|
||||||
|
2 1849 1870
|
||||||
|
3 1871 1888
|
||||||
|
4 1889 1899
|
||||||
|
5 1900 1906
|
||||||
|
6 1907 1912
|
||||||
|
7 1913 1917
|
||||||
|
8 1918 1922
|
||||||
|
9 1923 1926
|
||||||
|
10 1927 1930
|
||||||
|
11 1931 1934
|
||||||
|
12 1935 1938
|
||||||
|
13 1939 1941
|
||||||
|
14 1942 1944
|
||||||
|
15 1945 1947
|
||||||
|
16 1948 1950
|
||||||
|
17 1951 1953
|
||||||
|
18 1954 1956
|
||||||
|
19 1957 1959
|
||||||
|
20 1960 1962
|
||||||
|
21 1963 1965
|
||||||
|
22 1966 1967
|
||||||
|
23 1968 1969
|
||||||
|
24 1970 1971
|
||||||
|
25 1972 1973
|
||||||
|
26 1974 1975
|
||||||
|
27 1976 1977
|
||||||
|
28 1978 1979
|
||||||
|
29 1980 1981
|
||||||
|
30 1982 1983
|
||||||
|
31 1984 1985
|
||||||
|
32 1986 1987
|
||||||
|
33 1988 1989
|
||||||
|
34 1990 1991
|
||||||
|
35 1992 1993
|
||||||
|
36 1994 1994
|
||||||
|
37 1995 1996
|
||||||
|
38 1997 1997
|
||||||
|
39 1998 1998
|
||||||
|
40 1999 1999
|
||||||
|
41 2000 2000
|
||||||
|
42 2001 2001
|
||||||
|
43 2002 2002
|
||||||
|
44 2003 2003
|
||||||
|
45 2004 2004
|
||||||
|
46 2005 2005
|
||||||
|
47 2006 2006
|
||||||
|
48 2007 2007
|
||||||
|
49 2008 2008
|
||||||
|
50 2009 2009
|
||||||
|
51 2010 2010
|
||||||
|
52 2011 2011
|
||||||
|
53 2012 2012
|
||||||
|
54 2013 2013
|
||||||
|
55 2014 2014
|
||||||
|
56 2015 2015
|
||||||
|
57 2016 2016
|
||||||
|
58 2017 2017
|
||||||
|
59 2018 2018
|
63925
data/yago/train.txt
Normal file
63925
data/yago/train.txt
Normal file
File diff suppressed because it is too large
Load Diff
7198
data/yago/valid.txt
Normal file
7198
data/yago/valid.txt
Normal file
File diff suppressed because it is too large
Load Diff
793
data/yago11k/indices_test.txt
Normal file
793
data/yago11k/indices_test.txt
Normal file
@ -0,0 +1,793 @@
|
|||||||
|
0
|
||||||
|
2
|
||||||
|
4
|
||||||
|
9
|
||||||
|
11
|
||||||
|
12
|
||||||
|
16
|
||||||
|
17
|
||||||
|
19
|
||||||
|
27
|
||||||
|
29
|
||||||
|
34
|
||||||
|
35
|
||||||
|
37
|
||||||
|
38
|
||||||
|
41
|
||||||
|
42
|
||||||
|
45
|
||||||
|
49
|
||||||
|
51
|
||||||
|
52
|
||||||
|
54
|
||||||
|
56
|
||||||
|
57
|
||||||
|
61
|
||||||
|
64
|
||||||
|
65
|
||||||
|
67
|
||||||
|
69
|
||||||
|
70
|
||||||
|
72
|
||||||
|
76
|
||||||
|
78
|
||||||
|
79
|
||||||
|
83
|
||||||
|
86
|
||||||
|
87
|
||||||
|
89
|
||||||
|
101
|
||||||
|
102
|
||||||
|
103
|
||||||
|
108
|
||||||
|
111
|
||||||
|
112
|
||||||
|
119
|
||||||
|
121
|
||||||
|
122
|
||||||
|
126
|
||||||
|
128
|
||||||
|
129
|
||||||
|
132
|
||||||
|
134
|
||||||
|
138
|
||||||
|
141
|
||||||
|
144
|
||||||
|
146
|
||||||
|
153
|
||||||
|
154
|
||||||
|
155
|
||||||
|
156
|
||||||
|
158
|
||||||
|
159
|
||||||
|
160
|
||||||
|
161
|
||||||
|
162
|
||||||
|
164
|
||||||
|
165
|
||||||
|
166
|
||||||
|
168
|
||||||
|
173
|
||||||
|
175
|
||||||
|
176
|
||||||
|
177
|
||||||
|
182
|
||||||
|
184
|
||||||
|
185
|
||||||
|
186
|
||||||
|
187
|
||||||
|
188
|
||||||
|
190
|
||||||
|
192
|
||||||
|
193
|
||||||
|
201
|
||||||
|
202
|
||||||
|
208
|
||||||
|
209
|
||||||
|
211
|
||||||
|
213
|
||||||
|
215
|
||||||
|
216
|
||||||
|
217
|
||||||
|
222
|
||||||
|
227
|
||||||
|
229
|
||||||
|
235
|
||||||
|
239
|
||||||
|
240
|
||||||
|
242
|
||||||
|
243
|
||||||
|
245
|
||||||
|
246
|
||||||
|
247
|
||||||
|
251
|
||||||
|
252
|
||||||
|
254
|
||||||
|
257
|
||||||
|
261
|
||||||
|
263
|
||||||
|
266
|
||||||
|
268
|
||||||
|
271
|
||||||
|
279
|
||||||
|
282
|
||||||
|
292
|
||||||
|
300
|
||||||
|
303
|
||||||
|
305
|
||||||
|
308
|
||||||
|
309
|
||||||
|
311
|
||||||
|
313
|
||||||
|
316
|
||||||
|
319
|
||||||
|
322
|
||||||
|
324
|
||||||
|
325
|
||||||
|
329
|
||||||
|
331
|
||||||
|
332
|
||||||
|
333
|
||||||
|
334
|
||||||
|
337
|
||||||
|
339
|
||||||
|
342
|
||||||
|
343
|
||||||
|
346
|
||||||
|
347
|
||||||
|
348
|
||||||
|
349
|
||||||
|
350
|
||||||
|
352
|
||||||
|
353
|
||||||
|
355
|
||||||
|
357
|
||||||
|
361
|
||||||
|
362
|
||||||
|
363
|
||||||
|
367
|
||||||
|
371
|
||||||
|
373
|
||||||
|
378
|
||||||
|
379
|
||||||
|
383
|
||||||
|
384
|
||||||
|
385
|
||||||
|
389
|
||||||
|
392
|
||||||
|
394
|
||||||
|
395
|
||||||
|
396
|
||||||
|
397
|
||||||
|
399
|
||||||
|
400
|
||||||
|
402
|
||||||
|
403
|
||||||
|
407
|
||||||
|
409
|
||||||
|
415
|
||||||
|
416
|
||||||
|
420
|
||||||
|
421
|
||||||
|
422
|
||||||
|
428
|
||||||
|
429
|
||||||
|
432
|
||||||
|
433
|
||||||
|
440
|
||||||
|
442
|
||||||
|
443
|
||||||
|
450
|
||||||
|
452
|
||||||
|
459
|
||||||
|
463
|
||||||
|
464
|
||||||
|
466
|
||||||
|
471
|
||||||
|
472
|
||||||
|
476
|
||||||
|
480
|
||||||
|
484
|
||||||
|
489
|
||||||
|
490
|
||||||
|
493
|
||||||
|
494
|
||||||
|
495
|
||||||
|
500
|
||||||
|
503
|
||||||
|
507
|
||||||
|
509
|
||||||
|
515
|
||||||
|
519
|
||||||
|
520
|
||||||
|
521
|
||||||
|
525
|
||||||
|
528
|
||||||
|
529
|
||||||
|
533
|
||||||
|
534
|
||||||
|
539
|
||||||
|
541
|
||||||
|
542
|
||||||
|
548
|
||||||
|
550
|
||||||
|
556
|
||||||
|
559
|
||||||
|
563
|
||||||
|
566
|
||||||
|
567
|
||||||
|
569
|
||||||
|
573
|
||||||
|
575
|
||||||
|
576
|
||||||
|
579
|
||||||
|
582
|
||||||
|
585
|
||||||
|
588
|
||||||
|
592
|
||||||
|
593
|
||||||
|
594
|
||||||
|
596
|
||||||
|
597
|
||||||
|
598
|
||||||
|
599
|
||||||
|
603
|
||||||
|
604
|
||||||
|
605
|
||||||
|
606
|
||||||
|
607
|
||||||
|
613
|
||||||
|
614
|
||||||
|
616
|
||||||
|
617
|
||||||
|
618
|
||||||
|
619
|
||||||
|
621
|
||||||
|
623
|
||||||
|
624
|
||||||
|
625
|
||||||
|
628
|
||||||
|
638
|
||||||
|
641
|
||||||
|
642
|
||||||
|
648
|
||||||
|
651
|
||||||
|
659
|
||||||
|
660
|
||||||
|
661
|
||||||
|
663
|
||||||
|
664
|
||||||
|
676
|
||||||
|
677
|
||||||
|
678
|
||||||
|
680
|
||||||
|
682
|
||||||
|
686
|
||||||
|
688
|
||||||
|
689
|
||||||
|
691
|
||||||
|
694
|
||||||
|
698
|
||||||
|
704
|
||||||
|
707
|
||||||
|
708
|
||||||
|
712
|
||||||
|
713
|
||||||
|
716
|
||||||
|
719
|
||||||
|
723
|
||||||
|
724
|
||||||
|
726
|
||||||
|
728
|
||||||
|
732
|
||||||
|
741
|
||||||
|
742
|
||||||
|
743
|
||||||
|
744
|
||||||
|
745
|
||||||
|
746
|
||||||
|
750
|
||||||
|
752
|
||||||
|
755
|
||||||
|
759
|
||||||
|
762
|
||||||
|
764
|
||||||
|
767
|
||||||
|
768
|
||||||
|
770
|
||||||
|
772
|
||||||
|
775
|
||||||
|
777
|
||||||
|
780
|
||||||
|
782
|
||||||
|
785
|
||||||
|
789
|
||||||
|
799
|
||||||
|
800
|
||||||
|
801
|
||||||
|
802
|
||||||
|
804
|
||||||
|
805
|
||||||
|
810
|
||||||
|
811
|
||||||
|
816
|
||||||
|
822
|
||||||
|
823
|
||||||
|
826
|
||||||
|
829
|
||||||
|
832
|
||||||
|
834
|
||||||
|
835
|
||||||
|
838
|
||||||
|
839
|
||||||
|
842
|
||||||
|
847
|
||||||
|
848
|
||||||
|
850
|
||||||
|
851
|
||||||
|
852
|
||||||
|
856
|
||||||
|
861
|
||||||
|
862
|
||||||
|
865
|
||||||
|
867
|
||||||
|
868
|
||||||
|
869
|
||||||
|
874
|
||||||
|
876
|
||||||
|
882
|
||||||
|
883
|
||||||
|
884
|
||||||
|
885
|
||||||
|
891
|
||||||
|
893
|
||||||
|
898
|
||||||
|
899
|
||||||
|
906
|
||||||
|
909
|
||||||
|
910
|
||||||
|
911
|
||||||
|
912
|
||||||
|
920
|
||||||
|
923
|
||||||
|
924
|
||||||
|
926
|
||||||
|
928
|
||||||
|
934
|
||||||
|
938
|
||||||
|
941
|
||||||
|
942
|
||||||
|
943
|
||||||
|
944
|
||||||
|
945
|
||||||
|
951
|
||||||
|
954
|
||||||
|
956
|
||||||
|
957
|
||||||
|
958
|
||||||
|
960
|
||||||
|
961
|
||||||
|
963
|
||||||
|
964
|
||||||
|
968
|
||||||
|
970
|
||||||
|
975
|
||||||
|
976
|
||||||
|
977
|
||||||
|
979
|
||||||
|
981
|
||||||
|
988
|
||||||
|
989
|
||||||
|
992
|
||||||
|
993
|
||||||
|
995
|
||||||
|
997
|
||||||
|
1005
|
||||||
|
1008
|
||||||
|
1009
|
||||||
|
1012
|
||||||
|
1013
|
||||||
|
1014
|
||||||
|
1015
|
||||||
|
1023
|
||||||
|
1029
|
||||||
|
1032
|
||||||
|
1038
|
||||||
|
1044
|
||||||
|
1045
|
||||||
|
1052
|
||||||
|
1053
|
||||||
|
1055
|
||||||
|
1057
|
||||||
|
1060
|
||||||
|
1061
|
||||||
|
1065
|
||||||
|
1066
|
||||||
|
1074
|
||||||
|
1077
|
||||||
|
1079
|
||||||
|
1080
|
||||||
|
1082
|
||||||
|
1083
|
||||||
|
1085
|
||||||
|
1086
|
||||||
|
1089
|
||||||
|
1090
|
||||||
|
1091
|
||||||
|
1095
|
||||||
|
1104
|
||||||
|
1107
|
||||||
|
1111
|
||||||
|
1114
|
||||||
|
1121
|
||||||
|
1124
|
||||||
|
1126
|
||||||
|
1127
|
||||||
|
1128
|
||||||
|
1131
|
||||||
|
1132
|
||||||
|
1139
|
||||||
|
1140
|
||||||
|
1142
|
||||||
|
1143
|
||||||
|
1145
|
||||||
|
1148
|
||||||
|
1150
|
||||||
|
1157
|
||||||
|
1163
|
||||||
|
1164
|
||||||
|
1168
|
||||||
|
1170
|
||||||
|
1171
|
||||||
|
1172
|
||||||
|
1173
|
||||||
|
1179
|
||||||
|
1182
|
||||||
|
1186
|
||||||
|
1189
|
||||||
|
1190
|
||||||
|
1191
|
||||||
|
1194
|
||||||
|
1196
|
||||||
|
1198
|
||||||
|
1201
|
||||||
|
1204
|
||||||
|
1206
|
||||||
|
1208
|
||||||
|
1217
|
||||||
|
1220
|
||||||
|
1223
|
||||||
|
1228
|
||||||
|
1231
|
||||||
|
1232
|
||||||
|
1235
|
||||||
|
1236
|
||||||
|
1237
|
||||||
|
1238
|
||||||
|
1240
|
||||||
|
1246
|
||||||
|
1247
|
||||||
|
1249
|
||||||
|
1252
|
||||||
|
1258
|
||||||
|
1260
|
||||||
|
1265
|
||||||
|
1266
|
||||||
|
1273
|
||||||
|
1274
|
||||||
|
1278
|
||||||
|
1279
|
||||||
|
1280
|
||||||
|
1284
|
||||||
|
1286
|
||||||
|
1287
|
||||||
|
1288
|
||||||
|
1289
|
||||||
|
1290
|
||||||
|
1293
|
||||||
|
1294
|
||||||
|
1295
|
||||||
|
1297
|
||||||
|
1298
|
||||||
|
1301
|
||||||
|
1303
|
||||||
|
1304
|
||||||
|
1305
|
||||||
|
1307
|
||||||
|
1308
|
||||||
|
1309
|
||||||
|
1314
|
||||||
|
1318
|
||||||
|
1319
|
||||||
|
1323
|
||||||
|
1325
|
||||||
|
1327
|
||||||
|
1328
|
||||||
|
1333
|
||||||
|
1337
|
||||||
|
1340
|
||||||
|
1341
|
||||||
|
1343
|
||||||
|
1345
|
||||||
|
1346
|
||||||
|
1347
|
||||||
|
1349
|
||||||
|
1350
|
||||||
|
1351
|
||||||
|
1358
|
||||||
|
1364
|
||||||
|
1365
|
||||||
|
1367
|
||||||
|
1368
|
||||||
|
1369
|
||||||
|
1370
|
||||||
|
1373
|
||||||
|
1375
|
||||||
|
1376
|
||||||
|
1378
|
||||||
|
1380
|
||||||
|
1381
|
||||||
|
1382
|
||||||
|
1385
|
||||||
|
1387
|
||||||
|
1390
|
||||||
|
1391
|
||||||
|
1394
|
||||||
|
1396
|
||||||
|
1397
|
||||||
|
1399
|
||||||
|
1400
|
||||||
|
1406
|
||||||
|
1409
|
||||||
|
1412
|
||||||
|
1416
|
||||||
|
1417
|
||||||
|
1418
|
||||||
|
1420
|
||||||
|
1423
|
||||||
|
1425
|
||||||
|
1428
|
||||||
|
1430
|
||||||
|
1431
|
||||||
|
1432
|
||||||
|
1437
|
||||||
|
1438
|
||||||
|
1439
|
||||||
|
1444
|
||||||
|
1447
|
||||||
|
1450
|
||||||
|
1454
|
||||||
|
1456
|
||||||
|
1457
|
||||||
|
1460
|
||||||
|
1464
|
||||||
|
1465
|
||||||
|
1469
|
||||||
|
1473
|
||||||
|
1474
|
||||||
|
1475
|
||||||
|
1477
|
||||||
|
1479
|
||||||
|
1488
|
||||||
|
1490
|
||||||
|
1493
|
||||||
|
1494
|
||||||
|
1497
|
||||||
|
1500
|
||||||
|
1502
|
||||||
|
1503
|
||||||
|
1504
|
||||||
|
1505
|
||||||
|
1507
|
||||||
|
1508
|
||||||
|
1510
|
||||||
|
1514
|
||||||
|
1515
|
||||||
|
1520
|
||||||
|
1522
|
||||||
|
1523
|
||||||
|
1526
|
||||||
|
1547
|
||||||
|
1549
|
||||||
|
1553
|
||||||
|
1556
|
||||||
|
1557
|
||||||
|
1558
|
||||||
|
1562
|
||||||
|
1563
|
||||||
|
1564
|
||||||
|
1565
|
||||||
|
1570
|
||||||
|
1571
|
||||||
|
1574
|
||||||
|
1575
|
||||||
|
1579
|
||||||
|
1591
|
||||||
|
1592
|
||||||
|
1594
|
||||||
|
1601
|
||||||
|
1604
|
||||||
|
1605
|
||||||
|
1606
|
||||||
|
1608
|
||||||
|
1609
|
||||||
|
1613
|
||||||
|
1618
|
||||||
|
1619
|
||||||
|
1620
|
||||||
|
1621
|
||||||
|
1632
|
||||||
|
1634
|
||||||
|
1635
|
||||||
|
1636
|
||||||
|
1642
|
||||||
|
1643
|
||||||
|
1648
|
||||||
|
1650
|
||||||
|
1652
|
||||||
|
1653
|
||||||
|
1660
|
||||||
|
1661
|
||||||
|
1662
|
||||||
|
1666
|
||||||
|
1669
|
||||||
|
1670
|
||||||
|
1676
|
||||||
|
1677
|
||||||
|
1682
|
||||||
|
1683
|
||||||
|
1690
|
||||||
|
1692
|
||||||
|
1693
|
||||||
|
1697
|
||||||
|
1698
|
||||||
|
1702
|
||||||
|
1703
|
||||||
|
1706
|
||||||
|
1709
|
||||||
|
1711
|
||||||
|
1713
|
||||||
|
1715
|
||||||
|
1717
|
||||||
|
1721
|
||||||
|
1724
|
||||||
|
1725
|
||||||
|
1729
|
||||||
|
1730
|
||||||
|
1733
|
||||||
|
1734
|
||||||
|
1735
|
||||||
|
1736
|
||||||
|
1741
|
||||||
|
1745
|
||||||
|
1746
|
||||||
|
1748
|
||||||
|
1749
|
||||||
|
1751
|
||||||
|
1755
|
||||||
|
1761
|
||||||
|
1763
|
||||||
|
1766
|
||||||
|
1767
|
||||||
|
1768
|
||||||
|
1769
|
||||||
|
1773
|
||||||
|
1775
|
||||||
|
1777
|
||||||
|
1778
|
||||||
|
1783
|
||||||
|
1789
|
||||||
|
1790
|
||||||
|
1792
|
||||||
|
1793
|
||||||
|
1795
|
||||||
|
1800
|
||||||
|
1803
|
||||||
|
1805
|
||||||
|
1809
|
||||||
|
1812
|
||||||
|
1815
|
||||||
|
1816
|
||||||
|
1819
|
||||||
|
1820
|
||||||
|
1822
|
||||||
|
1823
|
||||||
|
1824
|
||||||
|
1825
|
||||||
|
1828
|
||||||
|
1831
|
||||||
|
1833
|
||||||
|
1834
|
||||||
|
1835
|
||||||
|
1836
|
||||||
|
1837
|
||||||
|
1842
|
||||||
|
1848
|
||||||
|
1849
|
||||||
|
1852
|
||||||
|
1853
|
||||||
|
1854
|
||||||
|
1856
|
||||||
|
1857
|
||||||
|
1858
|
||||||
|
1859
|
||||||
|
1861
|
||||||
|
1864
|
||||||
|
1865
|
||||||
|
1869
|
||||||
|
1873
|
||||||
|
1874
|
||||||
|
1876
|
||||||
|
1877
|
||||||
|
1882
|
||||||
|
1883
|
||||||
|
1884
|
||||||
|
1885
|
||||||
|
1888
|
||||||
|
1889
|
||||||
|
1890
|
||||||
|
1892
|
||||||
|
1894
|
||||||
|
1896
|
||||||
|
1899
|
||||||
|
1902
|
||||||
|
1903
|
||||||
|
1905
|
||||||
|
1908
|
||||||
|
1910
|
||||||
|
1913
|
||||||
|
1914
|
||||||
|
1915
|
||||||
|
1920
|
||||||
|
1928
|
||||||
|
1931
|
||||||
|
1936
|
||||||
|
1938
|
||||||
|
1941
|
||||||
|
1942
|
||||||
|
1944
|
||||||
|
1946
|
||||||
|
1947
|
||||||
|
1948
|
||||||
|
1954
|
||||||
|
1956
|
||||||
|
1958
|
||||||
|
1961
|
||||||
|
1966
|
||||||
|
1968
|
||||||
|
1969
|
||||||
|
1971
|
||||||
|
1972
|
||||||
|
1977
|
||||||
|
1979
|
||||||
|
1985
|
||||||
|
1986
|
||||||
|
1987
|
||||||
|
1988
|
||||||
|
1989
|
||||||
|
1990
|
||||||
|
1999
|
||||||
|
2001
|
||||||
|
2005
|
||||||
|
2009
|
||||||
|
2010
|
||||||
|
2012
|
||||||
|
2013
|
||||||
|
2014
|
||||||
|
2015
|
||||||
|
2017
|
||||||
|
2018
|
||||||
|
2022
|
||||||
|
2023
|
||||||
|
2028
|
||||||
|
2032
|
||||||
|
2036
|
||||||
|
2037
|
||||||
|
2038
|
||||||
|
2041
|
||||||
|
2042
|
||||||
|
2043
|
||||||
|
2044
|
||||||
|
2045
|
||||||
|
2046
|
||||||
|
2048
|
7395
data/yago11k/indices_train.txt
Normal file
7395
data/yago11k/indices_train.txt
Normal file
File diff suppressed because it is too large
Load Diff
809
data/yago11k/indices_valid.txt
Normal file
809
data/yago11k/indices_valid.txt
Normal file
@ -0,0 +1,809 @@
|
|||||||
|
7
|
||||||
|
9
|
||||||
|
12
|
||||||
|
15
|
||||||
|
17
|
||||||
|
22
|
||||||
|
24
|
||||||
|
25
|
||||||
|
28
|
||||||
|
29
|
||||||
|
32
|
||||||
|
37
|
||||||
|
38
|
||||||
|
41
|
||||||
|
43
|
||||||
|
49
|
||||||
|
52
|
||||||
|
54
|
||||||
|
57
|
||||||
|
58
|
||||||
|
59
|
||||||
|
60
|
||||||
|
66
|
||||||
|
69
|
||||||
|
72
|
||||||
|
76
|
||||||
|
78
|
||||||
|
81
|
||||||
|
83
|
||||||
|
84
|
||||||
|
85
|
||||||
|
88
|
||||||
|
89
|
||||||
|
94
|
||||||
|
100
|
||||||
|
102
|
||||||
|
105
|
||||||
|
106
|
||||||
|
107
|
||||||
|
108
|
||||||
|
109
|
||||||
|
115
|
||||||
|
116
|
||||||
|
121
|
||||||
|
123
|
||||||
|
124
|
||||||
|
127
|
||||||
|
128
|
||||||
|
133
|
||||||
|
135
|
||||||
|
137
|
||||||
|
138
|
||||||
|
141
|
||||||
|
144
|
||||||
|
156
|
||||||
|
157
|
||||||
|
159
|
||||||
|
161
|
||||||
|
168
|
||||||
|
171
|
||||||
|
172
|
||||||
|
174
|
||||||
|
175
|
||||||
|
176
|
||||||
|
181
|
||||||
|
182
|
||||||
|
186
|
||||||
|
188
|
||||||
|
189
|
||||||
|
190
|
||||||
|
191
|
||||||
|
195
|
||||||
|
197
|
||||||
|
198
|
||||||
|
200
|
||||||
|
201
|
||||||
|
204
|
||||||
|
208
|
||||||
|
212
|
||||||
|
215
|
||||||
|
216
|
||||||
|
217
|
||||||
|
218
|
||||||
|
219
|
||||||
|
220
|
||||||
|
222
|
||||||
|
224
|
||||||
|
225
|
||||||
|
227
|
||||||
|
229
|
||||||
|
230
|
||||||
|
233
|
||||||
|
236
|
||||||
|
239
|
||||||
|
240
|
||||||
|
242
|
||||||
|
243
|
||||||
|
244
|
||||||
|
246
|
||||||
|
247
|
||||||
|
250
|
||||||
|
251
|
||||||
|
253
|
||||||
|
254
|
||||||
|
255
|
||||||
|
256
|
||||||
|
257
|
||||||
|
261
|
||||||
|
265
|
||||||
|
266
|
||||||
|
271
|
||||||
|
273
|
||||||
|
274
|
||||||
|
275
|
||||||
|
276
|
||||||
|
279
|
||||||
|
280
|
||||||
|
282
|
||||||
|
284
|
||||||
|
287
|
||||||
|
289
|
||||||
|
292
|
||||||
|
296
|
||||||
|
297
|
||||||
|
299
|
||||||
|
300
|
||||||
|
302
|
||||||
|
308
|
||||||
|
311
|
||||||
|
312
|
||||||
|
315
|
||||||
|
316
|
||||||
|
317
|
||||||
|
320
|
||||||
|
321
|
||||||
|
322
|
||||||
|
326
|
||||||
|
331
|
||||||
|
333
|
||||||
|
335
|
||||||
|
336
|
||||||
|
337
|
||||||
|
339
|
||||||
|
344
|
||||||
|
345
|
||||||
|
346
|
||||||
|
347
|
||||||
|
351
|
||||||
|
352
|
||||||
|
353
|
||||||
|
354
|
||||||
|
355
|
||||||
|
358
|
||||||
|
359
|
||||||
|
362
|
||||||
|
364
|
||||||
|
366
|
||||||
|
368
|
||||||
|
373
|
||||||
|
376
|
||||||
|
388
|
||||||
|
390
|
||||||
|
392
|
||||||
|
393
|
||||||
|
394
|
||||||
|
395
|
||||||
|
397
|
||||||
|
398
|
||||||
|
401
|
||||||
|
403
|
||||||
|
406
|
||||||
|
407
|
||||||
|
409
|
||||||
|
410
|
||||||
|
412
|
||||||
|
413
|
||||||
|
415
|
||||||
|
416
|
||||||
|
418
|
||||||
|
420
|
||||||
|
421
|
||||||
|
425
|
||||||
|
435
|
||||||
|
437
|
||||||
|
443
|
||||||
|
444
|
||||||
|
445
|
||||||
|
446
|
||||||
|
448
|
||||||
|
453
|
||||||
|
455
|
||||||
|
456
|
||||||
|
462
|
||||||
|
463
|
||||||
|
468
|
||||||
|
470
|
||||||
|
471
|
||||||
|
477
|
||||||
|
478
|
||||||
|
481
|
||||||
|
485
|
||||||
|
490
|
||||||
|
491
|
||||||
|
493
|
||||||
|
498
|
||||||
|
505
|
||||||
|
510
|
||||||
|
513
|
||||||
|
514
|
||||||
|
515
|
||||||
|
516
|
||||||
|
521
|
||||||
|
523
|
||||||
|
527
|
||||||
|
529
|
||||||
|
531
|
||||||
|
534
|
||||||
|
535
|
||||||
|
536
|
||||||
|
537
|
||||||
|
538
|
||||||
|
543
|
||||||
|
547
|
||||||
|
550
|
||||||
|
555
|
||||||
|
557
|
||||||
|
558
|
||||||
|
559
|
||||||
|
560
|
||||||
|
561
|
||||||
|
565
|
||||||
|
566
|
||||||
|
567
|
||||||
|
570
|
||||||
|
575
|
||||||
|
577
|
||||||
|
580
|
||||||
|
581
|
||||||
|
582
|
||||||
|
593
|
||||||
|
594
|
||||||
|
595
|
||||||
|
597
|
||||||
|
599
|
||||||
|
602
|
||||||
|
605
|
||||||
|
609
|
||||||
|
612
|
||||||
|
614
|
||||||
|
616
|
||||||
|
618
|
||||||
|
620
|
||||||
|
625
|
||||||
|
628
|
||||||
|
632
|
||||||
|
633
|
||||||
|
635
|
||||||
|
636
|
||||||
|
643
|
||||||
|
644
|
||||||
|
645
|
||||||
|
646
|
||||||
|
647
|
||||||
|
648
|
||||||
|
652
|
||||||
|
662
|
||||||
|
663
|
||||||
|
669
|
||||||
|
670
|
||||||
|
672
|
||||||
|
673
|
||||||
|
681
|
||||||
|
682
|
||||||
|
685
|
||||||
|
687
|
||||||
|
689
|
||||||
|
693
|
||||||
|
697
|
||||||
|
700
|
||||||
|
702
|
||||||
|
707
|
||||||
|
711
|
||||||
|
716
|
||||||
|
717
|
||||||
|
719
|
||||||
|
727
|
||||||
|
728
|
||||||
|
729
|
||||||
|
730
|
||||||
|
732
|
||||||
|
733
|
||||||
|
734
|
||||||
|
738
|
||||||
|
740
|
||||||
|
744
|
||||||
|
751
|
||||||
|
754
|
||||||
|
755
|
||||||
|
756
|
||||||
|
760
|
||||||
|
761
|
||||||
|
762
|
||||||
|
763
|
||||||
|
766
|
||||||
|
768
|
||||||
|
770
|
||||||
|
771
|
||||||
|
772
|
||||||
|
773
|
||||||
|
774
|
||||||
|
775
|
||||||
|
776
|
||||||
|
777
|
||||||
|
779
|
||||||
|
781
|
||||||
|
782
|
||||||
|
784
|
||||||
|
786
|
||||||
|
788
|
||||||
|
789
|
||||||
|
792
|
||||||
|
795
|
||||||
|
799
|
||||||
|
800
|
||||||
|
805
|
||||||
|
808
|
||||||
|
810
|
||||||
|
812
|
||||||
|
815
|
||||||
|
817
|
||||||
|
820
|
||||||
|
821
|
||||||
|
825
|
||||||
|
827
|
||||||
|
829
|
||||||
|
831
|
||||||
|
832
|
||||||
|
833
|
||||||
|
834
|
||||||
|
836
|
||||||
|
837
|
||||||
|
840
|
||||||
|
841
|
||||||
|
842
|
||||||
|
848
|
||||||
|
849
|
||||||
|
850
|
||||||
|
851
|
||||||
|
852
|
||||||
|
855
|
||||||
|
858
|
||||||
|
860
|
||||||
|
861
|
||||||
|
866
|
||||||
|
869
|
||||||
|
871
|
||||||
|
872
|
||||||
|
877
|
||||||
|
878
|
||||||
|
880
|
||||||
|
881
|
||||||
|
882
|
||||||
|
884
|
||||||
|
885
|
||||||
|
886
|
||||||
|
891
|
||||||
|
893
|
||||||
|
897
|
||||||
|
899
|
||||||
|
901
|
||||||
|
908
|
||||||
|
911
|
||||||
|
913
|
||||||
|
919
|
||||||
|
921
|
||||||
|
925
|
||||||
|
927
|
||||||
|
929
|
||||||
|
934
|
||||||
|
937
|
||||||
|
938
|
||||||
|
945
|
||||||
|
947
|
||||||
|
948
|
||||||
|
950
|
||||||
|
957
|
||||||
|
962
|
||||||
|
963
|
||||||
|
965
|
||||||
|
969
|
||||||
|
971
|
||||||
|
976
|
||||||
|
977
|
||||||
|
978
|
||||||
|
985
|
||||||
|
986
|
||||||
|
989
|
||||||
|
990
|
||||||
|
991
|
||||||
|
993
|
||||||
|
996
|
||||||
|
997
|
||||||
|
998
|
||||||
|
1003
|
||||||
|
1005
|
||||||
|
1006
|
||||||
|
1007
|
||||||
|
1010
|
||||||
|
1011
|
||||||
|
1013
|
||||||
|
1014
|
||||||
|
1019
|
||||||
|
1022
|
||||||
|
1024
|
||||||
|
1025
|
||||||
|
1028
|
||||||
|
1031
|
||||||
|
1032
|
||||||
|
1034
|
||||||
|
1038
|
||||||
|
1040
|
||||||
|
1042
|
||||||
|
1043
|
||||||
|
1048
|
||||||
|
1049
|
||||||
|
1050
|
||||||
|
1051
|
||||||
|
1056
|
||||||
|
1057
|
||||||
|
1059
|
||||||
|
1062
|
||||||
|
1063
|
||||||
|
1065
|
||||||
|
1066
|
||||||
|
1071
|
||||||
|
1072
|
||||||
|
1074
|
||||||
|
1075
|
||||||
|
1076
|
||||||
|
1080
|
||||||
|
1081
|
||||||
|
1082
|
||||||
|
1085
|
||||||
|
1086
|
||||||
|
1087
|
||||||
|
1090
|
||||||
|
1095
|
||||||
|
1096
|
||||||
|
1098
|
||||||
|
1099
|
||||||
|
1101
|
||||||
|
1105
|
||||||
|
1108
|
||||||
|
1109
|
||||||
|
1110
|
||||||
|
1112
|
||||||
|
1117
|
||||||
|
1119
|
||||||
|
1121
|
||||||
|
1122
|
||||||
|
1123
|
||||||
|
1125
|
||||||
|
1129
|
||||||
|
1133
|
||||||
|
1139
|
||||||
|
1143
|
||||||
|
1145
|
||||||
|
1147
|
||||||
|
1151
|
||||||
|
1153
|
||||||
|
1154
|
||||||
|
1158
|
||||||
|
1159
|
||||||
|
1162
|
||||||
|
1165
|
||||||
|
1167
|
||||||
|
1169
|
||||||
|
1171
|
||||||
|
1175
|
||||||
|
1177
|
||||||
|
1178
|
||||||
|
1180
|
||||||
|
1181
|
||||||
|
1182
|
||||||
|
1185
|
||||||
|
1187
|
||||||
|
1188
|
||||||
|
1193
|
||||||
|
1197
|
||||||
|
1199
|
||||||
|
1217
|
||||||
|
1218
|
||||||
|
1220
|
||||||
|
1221
|
||||||
|
1225
|
||||||
|
1227
|
||||||
|
1230
|
||||||
|
1232
|
||||||
|
1233
|
||||||
|
1235
|
||||||
|
1236
|
||||||
|
1237
|
||||||
|
1238
|
||||||
|
1239
|
||||||
|
1244
|
||||||
|
1246
|
||||||
|
1247
|
||||||
|
1248
|
||||||
|
1262
|
||||||
|
1263
|
||||||
|
1264
|
||||||
|
1266
|
||||||
|
1267
|
||||||
|
1269
|
||||||
|
1271
|
||||||
|
1272
|
||||||
|
1273
|
||||||
|
1276
|
||||||
|
1277
|
||||||
|
1278
|
||||||
|
1280
|
||||||
|
1284
|
||||||
|
1291
|
||||||
|
1292
|
||||||
|
1293
|
||||||
|
1295
|
||||||
|
1296
|
||||||
|
1297
|
||||||
|
1299
|
||||||
|
1300
|
||||||
|
1301
|
||||||
|
1305
|
||||||
|
1307
|
||||||
|
1309
|
||||||
|
1312
|
||||||
|
1322
|
||||||
|
1325
|
||||||
|
1330
|
||||||
|
1334
|
||||||
|
1335
|
||||||
|
1337
|
||||||
|
1338
|
||||||
|
1340
|
||||||
|
1342
|
||||||
|
1346
|
||||||
|
1347
|
||||||
|
1350
|
||||||
|
1351
|
||||||
|
1356
|
||||||
|
1357
|
||||||
|
1358
|
||||||
|
1359
|
||||||
|
1366
|
||||||
|
1368
|
||||||
|
1370
|
||||||
|
1371
|
||||||
|
1373
|
||||||
|
1376
|
||||||
|
1379
|
||||||
|
1382
|
||||||
|
1383
|
||||||
|
1384
|
||||||
|
1385
|
||||||
|
1386
|
||||||
|
1387
|
||||||
|
1389
|
||||||
|
1393
|
||||||
|
1394
|
||||||
|
1398
|
||||||
|
1403
|
||||||
|
1404
|
||||||
|
1409
|
||||||
|
1411
|
||||||
|
1413
|
||||||
|
1415
|
||||||
|
1420
|
||||||
|
1421
|
||||||
|
1423
|
||||||
|
1426
|
||||||
|
1429
|
||||||
|
1431
|
||||||
|
1433
|
||||||
|
1434
|
||||||
|
1439
|
||||||
|
1444
|
||||||
|
1446
|
||||||
|
1447
|
||||||
|
1456
|
||||||
|
1457
|
||||||
|
1461
|
||||||
|
1467
|
||||||
|
1469
|
||||||
|
1472
|
||||||
|
1477
|
||||||
|
1481
|
||||||
|
1493
|
||||||
|
1494
|
||||||
|
1496
|
||||||
|
1498
|
||||||
|
1499
|
||||||
|
1501
|
||||||
|
1503
|
||||||
|
1506
|
||||||
|
1507
|
||||||
|
1508
|
||||||
|
1510
|
||||||
|
1511
|
||||||
|
1512
|
||||||
|
1516
|
||||||
|
1521
|
||||||
|
1523
|
||||||
|
1527
|
||||||
|
1528
|
||||||
|
1529
|
||||||
|
1530
|
||||||
|
1535
|
||||||
|
1537
|
||||||
|
1540
|
||||||
|
1541
|
||||||
|
1542
|
||||||
|
1546
|
||||||
|
1552
|
||||||
|
1559
|
||||||
|
1562
|
||||||
|
1563
|
||||||
|
1564
|
||||||
|
1567
|
||||||
|
1572
|
||||||
|
1574
|
||||||
|
1577
|
||||||
|
1581
|
||||||
|
1587
|
||||||
|
1589
|
||||||
|
1594
|
||||||
|
1601
|
||||||
|
1603
|
||||||
|
1613
|
||||||
|
1614
|
||||||
|
1616
|
||||||
|
1617
|
||||||
|
1618
|
||||||
|
1619
|
||||||
|
1620
|
||||||
|
1621
|
||||||
|
1623
|
||||||
|
1624
|
||||||
|
1625
|
||||||
|
1626
|
||||||
|
1631
|
||||||
|
1632
|
||||||
|
1634
|
||||||
|
1635
|
||||||
|
1636
|
||||||
|
1638
|
||||||
|
1640
|
||||||
|
1642
|
||||||
|
1643
|
||||||
|
1645
|
||||||
|
1647
|
||||||
|
1648
|
||||||
|
1650
|
||||||
|
1651
|
||||||
|
1652
|
||||||
|
1655
|
||||||
|
1656
|
||||||
|
1658
|
||||||
|
1661
|
||||||
|
1662
|
||||||
|
1663
|
||||||
|
1665
|
||||||
|
1666
|
||||||
|
1667
|
||||||
|
1668
|
||||||
|
1669
|
||||||
|
1674
|
||||||
|
1675
|
||||||
|
1685
|
||||||
|
1686
|
||||||
|
1695
|
||||||
|
1697
|
||||||
|
1698
|
||||||
|
1699
|
||||||
|
1706
|
||||||
|
1713
|
||||||
|
1716
|
||||||
|
1717
|
||||||
|
1718
|
||||||
|
1723
|
||||||
|
1724
|
||||||
|
1725
|
||||||
|
1729
|
||||||
|
1730
|
||||||
|
1731
|
||||||
|
1732
|
||||||
|
1733
|
||||||
|
1734
|
||||||
|
1738
|
||||||
|
1741
|
||||||
|
1744
|
||||||
|
1749
|
||||||
|
1761
|
||||||
|
1762
|
||||||
|
1769
|
||||||
|
1772
|
||||||
|
1774
|
||||||
|
1776
|
||||||
|
1777
|
||||||
|
1781
|
||||||
|
1784
|
||||||
|
1787
|
||||||
|
1789
|
||||||
|
1790
|
||||||
|
1791
|
||||||
|
1798
|
||||||
|
1804
|
||||||
|
1805
|
||||||
|
1807
|
||||||
|
1808
|
||||||
|
1811
|
||||||
|
1812
|
||||||
|
1816
|
||||||
|
1821
|
||||||
|
1824
|
||||||
|
1827
|
||||||
|
1831
|
||||||
|
1834
|
||||||
|
1835
|
||||||
|
1836
|
||||||
|
1842
|
||||||
|
1847
|
||||||
|
1852
|
||||||
|
1855
|
||||||
|
1856
|
||||||
|
1859
|
||||||
|
1861
|
||||||
|
1862
|
||||||
|
1865
|
||||||
|
1866
|
||||||
|
1871
|
||||||
|
1872
|
||||||
|
1874
|
||||||
|
1876
|
||||||
|
1877
|
||||||
|
1878
|
||||||
|
1879
|
||||||
|
1880
|
||||||
|
1881
|
||||||
|
1885
|
||||||
|
1887
|
||||||
|
1888
|
||||||
|
1889
|
||||||
|
1891
|
||||||
|
1892
|
||||||
|
1894
|
||||||
|
1895
|
||||||
|
1899
|
||||||
|
1900
|
||||||
|
1907
|
||||||
|
1911
|
||||||
|
1913
|
||||||
|
1914
|
||||||
|
1915
|
||||||
|
1917
|
||||||
|
1919
|
||||||
|
1920
|
||||||
|
1923
|
||||||
|
1924
|
||||||
|
1925
|
||||||
|
1927
|
||||||
|
1934
|
||||||
|
1939
|
||||||
|
1940
|
||||||
|
1941
|
||||||
|
1948
|
||||||
|
1951
|
||||||
|
1953
|
||||||
|
1957
|
||||||
|
1958
|
||||||
|
1959
|
||||||
|
1961
|
||||||
|
1962
|
||||||
|
1965
|
||||||
|
1966
|
||||||
|
1967
|
||||||
|
1971
|
||||||
|
1973
|
||||||
|
1975
|
||||||
|
1979
|
||||||
|
1981
|
||||||
|
1983
|
||||||
|
1989
|
||||||
|
1996
|
||||||
|
1997
|
||||||
|
2000
|
||||||
|
2004
|
||||||
|
2007
|
||||||
|
2009
|
||||||
|
2011
|
||||||
|
2012
|
||||||
|
2016
|
||||||
|
2017
|
||||||
|
2019
|
||||||
|
2021
|
||||||
|
2024
|
||||||
|
2029
|
||||||
|
2040
|
||||||
|
2042
|
||||||
|
2044
|
||||||
|
2048
|
@ -1,3 +0,0 @@
|
|||||||
nohup: ignoring input
|
|
||||||
2023-06-20 09:22:51,618 - [INFO] - {'dataset': 'icews14_both', 'name': 'icews14_both', 'gpu': '2', 'train_strategy': 'one_to_n', 'opt': 'adam', 'neg_num': 1000, 'batch_size': 128, 'l2': 0.0, 'lr': 0.0001, 'max_epochs': 500, 'num_workers': 0, 'seed': 42, 'restore': False, 'lbl_smooth': 0.1, 'embed_dim': 400, 'ent_vec_dim': 400, 'rel_vec_dim': 400, 'bias': False, 'form': 'plain', 'k_w': 10, 'k_h': 20, 'num_filt': 96, 'ker_sz': 9, 'perm': 1, 'hid_drop': 0.5, 'feat_drop': 0.2, 'inp_drop': 0.2, 'drop_path': 0.0, 'drop': 0.0, 'in_channels': 1, 'out_channels': 32, 'filt_h': 1, 'filt_w': 9, 'image_h': 128, 'image_w': 128, 'patch_size': 8, 'mixer_dim': 256, 'expansion_factor': 4, 'expansion_factor_token': 0.5, 'mixer_depth': 16, 'mixer_dropout': 0.2, 'log_dir': './log/', 'config_dir': './config/', 'test_only': False, 'grid_search': False}
|
|
||||||
2023-06-20 09:22:57,979 - [INFO] - [E:0| 0]: Train Loss:0.70005, Val MRR:0.0, icews14_both
|
|
14945
log/ice00001
14945
log/ice00001
File diff suppressed because it is too large
Load Diff
4904
log/ice0003
4904
log/ice0003
File diff suppressed because it is too large
Load Diff
6607
log/ice0003_2
6607
log/ice0003_2
File diff suppressed because it is too large
Load Diff
6205
log/ice001
6205
log/ice001
File diff suppressed because it is too large
Load Diff
9541
log/ice14ws_128
9541
log/ice14ws_128
File diff suppressed because it is too large
Load Diff
4154
log/iceboth
4154
log/iceboth
File diff suppressed because it is too large
Load Diff
9482
log/icews14
9482
log/icews14
File diff suppressed because it is too large
Load Diff
@ -1 +0,0 @@
|
|||||||
2023-05-13 03:52:44,141 - icews14_128 - [INFO] - {'dataset': 'icews14', 'name': 'icews14_128', 'gpu': '0', 'train_strategy': 'one_to_n', 'opt': 'adam', 'neg_num': 1000, 'batch_size': 128, 'l2': 0.0, 'lr': 0.0001, 'max_epochs': 500, 'num_workers': 0, 'seed': 42, 'restore': False, 'lbl_smooth': 0.1, 'embed_dim': 400, 'ent_vec_dim': 400, 'rel_vec_dim': 400, 'bias': False, 'form': 'plain', 'k_w': 10, 'k_h': 20, 'num_filt': 96, 'ker_sz': 9, 'perm': 1, 'hid_drop': 0.5, 'feat_drop': 0.2, 'inp_drop': 0.2, 'drop_path': 0.0, 'drop': 0.0, 'in_channels': 1, 'out_channels': 32, 'filt_h': 1, 'filt_w': 9, 'image_h': 128, 'image_w': 128, 'patch_size': 8, 'mixer_dim': 256, 'expansion_factor': 4, 'expansion_factor_token': 0.5, 'mixer_depth': 16, 'mixer_dropout': 0.2, 'log_dir': './log/', 'config_dir': './config/', 'test_only': True, 'filtered': False}
|
|
10670
log/icews14_both
10670
log/icews14_both
File diff suppressed because it is too large
Load Diff
@ -1,2 +0,0 @@
|
|||||||
nohup: ignoring input
|
|
||||||
python: can't open file 'run.py': [Errno 2] No such file or directory
|
|
@ -1 +0,0 @@
|
|||||||
2023-05-17 06:54:57,988 - testrun_227cb2f9 - [INFO] - {'dataset': 'icews14', 'name': 'testrun_227cb2f9', 'gpu': '1', 'train_strategy': 'one_to_n', 'opt': 'adam', 'neg_num': 1000, 'batch_size': 128, 'l2': 0.0, 'lr': 0.0001, 'max_epochs': 500, 'num_workers': 0, 'seed': 42, 'restore': False, 'lbl_smooth': 0.1, 'embed_dim': 400, 'ent_vec_dim': 400, 'rel_vec_dim': 400, 'bias': False, 'form': 'plain', 'k_w': 10, 'k_h': 20, 'num_filt': 96, 'ker_sz': 9, 'perm': 1, 'hid_drop': 0.5, 'feat_drop': 0.2, 'inp_drop': 0.2, 'drop_path': 0.0, 'drop': 0.0, 'in_channels': 1, 'out_channels': 32, 'filt_h': 1, 'filt_w': 9, 'image_h': 128, 'image_w': 128, 'patch_size': 8, 'mixer_dim': 256, 'expansion_factor': 4, 'expansion_factor_token': 0.5, 'mixer_depth': 16, 'mixer_dropout': 0.2, 'log_dir': './log/', 'config_dir': './config/', 'test_only': False, 'grid_search': True}
|
|
@ -1 +0,0 @@
|
|||||||
2023-05-17 06:23:34,181 - testrun_30d70322 - [INFO] - {'dataset': 'icews14', 'name': 'testrun_30d70322', 'gpu': '1', 'train_strategy': 'one_to_n', 'opt': 'adam', 'neg_num': 1000, 'batch_size': 128, 'l2': 0.0, 'lr': 0.0001, 'max_epochs': 500, 'num_workers': 0, 'seed': 42, 'restore': False, 'lbl_smooth': 0.1, 'embed_dim': 400, 'ent_vec_dim': 400, 'rel_vec_dim': 400, 'bias': False, 'form': 'plain', 'k_w': 10, 'k_h': 20, 'num_filt': 96, 'ker_sz': 9, 'perm': 1, 'hid_drop': 0.5, 'feat_drop': 0.2, 'inp_drop': 0.2, 'drop_path': 0.0, 'drop': 0.0, 'in_channels': 1, 'out_channels': 32, 'filt_h': 1, 'filt_w': 9, 'image_h': 128, 'image_w': 128, 'patch_size': 8, 'mixer_dim': 256, 'expansion_factor': 4, 'expansion_factor_token': 0.5, 'mixer_depth': 16, 'mixer_dropout': 0.2, 'log_dir': './log/', 'config_dir': './config/', 'test_only': False, 'grid_search': True}
|
|
@ -1 +0,0 @@
|
|||||||
2023-05-17 06:53:01,668 - testrun_3212b281 - [INFO] - {'dataset': 'icews14', 'name': 'testrun_3212b281', 'gpu': '1', 'train_strategy': 'one_to_n', 'opt': 'adam', 'neg_num': 1000, 'batch_size': 128, 'l2': 0.0, 'lr': 0.0001, 'max_epochs': 500, 'num_workers': 0, 'seed': 42, 'restore': False, 'lbl_smooth': 0.1, 'embed_dim': 400, 'ent_vec_dim': 400, 'rel_vec_dim': 400, 'bias': False, 'form': 'plain', 'k_w': 10, 'k_h': 20, 'num_filt': 96, 'ker_sz': 9, 'perm': 1, 'hid_drop': 0.5, 'feat_drop': 0.2, 'inp_drop': 0.2, 'drop_path': 0.0, 'drop': 0.0, 'in_channels': 1, 'out_channels': 32, 'filt_h': 1, 'filt_w': 9, 'image_h': 128, 'image_w': 128, 'patch_size': 8, 'mixer_dim': 256, 'expansion_factor': 4, 'expansion_factor_token': 0.5, 'mixer_depth': 16, 'mixer_dropout': 0.2, 'log_dir': './log/', 'config_dir': './config/', 'test_only': False, 'grid_search': True}
|
|
@ -1 +0,0 @@
|
|||||||
2023-05-06 08:35:38,753 - testrun_3dbc9e89 - [INFO] - {'dataset': 'wikidata12k', 'name': 'testrun_3dbc9e89', 'gpu': '0', 'train_strategy': 'one_to_n', 'opt': 'adam', 'neg_num': 1000, 'batch_size': 128, 'l2': 0.0, 'lr': 0.0001, 'max_epochs': 500, 'num_workers': 0, 'seed': 42, 'restore': False, 'lbl_smooth': 0.1, 'embed_dim': 400, 'ent_vec_dim': 400, 'rel_vec_dim': 400, 'bias': False, 'form': 'plain', 'k_w': 10, 'k_h': 20, 'num_filt': 96, 'ker_sz': 9, 'perm': 1, 'hid_drop': 0.5, 'feat_drop': 0.2, 'inp_drop': 0.2, 'drop_path': 0.0, 'drop': 0.0, 'in_channels': 1, 'out_channels': 32, 'filt_h': 1, 'filt_w': 9, 'image_h': 128, 'image_w': 128, 'patch_size': 8, 'mixer_dim': 256, 'expansion_factor': 4, 'expansion_factor_token': 0.5, 'mixer_depth': 16, 'mixer_dropout': 0.2, 'log_dir': './log/', 'config_dir': './config/', 'test_only': False}
|
|
@ -1 +0,0 @@
|
|||||||
2023-05-17 06:38:00,469 - testrun_43389ddf - [INFO] - {'dataset': 'icews14', 'name': 'testrun_43389ddf', 'gpu': '1', 'train_strategy': 'one_to_n', 'opt': 'adam', 'neg_num': 1000, 'batch_size': 128, 'l2': 0.0, 'lr': 0.0001, 'max_epochs': 500, 'num_workers': 0, 'seed': 42, 'restore': False, 'lbl_smooth': 0.1, 'embed_dim': 400, 'ent_vec_dim': 400, 'rel_vec_dim': 400, 'bias': False, 'form': 'plain', 'k_w': 10, 'k_h': 20, 'num_filt': 96, 'ker_sz': 9, 'perm': 1, 'hid_drop': 0.5, 'feat_drop': 0.2, 'inp_drop': 0.2, 'drop_path': 0.0, 'drop': 0.0, 'in_channels': 1, 'out_channels': 32, 'filt_h': 1, 'filt_w': 9, 'image_h': 128, 'image_w': 128, 'patch_size': 8, 'mixer_dim': 256, 'expansion_factor': 4, 'expansion_factor_token': 0.5, 'mixer_depth': 16, 'mixer_dropout': 0.2, 'log_dir': './log/', 'config_dir': './config/', 'test_only': False, 'grid_search': True}
|
|
@ -1 +0,0 @@
|
|||||||
2023-05-17 06:13:02,952 - testrun_47ede3b9 - [INFO] - {'dataset': 'FB15k-237', 'name': 'testrun_47ede3b9', 'gpu': '1', 'train_strategy': 'one_to_n', 'opt': 'adam', 'neg_num': 1000, 'batch_size': 128, 'l2': 0.0, 'lr': 0.0001, 'max_epochs': 500, 'num_workers': 0, 'seed': 42, 'restore': False, 'lbl_smooth': 0.1, 'embed_dim': 400, 'ent_vec_dim': 400, 'rel_vec_dim': 400, 'bias': False, 'form': 'plain', 'k_w': 10, 'k_h': 20, 'num_filt': 96, 'ker_sz': 9, 'perm': 1, 'hid_drop': 0.5, 'feat_drop': 0.2, 'inp_drop': 0.2, 'drop_path': 0.0, 'drop': 0.0, 'in_channels': 1, 'out_channels': 32, 'filt_h': 1, 'filt_w': 9, 'image_h': 128, 'image_w': 128, 'patch_size': 8, 'mixer_dim': 256, 'expansion_factor': 4, 'expansion_factor_token': 0.5, 'mixer_depth': 16, 'mixer_dropout': 0.2, 'log_dir': './log/', 'config_dir': './config/', 'test_only': False, 'grid_search': True}
|
|
@ -1 +0,0 @@
|
|||||||
2023-05-06 08:37:18,939 - testrun_49495af8 - [INFO] - {'dataset': 'wikidata12k', 'name': 'testrun_49495af8', 'gpu': '3', 'train_strategy': 'one_to_n', 'opt': 'adam', 'neg_num': 1000, 'batch_size': 128, 'l2': 0.0, 'lr': 0.0001, 'max_epochs': 500, 'num_workers': 0, 'seed': 42, 'restore': False, 'lbl_smooth': 0.1, 'embed_dim': 400, 'ent_vec_dim': 400, 'rel_vec_dim': 400, 'bias': False, 'form': 'plain', 'k_w': 10, 'k_h': 20, 'num_filt': 96, 'ker_sz': 9, 'perm': 1, 'hid_drop': 0.5, 'feat_drop': 0.2, 'inp_drop': 0.2, 'drop_path': 0.0, 'drop': 0.0, 'in_channels': 1, 'out_channels': 32, 'filt_h': 1, 'filt_w': 9, 'image_h': 128, 'image_w': 128, 'patch_size': 8, 'mixer_dim': 256, 'expansion_factor': 4, 'expansion_factor_token': 0.5, 'mixer_depth': 16, 'mixer_dropout': 0.2, 'log_dir': './log/', 'config_dir': './config/', 'test_only': False}
|
|
7877
log/testrun_4a235016
7877
log/testrun_4a235016
File diff suppressed because it is too large
Load Diff
@ -1 +0,0 @@
|
|||||||
2023-05-06 08:35:13,356 - testrun_4f5d8391 - [INFO] - {'dataset': 'wikidata12k', 'name': 'testrun_4f5d8391', 'gpu': '0', 'train_strategy': 'one_to_n', 'opt': 'adam', 'neg_num': 1000, 'batch_size': 128, 'l2': 0.0, 'lr': 0.0001, 'max_epochs': 500, 'num_workers': 0, 'seed': 42, 'restore': False, 'lbl_smooth': 0.1, 'embed_dim': 400, 'ent_vec_dim': 400, 'rel_vec_dim': 400, 'bias': False, 'form': 'plain', 'k_w': 10, 'k_h': 20, 'num_filt': 96, 'ker_sz': 9, 'perm': 1, 'hid_drop': 0.5, 'feat_drop': 0.2, 'inp_drop': 0.2, 'drop_path': 0.0, 'drop': 0.0, 'in_channels': 1, 'out_channels': 32, 'filt_h': 1, 'filt_w': 9, 'image_h': 128, 'image_w': 128, 'patch_size': 8, 'mixer_dim': 256, 'expansion_factor': 4, 'expansion_factor_token': 0.5, 'mixer_depth': 16, 'mixer_dropout': 0.2, 'log_dir': './log/', 'config_dir': './config/', 'test_only': False}
|
|
@ -1 +0,0 @@
|
|||||||
2023-05-06 08:34:55,992 - testrun_540f6a03 - [INFO] - {'dataset': 'wikidata12k', 'name': 'testrun_540f6a03', 'gpu': '0', 'train_strategy': 'one_to_n', 'opt': 'adam', 'neg_num': 1000, 'batch_size': 128, 'l2': 0.0, 'lr': 0.0001, 'max_epochs': 500, 'num_workers': 0, 'seed': 42, 'restore': False, 'lbl_smooth': 0.1, 'embed_dim': 400, 'ent_vec_dim': 400, 'rel_vec_dim': 400, 'bias': False, 'form': 'plain', 'k_w': 10, 'k_h': 20, 'num_filt': 96, 'ker_sz': 9, 'perm': 1, 'hid_drop': 0.5, 'feat_drop': 0.2, 'inp_drop': 0.2, 'drop_path': 0.0, 'drop': 0.0, 'in_channels': 1, 'out_channels': 32, 'filt_h': 1, 'filt_w': 9, 'image_h': 128, 'image_w': 128, 'patch_size': 8, 'mixer_dim': 256, 'expansion_factor': 4, 'expansion_factor_token': 0.5, 'mixer_depth': 16, 'mixer_dropout': 0.2, 'log_dir': './log/', 'config_dir': './config/', 'test_only': False}
|
|
@ -1 +0,0 @@
|
|||||||
2023-05-17 07:04:56,051 - testrun_5a901712 - [INFO] - {'dataset': 'icews14', 'name': 'testrun_5a901712', 'gpu': '1', 'train_strategy': 'one_to_n', 'opt': 'adam', 'neg_num': 1000, 'batch_size': 128, 'l2': 0.0, 'lr': 0.0001, 'max_epochs': 500, 'num_workers': 0, 'seed': 42, 'restore': False, 'lbl_smooth': 0.1, 'embed_dim': 400, 'ent_vec_dim': 400, 'rel_vec_dim': 400, 'bias': False, 'form': 'plain', 'k_w': 10, 'k_h': 20, 'num_filt': 96, 'ker_sz': 9, 'perm': 1, 'hid_drop': 0.5, 'feat_drop': 0.2, 'inp_drop': 0.2, 'drop_path': 0.0, 'drop': 0.0, 'in_channels': 1, 'out_channels': 32, 'filt_h': 1, 'filt_w': 9, 'image_h': 128, 'image_w': 128, 'patch_size': 8, 'mixer_dim': 256, 'expansion_factor': 4, 'expansion_factor_token': 0.5, 'mixer_depth': 16, 'mixer_dropout': 0.2, 'log_dir': './log/', 'config_dir': './config/', 'test_only': False, 'grid_search': True}
|
|
@ -1,44 +0,0 @@
|
|||||||
2023-05-17 06:48:57,396 - testrun_5cafe61a - [INFO] - {'dataset': 'icews14', 'name': 'testrun_5cafe61a', 'gpu': '1', 'train_strategy': 'one_to_n', 'opt': 'adam', 'neg_num': 1000, 'batch_size': 128, 'l2': 0.0, 'lr': 0.0001, 'max_epochs': 500, 'num_workers': 0, 'seed': 42, 'restore': False, 'lbl_smooth': 0.1, 'embed_dim': 400, 'ent_vec_dim': 400, 'rel_vec_dim': 400, 'bias': False, 'form': 'plain', 'k_w': 10, 'k_h': 20, 'num_filt': 96, 'ker_sz': 9, 'perm': 1, 'hid_drop': 0.5, 'feat_drop': 0.2, 'inp_drop': 0.2, 'drop_path': 0.0, 'drop': 0.0, 'in_channels': 1, 'out_channels': 32, 'filt_h': 1, 'filt_w': 9, 'image_h': 128, 'image_w': 128, 'patch_size': 8, 'mixer_dim': 256, 'expansion_factor': 4, 'expansion_factor_token': 0.5, 'mixer_depth': 16, 'mixer_dropout': 0.2, 'log_dir': './log/', 'config_dir': './config/', 'test_only': False, 'grid_search': True}
|
|
||||||
2023-05-17 06:49:44,802 - concurrent.futures - [ERROR] - exception calling callback for <Future at 0x7efb51b74160 state=finished raised BrokenProcessPool>
|
|
||||||
joblib.externals.loky.process_executor._RemoteTraceback:
|
|
||||||
"""
|
|
||||||
Traceback (most recent call last):
|
|
||||||
File "/opt/conda/envs/kgs2s/lib/python3.8/site-packages/joblib/externals/loky/process_executor.py", line 391, in _process_worker
|
|
||||||
call_item = call_queue.get(block=True, timeout=timeout)
|
|
||||||
File "/opt/conda/envs/kgs2s/lib/python3.8/multiprocessing/queues.py", line 116, in get
|
|
||||||
return _ForkingPickler.loads(res)
|
|
||||||
File "/opt/conda/envs/kgs2s/lib/python3.8/site-packages/torch/storage.py", line 222, in _load_from_bytes
|
|
||||||
return torch.load(io.BytesIO(b))
|
|
||||||
File "/opt/conda/envs/kgs2s/lib/python3.8/site-packages/torch/serialization.py", line 713, in load
|
|
||||||
return _legacy_load(opened_file, map_location, pickle_module, **pickle_load_args)
|
|
||||||
File "/opt/conda/envs/kgs2s/lib/python3.8/site-packages/torch/serialization.py", line 930, in _legacy_load
|
|
||||||
result = unpickler.load()
|
|
||||||
File "/opt/conda/envs/kgs2s/lib/python3.8/site-packages/torch/serialization.py", line 876, in persistent_load
|
|
||||||
wrap_storage=restore_location(obj, location),
|
|
||||||
File "/opt/conda/envs/kgs2s/lib/python3.8/site-packages/torch/serialization.py", line 175, in default_restore_location
|
|
||||||
result = fn(storage, location)
|
|
||||||
File "/opt/conda/envs/kgs2s/lib/python3.8/site-packages/torch/serialization.py", line 155, in _cuda_deserialize
|
|
||||||
return torch._UntypedStorage(obj.nbytes(), device=torch.device(location))
|
|
||||||
RuntimeError: CUDA out of memory. Tried to allocate 678.00 MiB (GPU 0; 31.72 GiB total capacity; 0 bytes already allocated; 593.94 MiB free; 0 bytes reserved in total by PyTorch) If reserved memory is >> allocated memory try setting max_split_size_mb to avoid fragmentation. See documentation for Memory Management and PYTORCH_CUDA_ALLOC_CONF
|
|
||||||
"""
|
|
||||||
|
|
||||||
The above exception was the direct cause of the following exception:
|
|
||||||
|
|
||||||
Traceback (most recent call last):
|
|
||||||
File "/opt/conda/envs/kgs2s/lib/python3.8/site-packages/joblib/externals/loky/_base.py", line 26, in _invoke_callbacks
|
|
||||||
callback(self)
|
|
||||||
File "/opt/conda/envs/kgs2s/lib/python3.8/site-packages/joblib/parallel.py", line 385, in __call__
|
|
||||||
self.parallel.dispatch_next()
|
|
||||||
File "/opt/conda/envs/kgs2s/lib/python3.8/site-packages/joblib/parallel.py", line 834, in dispatch_next
|
|
||||||
if not self.dispatch_one_batch(self._original_iterator):
|
|
||||||
File "/opt/conda/envs/kgs2s/lib/python3.8/site-packages/joblib/parallel.py", line 901, in dispatch_one_batch
|
|
||||||
self._dispatch(tasks)
|
|
||||||
File "/opt/conda/envs/kgs2s/lib/python3.8/site-packages/joblib/parallel.py", line 819, in _dispatch
|
|
||||||
job = self._backend.apply_async(batch, callback=cb)
|
|
||||||
File "/opt/conda/envs/kgs2s/lib/python3.8/site-packages/joblib/_parallel_backends.py", line 556, in apply_async
|
|
||||||
future = self._workers.submit(SafeFunction(func))
|
|
||||||
File "/opt/conda/envs/kgs2s/lib/python3.8/site-packages/joblib/externals/loky/reusable_executor.py", line 176, in submit
|
|
||||||
return super().submit(fn, *args, **kwargs)
|
|
||||||
File "/opt/conda/envs/kgs2s/lib/python3.8/site-packages/joblib/externals/loky/process_executor.py", line 1129, in submit
|
|
||||||
raise self._flags.broken
|
|
||||||
joblib.externals.loky.process_executor.BrokenProcessPool: A task has failed to un-serialize. Please ensure that the arguments of the function are all picklable.
|
|
@ -1 +0,0 @@
|
|||||||
2023-05-06 08:34:33,652 - testrun_6fd94d59 - [INFO] - {'dataset': 'wikidata12k', 'name': 'testrun_6fd94d59', 'gpu': '3', 'train_strategy': 'one_to_n', 'opt': 'adam', 'neg_num': 1000, 'batch_size': 128, 'l2': 0.0, 'lr': 0.0001, 'max_epochs': 500, 'num_workers': 0, 'seed': 42, 'restore': False, 'lbl_smooth': 0.1, 'embed_dim': 400, 'ent_vec_dim': 400, 'rel_vec_dim': 400, 'bias': False, 'form': 'plain', 'k_w': 10, 'k_h': 20, 'num_filt': 96, 'ker_sz': 9, 'perm': 1, 'hid_drop': 0.5, 'feat_drop': 0.2, 'inp_drop': 0.2, 'drop_path': 0.0, 'drop': 0.0, 'in_channels': 1, 'out_channels': 32, 'filt_h': 1, 'filt_w': 9, 'image_h': 128, 'image_w': 128, 'patch_size': 8, 'mixer_dim': 256, 'expansion_factor': 4, 'expansion_factor_token': 0.5, 'mixer_depth': 16, 'mixer_dropout': 0.2, 'log_dir': './log/', 'config_dir': './config/', 'test_only': False}
|
|
@ -1 +0,0 @@
|
|||||||
2023-05-17 06:56:35,124 - testrun_7c096a18 - [INFO] - {'dataset': 'icews14', 'name': 'testrun_7c096a18', 'gpu': '1', 'train_strategy': 'one_to_n', 'opt': 'adam', 'neg_num': 1000, 'batch_size': 128, 'l2': 0.0, 'lr': 0.0001, 'max_epochs': 500, 'num_workers': 0, 'seed': 42, 'restore': False, 'lbl_smooth': 0.1, 'embed_dim': 400, 'ent_vec_dim': 400, 'rel_vec_dim': 400, 'bias': False, 'form': 'plain', 'k_w': 10, 'k_h': 20, 'num_filt': 96, 'ker_sz': 9, 'perm': 1, 'hid_drop': 0.5, 'feat_drop': 0.2, 'inp_drop': 0.2, 'drop_path': 0.0, 'drop': 0.0, 'in_channels': 1, 'out_channels': 32, 'filt_h': 1, 'filt_w': 9, 'image_h': 128, 'image_w': 128, 'patch_size': 8, 'mixer_dim': 256, 'expansion_factor': 4, 'expansion_factor_token': 0.5, 'mixer_depth': 16, 'mixer_dropout': 0.2, 'log_dir': './log/', 'config_dir': './config/', 'test_only': False, 'grid_search': True}
|
|
@ -1 +0,0 @@
|
|||||||
2023-05-17 07:13:14,777 - testrun_7fb885ee - [INFO] - {'dataset': 'icews14', 'name': 'testrun_7fb885ee', 'gpu': '1', 'train_strategy': 'one_to_n', 'opt': 'adam', 'neg_num': 1000, 'batch_size': 128, 'l2': 0.0, 'lr': 0.0001, 'max_epochs': 500, 'num_workers': 0, 'seed': 42, 'restore': False, 'lbl_smooth': 0.1, 'embed_dim': 400, 'ent_vec_dim': 400, 'rel_vec_dim': 400, 'bias': False, 'form': 'plain', 'k_w': 10, 'k_h': 20, 'num_filt': 96, 'ker_sz': 9, 'perm': 1, 'hid_drop': 0.5, 'feat_drop': 0.2, 'inp_drop': 0.2, 'drop_path': 0.0, 'drop': 0.0, 'in_channels': 1, 'out_channels': 32, 'filt_h': 1, 'filt_w': 9, 'image_h': 128, 'image_w': 128, 'patch_size': 8, 'mixer_dim': 256, 'expansion_factor': 4, 'expansion_factor_token': 0.5, 'mixer_depth': 16, 'mixer_dropout': 0.2, 'log_dir': './log/', 'config_dir': './config/', 'test_only': False, 'grid_search': True}
|
|
@ -1 +0,0 @@
|
|||||||
2023-05-17 06:59:35,220 - testrun_8f32040f - [INFO] - {'dataset': 'icews14', 'name': 'testrun_8f32040f', 'gpu': '1', 'train_strategy': 'one_to_n', 'opt': 'adam', 'neg_num': 1000, 'batch_size': 128, 'l2': 0.0, 'lr': 0.0001, 'max_epochs': 500, 'num_workers': 0, 'seed': 42, 'restore': False, 'lbl_smooth': 0.1, 'embed_dim': 400, 'ent_vec_dim': 400, 'rel_vec_dim': 400, 'bias': False, 'form': 'plain', 'k_w': 10, 'k_h': 20, 'num_filt': 96, 'ker_sz': 9, 'perm': 1, 'hid_drop': 0.5, 'feat_drop': 0.2, 'inp_drop': 0.2, 'drop_path': 0.0, 'drop': 0.0, 'in_channels': 1, 'out_channels': 32, 'filt_h': 1, 'filt_w': 9, 'image_h': 128, 'image_w': 128, 'patch_size': 8, 'mixer_dim': 256, 'expansion_factor': 4, 'expansion_factor_token': 0.5, 'mixer_depth': 16, 'mixer_dropout': 0.2, 'log_dir': './log/', 'config_dir': './config/', 'test_only': False, 'grid_search': True}
|
|
@ -1 +0,0 @@
|
|||||||
2023-05-17 06:16:45,427 - testrun_958ef154 - [INFO] - {'dataset': 'icews14', 'name': 'testrun_958ef154', 'gpu': '1', 'train_strategy': 'one_to_n', 'opt': 'adam', 'neg_num': 1000, 'batch_size': 128, 'l2': 0.0, 'lr': 0.0001, 'max_epochs': 500, 'num_workers': 0, 'seed': 42, 'restore': False, 'lbl_smooth': 0.1, 'embed_dim': 400, 'ent_vec_dim': 400, 'rel_vec_dim': 400, 'bias': False, 'form': 'plain', 'k_w': 10, 'k_h': 20, 'num_filt': 96, 'ker_sz': 9, 'perm': 1, 'hid_drop': 0.5, 'feat_drop': 0.2, 'inp_drop': 0.2, 'drop_path': 0.0, 'drop': 0.0, 'in_channels': 1, 'out_channels': 32, 'filt_h': 1, 'filt_w': 9, 'image_h': 128, 'image_w': 128, 'patch_size': 8, 'mixer_dim': 256, 'expansion_factor': 4, 'expansion_factor_token': 0.5, 'mixer_depth': 16, 'mixer_dropout': 0.2, 'log_dir': './log/', 'config_dir': './config/', 'test_only': False, 'grid_search': True}
|
|
@ -1,2 +0,0 @@
|
|||||||
2023-05-06 08:36:46,668 - testrun_9acdfb58 - [INFO] - {'dataset': 'wikidata12k', 'name': 'testrun_9acdfb58', 'gpu': '3', 'train_strategy': 'one_to_n', 'opt': 'adam', 'neg_num': 1000, 'batch_size': 128, 'l2': 0.0, 'lr': 0.0001, 'max_epochs': 500, 'num_workers': 0, 'seed': 42, 'restore': False, 'lbl_smooth': 0.1, 'embed_dim': 400, 'ent_vec_dim': 400, 'rel_vec_dim': 400, 'bias': False, 'form': 'plain', 'k_w': 10, 'k_h': 20, 'num_filt': 96, 'ker_sz': 9, 'perm': 1, 'hid_drop': 0.5, 'feat_drop': 0.2, 'inp_drop': 0.2, 'drop_path': 0.0, 'drop': 0.0, 'in_channels': 1, 'out_channels': 32, 'filt_h': 1, 'filt_w': 9, 'image_h': 128, 'image_w': 128, 'patch_size': 8, 'mixer_dim': 256, 'expansion_factor': 4, 'expansion_factor_token': 0.5, 'mixer_depth': 16, 'mixer_dropout': 0.2, 'log_dir': './log/', 'config_dir': './config/', 'test_only': False}
|
|
||||||
2023-05-06 08:36:57,409 - testrun_9acdfb58 - [INFO] - [E:0| 0]: Train Loss:0.69813, Val MRR:0.0, testrun_9acdfb58
|
|
@ -1 +0,0 @@
|
|||||||
2023-05-17 06:36:14,606 - testrun_a051cf32 - [INFO] - {'dataset': 'icews14', 'name': 'testrun_a051cf32', 'gpu': '1', 'train_strategy': 'one_to_n', 'opt': 'adam', 'neg_num': 1000, 'batch_size': 128, 'l2': 0.0, 'lr': 0.0001, 'max_epochs': 500, 'num_workers': 0, 'seed': 42, 'restore': False, 'lbl_smooth': 0.1, 'embed_dim': 400, 'ent_vec_dim': 400, 'rel_vec_dim': 400, 'bias': False, 'form': 'plain', 'k_w': 10, 'k_h': 20, 'num_filt': 96, 'ker_sz': 9, 'perm': 1, 'hid_drop': 0.5, 'feat_drop': 0.2, 'inp_drop': 0.2, 'drop_path': 0.0, 'drop': 0.0, 'in_channels': 1, 'out_channels': 32, 'filt_h': 1, 'filt_w': 9, 'image_h': 128, 'image_w': 128, 'patch_size': 8, 'mixer_dim': 256, 'expansion_factor': 4, 'expansion_factor_token': 0.5, 'mixer_depth': 16, 'mixer_dropout': 0.2, 'log_dir': './log/', 'config_dir': './config/', 'test_only': False, 'grid_search': True}
|
|
@ -1 +0,0 @@
|
|||||||
2023-05-17 06:13:16,274 - testrun_a06d39d0 - [INFO] - {'dataset': 'icews14', 'name': 'testrun_a06d39d0', 'gpu': '1', 'train_strategy': 'one_to_n', 'opt': 'adam', 'neg_num': 1000, 'batch_size': 128, 'l2': 0.0, 'lr': 0.0001, 'max_epochs': 500, 'num_workers': 0, 'seed': 42, 'restore': False, 'lbl_smooth': 0.1, 'embed_dim': 400, 'ent_vec_dim': 400, 'rel_vec_dim': 400, 'bias': False, 'form': 'plain', 'k_w': 10, 'k_h': 20, 'num_filt': 96, 'ker_sz': 9, 'perm': 1, 'hid_drop': 0.5, 'feat_drop': 0.2, 'inp_drop': 0.2, 'drop_path': 0.0, 'drop': 0.0, 'in_channels': 1, 'out_channels': 32, 'filt_h': 1, 'filt_w': 9, 'image_h': 128, 'image_w': 128, 'patch_size': 8, 'mixer_dim': 256, 'expansion_factor': 4, 'expansion_factor_token': 0.5, 'mixer_depth': 16, 'mixer_dropout': 0.2, 'log_dir': './log/', 'config_dir': './config/', 'test_only': False, 'grid_search': True}
|
|
@ -1 +0,0 @@
|
|||||||
2023-05-17 06:41:20,654 - testrun_aca2b734 - [INFO] - {'dataset': 'icews14', 'name': 'testrun_aca2b734', 'gpu': '1', 'train_strategy': 'one_to_n', 'opt': 'adam', 'neg_num': 1000, 'batch_size': 128, 'l2': 0.0, 'lr': 0.0001, 'max_epochs': 500, 'num_workers': 0, 'seed': 42, 'restore': False, 'lbl_smooth': 0.1, 'embed_dim': 400, 'ent_vec_dim': 400, 'rel_vec_dim': 400, 'bias': False, 'form': 'plain', 'k_w': 10, 'k_h': 20, 'num_filt': 96, 'ker_sz': 9, 'perm': 1, 'hid_drop': 0.5, 'feat_drop': 0.2, 'inp_drop': 0.2, 'drop_path': 0.0, 'drop': 0.0, 'in_channels': 1, 'out_channels': 32, 'filt_h': 1, 'filt_w': 9, 'image_h': 128, 'image_w': 128, 'patch_size': 8, 'mixer_dim': 256, 'expansion_factor': 4, 'expansion_factor_token': 0.5, 'mixer_depth': 16, 'mixer_dropout': 0.2, 'log_dir': './log/', 'config_dir': './config/', 'test_only': False, 'grid_search': True}
|
|
@ -1 +0,0 @@
|
|||||||
2023-05-17 06:45:54,332 - testrun_ad7a0edb - [INFO] - {'dataset': 'icews14', 'name': 'testrun_ad7a0edb', 'gpu': '1', 'train_strategy': 'one_to_n', 'opt': 'adam', 'neg_num': 1000, 'batch_size': 128, 'l2': 0.0, 'lr': 0.0001, 'max_epochs': 500, 'num_workers': 0, 'seed': 42, 'restore': False, 'lbl_smooth': 0.1, 'embed_dim': 400, 'ent_vec_dim': 400, 'rel_vec_dim': 400, 'bias': False, 'form': 'plain', 'k_w': 10, 'k_h': 20, 'num_filt': 96, 'ker_sz': 9, 'perm': 1, 'hid_drop': 0.5, 'feat_drop': 0.2, 'inp_drop': 0.2, 'drop_path': 0.0, 'drop': 0.0, 'in_channels': 1, 'out_channels': 32, 'filt_h': 1, 'filt_w': 9, 'image_h': 128, 'image_w': 128, 'patch_size': 8, 'mixer_dim': 256, 'expansion_factor': 4, 'expansion_factor_token': 0.5, 'mixer_depth': 16, 'mixer_dropout': 0.2, 'log_dir': './log/', 'config_dir': './config/', 'test_only': False, 'grid_search': True}
|
|
7958
log/testrun_ae6f81ee
7958
log/testrun_ae6f81ee
File diff suppressed because it is too large
Load Diff
@ -1 +0,0 @@
|
|||||||
2023-05-30 17:54:20,857 - testrun_b381870f - [INFO] - {'dataset': 'wikidata12k', 'name': 'testrun_b381870f', 'gpu': '0', 'train_strategy': 'one_to_n', 'opt': 'adam', 'neg_num': 1000, 'batch_size': 128, 'l2': 0.0, 'lr': 0.0003, 'max_epochs': 500, 'num_workers': 0, 'seed': 42, 'restore': False, 'lbl_smooth': 0.1, 'embed_dim': 400, 'ent_vec_dim': 400, 'rel_vec_dim': 400, 'bias': False, 'form': 'plain', 'k_w': 10, 'k_h': 20, 'num_filt': 96, 'ker_sz': 9, 'perm': 1, 'hid_drop': 0.5, 'feat_drop': 0.2, 'inp_drop': 0.2, 'drop_path': 0.0, 'drop': 0.0, 'in_channels': 1, 'out_channels': 32, 'filt_h': 1, 'filt_w': 9, 'image_h': 128, 'image_w': 128, 'patch_size': 8, 'mixer_dim': 256, 'expansion_factor': 4, 'expansion_factor_token': 0.5, 'mixer_depth': 16, 'mixer_dropout': 0.2, 'log_dir': './log/', 'config_dir': './config/', 'test_only': False, 'grid_search': False}
|
|
@ -1,2 +0,0 @@
|
|||||||
2023-05-30 17:56:25,430 - testrun_b396dcde - [INFO] - {'dataset': 'wikidata12k', 'name': 'testrun_b396dcde', 'gpu': '0', 'train_strategy': 'one_to_n', 'opt': 'adam', 'neg_num': 1000, 'batch_size': 128, 'l2': 0.0, 'lr': 0.0003, 'max_epochs': 500, 'num_workers': 0, 'seed': 42, 'restore': False, 'lbl_smooth': 0.1, 'embed_dim': 400, 'ent_vec_dim': 400, 'rel_vec_dim': 400, 'bias': False, 'form': 'plain', 'k_w': 10, 'k_h': 20, 'num_filt': 96, 'ker_sz': 9, 'perm': 1, 'hid_drop': 0.5, 'feat_drop': 0.2, 'inp_drop': 0.2, 'drop_path': 0.0, 'drop': 0.0, 'in_channels': 1, 'out_channels': 32, 'filt_h': 1, 'filt_w': 9, 'image_h': 128, 'image_w': 128, 'patch_size': 8, 'mixer_dim': 256, 'expansion_factor': 4, 'expansion_factor_token': 0.5, 'mixer_depth': 16, 'mixer_dropout': 0.2, 'log_dir': './log/', 'config_dir': './config/', 'test_only': False, 'grid_search': False}
|
|
||||||
2023-05-30 17:57:00,673 - testrun_b396dcde - [INFO] - {'dataset': 'wikidata12k', 'name': 'testrun_b396dcde', 'gpu': '0', 'train_strategy': 'one_to_n', 'opt': 'adam', 'neg_num': 1000, 'batch_size': 128, 'l2': 0.0, 'lr': 0.0003, 'max_epochs': 500, 'num_workers': 0, 'seed': 42, 'restore': False, 'lbl_smooth': 0.1, 'embed_dim': 400, 'ent_vec_dim': 400, 'rel_vec_dim': 400, 'bias': False, 'form': 'plain', 'k_w': 10, 'k_h': 20, 'num_filt': 96, 'ker_sz': 9, 'perm': 1, 'hid_drop': 0.5, 'feat_drop': 0.2, 'inp_drop': 0.2, 'drop_path': 0.0, 'drop': 0.0, 'in_channels': 1, 'out_channels': 32, 'filt_h': 1, 'filt_w': 9, 'image_h': 128, 'image_w': 128, 'patch_size': 8, 'mixer_dim': 256, 'expansion_factor': 4, 'expansion_factor_token': 0.5, 'mixer_depth': 16, 'mixer_dropout': 0.2, 'log_dir': './log/', 'config_dir': './config/', 'test_only': False, 'grid_search': False, 'num_ent': 12554, 'num_rel': 423}
|
|
@ -1 +0,0 @@
|
|||||||
2023-05-17 06:21:14,228 - testrun_bbf65ab5 - [INFO] - {'dataset': 'icews14', 'name': 'testrun_bbf65ab5', 'gpu': '1', 'train_strategy': 'one_to_n', 'opt': 'adam', 'neg_num': 1000, 'batch_size': 128, 'l2': 0.0, 'lr': 0.0001, 'max_epochs': 500, 'num_workers': 0, 'seed': 42, 'restore': False, 'lbl_smooth': 0.1, 'embed_dim': 400, 'ent_vec_dim': 400, 'rel_vec_dim': 400, 'bias': False, 'form': 'plain', 'k_w': 10, 'k_h': 20, 'num_filt': 96, 'ker_sz': 9, 'perm': 1, 'hid_drop': 0.5, 'feat_drop': 0.2, 'inp_drop': 0.2, 'drop_path': 0.0, 'drop': 0.0, 'in_channels': 1, 'out_channels': 32, 'filt_h': 1, 'filt_w': 9, 'image_h': 128, 'image_w': 128, 'patch_size': 8, 'mixer_dim': 256, 'expansion_factor': 4, 'expansion_factor_token': 0.5, 'mixer_depth': 16, 'mixer_dropout': 0.2, 'log_dir': './log/', 'config_dir': './config/', 'test_only': False, 'grid_search': True}
|
|
@ -1 +0,0 @@
|
|||||||
2023-05-17 06:50:58,251 - testrun_bfaa042b - [INFO] - {'dataset': 'icews14', 'name': 'testrun_bfaa042b', 'gpu': '1', 'train_strategy': 'one_to_n', 'opt': 'adam', 'neg_num': 1000, 'batch_size': 128, 'l2': 0.0, 'lr': 0.0001, 'max_epochs': 500, 'num_workers': 0, 'seed': 42, 'restore': False, 'lbl_smooth': 0.1, 'embed_dim': 400, 'ent_vec_dim': 400, 'rel_vec_dim': 400, 'bias': False, 'form': 'plain', 'k_w': 10, 'k_h': 20, 'num_filt': 96, 'ker_sz': 9, 'perm': 1, 'hid_drop': 0.5, 'feat_drop': 0.2, 'inp_drop': 0.2, 'drop_path': 0.0, 'drop': 0.0, 'in_channels': 1, 'out_channels': 32, 'filt_h': 1, 'filt_w': 9, 'image_h': 128, 'image_w': 128, 'patch_size': 8, 'mixer_dim': 256, 'expansion_factor': 4, 'expansion_factor_token': 0.5, 'mixer_depth': 16, 'mixer_dropout': 0.2, 'log_dir': './log/', 'config_dir': './config/', 'test_only': False, 'grid_search': True}
|
|
@ -1 +0,0 @@
|
|||||||
2023-05-17 06:37:11,288 - testrun_c77a8ec3 - [INFO] - {'dataset': 'icews14', 'name': 'testrun_c77a8ec3', 'gpu': '1', 'train_strategy': 'one_to_n', 'opt': 'adam', 'neg_num': 1000, 'batch_size': 128, 'l2': 0.0, 'lr': 0.0001, 'max_epochs': 500, 'num_workers': 0, 'seed': 42, 'restore': False, 'lbl_smooth': 0.1, 'embed_dim': 400, 'ent_vec_dim': 400, 'rel_vec_dim': 400, 'bias': False, 'form': 'plain', 'k_w': 10, 'k_h': 20, 'num_filt': 96, 'ker_sz': 9, 'perm': 1, 'hid_drop': 0.5, 'feat_drop': 0.2, 'inp_drop': 0.2, 'drop_path': 0.0, 'drop': 0.0, 'in_channels': 1, 'out_channels': 32, 'filt_h': 1, 'filt_w': 9, 'image_h': 128, 'image_w': 128, 'patch_size': 8, 'mixer_dim': 256, 'expansion_factor': 4, 'expansion_factor_token': 0.5, 'mixer_depth': 16, 'mixer_dropout': 0.2, 'log_dir': './log/', 'config_dir': './config/', 'test_only': False, 'grid_search': True}
|
|
@ -1 +0,0 @@
|
|||||||
2023-05-17 07:08:13,688 - testrun_cb3528f3 - [INFO] - {'dataset': 'icews14', 'name': 'testrun_cb3528f3', 'gpu': '1', 'train_strategy': 'one_to_n', 'opt': 'adam', 'neg_num': 1000, 'batch_size': 128, 'l2': 0.0, 'lr': 0.0001, 'max_epochs': 500, 'num_workers': 0, 'seed': 42, 'restore': False, 'lbl_smooth': 0.1, 'embed_dim': 400, 'ent_vec_dim': 400, 'rel_vec_dim': 400, 'bias': False, 'form': 'plain', 'k_w': 10, 'k_h': 20, 'num_filt': 96, 'ker_sz': 9, 'perm': 1, 'hid_drop': 0.5, 'feat_drop': 0.2, 'inp_drop': 0.2, 'drop_path': 0.0, 'drop': 0.0, 'in_channels': 1, 'out_channels': 32, 'filt_h': 1, 'filt_w': 9, 'image_h': 128, 'image_w': 128, 'patch_size': 8, 'mixer_dim': 256, 'expansion_factor': 4, 'expansion_factor_token': 0.5, 'mixer_depth': 16, 'mixer_dropout': 0.2, 'log_dir': './log/', 'config_dir': './config/', 'test_only': False, 'grid_search': True}
|
|
@ -1 +0,0 @@
|
|||||||
2023-05-17 06:25:12,047 - testrun_cd333c33 - [INFO] - {'dataset': 'icews14', 'name': 'testrun_cd333c33', 'gpu': '1', 'train_strategy': 'one_to_n', 'opt': 'adam', 'neg_num': 1000, 'batch_size': 128, 'l2': 0.0, 'lr': 0.0001, 'max_epochs': 500, 'num_workers': 0, 'seed': 42, 'restore': False, 'lbl_smooth': 0.1, 'embed_dim': 400, 'ent_vec_dim': 400, 'rel_vec_dim': 400, 'bias': False, 'form': 'plain', 'k_w': 10, 'k_h': 20, 'num_filt': 96, 'ker_sz': 9, 'perm': 1, 'hid_drop': 0.5, 'feat_drop': 0.2, 'inp_drop': 0.2, 'drop_path': 0.0, 'drop': 0.0, 'in_channels': 1, 'out_channels': 32, 'filt_h': 1, 'filt_w': 9, 'image_h': 128, 'image_w': 128, 'patch_size': 8, 'mixer_dim': 256, 'expansion_factor': 4, 'expansion_factor_token': 0.5, 'mixer_depth': 16, 'mixer_dropout': 0.2, 'log_dir': './log/', 'config_dir': './config/', 'test_only': False, 'grid_search': True}
|
|
@ -1,2 +0,0 @@
|
|||||||
2023-05-06 08:37:25,129 - testrun_d0367b19 - [INFO] - {'dataset': 'wikidata12k', 'name': 'testrun_d0367b19', 'gpu': '3', 'train_strategy': 'one_to_n', 'opt': 'adam', 'neg_num': 1000, 'batch_size': 128, 'l2': 0.0, 'lr': 0.0001, 'max_epochs': 500, 'num_workers': 0, 'seed': 42, 'restore': False, 'lbl_smooth': 0.1, 'embed_dim': 400, 'ent_vec_dim': 400, 'rel_vec_dim': 400, 'bias': False, 'form': 'plain', 'k_w': 10, 'k_h': 20, 'num_filt': 96, 'ker_sz': 9, 'perm': 1, 'hid_drop': 0.5, 'feat_drop': 0.2, 'inp_drop': 0.2, 'drop_path': 0.0, 'drop': 0.0, 'in_channels': 1, 'out_channels': 32, 'filt_h': 1, 'filt_w': 9, 'image_h': 128, 'image_w': 128, 'patch_size': 8, 'mixer_dim': 256, 'expansion_factor': 4, 'expansion_factor_token': 0.5, 'mixer_depth': 16, 'mixer_dropout': 0.2, 'log_dir': './log/', 'config_dir': './config/', 'test_only': False}
|
|
||||||
2023-05-06 08:37:36,239 - testrun_d0367b19 - [INFO] - [E:0| 0]: Train Loss:0.69813, Val MRR:0.0, testrun_d0367b19
|
|
9001
log/testrun_d2ab6391
9001
log/testrun_d2ab6391
File diff suppressed because it is too large
Load Diff
11836
log/testrun_e1726b98
11836
log/testrun_e1726b98
File diff suppressed because it is too large
Load Diff
@ -1 +0,0 @@
|
|||||||
2023-05-17 06:47:48,537 - testrun_f0394b3c - [INFO] - {'dataset': 'icews14', 'name': 'testrun_f0394b3c', 'gpu': '1', 'train_strategy': 'one_to_n', 'opt': 'adam', 'neg_num': 1000, 'batch_size': 128, 'l2': 0.0, 'lr': 0.0001, 'max_epochs': 500, 'num_workers': 0, 'seed': 42, 'restore': False, 'lbl_smooth': 0.1, 'embed_dim': 400, 'ent_vec_dim': 400, 'rel_vec_dim': 400, 'bias': False, 'form': 'plain', 'k_w': 10, 'k_h': 20, 'num_filt': 96, 'ker_sz': 9, 'perm': 1, 'hid_drop': 0.5, 'feat_drop': 0.2, 'inp_drop': 0.2, 'drop_path': 0.0, 'drop': 0.0, 'in_channels': 1, 'out_channels': 32, 'filt_h': 1, 'filt_w': 9, 'image_h': 128, 'image_w': 128, 'patch_size': 8, 'mixer_dim': 256, 'expansion_factor': 4, 'expansion_factor_token': 0.5, 'mixer_depth': 16, 'mixer_dropout': 0.2, 'log_dir': './log/', 'config_dir': './config/', 'test_only': False, 'grid_search': True}
|
|
@ -1 +0,0 @@
|
|||||||
2023-05-30 17:55:52,461 - testrun_f42f568c - [INFO] - {'dataset': 'wikidata12k', 'name': 'testrun_f42f568c', 'gpu': '0', 'train_strategy': 'one_to_n', 'opt': 'adam', 'neg_num': 1000, 'batch_size': 128, 'l2': 0.0, 'lr': 0.0003, 'max_epochs': 500, 'num_workers': 0, 'seed': 42, 'restore': False, 'lbl_smooth': 0.1, 'embed_dim': 400, 'ent_vec_dim': 400, 'rel_vec_dim': 400, 'bias': False, 'form': 'plain', 'k_w': 10, 'k_h': 20, 'num_filt': 96, 'ker_sz': 9, 'perm': 1, 'hid_drop': 0.5, 'feat_drop': 0.2, 'inp_drop': 0.2, 'drop_path': 0.0, 'drop': 0.0, 'in_channels': 1, 'out_channels': 32, 'filt_h': 1, 'filt_w': 9, 'image_h': 128, 'image_w': 128, 'patch_size': 8, 'mixer_dim': 256, 'expansion_factor': 4, 'expansion_factor_token': 0.5, 'mixer_depth': 16, 'mixer_dropout': 0.2, 'log_dir': './log/', 'config_dir': './config/', 'test_only': False, 'grid_search': False}
|
|
@ -1 +0,0 @@
|
|||||||
2023-05-17 06:39:01,301 - testrun_fdb0e82c - [INFO] - {'dataset': 'icews14', 'name': 'testrun_fdb0e82c', 'gpu': '1', 'train_strategy': 'one_to_n', 'opt': 'adam', 'neg_num': 1000, 'batch_size': 128, 'l2': 0.0, 'lr': 0.0001, 'max_epochs': 500, 'num_workers': 0, 'seed': 42, 'restore': False, 'lbl_smooth': 0.1, 'embed_dim': 400, 'ent_vec_dim': 400, 'rel_vec_dim': 400, 'bias': False, 'form': 'plain', 'k_w': 10, 'k_h': 20, 'num_filt': 96, 'ker_sz': 9, 'perm': 1, 'hid_drop': 0.5, 'feat_drop': 0.2, 'inp_drop': 0.2, 'drop_path': 0.0, 'drop': 0.0, 'in_channels': 1, 'out_channels': 32, 'filt_h': 1, 'filt_w': 9, 'image_h': 128, 'image_w': 128, 'patch_size': 8, 'mixer_dim': 256, 'expansion_factor': 4, 'expansion_factor_token': 0.5, 'mixer_depth': 16, 'mixer_dropout': 0.2, 'log_dir': './log/', 'config_dir': './config/', 'test_only': False, 'grid_search': True}
|
|
1116
log/wikidata12k
1116
log/wikidata12k
File diff suppressed because it is too large
Load Diff
@ -1,2 +0,0 @@
|
|||||||
2023-06-04 17:05:45,012 - wikidata12k_0.00003 - [INFO] - {'dataset': 'wikidata12k', 'name': 'wikidata12k_0.00003', 'gpu': '2', 'train_strategy': 'one_to_n', 'opt': 'adam', 'neg_num': 1000, 'batch_size': 128, 'l2': 0.0, 'lr': 0.0001, 'max_epochs': 500, 'num_workers': 0, 'seed': 42, 'restore': False, 'lbl_smooth': 0.1, 'embed_dim': 400, 'ent_vec_dim': 400, 'rel_vec_dim': 400, 'bias': False, 'form': 'plain', 'k_w': 10, 'k_h': 20, 'num_filt': 96, 'ker_sz': 9, 'perm': 1, 'hid_drop': 0.5, 'feat_drop': 0.2, 'inp_drop': 0.2, 'drop_path': 0.0, 'drop': 0.0, 'in_channels': 1, 'out_channels': 32, 'filt_h': 1, 'filt_w': 9, 'image_h': 128, 'image_w': 128, 'patch_size': 8, 'mixer_dim': 256, 'expansion_factor': 4, 'expansion_factor_token': 0.5, 'mixer_depth': 16, 'mixer_dropout': 0.2, 'log_dir': './log/', 'config_dir': './config/', 'test_only': False, 'grid_search': False}
|
|
||||||
2023-06-04 17:06:06,702 - wikidata12k_0.00003 - [INFO] - [E:0| 0]: Train Loss:0.69813, Val MRR:0.0, wikidata12k_0.00003
|
|
File diff suppressed because it is too large
Load Diff
15357
log/wikidata12k_1n
15357
log/wikidata12k_1n
File diff suppressed because it is too large
Load Diff
11565
log/wikidata12k_both
11565
log/wikidata12k_both
File diff suppressed because it is too large
Load Diff
9241
log/yago11k
9241
log/yago11k
File diff suppressed because it is too large
Load Diff
9654
log/yago11k_0.00003
9654
log/yago11k_0.00003
File diff suppressed because it is too large
Load Diff
9599
log/yago11k_0.0003
9599
log/yago11k_0.0003
File diff suppressed because it is too large
Load Diff
7233
log/yago11k_0.001
7233
log/yago11k_0.001
File diff suppressed because it is too large
Load Diff
18847
log/yago11k_0.001.log
18847
log/yago11k_0.001.log
File diff suppressed because it is too large
Load Diff
9169
log/yago11k_both
9169
log/yago11k_both
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
86
main.py
86
main.py
@ -3,12 +3,10 @@ import uuid
|
|||||||
import argparse
|
import argparse
|
||||||
import logging
|
import logging
|
||||||
import logging.config
|
import logging.config
|
||||||
import pandas as pd
|
import time
|
||||||
import sys
|
|
||||||
|
|
||||||
import torch
|
import torch
|
||||||
import numpy as np
|
import numpy as np
|
||||||
import time
|
|
||||||
|
|
||||||
from collections import defaultdict as ddict
|
from collections import defaultdict as ddict
|
||||||
from pprint import pprint
|
from pprint import pprint
|
||||||
@ -20,12 +18,11 @@ from data_loader import TrainDataset, TestDataset
|
|||||||
from utils import get_logger, get_combined_results, set_gpu, prepare_env, set_seed
|
from utils import get_logger, get_combined_results, set_gpu, prepare_env, set_seed
|
||||||
|
|
||||||
from models import ComplEx, ConvE, HypER, InteractE, FouriER, TuckER
|
from models import ComplEx, ConvE, HypER, InteractE, FouriER, TuckER
|
||||||
import traceback
|
|
||||||
|
|
||||||
|
|
||||||
class Main(object):
|
class Main(object):
|
||||||
|
|
||||||
def __init__(self, params, logger):
|
def __init__(self, params):
|
||||||
"""
|
"""
|
||||||
Constructor of the runner class
|
Constructor of the runner class
|
||||||
Parameters
|
Parameters
|
||||||
@ -38,9 +35,11 @@ class Main(object):
|
|||||||
|
|
||||||
"""
|
"""
|
||||||
self.p = params
|
self.p = params
|
||||||
self.logger = logger
|
self.logger = get_logger(
|
||||||
|
self.p.name, self.p.log_dir, self.p.config_dir)
|
||||||
|
|
||||||
self.logger.info(vars(self.p))
|
self.logger.info(vars(self.p))
|
||||||
|
pprint(vars(self.p))
|
||||||
|
|
||||||
if self.p.gpu != '-1' and torch.cuda.is_available():
|
if self.p.gpu != '-1' and torch.cuda.is_available():
|
||||||
self.device = torch.device('cuda')
|
self.device = torch.device('cuda')
|
||||||
@ -85,7 +84,7 @@ class Main(object):
|
|||||||
|
|
||||||
self.ent2id = {}
|
self.ent2id = {}
|
||||||
for line in open('./data/{}/{}'.format(self.p.dataset, "entities.dict")):
|
for line in open('./data/{}/{}'.format(self.p.dataset, "entities.dict")):
|
||||||
id, ent = map(str.lower, line.replace('\xa0', '').strip().split('\t'))
|
id, ent = map(str.lower, line.strip().split('\t'))
|
||||||
self.ent2id[ent] = int(id)
|
self.ent2id[ent] = int(id)
|
||||||
self.rel2id = {}
|
self.rel2id = {}
|
||||||
for line in open('./data/{}/{}'.format(self.p.dataset, "relations.dict")):
|
for line in open('./data/{}/{}'.format(self.p.dataset, "relations.dict")):
|
||||||
@ -109,14 +108,20 @@ class Main(object):
|
|||||||
sr2o = ddict(set)
|
sr2o = ddict(set)
|
||||||
|
|
||||||
for split in ['train', 'test', 'valid']:
|
for split in ['train', 'test', 'valid']:
|
||||||
for line in open('./data/{}/{}.txt'.format(self.p.dataset, split)):
|
samples = 0
|
||||||
sub, rel, obj, *_ = map(str.lower, line.replace('\xa0', '').strip().split('\t'))
|
for i, line in enumerate(open('./data/{}/{}.txt'.format(self.p.dataset, split))):
|
||||||
|
sub, rel, obj, rel_type, *_ = map(str.lower, line.strip().split('\t'))
|
||||||
|
if (split == 'test' and self.p.rel_type is not None):
|
||||||
|
if rel_type != self.p.rel_type:
|
||||||
|
continue
|
||||||
sub, rel, obj = self.ent2id[sub], self.rel2id[rel], self.ent2id[obj]
|
sub, rel, obj = self.ent2id[sub], self.rel2id[rel], self.ent2id[obj]
|
||||||
self.data[split].append((sub, rel, obj))
|
self.data[split].append((sub, rel, obj))
|
||||||
|
|
||||||
if split == 'train':
|
if split == 'train':
|
||||||
sr2o[(sub, rel)].add(obj)
|
sr2o[(sub, rel)].add(obj)
|
||||||
sr2o[(obj, rel+self.p.num_rel)].add(sub)
|
sr2o[(obj, rel+self.p.num_rel)].add(sub)
|
||||||
|
samples += 1
|
||||||
|
print(split.capitalize() + ': ' + str(samples) + ' samples')
|
||||||
self.data = dict(self.data)
|
self.data = dict(self.data)
|
||||||
|
|
||||||
self.sr2o = {k: list(v) for k, v in sr2o.items()}
|
self.sr2o = {k: list(v) for k, v in sr2o.items()}
|
||||||
@ -154,6 +159,8 @@ class Main(object):
|
|||||||
{'triple': (obj, rel_inv, sub), 'label': self.sr2o_all[(obj, rel_inv)]})
|
{'triple': (obj, rel_inv, sub), 'label': self.sr2o_all[(obj, rel_inv)]})
|
||||||
|
|
||||||
self.triples = dict(self.triples)
|
self.triples = dict(self.triples)
|
||||||
|
print(len(self.triples['test_head']))
|
||||||
|
print(len(self.triples['test_tail']))
|
||||||
|
|
||||||
def get_data_loader(dataset_class, split, batch_size, shuffle=True):
|
def get_data_loader(dataset_class, split, batch_size, shuffle=True):
|
||||||
return DataLoader(
|
return DataLoader(
|
||||||
@ -408,13 +415,6 @@ class Main(object):
|
|||||||
train_iter = iter(
|
train_iter = iter(
|
||||||
self.data_iter['{}_{}'.format(split, mode.split('_')[0])])
|
self.data_iter['{}_{}'.format(split, mode.split('_')[0])])
|
||||||
|
|
||||||
sub_all = []
|
|
||||||
obj_all = []
|
|
||||||
rel_all = []
|
|
||||||
target_score = []
|
|
||||||
target_rank = []
|
|
||||||
obj_pred = []
|
|
||||||
obj_pred_score = []
|
|
||||||
for step, batch in enumerate(train_iter):
|
for step, batch in enumerate(train_iter):
|
||||||
sub, rel, obj, label = self.read_batch(batch, split)
|
sub, rel, obj, label = self.read_batch(batch, split)
|
||||||
pred = self.model.forward(sub, rel, None, 'one_to_n')
|
pred = self.model.forward(sub, rel, None, 'one_to_n')
|
||||||
@ -422,21 +422,9 @@ class Main(object):
|
|||||||
target_pred = pred[b_range, obj]
|
target_pred = pred[b_range, obj]
|
||||||
pred = torch.where(label.byte(), torch.zeros_like(pred), pred)
|
pred = torch.where(label.byte(), torch.zeros_like(pred), pred)
|
||||||
pred[b_range, obj] = target_pred
|
pred[b_range, obj] = target_pred
|
||||||
|
|
||||||
highest = torch.argsort(pred, dim=1, descending=True)[:,0]
|
|
||||||
highest_score = pred[b_range, highest]
|
|
||||||
|
|
||||||
ranks = 1 + torch.argsort(torch.argsort(pred, dim=1,
|
ranks = 1 + torch.argsort(torch.argsort(pred, dim=1,
|
||||||
descending=True), dim=1, descending=False)[b_range, obj]
|
descending=True), dim=1, descending=False)[b_range, obj]
|
||||||
|
|
||||||
sub_all.extend(sub.cpu().numpy())
|
|
||||||
obj_all.extend(obj.cpu().numpy())
|
|
||||||
rel_all.extend(rel.cpu().numpy())
|
|
||||||
target_score.extend(target_pred.cpu().numpy())
|
|
||||||
target_rank.extend(ranks.cpu().numpy())
|
|
||||||
obj_pred.extend(highest.cpu().numpy())
|
|
||||||
obj_pred_score.extend(highest_score.cpu().numpy())
|
|
||||||
|
|
||||||
ranks = ranks.float()
|
ranks = ranks.float()
|
||||||
results['count'] = torch.numel(
|
results['count'] = torch.numel(
|
||||||
ranks) + results.get('count', 0.0)
|
ranks) + results.get('count', 0.0)
|
||||||
@ -451,8 +439,7 @@ class Main(object):
|
|||||||
if step % 100 == 0:
|
if step % 100 == 0:
|
||||||
self.logger.info('[{}, {} Step {}]\t{}'.format(
|
self.logger.info('[{}, {} Step {}]\t{}'.format(
|
||||||
split.title(), mode.title(), step, self.p.name))
|
split.title(), mode.title(), step, self.p.name))
|
||||||
df = pd.DataFrame({"sub":sub_all,"rel":rel_all,"obj":obj_all, "rank": target_rank,"score":target_score, "pred":obj_pred,"pred_score":obj_pred_score})
|
|
||||||
df.to_csv(f"{self.p.name}_result.csv",header=True, index=False)
|
|
||||||
return results
|
return results
|
||||||
|
|
||||||
def run_epoch(self, epoch):
|
def run_epoch(self, epoch):
|
||||||
@ -478,11 +465,7 @@ class Main(object):
|
|||||||
batch, 'train')
|
batch, 'train')
|
||||||
|
|
||||||
pred = self.model.forward(sub, rel, neg_ent, self.p.train_strategy)
|
pred = self.model.forward(sub, rel, neg_ent, self.p.train_strategy)
|
||||||
try:
|
loss = self.model.loss(pred, label, sub_samp)
|
||||||
loss = self.model.loss(pred, label, sub_samp)
|
|
||||||
except Exception as e:
|
|
||||||
print(pred)
|
|
||||||
raise e
|
|
||||||
|
|
||||||
loss.backward()
|
loss.backward()
|
||||||
self.optimizer.step()
|
self.optimizer.step()
|
||||||
@ -652,6 +635,7 @@ if __name__ == "__main__":
|
|||||||
|
|
||||||
parser.add_argument('--test_only', action='store_true', default=False)
|
parser.add_argument('--test_only', action='store_true', default=False)
|
||||||
parser.add_argument('--grid_search', action='store_true', default=False)
|
parser.add_argument('--grid_search', action='store_true', default=False)
|
||||||
|
parser.add_argument('--rel_type', default=None, type=str)
|
||||||
|
|
||||||
args = parser.parse_args()
|
args = parser.parse_args()
|
||||||
|
|
||||||
@ -660,10 +644,9 @@ if __name__ == "__main__":
|
|||||||
set_gpu(args.gpu)
|
set_gpu(args.gpu)
|
||||||
set_seed(args.seed)
|
set_seed(args.seed)
|
||||||
|
|
||||||
|
model = Main(args)
|
||||||
|
|
||||||
if (args.grid_search):
|
if (args.grid_search):
|
||||||
|
|
||||||
model = Main(args)
|
|
||||||
from sklearn.model_selection import GridSearchCV
|
from sklearn.model_selection import GridSearchCV
|
||||||
from skorch import NeuralNet
|
from skorch import NeuralNet
|
||||||
|
|
||||||
@ -712,27 +695,18 @@ if __name__ == "__main__":
|
|||||||
search = grid.fit(inputs, label)
|
search = grid.fit(inputs, label)
|
||||||
print("BEST SCORE: ", search.best_score_)
|
print("BEST SCORE: ", search.best_score_)
|
||||||
print("BEST PARAMS: ", search.best_params_)
|
print("BEST PARAMS: ", search.best_params_)
|
||||||
logger = get_logger(
|
|
||||||
args.name, args.log_dir, args.config_dir)
|
|
||||||
if (args.test_only):
|
if (args.test_only):
|
||||||
model = Main(args, logger)
|
|
||||||
save_path = os.path.join('./torch_saved', args.name)
|
save_path = os.path.join('./torch_saved', args.name)
|
||||||
model.load_model(save_path)
|
model.load_model(save_path)
|
||||||
model.evaluate('test')
|
model.evaluate('test')
|
||||||
else:
|
else:
|
||||||
model = Main(args, logger)
|
while True:
|
||||||
model.fit()
|
try:
|
||||||
# while True:
|
model.fit()
|
||||||
# try:
|
except Exception as e:
|
||||||
# model = Main(args, logger)
|
print(e)
|
||||||
# model.fit()
|
time.sleep(30)
|
||||||
# except Exception as e:
|
del model
|
||||||
# print(e)
|
model = Main(args)
|
||||||
# traceback.print_exc()
|
continue
|
||||||
# try:
|
break
|
||||||
# del model
|
|
||||||
# except Exception:
|
|
||||||
# pass
|
|
||||||
# time.sleep(30)
|
|
||||||
# continue
|
|
||||||
# break
|
|
||||||
|
392
models.py
392
models.py
@ -1,16 +1,15 @@
|
|||||||
import torch
|
import torch
|
||||||
from torch import nn, einsum
|
from torch import nn
|
||||||
import torch.nn.functional as F
|
import torch.nn.functional as F
|
||||||
import numpy as np
|
import numpy as np
|
||||||
from functools import partial
|
from functools import partial
|
||||||
from einops.layers.torch import Rearrange, Reduce
|
from einops.layers.torch import Rearrange, Reduce
|
||||||
from einops import rearrange, repeat
|
|
||||||
from utils import *
|
from utils import *
|
||||||
from layers import *
|
from layers import *
|
||||||
from timm.data import IMAGENET_DEFAULT_MEAN, IMAGENET_DEFAULT_STD
|
from timm.data import IMAGENET_DEFAULT_MEAN, IMAGENET_DEFAULT_STD
|
||||||
from timm.models.layers import DropPath, trunc_normal_
|
from timm.models.layers import DropPath, trunc_normal_
|
||||||
from timm.models.registry import register_model
|
from timm.models.registry import register_model
|
||||||
from timm.layers.helpers import to_2tuple
|
from timm.models.layers.helpers import to_2tuple
|
||||||
|
|
||||||
|
|
||||||
class ConvE(torch.nn.Module):
|
class ConvE(torch.nn.Module):
|
||||||
@ -558,8 +557,6 @@ class FouriER(torch.nn.Module):
|
|||||||
z = self.forward_embeddings(y)
|
z = self.forward_embeddings(y)
|
||||||
z = self.forward_tokens(z)
|
z = self.forward_tokens(z)
|
||||||
z = z.mean([-2, -1])
|
z = z.mean([-2, -1])
|
||||||
if np.count_nonzero(np.isnan(z)) > 0:
|
|
||||||
print("ZZZ")
|
|
||||||
z = self.norm(z)
|
z = self.norm(z)
|
||||||
x = self.head(z)
|
x = self.head(z)
|
||||||
x = self.hidden_drop(x)
|
x = self.hidden_drop(x)
|
||||||
@ -710,363 +707,6 @@ def basic_blocks(dim, index, layers,
|
|||||||
|
|
||||||
return blocks
|
return blocks
|
||||||
|
|
||||||
def window_partition(x, window_size):
|
|
||||||
"""
|
|
||||||
Args:
|
|
||||||
x: (B, H, W, C)
|
|
||||||
window_size (int): window size
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
windows: (num_windows*B, window_size, window_size, C)
|
|
||||||
"""
|
|
||||||
B, C, H, W = x.shape
|
|
||||||
x = x.view(B, H // window_size, window_size, W // window_size, window_size, C)
|
|
||||||
windows = x.permute(0, 1, 3, 2, 4, 5).contiguous().view(-1, window_size, window_size, C)
|
|
||||||
return windows
|
|
||||||
|
|
||||||
class WindowAttention(nn.Module):
|
|
||||||
r""" Window based multi-head self attention (W-MSA) module with relative position bias.
|
|
||||||
It supports both of shifted and non-shifted window.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
dim (int): Number of input channels.
|
|
||||||
window_size (tuple[int]): The height and width of the window.
|
|
||||||
num_heads (int): Number of attention heads.
|
|
||||||
qkv_bias (bool, optional): If True, add a learnable bias to query, key, value. Default: True
|
|
||||||
attn_drop (float, optional): Dropout ratio of attention weight. Default: 0.0
|
|
||||||
proj_drop (float, optional): Dropout ratio of output. Default: 0.0
|
|
||||||
pretrained_window_size (tuple[int]): The height and width of the window in pre-training.
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(self, dim, window_size, num_heads, qkv_bias=True, attn_drop=0., proj_drop=0.,
|
|
||||||
pretrained_window_size=[0, 0]):
|
|
||||||
|
|
||||||
super().__init__()
|
|
||||||
self.dim = dim
|
|
||||||
self.window_size = window_size # Wh, Ww
|
|
||||||
self.pretrained_window_size = pretrained_window_size
|
|
||||||
self.num_heads = num_heads
|
|
||||||
|
|
||||||
self.logit_scale = nn.Parameter(torch.log(10 * torch.ones((num_heads, 1, 1))), requires_grad=True)
|
|
||||||
|
|
||||||
# mlp to generate continuous relative position bias
|
|
||||||
self.cpb_mlp = nn.Sequential(nn.Linear(2, 512, bias=True),
|
|
||||||
nn.ReLU(inplace=True),
|
|
||||||
nn.Linear(512, num_heads, bias=False))
|
|
||||||
|
|
||||||
# get relative_coords_table
|
|
||||||
relative_coords_h = torch.arange(-(self.window_size[0] - 1), self.window_size[0], dtype=torch.float32)
|
|
||||||
relative_coords_w = torch.arange(-(self.window_size[1] - 1), self.window_size[1], dtype=torch.float32)
|
|
||||||
relative_coords_table = torch.stack(
|
|
||||||
torch.meshgrid([relative_coords_h,
|
|
||||||
relative_coords_w])).permute(1, 2, 0).contiguous().unsqueeze(0) # 1, 2*Wh-1, 2*Ww-1, 2
|
|
||||||
if pretrained_window_size[0] > 0:
|
|
||||||
relative_coords_table[:, :, :, 0] /= (pretrained_window_size[0] - 1)
|
|
||||||
relative_coords_table[:, :, :, 1] /= (pretrained_window_size[1] - 1)
|
|
||||||
else:
|
|
||||||
relative_coords_table[:, :, :, 0] /= (self.window_size[0] - 1)
|
|
||||||
relative_coords_table[:, :, :, 1] /= (self.window_size[1] - 1)
|
|
||||||
relative_coords_table *= 8 # normalize to -8, 8
|
|
||||||
relative_coords_table = torch.sign(relative_coords_table) * torch.log2(
|
|
||||||
torch.abs(relative_coords_table) + 1.0) / np.log2(8)
|
|
||||||
|
|
||||||
self.register_buffer("relative_coords_table", relative_coords_table)
|
|
||||||
|
|
||||||
# get pair-wise relative position index for each token inside the window
|
|
||||||
coords_h = torch.arange(self.window_size[0])
|
|
||||||
coords_w = torch.arange(self.window_size[1])
|
|
||||||
coords = torch.stack(torch.meshgrid([coords_h, coords_w])) # 2, Wh, Ww
|
|
||||||
coords_flatten = torch.flatten(coords, 1) # 2, Wh*Ww
|
|
||||||
relative_coords = coords_flatten[:, :, None] - coords_flatten[:, None, :] # 2, Wh*Ww, Wh*Ww
|
|
||||||
relative_coords = relative_coords.permute(1, 2, 0).contiguous() # Wh*Ww, Wh*Ww, 2
|
|
||||||
relative_coords[:, :, 0] += self.window_size[0] - 1 # shift to start from 0
|
|
||||||
relative_coords[:, :, 1] += self.window_size[1] - 1
|
|
||||||
relative_coords[:, :, 0] *= 2 * self.window_size[1] - 1
|
|
||||||
relative_position_index = relative_coords.sum(-1) # Wh*Ww, Wh*Ww
|
|
||||||
self.register_buffer("relative_position_index", relative_position_index)
|
|
||||||
|
|
||||||
self.qkv = nn.Linear(dim, dim * 3, bias=False)
|
|
||||||
if qkv_bias:
|
|
||||||
self.q_bias = nn.Parameter(torch.zeros(dim))
|
|
||||||
self.v_bias = nn.Parameter(torch.zeros(dim))
|
|
||||||
else:
|
|
||||||
self.q_bias = None
|
|
||||||
self.v_bias = None
|
|
||||||
self.attn_drop = nn.Dropout(attn_drop)
|
|
||||||
self.proj = nn.Linear(dim, dim)
|
|
||||||
self.proj_drop = nn.Dropout(proj_drop)
|
|
||||||
self.softmax = nn.Softmax(dim=-1)
|
|
||||||
|
|
||||||
def forward(self, x, mask=None):
|
|
||||||
"""
|
|
||||||
Args:
|
|
||||||
x: input features with shape of (num_windows*B, N, C)
|
|
||||||
mask: (0/-inf) mask with shape of (num_windows, Wh*Ww, Wh*Ww) or None
|
|
||||||
"""
|
|
||||||
B_, N, C = x.shape
|
|
||||||
qkv_bias = None
|
|
||||||
if self.q_bias is not None:
|
|
||||||
qkv_bias = torch.cat((self.q_bias, torch.zeros_like(self.v_bias, requires_grad=False), self.v_bias))
|
|
||||||
qkv = F.linear(input=x, weight=self.qkv.weight, bias=qkv_bias)
|
|
||||||
qkv = qkv.reshape(B_, N, 3, self.num_heads, -1).permute(2, 0, 3, 1, 4)
|
|
||||||
q, k, v = qkv[0], qkv[1], qkv[2] # make torchscript happy (cannot use tensor as tuple)
|
|
||||||
|
|
||||||
# cosine attention
|
|
||||||
attn = (F.normalize(q, dim=-1) @ F.normalize(k, dim=-1).transpose(-2, -1))
|
|
||||||
logit_scale = torch.clamp(self.logit_scale, max=torch.log(torch.tensor(1. / 0.01)).cuda()).exp()
|
|
||||||
attn = attn * logit_scale
|
|
||||||
|
|
||||||
relative_position_bias_table = self.cpb_mlp(self.relative_coords_table).view(-1, self.num_heads)
|
|
||||||
relative_position_bias = relative_position_bias_table[self.relative_position_index.view(-1)].view(
|
|
||||||
self.window_size[0] * self.window_size[1], self.window_size[0] * self.window_size[1], -1) # Wh*Ww,Wh*Ww,nH
|
|
||||||
relative_position_bias = relative_position_bias.permute(2, 0, 1).contiguous() # nH, Wh*Ww, Wh*Ww
|
|
||||||
relative_position_bias = 16 * torch.sigmoid(relative_position_bias)
|
|
||||||
attn = attn + relative_position_bias.unsqueeze(0)
|
|
||||||
|
|
||||||
if mask is not None:
|
|
||||||
nW = mask.shape[0]
|
|
||||||
attn = attn.view(B_ // nW, nW, self.num_heads, N, N) + mask.unsqueeze(1).unsqueeze(0)
|
|
||||||
attn = attn.view(-1, self.num_heads, N, N)
|
|
||||||
attn = self.softmax(attn)
|
|
||||||
else:
|
|
||||||
attn = self.softmax(attn)
|
|
||||||
|
|
||||||
attn = self.attn_drop(attn)
|
|
||||||
|
|
||||||
x = (attn @ v).transpose(1, 2).reshape(B_, N, C)
|
|
||||||
x = self.proj(x)
|
|
||||||
x = self.proj_drop(x)
|
|
||||||
return x
|
|
||||||
|
|
||||||
def extra_repr(self) -> str:
|
|
||||||
return f'dim={self.dim}, window_size={self.window_size}, ' \
|
|
||||||
f'pretrained_window_size={self.pretrained_window_size}, num_heads={self.num_heads}'
|
|
||||||
|
|
||||||
def flops(self, N):
|
|
||||||
# calculate flops for 1 window with token length of N
|
|
||||||
flops = 0
|
|
||||||
# qkv = self.qkv(x)
|
|
||||||
flops += N * self.dim * 3 * self.dim
|
|
||||||
# attn = (q @ k.transpose(-2, -1))
|
|
||||||
flops += self.num_heads * N * (self.dim // self.num_heads) * N
|
|
||||||
# x = (attn @ v)
|
|
||||||
flops += self.num_heads * N * N * (self.dim // self.num_heads)
|
|
||||||
# x = self.proj(x)
|
|
||||||
flops += N * self.dim * self.dim
|
|
||||||
return flops
|
|
||||||
|
|
||||||
def window_reverse(windows, window_size, H, W):
|
|
||||||
"""
|
|
||||||
Args:
|
|
||||||
windows: (num_windows*B, window_size, window_size, C)
|
|
||||||
window_size (int): Window size
|
|
||||||
H (int): Height of image
|
|
||||||
W (int): Width of image
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
x: (B, H, W, C)
|
|
||||||
"""
|
|
||||||
B = int(windows.shape[0] / (H * W / window_size / window_size))
|
|
||||||
x = windows.view(B, H // window_size, W // window_size, window_size, window_size, -1)
|
|
||||||
x = x.permute(0, 1, 3, 2, 4, 5).contiguous().view(B, -1, H, W)
|
|
||||||
return x
|
|
||||||
|
|
||||||
def cast_tuple(val, length = 1):
|
|
||||||
return val if isinstance(val, tuple) else ((val,) * length)
|
|
||||||
|
|
||||||
# helper classes
|
|
||||||
|
|
||||||
class ChanLayerNorm(nn.Module):
|
|
||||||
def __init__(self, dim, eps = 1e-5):
|
|
||||||
super().__init__()
|
|
||||||
self.eps = eps
|
|
||||||
self.g = nn.Parameter(torch.ones(1, dim, 1, 1))
|
|
||||||
self.b = nn.Parameter(torch.zeros(1, dim, 1, 1))
|
|
||||||
|
|
||||||
def forward(self, x):
|
|
||||||
var = torch.var(x, dim = 1, unbiased = False, keepdim = True)
|
|
||||||
mean = torch.mean(x, dim = 1, keepdim = True)
|
|
||||||
return (x - mean) / (var + self.eps).sqrt() * self.g + self.b
|
|
||||||
|
|
||||||
class OverlappingPatchEmbed(nn.Module):
|
|
||||||
def __init__(self, dim_in, dim_out, stride = 2):
|
|
||||||
super().__init__()
|
|
||||||
kernel_size = stride * 2 - 1
|
|
||||||
padding = kernel_size // 2
|
|
||||||
self.conv = nn.Conv2d(dim_in, dim_out, kernel_size, stride = stride, padding = padding)
|
|
||||||
|
|
||||||
def forward(self, x):
|
|
||||||
return self.conv(x)
|
|
||||||
|
|
||||||
class PEG(nn.Module):
|
|
||||||
def __init__(self, dim, kernel_size = 3):
|
|
||||||
super().__init__()
|
|
||||||
self.proj = nn.Conv2d(dim, dim, kernel_size = kernel_size, padding = kernel_size // 2, groups = dim, stride = 1)
|
|
||||||
|
|
||||||
def forward(self, x):
|
|
||||||
return self.proj(x) + x
|
|
||||||
|
|
||||||
# feedforward
|
|
||||||
|
|
||||||
class FeedForwardDSSA(nn.Module):
|
|
||||||
def __init__(self, dim, mult = 4, dropout = 0.):
|
|
||||||
super().__init__()
|
|
||||||
inner_dim = int(dim * mult)
|
|
||||||
self.net = nn.Sequential(
|
|
||||||
ChanLayerNorm(dim),
|
|
||||||
nn.Conv2d(dim, inner_dim, 1),
|
|
||||||
nn.GELU(),
|
|
||||||
nn.Dropout(dropout),
|
|
||||||
nn.Conv2d(inner_dim, dim, 1),
|
|
||||||
nn.Dropout(dropout)
|
|
||||||
)
|
|
||||||
def forward(self, x):
|
|
||||||
return self.net(x)
|
|
||||||
|
|
||||||
# attention
|
|
||||||
|
|
||||||
class DSSA(nn.Module):
|
|
||||||
def __init__(
|
|
||||||
self,
|
|
||||||
dim,
|
|
||||||
heads = 8,
|
|
||||||
dim_head = 32,
|
|
||||||
dropout = 0.,
|
|
||||||
window_size = 7
|
|
||||||
):
|
|
||||||
super().__init__()
|
|
||||||
self.heads = heads
|
|
||||||
self.scale = dim_head ** -0.5
|
|
||||||
self.window_size = window_size
|
|
||||||
inner_dim = dim_head * heads
|
|
||||||
|
|
||||||
self.norm = ChanLayerNorm(dim)
|
|
||||||
|
|
||||||
self.attend = nn.Sequential(
|
|
||||||
nn.Softmax(dim = -1),
|
|
||||||
nn.Dropout(dropout)
|
|
||||||
)
|
|
||||||
|
|
||||||
self.to_qkv = nn.Conv1d(dim, inner_dim * 3, 1, bias = False)
|
|
||||||
|
|
||||||
# window tokens
|
|
||||||
|
|
||||||
self.window_tokens = nn.Parameter(torch.randn(dim))
|
|
||||||
|
|
||||||
# prenorm and non-linearity for window tokens
|
|
||||||
# then projection to queries and keys for window tokens
|
|
||||||
|
|
||||||
self.window_tokens_to_qk = nn.Sequential(
|
|
||||||
nn.LayerNorm(dim_head),
|
|
||||||
nn.GELU(),
|
|
||||||
Rearrange('b h n c -> b (h c) n'),
|
|
||||||
nn.Conv1d(inner_dim, inner_dim * 2, 1),
|
|
||||||
Rearrange('b (h c) n -> b h n c', h = heads),
|
|
||||||
)
|
|
||||||
|
|
||||||
# window attention
|
|
||||||
|
|
||||||
self.window_attend = nn.Sequential(
|
|
||||||
nn.Softmax(dim = -1),
|
|
||||||
nn.Dropout(dropout)
|
|
||||||
)
|
|
||||||
|
|
||||||
self.to_out = nn.Sequential(
|
|
||||||
nn.Conv2d(inner_dim, dim, 1),
|
|
||||||
nn.Dropout(dropout)
|
|
||||||
)
|
|
||||||
|
|
||||||
def forward(self, x):
|
|
||||||
"""
|
|
||||||
einstein notation
|
|
||||||
|
|
||||||
b - batch
|
|
||||||
c - channels
|
|
||||||
w1 - window size (height)
|
|
||||||
w2 - also window size (width)
|
|
||||||
i - sequence dimension (source)
|
|
||||||
j - sequence dimension (target dimension to be reduced)
|
|
||||||
h - heads
|
|
||||||
x - height of feature map divided by window size
|
|
||||||
y - width of feature map divided by window size
|
|
||||||
"""
|
|
||||||
|
|
||||||
batch, height, width, heads, wsz = x.shape[0], *x.shape[-2:], self.heads, self.window_size
|
|
||||||
assert (height % wsz) == 0 and (width % wsz) == 0, f'height {height} and width {width} must be divisible by window size {wsz}'
|
|
||||||
num_windows = (height // wsz) * (width // wsz)
|
|
||||||
|
|
||||||
x = self.norm(x)
|
|
||||||
|
|
||||||
# fold in windows for "depthwise" attention - not sure why it is named depthwise when it is just "windowed" attention
|
|
||||||
|
|
||||||
x = rearrange(x, 'b c (h w1) (w w2) -> (b h w) c (w1 w2)', w1 = wsz, w2 = wsz)
|
|
||||||
|
|
||||||
# add windowing tokens
|
|
||||||
|
|
||||||
w = repeat(self.window_tokens, 'c -> b c 1', b = x.shape[0])
|
|
||||||
x = torch.cat((w, x), dim = -1)
|
|
||||||
|
|
||||||
# project for queries, keys, value
|
|
||||||
|
|
||||||
q, k, v = self.to_qkv(x).chunk(3, dim = 1)
|
|
||||||
|
|
||||||
# split out heads
|
|
||||||
|
|
||||||
q, k, v = map(lambda t: rearrange(t, 'b (h d) ... -> b h (...) d', h = heads), (q, k, v))
|
|
||||||
|
|
||||||
# scale
|
|
||||||
|
|
||||||
q = q * self.scale
|
|
||||||
|
|
||||||
# similarity
|
|
||||||
|
|
||||||
dots = einsum('b h i d, b h j d -> b h i j', q, k)
|
|
||||||
|
|
||||||
# attention
|
|
||||||
|
|
||||||
attn = self.attend(dots)
|
|
||||||
|
|
||||||
# aggregate values
|
|
||||||
|
|
||||||
out = torch.matmul(attn, v)
|
|
||||||
|
|
||||||
# split out windowed tokens
|
|
||||||
|
|
||||||
window_tokens, windowed_fmaps = out[:, :, 0], out[:, :, 1:]
|
|
||||||
|
|
||||||
# early return if there is only 1 window
|
|
||||||
|
|
||||||
if num_windows == 1:
|
|
||||||
fmap = rearrange(windowed_fmaps, '(b x y) h (w1 w2) d -> b (h d) (x w1) (y w2)', x = height // wsz, y = width // wsz, w1 = wsz, w2 = wsz)
|
|
||||||
return self.to_out(fmap)
|
|
||||||
|
|
||||||
# carry out the pointwise attention, the main novelty in the paper
|
|
||||||
|
|
||||||
window_tokens = rearrange(window_tokens, '(b x y) h d -> b h (x y) d', x = height // wsz, y = width // wsz)
|
|
||||||
windowed_fmaps = rearrange(windowed_fmaps, '(b x y) h n d -> b h (x y) n d', x = height // wsz, y = width // wsz)
|
|
||||||
|
|
||||||
# windowed queries and keys (preceded by prenorm activation)
|
|
||||||
|
|
||||||
w_q, w_k = self.window_tokens_to_qk(window_tokens).chunk(2, dim = -1)
|
|
||||||
|
|
||||||
# scale
|
|
||||||
|
|
||||||
w_q = w_q * self.scale
|
|
||||||
|
|
||||||
# similarities
|
|
||||||
|
|
||||||
w_dots = einsum('b h i d, b h j d -> b h i j', w_q, w_k)
|
|
||||||
|
|
||||||
w_attn = self.window_attend(w_dots)
|
|
||||||
|
|
||||||
# aggregate the feature maps from the "depthwise" attention step (the most interesting part of the paper, one i haven't seen before)
|
|
||||||
|
|
||||||
aggregated_windowed_fmap = einsum('b h i j, b h j w d -> b h i w d', w_attn, windowed_fmaps)
|
|
||||||
|
|
||||||
# fold back the windows and then combine heads for aggregation
|
|
||||||
|
|
||||||
fmap = rearrange(aggregated_windowed_fmap, 'b h (x y) (w1 w2) d -> b (h d) (x w1) (y w2)', x = height // wsz, y = width // wsz, w1 = wsz, w2 = wsz)
|
|
||||||
return self.to_out(fmap)
|
|
||||||
|
|
||||||
class PoolFormerBlock(nn.Module):
|
class PoolFormerBlock(nn.Module):
|
||||||
"""
|
"""
|
||||||
@ -1091,15 +731,7 @@ class PoolFormerBlock(nn.Module):
|
|||||||
|
|
||||||
self.norm1 = norm_layer(dim)
|
self.norm1 = norm_layer(dim)
|
||||||
#self.token_mixer = Pooling(pool_size=pool_size)
|
#self.token_mixer = Pooling(pool_size=pool_size)
|
||||||
# self.token_mixer = FNetBlock()
|
self.token_mixer = FNetBlock()
|
||||||
self.window_size = 4
|
|
||||||
self.attn_heads = 4
|
|
||||||
self.attn_mask = None
|
|
||||||
# self.token_mixer = WindowAttention(dim=dim, window_size=to_2tuple(self.window_size), num_heads=4)
|
|
||||||
self.token_mixer = nn.ModuleList([
|
|
||||||
DSSA(dim, heads=self.attn_heads, window_size=self.window_size),
|
|
||||||
FeedForwardDSSA(dim)
|
|
||||||
])
|
|
||||||
self.norm2 = norm_layer(dim)
|
self.norm2 = norm_layer(dim)
|
||||||
mlp_hidden_dim = int(dim * mlp_ratio)
|
mlp_hidden_dim = int(dim * mlp_ratio)
|
||||||
self.mlp = Mlp(in_features=dim, hidden_features=mlp_hidden_dim,
|
self.mlp = Mlp(in_features=dim, hidden_features=mlp_hidden_dim,
|
||||||
@ -1116,26 +748,16 @@ class PoolFormerBlock(nn.Module):
|
|||||||
layer_scale_init_value * torch.ones((dim)), requires_grad=True)
|
layer_scale_init_value * torch.ones((dim)), requires_grad=True)
|
||||||
|
|
||||||
def forward(self, x):
|
def forward(self, x):
|
||||||
B, C, H, W = x.shape
|
|
||||||
# x_windows = window_partition(x, self.window_size)
|
|
||||||
# x_windows = x_windows.view(-1, self.window_size * self.window_size, C)
|
|
||||||
# attn_windows = self.token_mixer(x_windows, mask=self.attn_mask)
|
|
||||||
# attn_windows = attn_windows.view(-1, self.window_size, self.window_size, C)
|
|
||||||
# x_attn = window_reverse(attn_windows, self.window_size, H, W)
|
|
||||||
x_attn = self.token_mixer(x)
|
|
||||||
if self.use_layer_scale:
|
if self.use_layer_scale:
|
||||||
x = x + self.drop_path(
|
x = x + self.drop_path(
|
||||||
self.layer_scale_1.unsqueeze(-1).unsqueeze(-1)
|
self.layer_scale_1.unsqueeze(-1).unsqueeze(-1)
|
||||||
* x_attn)
|
* self.token_mixer(self.norm1(x)))
|
||||||
x = x + self.drop_path(
|
x = x + self.drop_path(
|
||||||
self.layer_scale_2.unsqueeze(-1).unsqueeze(-1)
|
self.layer_scale_2.unsqueeze(-1).unsqueeze(-1)
|
||||||
* self.mlp(self.norm2(x)))
|
* self.mlp(self.norm2(x)))
|
||||||
else:
|
else:
|
||||||
x = x + self.drop_path(x_attn)
|
x = x + self.drop_path(self.token_mixer(self.norm1(x)))
|
||||||
x = x + self.drop_path(self.mlp(self.norm2(x)))
|
x = x + self.drop_path(self.mlp(self.norm2(x)))
|
||||||
|
|
||||||
if np.count_nonzero(np.isnan(x)) > 0:
|
|
||||||
print("PFBlock")
|
|
||||||
return x
|
return x
|
||||||
class PatchEmbed(nn.Module):
|
class PatchEmbed(nn.Module):
|
||||||
"""
|
"""
|
||||||
@ -1221,7 +843,7 @@ class LayerNormChannel(nn.Module):
|
|||||||
+ self.bias.unsqueeze(-1).unsqueeze(-1)
|
+ self.bias.unsqueeze(-1).unsqueeze(-1)
|
||||||
return x
|
return x
|
||||||
|
|
||||||
class FeedForwardFNet(nn.Module):
|
class FeedForward(nn.Module):
|
||||||
def __init__(self, dim, hidden_dim, dropout = 0.):
|
def __init__(self, dim, hidden_dim, dropout = 0.):
|
||||||
super().__init__()
|
super().__init__()
|
||||||
self.net = nn.Sequential(
|
self.net = nn.Sequential(
|
||||||
@ -1257,7 +879,7 @@ class FNet(nn.Module):
|
|||||||
for _ in range(depth):
|
for _ in range(depth):
|
||||||
self.layers.append(nn.ModuleList([
|
self.layers.append(nn.ModuleList([
|
||||||
PreNorm(dim, FNetBlock()),
|
PreNorm(dim, FNetBlock()),
|
||||||
PreNorm(dim, FeedForwardFNet(dim, mlp_dim, dropout = dropout))
|
PreNorm(dim, FeedForward(dim, mlp_dim, dropout = dropout))
|
||||||
]))
|
]))
|
||||||
def forward(self, x):
|
def forward(self, x):
|
||||||
for attn, ff in self.layers:
|
for attn, ff in self.layers:
|
||||||
|
@ -1,6 +1,4 @@
|
|||||||
torch==1.12.1+cu116
|
torch==1.12.1+cu116
|
||||||
ordered-set==4.1.0
|
ordered-set==4.1.0
|
||||||
numpy==1.21.5
|
numpy==1.21.5
|
||||||
einops==0.4.1
|
einops==0.4.1
|
||||||
pandas
|
|
||||||
timm==0.9.16
|
|
5
run.sh
5
run.sh
@ -37,7 +37,4 @@ nohup python main.py --name ice00001 --lr 0.00001 --data icews14 --gpu 2 >run_lo
|
|||||||
PID:
|
PID:
|
||||||
|
|
||||||
___
|
___
|
||||||
nohup python main.py --name ice001 --lr 0.001 --data icews14 --gpu 3 >run_log/0.001.log 2>&1 &
|
nohup python main.py --name ice001 --lr 0.001 --data icews14 --gpu 3 >run_log/0.001.log 2>&1 &
|
||||||
___
|
|
||||||
nohup python main.py --name iceboth --data icews14_both --gpu 0 >run_log/iceboth.log 2>&1 &
|
|
||||||
PID: 21984
|
|
10708
run_log/icews14/0.00001.log
Normal file
10708
run_log/icews14/0.00001.log
Normal file
File diff suppressed because it is too large
Load Diff
6653
run_log/icews14/0.00003.log
Normal file
6653
run_log/icews14/0.00003.log
Normal file
File diff suppressed because it is too large
Load Diff
9511
run_log/icews14/0.0001.out
Normal file
9511
run_log/icews14/0.0001.out
Normal file
File diff suppressed because it is too large
Load Diff
4950
run_log/icews14/0.0003.log
Normal file
4950
run_log/icews14/0.0003.log
Normal file
File diff suppressed because it is too large
Load Diff
6249
run_log/icews14/0.001.log
Normal file
6249
run_log/icews14/0.001.log
Normal file
File diff suppressed because it is too large
Load Diff
425
run_log/wikidata12k/0.001.log
Normal file
425
run_log/wikidata12k/0.001.log
Normal file
@ -0,0 +1,425 @@
|
|||||||
|
nohup: ignoring input
|
||||||
|
2023-05-27 04:41:18,497 - [INFO] - {'dataset': 'wikidata12k', 'name': 'wikidata12k_0.001', 'gpu': '3', 'train_strategy': 'one_to_n', 'opt': 'adam', 'neg_num': 1000, 'batch_size': 128, 'l2': 0.0, 'lr': 0.001, 'max_epochs': 500, 'num_workers': 0, 'seed': 42, 'restore': False, 'lbl_smooth': 0.1, 'embed_dim': 400, 'ent_vec_dim': 400, 'rel_vec_dim': 400, 'bias': False, 'form': 'plain', 'k_w': 10, 'k_h': 20, 'num_filt': 96, 'ker_sz': 9, 'perm': 1, 'hid_drop': 0.5, 'feat_drop': 0.2, 'inp_drop': 0.2, 'drop_path': 0.0, 'drop': 0.0, 'in_channels': 1, 'out_channels': 32, 'filt_h': 1, 'filt_w': 9, 'image_h': 128, 'image_w': 128, 'patch_size': 8, 'mixer_dim': 256, 'expansion_factor': 4, 'expansion_factor_token': 0.5, 'mixer_depth': 16, 'mixer_dropout': 0.2, 'log_dir': './log/', 'config_dir': './config/', 'test_only': False, 'grid_search': False}
|
||||||
|
{'batch_size': 128,
|
||||||
|
'bias': False,
|
||||||
|
'config_dir': './config/',
|
||||||
|
'dataset': 'wikidata12k',
|
||||||
|
'drop': 0.0,
|
||||||
|
'drop_path': 0.0,
|
||||||
|
'embed_dim': 400,
|
||||||
|
'ent_vec_dim': 400,
|
||||||
|
'expansion_factor': 4,
|
||||||
|
'expansion_factor_token': 0.5,
|
||||||
|
'feat_drop': 0.2,
|
||||||
|
'filt_h': 1,
|
||||||
|
'filt_w': 9,
|
||||||
|
'form': 'plain',
|
||||||
|
'gpu': '3',
|
||||||
|
'grid_search': False,
|
||||||
|
'hid_drop': 0.5,
|
||||||
|
'image_h': 128,
|
||||||
|
'image_w': 128,
|
||||||
|
'in_channels': 1,
|
||||||
|
'inp_drop': 0.2,
|
||||||
|
'k_h': 20,
|
||||||
|
'k_w': 10,
|
||||||
|
'ker_sz': 9,
|
||||||
|
'l2': 0.0,
|
||||||
|
'lbl_smooth': 0.1,
|
||||||
|
'log_dir': './log/',
|
||||||
|
'lr': 0.001,
|
||||||
|
'max_epochs': 500,
|
||||||
|
'mixer_depth': 16,
|
||||||
|
'mixer_dim': 256,
|
||||||
|
'mixer_dropout': 0.2,
|
||||||
|
'name': 'wikidata12k_0.001',
|
||||||
|
'neg_num': 1000,
|
||||||
|
'num_filt': 96,
|
||||||
|
'num_workers': 0,
|
||||||
|
'opt': 'adam',
|
||||||
|
'out_channels': 32,
|
||||||
|
'patch_size': 8,
|
||||||
|
'perm': 1,
|
||||||
|
'rel_vec_dim': 400,
|
||||||
|
'restore': False,
|
||||||
|
'seed': 42,
|
||||||
|
'test_only': False,
|
||||||
|
'train_strategy': 'one_to_n'}
|
||||||
|
2023-05-27 04:41:28,635 - [INFO] - [E:0| 0]: Train Loss:0.69813, Val MRR:0.0, wikidata12k_0.001
|
||||||
|
2023-05-27 04:42:32,570 - [INFO] - [E:0| 100]: Train Loss:0.053587, Val MRR:0.0, wikidata12k_0.001
|
||||||
|
2023-05-27 04:43:36,618 - [INFO] - [E:0| 200]: Train Loss:0.028724, Val MRR:0.0, wikidata12k_0.001
|
||||||
|
2023-05-27 04:44:40,687 - [INFO] - [E:0| 300]: Train Loss:0.020033, Val MRR:0.0, wikidata12k_0.001
|
||||||
|
2023-05-27 04:45:44,799 - [INFO] - [E:0| 400]: Train Loss:0.015589, Val MRR:0.0, wikidata12k_0.001
|
||||||
|
2023-05-27 04:46:48,901 - [INFO] - [E:0| 500]: Train Loss:0.012878, Val MRR:0.0, wikidata12k_0.001
|
||||||
|
2023-05-27 04:47:53,124 - [INFO] - [E:0| 600]: Train Loss:0.011054, Val MRR:0.0, wikidata12k_0.001
|
||||||
|
2023-05-27 04:48:57,224 - [INFO] - [E:0| 700]: Train Loss:0.0097532, Val MRR:0.0, wikidata12k_0.001
|
||||||
|
2023-05-27 04:50:01,352 - [INFO] - [E:0| 800]: Train Loss:0.008763, Val MRR:0.0, wikidata12k_0.001
|
||||||
|
2023-05-27 04:51:05,445 - [INFO] - [E:0| 900]: Train Loss:0.0079929, Val MRR:0.0, wikidata12k_0.001
|
||||||
|
2023-05-27 04:52:09,559 - [INFO] - [E:0| 1000]: Train Loss:0.0073745, Val MRR:0.0, wikidata12k_0.001
|
||||||
|
2023-05-27 04:53:13,624 - [INFO] - [E:0| 1100]: Train Loss:0.0068693, Val MRR:0.0, wikidata12k_0.001
|
||||||
|
2023-05-27 04:54:17,823 - [INFO] - [E:0| 1200]: Train Loss:0.0064497, Val MRR:0.0, wikidata12k_0.001
|
||||||
|
2023-05-27 04:55:21,967 - [INFO] - [E:0| 1300]: Train Loss:0.0060945, Val MRR:0.0, wikidata12k_0.001
|
||||||
|
2023-05-27 04:56:26,129 - [INFO] - [E:0| 1400]: Train Loss:0.0057879, Val MRR:0.0, wikidata12k_0.001
|
||||||
|
2023-05-27 04:57:30,256 - [INFO] - [E:0| 1500]: Train Loss:0.0055195, Val MRR:0.0, wikidata12k_0.001
|
||||||
|
2023-05-27 04:58:34,350 - [INFO] - [E:0| 1600]: Train Loss:0.0052845, Val MRR:0.0, wikidata12k_0.001
|
||||||
|
2023-05-27 04:59:16,259 - [INFO] - [Epoch:0]: Training Loss:0.005147
|
||||||
|
|
||||||
|
2023-05-27 04:59:16,481 - [INFO] - [Valid, Tail_Batch Step 0] wikidata12k_0.001
|
||||||
|
2023-05-27 04:59:38,187 - [INFO] - [Valid, Tail_Batch Step 100] wikidata12k_0.001
|
||||||
|
2023-05-27 04:59:50,745 - [INFO] - [Valid, Head_Batch Step 0] wikidata12k_0.001
|
||||||
|
2023-05-27 05:00:12,609 - [INFO] - [Valid, Head_Batch Step 100] wikidata12k_0.001
|
||||||
|
2023-05-27 05:00:25,062 - [INFO] - [Evaluating Epoch 0 valid]:
|
||||||
|
MRR: Tail : 0.08049, Head : 0.01947, Avg : 0.04998
|
||||||
|
|
||||||
|
2023-05-27 05:00:26,469 - [INFO] - [Epoch 0]: Training Loss: 0.0051469, Valid MRR: 0.04998,
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
2023-05-27 05:00:27,127 - [INFO] - [E:1| 0]: Train Loss:0.0016275, Val MRR:0.04998, wikidata12k_0.001
|
||||||
|
2023-05-27 05:01:31,277 - [INFO] - [E:1| 100]: Train Loss:0.0017991, Val MRR:0.04998, wikidata12k_0.001
|
||||||
|
2023-05-27 05:02:35,390 - [INFO] - [E:1| 200]: Train Loss:0.0017846, Val MRR:0.04998, wikidata12k_0.001
|
||||||
|
2023-05-27 05:03:39,590 - [INFO] - [E:1| 300]: Train Loss:0.0017789, Val MRR:0.04998, wikidata12k_0.001
|
||||||
|
2023-05-27 05:04:43,748 - [INFO] - [E:1| 400]: Train Loss:0.001772, Val MRR:0.04998, wikidata12k_0.001
|
||||||
|
2023-05-27 05:05:47,967 - [INFO] - [E:1| 500]: Train Loss:0.0017692, Val MRR:0.04998, wikidata12k_0.001
|
||||||
|
2023-05-27 05:06:52,036 - [INFO] - [E:1| 600]: Train Loss:0.0017597, Val MRR:0.04998, wikidata12k_0.001
|
||||||
|
2023-05-27 05:07:56,215 - [INFO] - [E:1| 700]: Train Loss:0.0017589, Val MRR:0.04998, wikidata12k_0.001
|
||||||
|
2023-05-27 05:09:00,363 - [INFO] - [E:1| 800]: Train Loss:0.0017555, Val MRR:0.04998, wikidata12k_0.001
|
||||||
|
2023-05-27 05:10:04,516 - [INFO] - [E:1| 900]: Train Loss:0.0017507, Val MRR:0.04998, wikidata12k_0.001
|
||||||
|
2023-05-27 05:11:08,719 - [INFO] - [E:1| 1000]: Train Loss:0.0017476, Val MRR:0.04998, wikidata12k_0.001
|
||||||
|
2023-05-27 05:12:12,940 - [INFO] - [E:1| 1100]: Train Loss:0.0017427, Val MRR:0.04998, wikidata12k_0.001
|
||||||
|
2023-05-27 05:13:17,076 - [INFO] - [E:1| 1200]: Train Loss:0.0017384, Val MRR:0.04998, wikidata12k_0.001
|
||||||
|
2023-05-27 05:14:21,295 - [INFO] - [E:1| 1300]: Train Loss:0.0017345, Val MRR:0.04998, wikidata12k_0.001
|
||||||
|
2023-05-27 05:15:25,462 - [INFO] - [E:1| 1400]: Train Loss:0.0017307, Val MRR:0.04998, wikidata12k_0.001
|
||||||
|
2023-05-27 05:16:29,614 - [INFO] - [E:1| 1500]: Train Loss:0.0017243, Val MRR:0.04998, wikidata12k_0.001
|
||||||
|
2023-05-27 05:17:33,705 - [INFO] - [E:1| 1600]: Train Loss:0.001719, Val MRR:0.04998, wikidata12k_0.001
|
||||||
|
2023-05-27 05:18:15,618 - [INFO] - [Epoch:1]: Training Loss:0.001714
|
||||||
|
|
||||||
|
2023-05-27 05:18:15,839 - [INFO] - [Valid, Tail_Batch Step 0] wikidata12k_0.001
|
||||||
|
2023-05-27 05:18:37,583 - [INFO] - [Valid, Tail_Batch Step 100] wikidata12k_0.001
|
||||||
|
2023-05-27 05:18:50,191 - [INFO] - [Valid, Head_Batch Step 0] wikidata12k_0.001
|
||||||
|
2023-05-27 05:19:12,067 - [INFO] - [Valid, Head_Batch Step 100] wikidata12k_0.001
|
||||||
|
2023-05-27 05:19:24,503 - [INFO] - [Evaluating Epoch 1 valid]:
|
||||||
|
MRR: Tail : 0.1748, Head : 0.04108, Avg : 0.10794
|
||||||
|
|
||||||
|
2023-05-27 05:19:25,566 - [INFO] - [Epoch 1]: Training Loss: 0.0017143, Valid MRR: 0.10794,
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
2023-05-27 05:19:26,219 - [INFO] - [E:2| 0]: Train Loss:0.0016961, Val MRR:0.10794, wikidata12k_0.001
|
||||||
|
2023-05-27 05:20:30,344 - [INFO] - [E:2| 100]: Train Loss:0.0016227, Val MRR:0.10794, wikidata12k_0.001
|
||||||
|
2023-05-27 05:21:34,535 - [INFO] - [E:2| 200]: Train Loss:0.0016161, Val MRR:0.10794, wikidata12k_0.001
|
||||||
|
2023-05-27 05:22:38,770 - [INFO] - [E:2| 300]: Train Loss:0.0016161, Val MRR:0.10794, wikidata12k_0.001
|
||||||
|
2023-05-27 05:23:43,004 - [INFO] - [E:2| 400]: Train Loss:0.0016106, Val MRR:0.10794, wikidata12k_0.001
|
||||||
|
2023-05-27 05:24:47,137 - [INFO] - [E:2| 500]: Train Loss:0.0016058, Val MRR:0.10794, wikidata12k_0.001
|
||||||
|
2023-05-27 05:25:51,362 - [INFO] - [E:2| 600]: Train Loss:0.0016067, Val MRR:0.10794, wikidata12k_0.001
|
||||||
|
2023-05-27 05:26:55,499 - [INFO] - [E:2| 700]: Train Loss:0.0016013, Val MRR:0.10794, wikidata12k_0.001
|
||||||
|
2023-05-27 05:27:59,761 - [INFO] - [E:2| 800]: Train Loss:0.0015978, Val MRR:0.10794, wikidata12k_0.001
|
||||||
|
2023-05-27 05:29:03,935 - [INFO] - [E:2| 900]: Train Loss:0.0015935, Val MRR:0.10794, wikidata12k_0.001
|
||||||
|
2023-05-27 05:30:08,210 - [INFO] - [E:2| 1000]: Train Loss:0.0015896, Val MRR:0.10794, wikidata12k_0.001
|
||||||
|
2023-05-27 05:31:12,398 - [INFO] - [E:2| 1100]: Train Loss:0.0015856, Val MRR:0.10794, wikidata12k_0.001
|
||||||
|
2023-05-27 05:32:16,608 - [INFO] - [E:2| 1200]: Train Loss:0.0015814, Val MRR:0.10794, wikidata12k_0.001
|
||||||
|
2023-05-27 05:33:20,836 - [INFO] - [E:2| 1300]: Train Loss:0.0015758, Val MRR:0.10794, wikidata12k_0.001
|
||||||
|
2023-05-27 05:34:25,014 - [INFO] - [E:2| 1400]: Train Loss:0.001571, Val MRR:0.10794, wikidata12k_0.001
|
||||||
|
2023-05-27 05:35:29,265 - [INFO] - [E:2| 1500]: Train Loss:0.001565, Val MRR:0.10794, wikidata12k_0.001
|
||||||
|
2023-05-27 05:36:33,450 - [INFO] - [E:2| 1600]: Train Loss:0.0015589, Val MRR:0.10794, wikidata12k_0.001
|
||||||
|
2023-05-27 05:37:15,383 - [INFO] - [Epoch:2]: Training Loss:0.001556
|
||||||
|
|
||||||
|
2023-05-27 05:37:15,603 - [INFO] - [Valid, Tail_Batch Step 0] wikidata12k_0.001
|
||||||
|
2023-05-27 05:37:37,308 - [INFO] - [Valid, Tail_Batch Step 100] wikidata12k_0.001
|
||||||
|
2023-05-27 05:37:49,874 - [INFO] - [Valid, Head_Batch Step 0] wikidata12k_0.001
|
||||||
|
2023-05-27 05:38:11,738 - [INFO] - [Valid, Head_Batch Step 100] wikidata12k_0.001
|
||||||
|
2023-05-27 05:38:24,173 - [INFO] - [Evaluating Epoch 2 valid]:
|
||||||
|
MRR: Tail : 0.28305, Head : 0.07818, Avg : 0.18062
|
||||||
|
|
||||||
|
2023-05-27 05:38:25,157 - [INFO] - [Epoch 2]: Training Loss: 0.001556, Valid MRR: 0.18062,
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
2023-05-27 05:38:25,813 - [INFO] - [E:3| 0]: Train Loss:0.0013897, Val MRR:0.18062, wikidata12k_0.001
|
||||||
|
2023-05-27 05:39:30,024 - [INFO] - [E:3| 100]: Train Loss:0.0014599, Val MRR:0.18062, wikidata12k_0.001
|
||||||
|
2023-05-27 05:40:34,122 - [INFO] - [E:3| 200]: Train Loss:0.0014516, Val MRR:0.18062, wikidata12k_0.001
|
||||||
|
2023-05-27 05:41:38,261 - [INFO] - [E:3| 300]: Train Loss:0.0014552, Val MRR:0.18062, wikidata12k_0.001
|
||||||
|
2023-05-27 05:42:42,459 - [INFO] - [E:3| 400]: Train Loss:0.0014541, Val MRR:0.18062, wikidata12k_0.001
|
||||||
|
2023-05-27 05:43:46,707 - [INFO] - [E:3| 500]: Train Loss:0.0014521, Val MRR:0.18062, wikidata12k_0.001
|
||||||
|
2023-05-27 05:44:50,829 - [INFO] - [E:3| 600]: Train Loss:0.0014476, Val MRR:0.18062, wikidata12k_0.001
|
||||||
|
2023-05-27 05:45:54,979 - [INFO] - [E:3| 700]: Train Loss:0.0014439, Val MRR:0.18062, wikidata12k_0.001
|
||||||
|
2023-05-27 05:46:59,115 - [INFO] - [E:3| 800]: Train Loss:0.0014396, Val MRR:0.18062, wikidata12k_0.001
|
||||||
|
2023-05-27 05:48:03,341 - [INFO] - [E:3| 900]: Train Loss:0.0014367, Val MRR:0.18062, wikidata12k_0.001
|
||||||
|
2023-05-27 05:49:07,419 - [INFO] - [E:3| 1000]: Train Loss:0.0014329, Val MRR:0.18062, wikidata12k_0.001
|
||||||
|
2023-05-27 05:50:11,647 - [INFO] - [E:3| 1100]: Train Loss:0.0014308, Val MRR:0.18062, wikidata12k_0.001
|
||||||
|
2023-05-27 05:51:15,783 - [INFO] - [E:3| 1200]: Train Loss:0.0014276, Val MRR:0.18062, wikidata12k_0.001
|
||||||
|
2023-05-27 05:52:19,915 - [INFO] - [E:3| 1300]: Train Loss:0.0014245, Val MRR:0.18062, wikidata12k_0.001
|
||||||
|
2023-05-27 05:53:24,121 - [INFO] - [E:3| 1400]: Train Loss:0.0014212, Val MRR:0.18062, wikidata12k_0.001
|
||||||
|
2023-05-27 05:54:28,236 - [INFO] - [E:3| 1500]: Train Loss:0.0014184, Val MRR:0.18062, wikidata12k_0.001
|
||||||
|
2023-05-27 05:55:32,482 - [INFO] - [E:3| 1600]: Train Loss:0.0014147, Val MRR:0.18062, wikidata12k_0.001
|
||||||
|
2023-05-27 05:56:14,438 - [INFO] - [Epoch:3]: Training Loss:0.001413
|
||||||
|
|
||||||
|
2023-05-27 05:56:14,658 - [INFO] - [Valid, Tail_Batch Step 0] wikidata12k_0.001
|
||||||
|
2023-05-27 05:56:36,372 - [INFO] - [Valid, Tail_Batch Step 100] wikidata12k_0.001
|
||||||
|
2023-05-27 05:56:48,954 - [INFO] - [Valid, Head_Batch Step 0] wikidata12k_0.001
|
||||||
|
2023-05-27 05:57:10,881 - [INFO] - [Valid, Head_Batch Step 100] wikidata12k_0.001
|
||||||
|
2023-05-27 05:57:23,328 - [INFO] - [Evaluating Epoch 3 valid]:
|
||||||
|
MRR: Tail : 0.31549, Head : 0.09979, Avg : 0.20764
|
||||||
|
|
||||||
|
2023-05-27 05:57:24,420 - [INFO] - [Epoch 3]: Training Loss: 0.001413, Valid MRR: 0.20764,
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
2023-05-27 05:57:25,077 - [INFO] - [E:4| 0]: Train Loss:0.0014323, Val MRR:0.20764, wikidata12k_0.001
|
||||||
|
2023-05-27 05:58:29,238 - [INFO] - [E:4| 100]: Train Loss:0.0013524, Val MRR:0.20764, wikidata12k_0.001
|
||||||
|
2023-05-27 05:59:33,410 - [INFO] - [E:4| 200]: Train Loss:0.0013439, Val MRR:0.20764, wikidata12k_0.001
|
||||||
|
2023-05-27 06:00:37,566 - [INFO] - [E:4| 300]: Train Loss:0.0013507, Val MRR:0.20764, wikidata12k_0.001
|
||||||
|
2023-05-27 06:01:41,692 - [INFO] - [E:4| 400]: Train Loss:0.0013525, Val MRR:0.20764, wikidata12k_0.001
|
||||||
|
2023-05-27 06:02:45,877 - [INFO] - [E:4| 500]: Train Loss:0.0013497, Val MRR:0.20764, wikidata12k_0.001
|
||||||
|
2023-05-27 06:03:50,088 - [INFO] - [E:4| 600]: Train Loss:0.0013468, Val MRR:0.20764, wikidata12k_0.001
|
||||||
|
2023-05-27 06:04:54,238 - [INFO] - [E:4| 700]: Train Loss:0.0013447, Val MRR:0.20764, wikidata12k_0.001
|
||||||
|
2023-05-27 06:05:58,490 - [INFO] - [E:4| 800]: Train Loss:0.0013417, Val MRR:0.20764, wikidata12k_0.001
|
||||||
|
2023-05-27 06:07:02,645 - [INFO] - [E:4| 900]: Train Loss:0.001339, Val MRR:0.20764, wikidata12k_0.001
|
||||||
|
2023-05-27 06:08:06,755 - [INFO] - [E:4| 1000]: Train Loss:0.0013377, Val MRR:0.20764, wikidata12k_0.001
|
||||||
|
2023-05-27 06:09:10,902 - [INFO] - [E:4| 1100]: Train Loss:0.0013348, Val MRR:0.20764, wikidata12k_0.001
|
||||||
|
2023-05-27 06:10:15,038 - [INFO] - [E:4| 1200]: Train Loss:0.0013326, Val MRR:0.20764, wikidata12k_0.001
|
||||||
|
2023-05-27 06:11:19,143 - [INFO] - [E:4| 1300]: Train Loss:0.0013302, Val MRR:0.20764, wikidata12k_0.001
|
||||||
|
2023-05-27 06:12:23,347 - [INFO] - [E:4| 1400]: Train Loss:0.0013283, Val MRR:0.20764, wikidata12k_0.001
|
||||||
|
2023-05-27 06:13:27,477 - [INFO] - [E:4| 1500]: Train Loss:0.0013269, Val MRR:0.20764, wikidata12k_0.001
|
||||||
|
2023-05-27 06:14:31,542 - [INFO] - [E:4| 1600]: Train Loss:0.0013247, Val MRR:0.20764, wikidata12k_0.001
|
||||||
|
2023-05-27 06:15:13,457 - [INFO] - [Epoch:4]: Training Loss:0.001323
|
||||||
|
|
||||||
|
2023-05-27 06:15:13,677 - [INFO] - [Valid, Tail_Batch Step 0] wikidata12k_0.001
|
||||||
|
2023-05-27 06:15:35,362 - [INFO] - [Valid, Tail_Batch Step 100] wikidata12k_0.001
|
||||||
|
2023-05-27 06:15:47,916 - [INFO] - [Valid, Head_Batch Step 0] wikidata12k_0.001
|
||||||
|
2023-05-27 06:16:09,784 - [INFO] - [Valid, Head_Batch Step 100] wikidata12k_0.001
|
||||||
|
2023-05-27 06:16:22,221 - [INFO] - [Evaluating Epoch 4 valid]:
|
||||||
|
MRR: Tail : 0.36022, Head : 0.1037, Avg : 0.23196
|
||||||
|
|
||||||
|
2023-05-27 06:16:23,220 - [INFO] - [Epoch 4]: Training Loss: 0.0013235, Valid MRR: 0.23196,
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
2023-05-27 06:16:23,875 - [INFO] - [E:5| 0]: Train Loss:0.0013387, Val MRR:0.23196, wikidata12k_0.001
|
||||||
|
2023-05-27 06:17:28,154 - [INFO] - [E:5| 100]: Train Loss:0.0012781, Val MRR:0.23196, wikidata12k_0.001
|
||||||
|
2023-05-27 06:18:32,286 - [INFO] - [E:5| 200]: Train Loss:0.0012786, Val MRR:0.23196, wikidata12k_0.001
|
||||||
|
2023-05-27 06:19:36,495 - [INFO] - [E:5| 300]: Train Loss:0.0012809, Val MRR:0.23196, wikidata12k_0.001
|
||||||
|
2023-05-27 06:20:40,588 - [INFO] - [E:5| 400]: Train Loss:0.0012857, Val MRR:0.23196, wikidata12k_0.001
|
||||||
|
2023-05-27 06:21:44,792 - [INFO] - [E:5| 500]: Train Loss:0.0012853, Val MRR:0.23196, wikidata12k_0.001
|
||||||
|
2023-05-27 06:22:49,006 - [INFO] - [E:5| 600]: Train Loss:0.0012833, Val MRR:0.23196, wikidata12k_0.001
|
||||||
|
2023-05-27 06:23:53,190 - [INFO] - [E:5| 700]: Train Loss:0.0012812, Val MRR:0.23196, wikidata12k_0.001
|
||||||
|
2023-05-27 06:24:57,311 - [INFO] - [E:5| 800]: Train Loss:0.0012813, Val MRR:0.23196, wikidata12k_0.001
|
||||||
|
2023-05-27 06:26:01,510 - [INFO] - [E:5| 900]: Train Loss:0.0012801, Val MRR:0.23196, wikidata12k_0.001
|
||||||
|
2023-05-27 06:27:05,756 - [INFO] - [E:5| 1000]: Train Loss:0.0012789, Val MRR:0.23196, wikidata12k_0.001
|
||||||
|
2023-05-27 06:28:09,936 - [INFO] - [E:5| 1100]: Train Loss:0.0012769, Val MRR:0.23196, wikidata12k_0.001
|
||||||
|
2023-05-27 06:29:14,145 - [INFO] - [E:5| 1200]: Train Loss:0.0012746, Val MRR:0.23196, wikidata12k_0.001
|
||||||
|
2023-05-27 06:30:18,293 - [INFO] - [E:5| 1300]: Train Loss:0.0012721, Val MRR:0.23196, wikidata12k_0.001
|
||||||
|
2023-05-27 06:31:22,538 - [INFO] - [E:5| 1400]: Train Loss:0.0012703, Val MRR:0.23196, wikidata12k_0.001
|
||||||
|
2023-05-27 06:32:26,694 - [INFO] - [E:5| 1500]: Train Loss:0.0012689, Val MRR:0.23196, wikidata12k_0.001
|
||||||
|
2023-05-27 06:33:30,913 - [INFO] - [E:5| 1600]: Train Loss:0.0012677, Val MRR:0.23196, wikidata12k_0.001
|
||||||
|
2023-05-27 06:34:12,771 - [INFO] - [Epoch:5]: Training Loss:0.001267
|
||||||
|
|
||||||
|
2023-05-27 06:34:12,992 - [INFO] - [Valid, Tail_Batch Step 0] wikidata12k_0.001
|
||||||
|
2023-05-27 06:34:34,725 - [INFO] - [Valid, Tail_Batch Step 100] wikidata12k_0.001
|
||||||
|
2023-05-27 06:34:47,309 - [INFO] - [Valid, Head_Batch Step 0] wikidata12k_0.001
|
||||||
|
2023-05-27 06:35:09,233 - [INFO] - [Valid, Head_Batch Step 100] wikidata12k_0.001
|
||||||
|
2023-05-27 06:35:21,676 - [INFO] - [Evaluating Epoch 5 valid]:
|
||||||
|
MRR: Tail : 0.39017, Head : 0.12832, Avg : 0.25924
|
||||||
|
|
||||||
|
2023-05-27 06:35:22,811 - [INFO] - [Epoch 5]: Training Loss: 0.0012668, Valid MRR: 0.25924,
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
2023-05-27 06:35:23,469 - [INFO] - [E:6| 0]: Train Loss:0.0011894, Val MRR:0.25924, wikidata12k_0.001
|
||||||
|
2023-05-27 06:36:27,594 - [INFO] - [E:6| 100]: Train Loss:0.0012342, Val MRR:0.25924, wikidata12k_0.001
|
||||||
|
2023-05-27 06:37:31,786 - [INFO] - [E:6| 200]: Train Loss:0.0012378, Val MRR:0.25924, wikidata12k_0.001
|
||||||
|
2023-05-27 06:38:35,956 - [INFO] - [E:6| 300]: Train Loss:0.0012388, Val MRR:0.25924, wikidata12k_0.001
|
||||||
|
2023-05-27 06:39:40,226 - [INFO] - [E:6| 400]: Train Loss:0.0012378, Val MRR:0.25924, wikidata12k_0.001
|
||||||
|
2023-05-27 06:40:44,423 - [INFO] - [E:6| 500]: Train Loss:0.0012438, Val MRR:0.25924, wikidata12k_0.001
|
||||||
|
2023-05-27 06:41:48,645 - [INFO] - [E:6| 600]: Train Loss:0.0012421, Val MRR:0.25924, wikidata12k_0.001
|
||||||
|
2023-05-27 06:42:52,773 - [INFO] - [E:6| 700]: Train Loss:0.0012408, Val MRR:0.25924, wikidata12k_0.001
|
||||||
|
2023-05-27 06:43:56,948 - [INFO] - [E:6| 800]: Train Loss:0.0012416, Val MRR:0.25924, wikidata12k_0.001
|
||||||
|
2023-05-27 06:45:01,063 - [INFO] - [E:6| 900]: Train Loss:0.001242, Val MRR:0.25924, wikidata12k_0.001
|
||||||
|
2023-05-27 06:46:05,216 - [INFO] - [E:6| 1000]: Train Loss:0.0012397, Val MRR:0.25924, wikidata12k_0.001
|
||||||
|
2023-05-27 06:47:09,350 - [INFO] - [E:6| 1100]: Train Loss:0.0012386, Val MRR:0.25924, wikidata12k_0.001
|
||||||
|
2023-05-27 06:48:13,445 - [INFO] - [E:6| 1200]: Train Loss:0.0012373, Val MRR:0.25924, wikidata12k_0.001
|
||||||
|
2023-05-27 06:49:17,622 - [INFO] - [E:6| 1300]: Train Loss:0.0012363, Val MRR:0.25924, wikidata12k_0.001
|
||||||
|
2023-05-27 06:50:21,832 - [INFO] - [E:6| 1400]: Train Loss:0.0012346, Val MRR:0.25924, wikidata12k_0.001
|
||||||
|
2023-05-27 06:51:26,056 - [INFO] - [E:6| 1500]: Train Loss:0.0012342, Val MRR:0.25924, wikidata12k_0.001
|
||||||
|
2023-05-27 06:52:30,214 - [INFO] - [E:6| 1600]: Train Loss:0.001233, Val MRR:0.25924, wikidata12k_0.001
|
||||||
|
2023-05-27 06:53:12,160 - [INFO] - [Epoch:6]: Training Loss:0.001232
|
||||||
|
|
||||||
|
2023-05-27 06:53:12,380 - [INFO] - [Valid, Tail_Batch Step 0] wikidata12k_0.001
|
||||||
|
2023-05-27 06:53:34,088 - [INFO] - [Valid, Tail_Batch Step 100] wikidata12k_0.001
|
||||||
|
2023-05-27 06:53:46,650 - [INFO] - [Valid, Head_Batch Step 0] wikidata12k_0.001
|
||||||
|
2023-05-27 06:54:08,519 - [INFO] - [Valid, Head_Batch Step 100] wikidata12k_0.001
|
||||||
|
2023-05-27 06:54:20,952 - [INFO] - [Evaluating Epoch 6 valid]:
|
||||||
|
MRR: Tail : 0.37877, Head : 0.18554, Avg : 0.28215
|
||||||
|
|
||||||
|
2023-05-27 06:54:22,025 - [INFO] - [Epoch 6]: Training Loss: 0.0012324, Valid MRR: 0.28215,
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
2023-05-27 06:54:22,682 - [INFO] - [E:7| 0]: Train Loss:0.0011315, Val MRR:0.28215, wikidata12k_0.001
|
||||||
|
2023-05-27 06:55:26,826 - [INFO] - [E:7| 100]: Train Loss:0.001205, Val MRR:0.28215, wikidata12k_0.001
|
||||||
|
2023-05-27 06:56:30,996 - [INFO] - [E:7| 200]: Train Loss:0.0012037, Val MRR:0.28215, wikidata12k_0.001
|
||||||
|
2023-05-27 06:57:35,173 - [INFO] - [E:7| 300]: Train Loss:0.0012034, Val MRR:0.28215, wikidata12k_0.001
|
||||||
|
2023-05-27 06:58:39,365 - [INFO] - [E:7| 400]: Train Loss:0.0012073, Val MRR:0.28215, wikidata12k_0.001
|
||||||
|
2023-05-27 06:59:43,659 - [INFO] - [E:7| 500]: Train Loss:0.0012094, Val MRR:0.28215, wikidata12k_0.001
|
||||||
|
2023-05-27 07:00:47,839 - [INFO] - [E:7| 600]: Train Loss:0.0012093, Val MRR:0.28215, wikidata12k_0.001
|
||||||
|
2023-05-27 07:01:51,994 - [INFO] - [E:7| 700]: Train Loss:0.0012077, Val MRR:0.28215, wikidata12k_0.001
|
||||||
|
2023-05-27 07:02:56,159 - [INFO] - [E:7| 800]: Train Loss:0.0012085, Val MRR:0.28215, wikidata12k_0.001
|
||||||
|
2023-05-27 07:04:00,272 - [INFO] - [E:7| 900]: Train Loss:0.0012086, Val MRR:0.28215, wikidata12k_0.001
|
||||||
|
2023-05-27 07:05:04,432 - [INFO] - [E:7| 1000]: Train Loss:0.0012104, Val MRR:0.28215, wikidata12k_0.001
|
||||||
|
2023-05-27 07:06:08,565 - [INFO] - [E:7| 1100]: Train Loss:0.00121, Val MRR:0.28215, wikidata12k_0.001
|
||||||
|
2023-05-27 07:07:12,766 - [INFO] - [E:7| 1200]: Train Loss:0.0012097, Val MRR:0.28215, wikidata12k_0.001
|
||||||
|
2023-05-27 07:08:16,920 - [INFO] - [E:7| 1300]: Train Loss:0.0012101, Val MRR:0.28215, wikidata12k_0.001
|
||||||
|
2023-05-27 07:09:21,081 - [INFO] - [E:7| 1400]: Train Loss:0.0012095, Val MRR:0.28215, wikidata12k_0.001
|
||||||
|
2023-05-27 07:10:25,247 - [INFO] - [E:7| 1500]: Train Loss:0.0012082, Val MRR:0.28215, wikidata12k_0.001
|
||||||
|
2023-05-27 07:11:29,494 - [INFO] - [E:7| 1600]: Train Loss:0.0012075, Val MRR:0.28215, wikidata12k_0.001
|
||||||
|
2023-05-27 07:12:11,381 - [INFO] - [Epoch:7]: Training Loss:0.001208
|
||||||
|
|
||||||
|
2023-05-27 07:12:11,602 - [INFO] - [Valid, Tail_Batch Step 0] wikidata12k_0.001
|
||||||
|
2023-05-27 07:12:33,359 - [INFO] - [Valid, Tail_Batch Step 100] wikidata12k_0.001
|
||||||
|
2023-05-27 07:12:45,946 - [INFO] - [Valid, Head_Batch Step 0] wikidata12k_0.001
|
||||||
|
2023-05-27 07:13:07,852 - [INFO] - [Valid, Head_Batch Step 100] wikidata12k_0.001
|
||||||
|
2023-05-27 07:13:20,334 - [INFO] - [Evaluating Epoch 7 valid]:
|
||||||
|
MRR: Tail : 0.40626, Head : 0.21375, Avg : 0.31001
|
||||||
|
|
||||||
|
2023-05-27 07:13:21,326 - [INFO] - [Epoch 7]: Training Loss: 0.0012077, Valid MRR: 0.31001,
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
2023-05-27 07:13:21,980 - [INFO] - [E:8| 0]: Train Loss:0.0012363, Val MRR:0.31001, wikidata12k_0.001
|
||||||
|
2023-05-27 07:14:26,096 - [INFO] - [E:8| 100]: Train Loss:0.0011868, Val MRR:0.31001, wikidata12k_0.001
|
||||||
|
2023-05-27 07:15:30,354 - [INFO] - [E:8| 200]: Train Loss:0.0011847, Val MRR:0.31001, wikidata12k_0.001
|
||||||
|
2023-05-27 07:16:34,466 - [INFO] - [E:8| 300]: Train Loss:0.0011814, Val MRR:0.31001, wikidata12k_0.001
|
||||||
|
2023-05-27 07:17:38,565 - [INFO] - [E:8| 400]: Train Loss:0.0011847, Val MRR:0.31001, wikidata12k_0.001
|
||||||
|
2023-05-27 07:18:42,799 - [INFO] - [E:8| 500]: Train Loss:0.0011887, Val MRR:0.31001, wikidata12k_0.001
|
||||||
|
2023-05-27 07:19:46,964 - [INFO] - [E:8| 600]: Train Loss:0.0011901, Val MRR:0.31001, wikidata12k_0.001
|
||||||
|
2023-05-27 07:20:51,144 - [INFO] - [E:8| 700]: Train Loss:0.0011897, Val MRR:0.31001, wikidata12k_0.001
|
||||||
|
2023-05-27 07:21:55,282 - [INFO] - [E:8| 800]: Train Loss:0.0011913, Val MRR:0.31001, wikidata12k_0.001
|
||||||
|
2023-05-27 07:22:59,411 - [INFO] - [E:8| 900]: Train Loss:0.0011918, Val MRR:0.31001, wikidata12k_0.001
|
||||||
|
2023-05-27 07:24:03,538 - [INFO] - [E:8| 1000]: Train Loss:0.0011908, Val MRR:0.31001, wikidata12k_0.001
|
||||||
|
2023-05-27 07:25:07,761 - [INFO] - [E:8| 1100]: Train Loss:0.0011915, Val MRR:0.31001, wikidata12k_0.001
|
||||||
|
2023-05-27 07:26:11,872 - [INFO] - [E:8| 1200]: Train Loss:0.0011925, Val MRR:0.31001, wikidata12k_0.001
|
||||||
|
2023-05-27 07:27:16,041 - [INFO] - [E:8| 1300]: Train Loss:0.0011918, Val MRR:0.31001, wikidata12k_0.001
|
||||||
|
2023-05-27 07:28:20,210 - [INFO] - [E:8| 1400]: Train Loss:0.0011905, Val MRR:0.31001, wikidata12k_0.001
|
||||||
|
2023-05-27 07:29:24,336 - [INFO] - [E:8| 1500]: Train Loss:0.0011898, Val MRR:0.31001, wikidata12k_0.001
|
||||||
|
2023-05-27 07:30:28,566 - [INFO] - [E:8| 1600]: Train Loss:0.0011888, Val MRR:0.31001, wikidata12k_0.001
|
||||||
|
2023-05-27 07:31:10,538 - [INFO] - [Epoch:8]: Training Loss:0.001189
|
||||||
|
|
||||||
|
2023-05-27 07:31:10,758 - [INFO] - [Valid, Tail_Batch Step 0] wikidata12k_0.001
|
||||||
|
2023-05-27 07:31:32,478 - [INFO] - [Valid, Tail_Batch Step 100] wikidata12k_0.001
|
||||||
|
2023-05-27 07:31:45,038 - [INFO] - [Valid, Head_Batch Step 0] wikidata12k_0.001
|
||||||
|
2023-05-27 07:32:06,913 - [INFO] - [Valid, Head_Batch Step 100] wikidata12k_0.001
|
||||||
|
2023-05-27 07:32:19,354 - [INFO] - [Evaluating Epoch 8 valid]:
|
||||||
|
MRR: Tail : 0.41408, Head : 0.20141, Avg : 0.30774
|
||||||
|
|
||||||
|
2023-05-27 07:32:19,354 - [INFO] - [Epoch 8]: Training Loss: 0.0011888, Valid MRR: 0.31001,
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
2023-05-27 07:32:20,011 - [INFO] - [E:9| 0]: Train Loss:0.0011748, Val MRR:0.31001, wikidata12k_0.001
|
||||||
|
2023-05-27 07:33:24,159 - [INFO] - [E:9| 100]: Train Loss:0.0011746, Val MRR:0.31001, wikidata12k_0.001
|
||||||
|
2023-05-27 07:34:28,351 - [INFO] - [E:9| 200]: Train Loss:0.0011787, Val MRR:0.31001, wikidata12k_0.001
|
||||||
|
2023-05-27 07:35:32,472 - [INFO] - [E:9| 300]: Train Loss:0.0011761, Val MRR:0.31001, wikidata12k_0.001
|
||||||
|
2023-05-27 07:36:36,656 - [INFO] - [E:9| 400]: Train Loss:0.0011729, Val MRR:0.31001, wikidata12k_0.001
|
||||||
|
2023-05-27 07:37:40,796 - [INFO] - [E:9| 500]: Train Loss:0.0011725, Val MRR:0.31001, wikidata12k_0.001
|
||||||
|
2023-05-27 07:38:44,981 - [INFO] - [E:9| 600]: Train Loss:0.0011741, Val MRR:0.31001, wikidata12k_0.001
|
||||||
|
2023-05-27 07:39:49,133 - [INFO] - [E:9| 700]: Train Loss:0.001173, Val MRR:0.31001, wikidata12k_0.001
|
||||||
|
2023-05-27 07:40:53,329 - [INFO] - [E:9| 800]: Train Loss:0.0011736, Val MRR:0.31001, wikidata12k_0.001
|
||||||
|
2023-05-27 07:41:57,558 - [INFO] - [E:9| 900]: Train Loss:0.0011731, Val MRR:0.31001, wikidata12k_0.001
|
||||||
|
2023-05-27 07:43:01,737 - [INFO] - [E:9| 1000]: Train Loss:0.0011729, Val MRR:0.31001, wikidata12k_0.001
|
||||||
|
2023-05-27 07:44:05,854 - [INFO] - [E:9| 1100]: Train Loss:0.001173, Val MRR:0.31001, wikidata12k_0.001
|
||||||
|
2023-05-27 07:45:10,080 - [INFO] - [E:9| 1200]: Train Loss:0.0011727, Val MRR:0.31001, wikidata12k_0.001
|
||||||
|
2023-05-27 07:46:14,191 - [INFO] - [E:9| 1300]: Train Loss:0.0011718, Val MRR:0.31001, wikidata12k_0.001
|
||||||
|
2023-05-27 07:47:18,385 - [INFO] - [E:9| 1400]: Train Loss:0.001171, Val MRR:0.31001, wikidata12k_0.001
|
||||||
|
2023-05-27 07:48:22,543 - [INFO] - [E:9| 1500]: Train Loss:0.0011709, Val MRR:0.31001, wikidata12k_0.001
|
||||||
|
2023-05-27 07:49:26,748 - [INFO] - [E:9| 1600]: Train Loss:0.0011712, Val MRR:0.31001, wikidata12k_0.001
|
||||||
|
2023-05-27 07:50:08,734 - [INFO] - [Epoch:9]: Training Loss:0.001171
|
||||||
|
|
||||||
|
2023-05-27 07:50:08,954 - [INFO] - [Valid, Tail_Batch Step 0] wikidata12k_0.001
|
||||||
|
2023-05-27 07:50:30,672 - [INFO] - [Valid, Tail_Batch Step 100] wikidata12k_0.001
|
||||||
|
2023-05-27 07:50:43,251 - [INFO] - [Valid, Head_Batch Step 0] wikidata12k_0.001
|
||||||
|
2023-05-27 07:51:05,138 - [INFO] - [Valid, Head_Batch Step 100] wikidata12k_0.001
|
||||||
|
2023-05-27 07:51:17,628 - [INFO] - [Evaluating Epoch 9 valid]:
|
||||||
|
MRR: Tail : 0.42849, Head : 0.23814, Avg : 0.33331
|
||||||
|
MR: Tail : 655.47, Head : 840.42, Avg : 747.94
|
||||||
|
Hit-1: Tail : 0.35832, Head : 0.15504, Avg : 0.25668
|
||||||
|
Hit-3: Tail : 0.45838, Head : 0.2739, Avg : 0.36614
|
||||||
|
Hit-10: Tail : 0.55785, Head : 0.39074, Avg : 0.47429
|
||||||
|
2023-05-27 07:51:18,545 - [INFO] - [Epoch 9]: Training Loss: 0.0011709, Valid MRR: 0.33331,
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
2023-05-27 07:51:19,204 - [INFO] - [E:10| 0]: Train Loss:0.00113, Val MRR:0.33331, wikidata12k_0.001
|
||||||
|
2023-05-27 07:52:23,358 - [INFO] - [E:10| 100]: Train Loss:0.0011531, Val MRR:0.33331, wikidata12k_0.001
|
||||||
|
2023-05-27 07:53:27,523 - [INFO] - [E:10| 200]: Train Loss:0.0011557, Val MRR:0.33331, wikidata12k_0.001
|
||||||
|
2023-05-27 07:54:31,758 - [INFO] - [E:10| 300]: Train Loss:0.0011545, Val MRR:0.33331, wikidata12k_0.001
|
||||||
|
2023-05-27 07:55:36,004 - [INFO] - [E:10| 400]: Train Loss:0.0011554, Val MRR:0.33331, wikidata12k_0.001
|
||||||
|
2023-05-27 07:56:40,140 - [INFO] - [E:10| 500]: Train Loss:0.001154, Val MRR:0.33331, wikidata12k_0.001
|
||||||
|
2023-05-27 07:57:44,301 - [INFO] - [E:10| 600]: Train Loss:0.0011525, Val MRR:0.33331, wikidata12k_0.001
|
||||||
|
2023-05-27 07:58:48,517 - [INFO] - [E:10| 700]: Train Loss:0.0011538, Val MRR:0.33331, wikidata12k_0.001
|
||||||
|
2023-05-27 07:59:52,698 - [INFO] - [E:10| 800]: Train Loss:0.0011536, Val MRR:0.33331, wikidata12k_0.001
|
||||||
|
2023-05-27 08:00:56,912 - [INFO] - [E:10| 900]: Train Loss:0.0011541, Val MRR:0.33331, wikidata12k_0.001
|
||||||
|
2023-05-27 08:02:01,143 - [INFO] - [E:10| 1000]: Train Loss:0.0011546, Val MRR:0.33331, wikidata12k_0.001
|
||||||
|
2023-05-27 08:03:05,293 - [INFO] - [E:10| 1100]: Train Loss:0.0011542, Val MRR:0.33331, wikidata12k_0.001
|
||||||
|
2023-05-27 08:04:09,471 - [INFO] - [E:10| 1200]: Train Loss:0.0011539, Val MRR:0.33331, wikidata12k_0.001
|
||||||
|
2023-05-27 08:05:13,701 - [INFO] - [E:10| 1300]: Train Loss:0.0011531, Val MRR:0.33331, wikidata12k_0.001
|
||||||
|
2023-05-27 08:06:17,887 - [INFO] - [E:10| 1400]: Train Loss:0.0011534, Val MRR:0.33331, wikidata12k_0.001
|
||||||
|
2023-05-27 08:07:22,089 - [INFO] - [E:10| 1500]: Train Loss:0.0011546, Val MRR:0.33331, wikidata12k_0.001
|
||||||
|
2023-05-27 08:08:26,239 - [INFO] - [E:10| 1600]: Train Loss:0.0011552, Val MRR:0.33331, wikidata12k_0.001
|
||||||
|
2023-05-27 08:09:08,153 - [INFO] - [Epoch:10]: Training Loss:0.001156
|
||||||
|
|
||||||
|
2023-05-27 08:09:08,373 - [INFO] - [Valid, Tail_Batch Step 0] wikidata12k_0.001
|
||||||
|
2023-05-27 08:09:30,456 - [INFO] - [Valid, Tail_Batch Step 100] wikidata12k_0.001
|
||||||
|
2023-05-27 08:09:43,084 - [INFO] - [Valid, Head_Batch Step 0] wikidata12k_0.001
|
||||||
|
2023-05-27 08:10:05,005 - [INFO] - [Valid, Head_Batch Step 100] wikidata12k_0.001
|
||||||
|
2023-05-27 08:10:17,448 - [INFO] - [Evaluating Epoch 10 valid]:
|
||||||
|
MRR: Tail : 0.45191, Head : 0.21626, Avg : 0.33409
|
||||||
|
|
||||||
|
2023-05-27 08:10:18,436 - [INFO] - [Epoch 10]: Training Loss: 0.0011556, Valid MRR: 0.33409,
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
2023-05-27 08:10:19,090 - [INFO] - [E:11| 0]: Train Loss:0.0011363, Val MRR:0.33409, wikidata12k_0.001
|
||||||
|
2023-05-27 08:11:23,530 - [INFO] - [E:11| 100]: Train Loss:0.0011426, Val MRR:0.33409, wikidata12k_0.001
|
||||||
|
2023-05-27 08:12:27,950 - [INFO] - [E:11| 200]: Train Loss:0.0011483, Val MRR:0.33409, wikidata12k_0.001
|
||||||
|
2023-05-27 08:13:32,143 - [INFO] - [E:11| 300]: Train Loss:0.0011472, Val MRR:0.33409, wikidata12k_0.001
|
||||||
|
2023-05-27 08:14:36,469 - [INFO] - [E:11| 400]: Train Loss:0.0011477, Val MRR:0.33409, wikidata12k_0.001
|
||||||
|
2023-05-27 08:15:40,641 - [INFO] - [E:11| 500]: Train Loss:0.0011474, Val MRR:0.33409, wikidata12k_0.001
|
||||||
|
2023-05-27 08:16:44,731 - [INFO] - [E:11| 600]: Train Loss:0.0011465, Val MRR:0.33409, wikidata12k_0.001
|
||||||
|
2023-05-27 08:17:48,900 - [INFO] - [E:11| 700]: Train Loss:0.0011469, Val MRR:0.33409, wikidata12k_0.001
|
||||||
|
2023-05-27 08:18:53,113 - [INFO] - [E:11| 800]: Train Loss:0.0011469, Val MRR:0.33409, wikidata12k_0.001
|
||||||
|
2023-05-27 08:19:57,285 - [INFO] - [E:11| 900]: Train Loss:0.0011457, Val MRR:0.33409, wikidata12k_0.001
|
||||||
|
2023-05-27 08:21:01,406 - [INFO] - [E:11| 1000]: Train Loss:0.0011445, Val MRR:0.33409, wikidata12k_0.001
|
||||||
|
2023-05-27 08:22:05,596 - [INFO] - [E:11| 1100]: Train Loss:0.0011434, Val MRR:0.33409, wikidata12k_0.001
|
||||||
|
2023-05-27 08:23:09,693 - [INFO] - [E:11| 1200]: Train Loss:0.0011431, Val MRR:0.33409, wikidata12k_0.001
|
||||||
|
2023-05-27 08:24:13,830 - [INFO] - [E:11| 1300]: Train Loss:0.001143, Val MRR:0.33409, wikidata12k_0.001
|
||||||
|
2023-05-27 08:25:18,076 - [INFO] - [E:11| 1400]: Train Loss:0.0011426, Val MRR:0.33409, wikidata12k_0.001
|
||||||
|
2023-05-27 08:26:22,160 - [INFO] - [E:11| 1500]: Train Loss:0.0011422, Val MRR:0.33409, wikidata12k_0.001
|
||||||
|
2023-05-27 08:27:26,373 - [INFO] - [E:11| 1600]: Train Loss:0.0011418, Val MRR:0.33409, wikidata12k_0.001
|
||||||
|
2023-05-27 08:28:08,368 - [INFO] - [Epoch:11]: Training Loss:0.001142
|
||||||
|
|
||||||
|
2023-05-27 08:28:08,589 - [INFO] - [Valid, Tail_Batch Step 0] wikidata12k_0.001
|
||||||
|
2023-05-27 08:28:30,301 - [INFO] - [Valid, Tail_Batch Step 100] wikidata12k_0.001
|
||||||
|
2023-05-27 08:28:42,888 - [INFO] - [Valid, Head_Batch Step 0] wikidata12k_0.001
|
||||||
|
2023-05-27 08:29:04,760 - [INFO] - [Valid, Head_Batch Step 100] wikidata12k_0.001
|
||||||
|
2023-05-27 08:29:17,200 - [INFO] - [Evaluating Epoch 11 valid]:
|
||||||
|
MRR: Tail : 0.4433, Head : 0.23916, Avg : 0.34123
|
||||||
|
|
||||||
|
2023-05-27 08:29:18,266 - [INFO] - [Epoch 11]: Training Loss: 0.0011416, Valid MRR: 0.34123,
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
2023-05-27 08:29:18,927 - [INFO] - [E:12| 0]: Train Loss:0.0010957, Val MRR:0.34123, wikidata12k_0.001
|
||||||
|
2023-05-27 08:30:23,063 - [INFO] - [E:12| 100]: Train Loss:0.0011303, Val MRR:0.34123, wikidata12k_0.001
|
||||||
|
2023-05-27 08:31:27,243 - [INFO] - [E:12| 200]: Train Loss:0.001132, Val MRR:0.34123, wikidata12k_0.001
|
||||||
|
2023-05-27 08:32:31,360 - [INFO] - [E:12| 300]: Train Loss:0.0011321, Val MRR:0.34123, wikidata12k_0.001
|
||||||
|
2023-05-27 08:33:35,484 - [INFO] - [E:12| 400]: Train Loss:0.0011313, Val MRR:0.34123, wikidata12k_0.001
|
||||||
|
2023-05-27 08:34:39,656 - [INFO] - [E:12| 500]: Train Loss:0.0011302, Val MRR:0.34123, wikidata12k_0.001
|
||||||
|
2023-05-27 08:35:43,783 - [INFO] - [E:12| 600]: Train Loss:0.0011318, Val MRR:0.34123, wikidata12k_0.001
|
||||||
|
2023-05-27 08:36:47,900 - [INFO] - [E:12| 700]: Train Loss:0.0011316, Val MRR:0.34123, wikidata12k_0.001
|
||||||
|
2023-05-27 08:37:52,082 - [INFO] - [E:12| 800]: Train Loss:0.0011323, Val MRR:0.34123, wikidata12k_0.001
|
||||||
|
2023-05-27 08:38:56,174 - [INFO] - [E:12| 900]: Train Loss:0.001132, Val MRR:0.34123, wikidata12k_0.001
|
||||||
|
2023-05-27 08:40:00,316 - [INFO] - [E:12| 1000]: Train Loss:0.0011317, Val MRR:0.34123, wikidata12k_0.001
|
||||||
|
2023-05-27 08:41:04,530 - [INFO] - [E:12| 1100]: Train Loss:0.0011322, Val MRR:0.34123, wikidata12k_0.001
|
||||||
|
2023-05-27 08:42:08,648 - [INFO] - [E:12| 1200]: Train Loss:0.0011318, Val MRR:0.34123, wikidata12k_0.001
|
||||||
|
2023-05-27 08:43:12,819 - [INFO] - [E:12| 1300]: Train Loss:0.0011314, Val MRR:0.34123, wikidata12k_0.001
|
||||||
|
2023-05-27 08:44:18,052 - [INFO] - [E:12| 1400]: Train Loss:0.0011312, Val MRR:0.34123, wikidata12k_0.001
|
@ -10,36 +10,15 @@ def extract_learning_curves(args):
|
|||||||
if len(paths) == 1 and os.path.isdir(paths[0]):
|
if len(paths) == 1 and os.path.isdir(paths[0]):
|
||||||
paths = [os.path.join(paths[0], f) for f in os.listdir(paths[0]) if os.path.isfile(os.path.join(paths[0], f))]
|
paths = [os.path.join(paths[0], f) for f in os.listdir(paths[0]) if os.path.isfile(os.path.join(paths[0], f))]
|
||||||
learning_curves = {}
|
learning_curves = {}
|
||||||
print(paths)
|
|
||||||
for path in paths:
|
for path in paths:
|
||||||
print(path)
|
|
||||||
learning_curve = []
|
learning_curve = []
|
||||||
lines = open(path, 'r').readlines()
|
lines = open(path, 'r').readlines()
|
||||||
last_epoch = -1
|
|
||||||
stacked_epoch = -1
|
|
||||||
max_epoch = -1
|
|
||||||
for line in lines:
|
for line in lines:
|
||||||
matched = re.match(r'[0-9\- :,]*\[INFO\] - \[Epoch ([0-9]+)\].*Valid MRR: ([0-9\.]+).*', line)
|
matched = re.match(r'[0-9\- :,]*\[INFO\] - \[Epoch ([0-9]+)\].*Valid MRR: ([0-9\.]+).*', line)
|
||||||
# matched = re.match(r'\tMRR: Tail : [0-9\.]+, Head : [0-9\.]+, Avg : ([0-9\.]+)', line)
|
|
||||||
if matched:
|
if matched:
|
||||||
this_epoch = int(matched.group(1))
|
learning_curve.append(float(matched.group(2)))
|
||||||
if (this_epoch > max_epoch):
|
if int(matched.group(1)) >= args.num_epochs:
|
||||||
learning_curve.append(float(matched.group(2)))
|
|
||||||
max_epoch = this_epoch
|
|
||||||
stacked_epoch = this_epoch
|
|
||||||
elif (this_epoch < max_epoch and this_epoch > last_epoch):
|
|
||||||
last_epoch = this_epoch
|
|
||||||
max_epoch = stacked_epoch + 1 + this_epoch
|
|
||||||
learning_curve.append(float(matched.group(2)))
|
|
||||||
if max_epoch >= args.num_epochs:
|
|
||||||
break
|
break
|
||||||
# if matched:
|
|
||||||
# max_epoch += 1
|
|
||||||
# learning_curve.append(float(matched.group(1)))
|
|
||||||
# if max_epoch >= args.num_epochs:
|
|
||||||
# break
|
|
||||||
while len(learning_curve) < args.num_epochs:
|
|
||||||
learning_curve.append(learning_curve[-1])
|
|
||||||
learning_curves[os.path.basename(path)] = learning_curve
|
learning_curves[os.path.basename(path)] = learning_curve
|
||||||
return learning_curves
|
return learning_curves
|
||||||
|
|
||||||
@ -53,7 +32,7 @@ def draw_learning_curves(args, learning_curves):
|
|||||||
label = name
|
label = name
|
||||||
plt.plot(epochs, learning_curves[name], label = label)
|
plt.plot(epochs, learning_curves[name], label = label)
|
||||||
plt.xlabel("Epochs")
|
plt.xlabel("Epochs")
|
||||||
plt.ylabel("Best Valid MRR")
|
plt.ylabel("MRR")
|
||||||
plt.legend(title=args.legend_title)
|
plt.legend(title=args.legend_title)
|
||||||
plt.savefig(os.path.join(args.out_path, str(round(datetime.utcnow().timestamp() * 1000)) + '.' + args.fig_filetype))
|
plt.savefig(os.path.join(args.out_path, str(round(datetime.utcnow().timestamp() * 1000)) + '.' + args.fig_filetype))
|
||||||
|
|
||||||
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user