Skip to content
This repository was archived by the owner on Nov 23, 2022. It is now read-only.

Commit 6e20827

Browse files
committed
Drop MAD
* It makes some horizon dosn't fit at all
1 parent 17c110f commit 6e20827

10 files changed

+40
-40
lines changed

mise/ml/mlp_mul_ms.py

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -263,15 +263,15 @@ def objective(trial):
263263
logger=True,
264264
checkpoint_callback=False,
265265
callbacks=[PyTorchLightningPruningCallback(
266-
trial, monitor="valid/MAD")])
266+
trial, monitor="valid/MSE")])
267267

268268
trainer.fit(model)
269269

270270
# Don't Log
271271
# hyperparameters = model.hparams
272272
# trainer.logger.log_hyperparams(hyperparameters)
273273

274-
return trainer.callback_metrics.get("valid/MAD")
274+
return trainer.callback_metrics.get("valid/MSE")
275275

276276
if n_trials > 1:
277277
study = optuna.create_study(direction="minimize")
@@ -356,12 +356,12 @@ def objective(trial):
356356
test_dataset.to_csv(model.data_dir / ("df_testset_" + target + ".csv"))
357357

358358
checkpoint_callback = pl.callbacks.ModelCheckpoint(
359-
os.path.join(model_dir, "train_{epoch}_{valid/MAD:.2f}"), monitor="valid/MAD",
359+
os.path.join(model_dir, "train_{epoch}_{valid/MSE:.2f}"), monitor="valid/MSE",
360360
period=10
361361
)
362362

363363
early_stop_callback = EarlyStopping(
364-
monitor='valid/MAD',
364+
monitor='valid/MSE',
365365
min_delta=0.001,
366366
patience=30,
367367
verbose=True,

mise/ml/mlp_mul_ms_mccr.py

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -264,15 +264,15 @@ def objective(trial):
264264
logger=True,
265265
checkpoint_callback=False,
266266
callbacks=[PyTorchLightningPruningCallback(
267-
trial, monitor="valid/MAD")])
267+
trial, monitor="valid/MSE")])
268268

269269
trainer.fit(model)
270270

271271
# Don't Log
272272
# hyperparameters = model.hparams
273273
# trainer.logger.log_hyperparams(hyperparameters)
274274

275-
return trainer.callback_metrics.get("valid/MAD")
275+
return trainer.callback_metrics.get("valid/MSE")
276276

277277
if n_trials > 1:
278278
study = optuna.create_study(direction="minimize")
@@ -358,12 +358,12 @@ def objective(trial):
358358
test_dataset.to_csv(model.data_dir / ("df_testset_" + target + ".csv"))
359359

360360
checkpoint_callback = pl.callbacks.ModelCheckpoint(
361-
os.path.join(model_dir, "train_{epoch}_{valid/MAD:.2f}"), monitor="valid/MAD",
361+
os.path.join(model_dir, "train_{epoch}_{valid/MSE:.2f}"), monitor="valid/MSE",
362362
period=10
363363
)
364364

365365
early_stop_callback = EarlyStopping(
366-
monitor='valid/MAD',
366+
monitor='valid/MSE',
367367
min_delta=0.001,
368368
patience=30,
369369
verbose=True,

mise/ml/mlp_mul_transformer.py

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -273,15 +273,15 @@ def objective(trial):
273273
logger=True,
274274
checkpoint_callback=False,
275275
callbacks=[PyTorchLightningPruningCallback(
276-
trial, monitor="valid/MAD")])
276+
trial, monitor="valid/MSE")])
277277

278278
trainer.fit(model)
279279

280280
# Don't Log
281281
# hyperparameters = model.hparams
282282
# trainer.logger.log_hyperparams(hyperparameters)
283283

284-
return trainer.callback_metrics.get("valid/MAD")
284+
return trainer.callback_metrics.get("valid/MSE")
285285

286286
if n_trials > 1:
287287
study = optuna.create_study(direction="minimize")
@@ -382,12 +382,12 @@ def objective(trial):
382382
test_dataset.to_csv(model.data_dir / ("df_testset_" + target + ".csv"))
383383

384384
checkpoint_callback = pl.callbacks.ModelCheckpoint(
385-
os.path.join(model_dir, "train_{epoch}_{valid/MAD:.2f}"), monitor="valid/MAD",
385+
os.path.join(model_dir, "train_{epoch}_{valid/MSE:.2f}"), monitor="valid/MSE",
386386
period=10
387387
)
388388

389389
early_stop_callback = EarlyStopping(
390-
monitor='valid/MAD',
390+
monitor='valid/MSE',
391391
min_delta=0.001,
392392
patience=30,
393393
verbose=True,

mise/ml/mlp_mul_transformer_mccr.py

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -267,15 +267,15 @@ def objective(trial):
267267
logger=True,
268268
checkpoint_callback=False,
269269
callbacks=[PyTorchLightningPruningCallback(
270-
trial, monitor="valid/MAD")])
270+
trial, monitor="valid/MSE")])
271271

272272
trainer.fit(model)
273273

274274
# Don't Log
275275
# hyperparameters = model.hparams
276276
# trainer.logger.log_hyperparams(hyperparameters)
277277

278-
return trainer.callback_metrics.get("valid/MAD")
278+
return trainer.callback_metrics.get("valid/MSE")
279279

280280
if n_trials > 1:
281281
study = optuna.create_study(direction="minimize")
@@ -377,12 +377,12 @@ def objective(trial):
377377
test_dataset.to_csv(model.data_dir / ("df_testset_" + target + ".csv"))
378378

379379
checkpoint_callback = pl.callbacks.ModelCheckpoint(
380-
os.path.join(model_dir, "train_{epoch}_{valid/MAD:.2f}"), monitor="valid/MAD",
380+
os.path.join(model_dir, "train_{epoch}_{valid/MSE:.2f}"), monitor="valid/MSE",
381381
period=10
382382
)
383383

384384
early_stop_callback = EarlyStopping(
385-
monitor='valid/MAD',
385+
monitor='valid/MSE',
386386
min_delta=0.001,
387387
patience=30,
388388
verbose=True,

mise/ml/mlp_uni_ms.py

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -229,15 +229,15 @@ def objective(trial):
229229
logger=True,
230230
checkpoint_callback=False,
231231
callbacks=[PyTorchLightningPruningCallback(
232-
trial, monitor="valid/MAD")])
232+
trial, monitor="valid/MSE")])
233233

234234
trainer.fit(model)
235235

236236
# Don't Log
237237
# hyperparameters = model.hparams
238238
# trainer.logger.log_hyperparams(hyperparameters)
239239

240-
return trainer.callback_metrics.get("valid/MAD")
240+
return trainer.callback_metrics.get("valid/MSE")
241241

242242
if n_trials > 1:
243243
study = optuna.create_study(direction="minimize")
@@ -320,12 +320,12 @@ def objective(trial):
320320
test_dataset.to_csv(model.data_dir / ("df_testset_" + target + ".csv"))
321321

322322
checkpoint_callback = pl.callbacks.ModelCheckpoint(
323-
os.path.join(model_dir, "train_{epoch}_{valid/MAD:.2f}"), monitor="valid/MAD",
323+
os.path.join(model_dir, "train_{epoch}_{valid/MSE:.2f}"), monitor="valid/MSE",
324324
period=10
325325
)
326326

327327
early_stop_callback = EarlyStopping(
328-
monitor='valid/MAD',
328+
monitor='valid/MSE',
329329
min_delta=0.001,
330330
patience=30,
331331
verbose=True,

mise/ml/mlp_uni_ms_mccr.py

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -233,15 +233,15 @@ def objective(trial):
233233
logger=True,
234234
checkpoint_callback=False,
235235
callbacks=[PyTorchLightningPruningCallback(
236-
trial, monitor="valid/MAD")])
236+
trial, monitor="valid/MSE")])
237237

238238
trainer.fit(model)
239239

240240
# Don't Log
241241
# hyperparameters = model.hparams
242242
# trainer.logger.log_hyperparams(hyperparameters)
243243

244-
return trainer.callback_metrics.get("valid/MAD")
244+
return trainer.callback_metrics.get("valid/MSE")
245245

246246
if n_trials > 1:
247247
study = optuna.create_study(direction="minimize")
@@ -325,12 +325,12 @@ def objective(trial):
325325
test_dataset.to_csv(model.data_dir / ("df_testset_" + target + ".csv"))
326326

327327
checkpoint_callback = pl.callbacks.ModelCheckpoint(
328-
os.path.join(model_dir, "train_{epoch}_{valid/MAD:.2f}"), monitor="valid/MAD",
328+
os.path.join(model_dir, "train_{epoch}_{valid/MSE:.2f}"), monitor="valid/MSE",
329329
period=10
330330
)
331331

332332
early_stop_callback = EarlyStopping(
333-
monitor='valid/MAD',
333+
monitor='valid/MSE',
334334
min_delta=0.001,
335335
patience=30,
336336
verbose=True,

mise/ml/rnn_mul_lstnet_skip.py

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -264,15 +264,15 @@ def objective(trial):
264264
logger=True,
265265
checkpoint_callback=False,
266266
callbacks=[PyTorchLightningPruningCallback(
267-
trial, monitor="valid/MAD")])
267+
trial, monitor="valid/MSE")])
268268

269269
trainer.fit(model)
270270

271271
# Don't Log
272272
# hyperparameters = model.hparams
273273
# trainer.logger.log_hyperparams(hyperparameters)
274274

275-
return trainer.callback_metrics.get("valid/MAD")
275+
return trainer.callback_metrics.get("valid/MSE")
276276

277277
if n_trials > 1:
278278
study = optuna.create_study(direction="minimize")
@@ -378,12 +378,12 @@ def objective(trial):
378378
test_dataset.to_csv(model.data_dir / ("df_testset_" + target + ".csv"))
379379

380380
checkpoint_callback = pl.callbacks.ModelCheckpoint(
381-
os.path.join(model_dir, "train_{epoch}_{valid/MAD:.2f}"), monitor="valid/MAD",
381+
os.path.join(model_dir, "train_{epoch}_{valid/MSE:.2f}"), monitor="valid/MSE",
382382
period=10
383383
)
384384

385385
early_stop_callback = EarlyStopping(
386-
monitor='valid/MAD',
386+
monitor='valid/MSE',
387387
min_delta=0.001,
388388
patience=30,
389389
verbose=True,

mise/ml/rnn_mul_lstnet_skip_mccr.py

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -264,15 +264,15 @@ def objective(trial):
264264
logger=True,
265265
checkpoint_callback=False,
266266
callbacks=[PyTorchLightningPruningCallback(
267-
trial, monitor="valid/MAD")])
267+
trial, monitor="valid/MSE")])
268268

269269
trainer.fit(model)
270270

271271
# Don't Log
272272
# hyperparameters = model.hparams
273273
# trainer.logger.log_hyperparams(hyperparameters)
274274

275-
return trainer.callback_metrics.get("valid/MAD")
275+
return trainer.callback_metrics.get("valid/MSE")
276276

277277
if n_trials > 1:
278278
study = optuna.create_study(direction="minimize")
@@ -379,12 +379,12 @@ def objective(trial):
379379
test_dataset.to_csv(model.data_dir / ("df_testset_" + target + ".csv"))
380380

381381
checkpoint_callback = pl.callbacks.ModelCheckpoint(
382-
os.path.join(model_dir, "train_{epoch}_{valid/MAD:.2f}"), monitor="valid/MAD",
382+
os.path.join(model_dir, "train_{epoch}_{valid/MSE:.2f}"), monitor="valid/MSE",
383383
period=10
384384
)
385385

386386
early_stop_callback = EarlyStopping(
387-
monitor='valid/MAD',
387+
monitor='valid/MSE',
388388
min_delta=0.001,
389389
patience=30,
390390
verbose=True,

mise/ml/rnn_uni_attn.py

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -222,15 +222,15 @@ def objective(trial):
222222
logger=True,
223223
checkpoint_callback=False,
224224
callbacks=[PyTorchLightningPruningCallback(
225-
trial, monitor="valid/MAD")])
225+
trial, monitor="valid/MSE")])
226226

227227
trainer.fit(model)
228228

229229
# Don't Log
230230
# hyperparameters = model.hparams
231231
# trainer.logger.log_hyperparams(hyperparameters)
232232

233-
return trainer.callback_metrics.get("valid/MAD")
233+
return trainer.callback_metrics.get("valid/MSE")
234234

235235
if n_trials > 1:
236236
study = optuna.create_study(direction="minimize")
@@ -305,12 +305,12 @@ def objective(trial):
305305
test_dataset.to_csv(model.data_dir / ("df_testset_" + target + ".csv"))
306306

307307
checkpoint_callback = pl.callbacks.ModelCheckpoint(
308-
os.path.join(model_dir, "train_{epoch}_{valid/MAD:.2f}"), monitor="valid/MAD",
308+
os.path.join(model_dir, "train_{epoch}_{valid/MSE:.2f}"), monitor="valid/MSE",
309309
period=10
310310
)
311311

312312
early_stop_callback = EarlyStopping(
313-
monitor='valid/MAD',
313+
monitor='valid/MSE',
314314
min_delta=0.001,
315315
patience=30,
316316
verbose=True,

mise/ml/rnn_uni_attn_mccr.py

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -223,15 +223,15 @@ def objective(trial):
223223
logger=True,
224224
checkpoint_callback=False,
225225
callbacks=[PyTorchLightningPruningCallback(
226-
trial, monitor="valid/MAD")])
226+
trial, monitor="valid/MSE")])
227227

228228
trainer.fit(model)
229229

230230
# Don't Log
231231
# hyperparameters = model.hparams
232232
# trainer.logger.log_hyperparams(hyperparameters)
233233

234-
return trainer.callback_metrics.get("valid/MAD")
234+
return trainer.callback_metrics.get("valid/MSE")
235235

236236
if n_trials > 1:
237237
study = optuna.create_study(direction="minimize")
@@ -307,12 +307,12 @@ def objective(trial):
307307
test_dataset.to_csv(model.data_dir / ("df_testset_" + target + ".csv"))
308308

309309
checkpoint_callback = pl.callbacks.ModelCheckpoint(
310-
os.path.join(model_dir, "train_{epoch}_{valid/MAD:.2f}"), monitor="valid/MAD",
310+
os.path.join(model_dir, "train_{epoch}_{valid/MSE:.2f}"), monitor="valid/MSE",
311311
period=10
312312
)
313313

314314
early_stop_callback = EarlyStopping(
315-
monitor='valid/MAD',
315+
monitor='valid/MSE',
316316
min_delta=0.001,
317317
patience=30,
318318
verbose=True,

0 commit comments

Comments
 (0)