Skip to content
Snippets Groups Projects
Commit 9df9de3a authored by Frisinghelli Daniel's avatar Frisinghelli Daniel
Browse files

Setup for hyperparameter grid search.

parent f655da60
No related branches found
No related tags found
No related merge requests found
...@@ -131,19 +131,19 @@ OPTIM = torch.optim.Adam ...@@ -131,19 +131,19 @@ OPTIM = torch.optim.Adam
BATCH_SIZE = 16 BATCH_SIZE = 16
# maximum learning rate determined from learning rate range test # maximum learning rate determined from learning rate range test
if PREDICTAND == 'tasmin': if PREDICTAND is 'tasmin':
if isinstance(LOSS, L1Loss): if isinstance(LOSS, L1Loss):
MAX_LR = 0.001 if OPTIM is torch.optim.Adam else 0.004 MAX_LR = 0.001 if OPTIM is torch.optim.Adam else 0.004
if isinstance(LOSS, MSELoss): if isinstance(LOSS, MSELoss):
MAX_LR = 0.001 if OPTIM is torch.optim.Adam else 0.002 MAX_LR = 0.001 if OPTIM is torch.optim.Adam else 0.002
if PREDICTAND == 'tasmax': if PREDICTAND is 'tasmax':
if isinstance(LOSS, L1Loss): if isinstance(LOSS, L1Loss):
MAX_LR = 0.001 MAX_LR = 0.001
if isinstance(LOSS, MSELoss): if isinstance(LOSS, MSELoss):
MAX_LR = 0.001 if OPTIM is torch.optim.Adam else 0.004 MAX_LR = 0.001 if OPTIM is torch.optim.Adam else 0.004
if PREDICTAND == 'pr': if PREDICTAND is 'pr':
if isinstance(LOSS, L1Loss): if isinstance(LOSS, L1Loss):
MAX_LR = 0.001 MAX_LR = 0.001
if isinstance(LOSS, MSELoss): if isinstance(LOSS, MSELoss):
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment