Skip to content
Snippets Groups Projects
Commit df4ba10c authored by Frisinghelli Daniel's avatar Frisinghelli Daniel
Browse files

Decreased number of epochs to 20.

parent 7b6fa4e7
No related branches found
No related tags found
No related merge requests found
...@@ -4,7 +4,6 @@ ...@@ -4,7 +4,6 @@
# -*- coding: utf-8 -*- # -*- coding: utf-8 -*-
# builtins # builtins
import sys
import time import time
import logging import logging
from datetime import timedelta from datetime import timedelta
...@@ -19,7 +18,6 @@ from torch.utils.data import DataLoader ...@@ -19,7 +18,6 @@ from torch.utils.data import DataLoader
# locals # locals
from pysegcnn.core.utils import search_files from pysegcnn.core.utils import search_files
from pysegcnn.core.trainer import NetworkTrainer, LogConfig from pysegcnn.core.trainer import NetworkTrainer, LogConfig
from pysegcnn.core.models import Network
from pysegcnn.core.logging import log_conf from pysegcnn.core.logging import log_conf
from climax.core.dataset import ERA5Dataset, NetCDFDataset from climax.core.dataset import ERA5Dataset, NetCDFDataset
from climax.core.loss import MSELoss, L1Loss from climax.core.loss import MSELoss, L1Loss
...@@ -36,11 +34,10 @@ LOGGER = logging.getLogger(__name__) ...@@ -36,11 +34,10 @@ LOGGER = logging.getLogger(__name__)
# network training configuration # network training configuration
TRAIN_CONFIG = { TRAIN_CONFIG = {
'checkpoint_state': {}, 'checkpoint_state': {},
'epochs': 75, 'epochs': 20,
'save': True, 'save': True,
'save_loaders': False, 'save_loaders': False,
'early_stop': False, 'early_stop': False,
'patience': 100,
'multi_gpu': True, 'multi_gpu': True,
'classification': False, 'classification': False,
'clip_gradients': False 'clip_gradients': False
...@@ -51,7 +48,7 @@ MIN_LR = 1e-4 ...@@ -51,7 +48,7 @@ MIN_LR = 1e-4
# learning rate scheduler: increase lr each epoch # learning rate scheduler: increase lr each epoch
LR_SCHEDULER = torch.optim.lr_scheduler.ExponentialLR LR_SCHEDULER = torch.optim.lr_scheduler.ExponentialLR
LR_SCHEDULER_PARAMS = {'gamma': 1.15} LR_SCHEDULER_PARAMS = {'gamma': 1.6}
if __name__ == '__main__': if __name__ == '__main__':
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment