spot_tuner = spot.Spot(fun= fun,fun_control= fun_control, design_control= design_control)
res = spot_tuner.run()
In fun(): config:
{'act_fn': Sigmoid(),
'batch_norm': False,
'batch_size': 32,
'dropout_prob': np.float64(0.010469763733360567),
'epochs': 16,
'initialization': 'xavier_uniform',
'l1': 16,
'lr_mult': np.float64(8.212543453926155),
'optimizer': 'Adam',
'patience': 4}
train_model result: {'val_loss': 24158.83203125, 'hp_metric': 24158.83203125}
In fun(): config:
{'act_fn': ReLU(),
'batch_norm': False,
'batch_size': 16,
'dropout_prob': np.float64(0.0184251494885258),
'epochs': 32,
'initialization': 'kaiming_normal',
'l1': 8,
'lr_mult': np.float64(6.2144595555349165),
'optimizer': 'Adadelta',
'patience': 8}
train_model result: {'val_loss': 23447.546875, 'hp_metric': 23447.546875}
In fun(): config:
{'act_fn': ELU(),
'batch_norm': True,
'batch_size': 32,
'dropout_prob': np.float64(0.00996276270809942),
'epochs': 64,
'initialization': 'Default',
'l1': 16,
'lr_mult': np.float64(17.072444874507983),
'optimizer': 'Adadelta',
'patience': 4}
train_model result: {'val_loss': 7307.08984375, 'hp_metric': 7307.08984375}
In fun(): config:
{'act_fn': LeakyReLU(),
'batch_norm': True,
'batch_size': 32,
'dropout_prob': np.float64(0.004305336774252681),
'epochs': 8,
'initialization': 'kaiming_normal',
'l1': 8,
'lr_mult': np.float64(0.5038833291649055),
'optimizer': 'Adamax',
'patience': 4}
train_model result: {'val_loss': 23786.861328125, 'hp_metric': 23786.861328125}
In fun(): config:
{'act_fn': Tanh(),
'batch_norm': True,
'batch_size': 16,
'dropout_prob': np.float64(0.021718144359373085),
'epochs': 32,
'initialization': 'kaiming_uniform',
'l1': 16,
'lr_mult': np.float64(15.991195791187765),
'optimizer': 'Adam',
'patience': 8}
train_model result: {'val_loss': 22770.90625, 'hp_metric': 22770.90625}
In fun(): config:
{'act_fn': LeakyReLU(),
'batch_norm': False,
'batch_size': 16,
'dropout_prob': np.float64(0.023931753071792624),
'epochs': 16,
'initialization': 'xavier_normal',
'l1': 16,
'lr_mult': np.float64(2.4181463288559466),
'optimizer': 'Adamax',
'patience': 8}
train_model result: {'val_loss': 23846.84375, 'hp_metric': 23846.84375}
In fun(): config:
{'act_fn': ELU(),
'batch_norm': False,
'batch_size': 32,
'dropout_prob': np.float64(0.0074444117802003025),
'epochs': 8,
'initialization': 'kaiming_uniform',
'l1': 8,
'lr_mult': np.float64(19.065991931545753),
'optimizer': 'Adam',
'patience': 8}
train_model result: {'val_loss': 23610.291015625, 'hp_metric': 23610.291015625}
In fun(): config:
{'act_fn': Swish(),
'batch_norm': True,
'batch_size': 16,
'dropout_prob': np.float64(0.0012790404219919403),
'epochs': 128,
'initialization': 'kaiming_uniform',
'l1': 8,
'lr_mult': np.float64(4.855811791679552),
'optimizer': 'Adadelta',
'patience': 4}
train_model result: {'val_loss': 4652.201171875, 'hp_metric': 4652.201171875}
In fun(): config:
{'act_fn': Tanh(),
'batch_norm': False,
'batch_size': 16,
'dropout_prob': np.float64(0.0153979445945591),
'epochs': 32,
'initialization': 'xavier_uniform',
'l1': 8,
'lr_mult': np.float64(12.138553034122333),
'optimizer': 'Adamax',
'patience': 4}
train_model result: {'val_loss': 22452.896484375, 'hp_metric': 22452.896484375}
In fun(): config:
{'act_fn': ReLU(),
'batch_norm': True,
'batch_size': 32,
'dropout_prob': np.float64(0.013939072152682473),
'epochs': 64,
'initialization': 'xavier_uniform',
'l1': 16,
'lr_mult': np.float64(11.738437881491578),
'optimizer': 'Adam',
'patience': 8}
train_model result: {'val_loss': 22722.826171875, 'hp_metric': 22722.826171875}
In fun(): config:
{'act_fn': Swish(),
'batch_norm': True,
'batch_size': 16,
'dropout_prob': np.float64(0.0033682803176620702),
'epochs': 128,
'initialization': 'kaiming_uniform',
'l1': 8,
'lr_mult': np.float64(8.795451519544661),
'optimizer': 'Adadelta',
'patience': 4}
train_model result: {'val_loss': 3762.646728515625, 'hp_metric': 3762.646728515625}
spotpython tuning: 3762.646728515625 [----------] 4.64%
In fun(): config:
{'act_fn': Swish(),
'batch_norm': True,
'batch_size': 16,
'dropout_prob': np.float64(0.001483504871229586),
'epochs': 128,
'initialization': 'kaiming_uniform',
'l1': 8,
'lr_mult': np.float64(8.965542571854463),
'optimizer': 'Adadelta',
'patience': 4}
train_model result: {'val_loss': 4189.49609375, 'hp_metric': 4189.49609375}
spotpython tuning: 3762.646728515625 [#---------] 8.04%
In fun(): config:
{'act_fn': Swish(),
'batch_norm': True,
'batch_size': 16,
'dropout_prob': np.float64(0.025),
'epochs': 128,
'initialization': 'kaiming_uniform',
'l1': 8,
'lr_mult': np.float64(7.029758527728445),
'optimizer': 'Adadelta',
'patience': 4}
train_model result: {'val_loss': 3598.41845703125, 'hp_metric': 3598.41845703125}
spotpython tuning: 3598.41845703125 [#---------] 12.40%
In fun(): config:
{'act_fn': Swish(),
'batch_norm': True,
'batch_size': 16,
'dropout_prob': np.float64(0.0),
'epochs': 128,
'initialization': 'kaiming_uniform',
'l1': 8,
'lr_mult': np.float64(7.059808792052545),
'optimizer': 'Adadelta',
'patience': 4}
train_model result: {'val_loss': 4432.4228515625, 'hp_metric': 4432.4228515625}
spotpython tuning: 3598.41845703125 [##--------] 17.87%
In fun(): config:
{'act_fn': Swish(),
'batch_norm': True,
'batch_size': 16,
'dropout_prob': np.float64(0.025),
'epochs': 128,
'initialization': 'kaiming_uniform',
'l1': 8,
'lr_mult': np.float64(8.172224667427638),
'optimizer': 'Adadelta',
'patience': 4}
train_model result: {'val_loss': 17434.79296875, 'hp_metric': 17434.79296875}
spotpython tuning: 3598.41845703125 [#####-----] 47.62%
In fun(): config:
{'act_fn': Swish(),
'batch_norm': True,
'batch_size': 16,
'dropout_prob': np.float64(0.025),
'epochs': 128,
'initialization': 'kaiming_uniform',
'l1': 8,
'lr_mult': np.float64(6.863815152944327),
'optimizer': 'Adadelta',
'patience': 4}
train_model result: {'val_loss': 4271.0537109375, 'hp_metric': 4271.0537109375}
spotpython tuning: 3598.41845703125 [#####-----] 52.20%
In fun(): config:
{'act_fn': Swish(),
'batch_norm': True,
'batch_size': 16,
'dropout_prob': np.float64(0.0),
'epochs': 128,
'initialization': 'kaiming_uniform',
'l1': 8,
'lr_mult': np.float64(8.850235720504392),
'optimizer': 'Adadelta',
'patience': 4}
train_model result: {'val_loss': 9081.8525390625, 'hp_metric': 9081.8525390625}
spotpython tuning: 3598.41845703125 [#####-----] 54.95%
In fun(): config:
{'act_fn': Swish(),
'batch_norm': True,
'batch_size': 16,
'dropout_prob': np.float64(0.0009178232930834657),
'epochs': 128,
'initialization': 'kaiming_uniform',
'l1': 8,
'lr_mult': np.float64(8.754200791960354),
'optimizer': 'Adadelta',
'patience': 4}
train_model result: {'val_loss': 8019.37744140625, 'hp_metric': 8019.37744140625}
spotpython tuning: 3598.41845703125 [######----] 58.49%
In fun(): config:
{'act_fn': Swish(),
'batch_norm': True,
'batch_size': 16,
'dropout_prob': np.float64(0.0024940202257315303),
'epochs': 128,
'initialization': 'kaiming_uniform',
'l1': 8,
'lr_mult': np.float64(8.986811004365546),
'optimizer': 'Adadelta',
'patience': 4}
train_model result: {'val_loss': 3723.706298828125, 'hp_metric': 3723.706298828125}
spotpython tuning: 3598.41845703125 [#######---] 65.93%
In fun(): config:
{'act_fn': Swish(),
'batch_norm': True,
'batch_size': 16,
'dropout_prob': np.float64(0.021028581178670843),
'epochs': 128,
'initialization': 'kaiming_uniform',
'l1': 8,
'lr_mult': np.float64(7.028166774184535),
'optimizer': 'Adadelta',
'patience': 4}
train_model result: {'val_loss': 18928.12109375, 'hp_metric': 18928.12109375}
spotpython tuning: 3598.41845703125 [##########] 96.55%
In fun(): config:
{'act_fn': Swish(),
'batch_norm': True,
'batch_size': 16,
'dropout_prob': np.float64(0.025),
'epochs': 128,
'initialization': 'kaiming_uniform',
'l1': 8,
'lr_mult': np.float64(7.101842452512002),
'optimizer': 'Adadelta',
'patience': 4}
train_model result: {'val_loss': 4800.939453125, 'hp_metric': 4800.939453125}
spotpython tuning: 3598.41845703125 [##########] 100.00% Done...