Hi, thanks for quick reply. The model file is pretty large, over 2GB h5 file, and does not zip much smaller.
I'll try if I can reproduce the issue with smaller config.
Here is the dfl_sae_state.json , I am not sure if it is helpful.
Code: Select all
{
"name": "dfl_sae",
"sessions": {
"1": {
"timestamp": 1636302930.190047,
"no_logs": false,
"loss_names": [
"total",
"face_a_0",
"face_a_1",
"face_a_2",
"mask_a",
"face_b_0",
"face_b_1",
"face_b_2",
"mask_b"
],
"batchsize": 16,
"iterations": 9501,
"config": {
"learning_rate": 5e-05,
"epsilon_exponent": -7,
"allow_growth": true,
"nan_protection": true,
"convert_batchsize": 4,
"eye_multiplier": 3,
"mouth_multiplier": 2,
"clipnorm": true
}
},
"2": {
"timestamp": 1636313397.1599169,
"no_logs": false,
"loss_names": [
"total",
"face_a_0",
"face_a_1",
"face_a_2",
"mask_a",
"face_b_0",
"face_b_1",
"face_b_2",
"mask_b"
],
"batchsize": 16,
"iterations": 35532,
"config": {
"learning_rate": 5e-05,
"epsilon_exponent": -7,
"allow_growth": true,
"nan_protection": true,
"convert_batchsize": 4,
"eye_multiplier": 3,
"mouth_multiplier": 2,
"clipnorm": true
}
}
},
"lowest_avg_loss": {
"a": 0.06255769795097876,
"b": 0.06906740904378239
},
"iterations": 45033,
"config": {
"centering": "face",
"coverage": 90.0,
"optimizer": "adam",
"learning_rate": 5e-05,
"epsilon_exponent": -7,
"allow_growth": true,
"mixed_precision": true,
"nan_protection": true,
"convert_batchsize": 16,
"loss_function": "ssim",
"mask_loss_function": "mse",
"l2_reg_term": 100,
"eye_multiplier": 3,
"mouth_multiplier": 2,
"penalized_mask_loss": true,
"mask_type": "bisenet-fp_face",
"mask_blur_kernel": 3,
"mask_threshold": 4,
"learn_mask": true,
"input_size": 256,
"clipnorm": true,
"architecture": "df",
"autoencoder_dims": 640,
"encoder_dims": 64,
"decoder_dims": 32,
"multiscale_decoder": true
}
}