It said unexpected error and it said I must have crash report and I think I cant upload the file here so I copy and paste the text
Code: Select all
02/17/2020 20:53:17 MainProcess _run_1 training_data initialize DEBUG Initialized constants: {'clahe_base_contrast': 2, 'tgt_slices': slice(20, 236, None), 'warp_mapx': '[[[ 20. 74. 128. 182. 236.]\n [ 20. 74. 128. 182. 236.]\n [ 20. 74. 128. 182. 236.]\n [ 20. 74. 128. 182. 236.]\n [ 20. 74. 128. 182. 236.]]\n\n [[ 20. 74. 128. 182. 236.]\n [ 20. 74. 128. 182. 236.]\n [ 20. 74. 128. 182. 236.]\n [ 20. 74. 128. 182. 236.]\n [ 20. 74. 128. 182. 236.]]\n\n [[ 20. 74. 128. 182. 236.]\n [ 20. 74. 128. 182. 236.]\n [ 20. 74. 128. 182. 236.]\n [ 20. 74. 128. 182. 236.]\n [ 20. 74. 128. 182. 236.]]\n\n ...\n\n [[ 20. 74. 128. 182. 236.]\n [ 20. 74. 128. 182. 236.]\n [ 20. 74. 128. 182. 236.]\n [ 20. 74. 128. 182. 236.]\n [ 20. 74. 128. 182. 236.]]\n\n [[ 20. 74. 128. 182. 236.]\n [ 20. 74. 128. 182. 236.]\n [ 20. 74. 128. 182. 236.]\n [ 20. 74. 128. 182. 236.]\n [ 20. 74. 128. 182. 236.]]\n\n [[ 20. 74. 128. 182. 236.]\n [ 20. 74. 128. 182. 236.]\n [ 20. 74. 128. 182. 236.]\n [ 20. 74. 128. 182. 236.]\n [ 20. 74. 128. 182. 236.]]]', 'warp_mapy': '[[[ 20. 20. 20. 20. 20.]\n [ 74. 74. 74. 74. 74.]\n [128. 128. 128. 128. 128.]\n [182. 182. 182. 182. 182.]\n [236. 236. 236. 236. 236.]]\n\n [[ 20. 20. 20. 20. 20.]\n [ 74. 74. 74. 74. 74.]\n [128. 128. 128. 128. 128.]\n [182. 182. 182. 182. 182.]\n [236. 236. 236. 236. 236.]]\n\n [[ 20. 20. 20. 20. 20.]\n [ 74. 74. 74. 74. 74.]\n [128. 128. 128. 128. 128.]\n [182. 182. 182. 182. 182.]\n [236. 236. 236. 236. 236.]]\n\n ...\n\n [[ 20. 20. 20. 20. 20.]\n [ 74. 74. 74. 74. 74.]\n [128. 128. 128. 128. 128.]\n [182. 182. 182. 182. 182.]\n [236. 236. 236. 236. 236.]]\n\n [[ 20. 20. 20. 20. 20.]\n [ 74. 74. 74. 74. 74.]\n [128. 128. 128. 128. 128.]\n [182. 182. 182. 182. 182.]\n [236. 236. 236. 236. 236.]]\n\n [[ 20. 20. 20. 20. 20.]\n [ 74. 74. 74. 74. 74.]\n [128. 128. 128. 128. 128.]\n [182. 182. 182. 182. 182.]\n [236. 236. 236. 236. 236.]]]', 'warp_pad': 80, 'warp_slices': slice(8, -8, None), 'warp_lm_edge_anchors': '[[[ 0 0]\n [ 0 255]\n [255 255]\n ...\n [127 255]\n [255 127]\n [ 0 127]]\n\n [[ 0 0]\n [ 0 255]\n [255 255]\n ...\n [127 255]\n [255 127]\n [ 0 127]]\n\n [[ 0 0]\n [ 0 255]\n [255 255]\n ...\n [127 255]\n [255 127]\n [ 0 127]]\n\n ...\n\n [[ 0 0]\n [ 0 255]\n [255 255]\n ...\n [127 255]\n [255 127]\n [ 0 127]]\n\n [[ 0 0]\n [ 0 255]\n [255 255]\n ...\n [127 255]\n [255 127]\n [ 0 127]]\n\n [[ 0 0]\n [ 0 255]\n [255 255]\n ...\n [127 255]\n [255 127]\n [ 0 127]]]', 'warp_lm_grids': '[[[ 0. 0. 0. ... 0. 0. 0.]\n [ 1. 1. 1. ... 1. 1. 1.]\n [ 2. 2. 2. ... 2. 2. 2.]\n ...\n [253. 253. 253. ... 253. 253. 253.]\n [254. 254. 254. ... 254. 254. 254.]\n [255. 255. 255. ... 255. 255. 255.]]\n\n [[ 0. 1. 2. ... 253. 254. 255.]\n [ 0. 1. 2. ... 253. 254. 255.]\n [ 0. 1. 2. ... 253. 254. 255.]\n ...\n [ 0. 1. 2. ... 253. 254. 255.]\n [ 0. 1. 2. ... 253. 254. 255.]\n [ 0. 1. 2. ... 253. 254. 255.]]]'}
02/17/2020 20:53:17 MainProcess _run_0 training_data initialize DEBUG Initialized constants: {'clahe_base_contrast': 2, 'tgt_slices': slice(20, 236, None), 'warp_mapx': '[[[ 20. 74. 128. 182. 236.]\n [ 20. 74. 128. 182. 236.]\n [ 20. 74. 128. 182. 236.]\n [ 20. 74. 128. 182. 236.]\n [ 20. 74. 128. 182. 236.]]\n\n [[ 20. 74. 128. 182. 236.]\n [ 20. 74. 128. 182. 236.]\n [ 20. 74. 128. 182. 236.]\n [ 20. 74. 128. 182. 236.]\n [ 20. 74. 128. 182. 236.]]\n\n [[ 20. 74. 128. 182. 236.]\n [ 20. 74. 128. 182. 236.]\n [ 20. 74. 128. 182. 236.]\n [ 20. 74. 128. 182. 236.]\n [ 20. 74. 128. 182. 236.]]\n\n ...\n\n [[ 20. 74. 128. 182. 236.]\n [ 20. 74. 128. 182. 236.]\n [ 20. 74. 128. 182. 236.]\n [ 20. 74. 128. 182. 236.]\n [ 20. 74. 128. 182. 236.]]\n\n [[ 20. 74. 128. 182. 236.]\n [ 20. 74. 128. 182. 236.]\n [ 20. 74. 128. 182. 236.]\n [ 20. 74. 128. 182. 236.]\n [ 20. 74. 128. 182. 236.]]\n\n [[ 20. 74. 128. 182. 236.]\n [ 20. 74. 128. 182. 236.]\n [ 20. 74. 128. 182. 236.]\n [ 20. 74. 128. 182. 236.]\n [ 20. 74. 128. 182. 236.]]]', 'warp_mapy': '[[[ 20. 20. 20. 20. 20.]\n [ 74. 74. 74. 74. 74.]\n [128. 128. 128. 128. 128.]\n [182. 182. 182. 182. 182.]\n [236. 236. 236. 236. 236.]]\n\n [[ 20. 20. 20. 20. 20.]\n [ 74. 74. 74. 74. 74.]\n [128. 128. 128. 128. 128.]\n [182. 182. 182. 182. 182.]\n [236. 236. 236. 236. 236.]]\n\n [[ 20. 20. 20. 20. 20.]\n [ 74. 74. 74. 74. 74.]\n [128. 128. 128. 128. 128.]\n [182. 182. 182. 182. 182.]\n [236. 236. 236. 236. 236.]]\n\n ...\n\n [[ 20. 20. 20. 20. 20.]\n [ 74. 74. 74. 74. 74.]\n [128. 128. 128. 128. 128.]\n [182. 182. 182. 182. 182.]\n [236. 236. 236. 236. 236.]]\n\n [[ 20. 20. 20. 20. 20.]\n [ 74. 74. 74. 74. 74.]\n [128. 128. 128. 128. 128.]\n [182. 182. 182. 182. 182.]\n [236. 236. 236. 236. 236.]]\n\n [[ 20. 20. 20. 20. 20.]\n [ 74. 74. 74. 74. 74.]\n [128. 128. 128. 128. 128.]\n [182. 182. 182. 182. 182.]\n [236. 236. 236. 236. 236.]]]', 'warp_pad': 80, 'warp_slices': slice(8, -8, None), 'warp_lm_edge_anchors': '[[[ 0 0]\n [ 0 255]\n [255 255]\n ...\n [127 255]\n [255 127]\n [ 0 127]]\n\n [[ 0 0]\n [ 0 255]\n [255 255]\n ...\n [127 255]\n [255 127]\n [ 0 127]]\n\n [[ 0 0]\n [ 0 255]\n [255 255]\n ...\n [127 255]\n [255 127]\n [ 0 127]]\n\n ...\n\n [[ 0 0]\n [ 0 255]\n [255 255]\n ...\n [127 255]\n [255 127]\n [ 0 127]]\n\n [[ 0 0]\n [ 0 255]\n [255 255]\n ...\n [127 255]\n [255 127]\n [ 0 127]]\n\n [[ 0 0]\n [ 0 255]\n [255 255]\n ...\n [127 255]\n [255 127]\n [ 0 127]]]', 'warp_lm_grids': '[[[ 0. 0. 0. ... 0. 0. 0.]\n [ 1. 1. 1. ... 1. 1. 1.]\n [ 2. 2. 2. ... 2. 2. 2.]\n ...\n [253. 253. 253. ... 253. 253. 253.]\n [254. 254. 254. ... 254. 254. 254.]\n [255. 255. 255. ... 255. 255. 255.]]\n\n [[ 0. 1. 2. ... 253. 254. 255.]\n [ 0. 1. 2. ... 253. 254. 255.]\n [ 0. 1. 2. ... 253. 254. 255.]\n ...\n [ 0. 1. 2. ... 253. 254. 255.]\n [ 0. 1. 2. ... 253. 254. 255.]\n [ 0. 1. 2. ... 253. 254. 255.]]]'}
02/17/2020 20:53:17 MainProcess _run_1 training_data initialize DEBUG Initialized constants: {'clahe_base_contrast': 2, 'tgt_slices': slice(20, 236, None), 'warp_mapx': '[[[ 20. 74. 128. 182. 236.]\n [ 20. 74. 128. 182. 236.]\n [ 20. 74. 128. 182. 236.]\n [ 20. 74. 128. 182. 236.]\n [ 20. 74. 128. 182. 236.]]\n\n [[ 20. 74. 128. 182. 236.]\n [ 20. 74. 128. 182. 236.]\n [ 20. 74. 128. 182. 236.]\n [ 20. 74. 128. 182. 236.]\n [ 20. 74. 128. 182. 236.]]\n\n [[ 20. 74. 128. 182. 236.]\n [ 20. 74. 128. 182. 236.]\n [ 20. 74. 128. 182. 236.]\n [ 20. 74. 128. 182. 236.]\n [ 20. 74. 128. 182. 236.]]\n\n ...\n\n [[ 20. 74. 128. 182. 236.]\n [ 20. 74. 128. 182. 236.]\n [ 20. 74. 128. 182. 236.]\n [ 20. 74. 128. 182. 236.]\n [ 20. 74. 128. 182. 236.]]\n\n [[ 20. 74. 128. 182. 236.]\n [ 20. 74. 128. 182. 236.]\n [ 20. 74. 128. 182. 236.]\n [ 20. 74. 128. 182. 236.]\n [ 20. 74. 128. 182. 236.]]\n\n [[ 20. 74. 128. 182. 236.]\n [ 20. 74. 128. 182. 236.]\n [ 20. 74. 128. 182. 236.]\n [ 20. 74. 128. 182. 236.]\n [ 20. 74. 128. 182. 236.]]]', 'warp_mapy': '[[[ 20. 20. 20. 20. 20.]\n [ 74. 74. 74. 74. 74.]\n [128. 128. 128. 128. 128.]\n [182. 182. 182. 182. 182.]\n [236. 236. 236. 236. 236.]]\n\n [[ 20. 20. 20. 20. 20.]\n [ 74. 74. 74. 74. 74.]\n [128. 128. 128. 128. 128.]\n [182. 182. 182. 182. 182.]\n [236. 236. 236. 236. 236.]]\n\n [[ 20. 20. 20. 20. 20.]\n [ 74. 74. 74. 74. 74.]\n [128. 128. 128. 128. 128.]\n [182. 182. 182. 182. 182.]\n [236. 236. 236. 236. 236.]]\n\n ...\n\n [[ 20. 20. 20. 20. 20.]\n [ 74. 74. 74. 74. 74.]\n [128. 128. 128. 128. 128.]\n [182. 182. 182. 182. 182.]\n [236. 236. 236. 236. 236.]]\n\n [[ 20. 20. 20. 20. 20.]\n [ 74. 74. 74. 74. 74.]\n [128. 128. 128. 128. 128.]\n [182. 182. 182. 182. 182.]\n [236. 236. 236. 236. 236.]]\n\n [[ 20. 20. 20. 20. 20.]\n [ 74. 74. 74. 74. 74.]\n [128. 128. 128. 128. 128.]\n [182. 182. 182. 182. 182.]\n [236. 236. 236. 236. 236.]]]', 'warp_pad': 80, 'warp_slices': slice(8, -8, None), 'warp_lm_edge_anchors': '[[[ 0 0]\n [ 0 255]\n [255 255]\n ...\n [127 255]\n [255 127]\n [ 0 127]]\n\n [[ 0 0]\n [ 0 255]\n [255 255]\n ...\n [127 255]\n [255 127]\n [ 0 127]]\n\n [[ 0 0]\n [ 0 255]\n [255 255]\n ...\n [127 255]\n [255 127]\n [ 0 127]]\n\n ...\n\n [[ 0 0]\n [ 0 255]\n [255 255]\n ...\n [127 255]\n [255 127]\n [ 0 127]]\n\n [[ 0 0]\n [ 0 255]\n [255 255]\n ...\n [127 255]\n [255 127]\n [ 0 127]]\n\n [[ 0 0]\n [ 0 255]\n [255 255]\n ...\n [127 255]\n [255 127]\n [ 0 127]]]', 'warp_lm_grids': '[[[ 0. 0. 0. ... 0. 0. 0.]\n [ 1. 1. 1. ... 1. 1. 1.]\n [ 2. 2. 2. ... 2. 2. 2.]\n ...\n [253. 253. 253. ... 253. 253. 253.]\n [254. 254. 254. ... 254. 254. 254.]\n [255. 255. 255. ... 255. 255. 255.]]\n\n [[ 0. 1. 2. ... 253. 254. 255.]\n [ 0. 1. 2. ... 253. 254. 255.]\n [ 0. 1. 2. ... 253. 254. 255.]\n ...\n [ 0. 1. 2. ... 253. 254. 255.]\n [ 0. 1. 2. ... 253. 254. 255.]\n [ 0. 1. 2. ... 253. 254. 255.]]]'}
02/17/2020 20:53:17 MainProcess _run_0 training_data initialize DEBUG Initialized constants: {'clahe_base_contrast': 2, 'tgt_slices': slice(20, 236, None), 'warp_mapx': '[[[ 20. 74. 128. 182. 236.]\n [ 20. 74. 128. 182. 236.]\n [ 20. 74. 128. 182. 236.]\n [ 20. 74. 128. 182. 236.]\n [ 20. 74. 128. 182. 236.]]\n\n [[ 20. 74. 128. 182. 236.]\n [ 20. 74. 128. 182. 236.]\n [ 20. 74. 128. 182. 236.]\n [ 20. 74. 128. 182. 236.]\n [ 20. 74. 128. 182. 236.]]\n\n [[ 20. 74. 128. 182. 236.]\n [ 20. 74. 128. 182. 236.]\n [ 20. 74. 128. 182. 236.]\n [ 20. 74. 128. 182. 236.]\n [ 20. 74. 128. 182. 236.]]\n\n ...\n\n [[ 20. 74. 128. 182. 236.]\n [ 20. 74. 128. 182. 236.]\n [ 20. 74. 128. 182. 236.]\n [ 20. 74. 128. 182. 236.]\n [ 20. 74. 128. 182. 236.]]\n\n [[ 20. 74. 128. 182. 236.]\n [ 20. 74. 128. 182. 236.]\n [ 20. 74. 128. 182. 236.]\n [ 20. 74. 128. 182. 236.]\n [ 20. 74. 128. 182. 236.]]\n\n [[ 20. 74. 128. 182. 236.]\n [ 20. 74. 128. 182. 236.]\n [ 20. 74. 128. 182. 236.]\n [ 20. 74. 128. 182. 236.]\n [ 20. 74. 128. 182. 236.]]]', 'warp_mapy': '[[[ 20. 20. 20. 20. 20.]\n [ 74. 74. 74. 74. 74.]\n [128. 128. 128. 128. 128.]\n [182. 182. 182. 182. 182.]\n [236. 236. 236. 236. 236.]]\n\n [[ 20. 20. 20. 20. 20.]\n [ 74. 74. 74. 74. 74.]\n [128. 128. 128. 128. 128.]\n [182. 182. 182. 182. 182.]\n [236. 236. 236. 236. 236.]]\n\n [[ 20. 20. 20. 20. 20.]\n [ 74. 74. 74. 74. 74.]\n [128. 128. 128. 128. 128.]\n [182. 182. 182. 182. 182.]\n [236. 236. 236. 236. 236.]]\n\n ...\n\n [[ 20. 20. 20. 20. 20.]\n [ 74. 74. 74. 74. 74.]\n [128. 128. 128. 128. 128.]\n [182. 182. 182. 182. 182.]\n [236. 236. 236. 236. 236.]]\n\n [[ 20. 20. 20. 20. 20.]\n [ 74. 74. 74. 74. 74.]\n [128. 128. 128. 128. 128.]\n [182. 182. 182. 182. 182.]\n [236. 236. 236. 236. 236.]]\n\n [[ 20. 20. 20. 20. 20.]\n [ 74. 74. 74. 74. 74.]\n [128. 128. 128. 128. 128.]\n [182. 182. 182. 182. 182.]\n [236. 236. 236. 236. 236.]]]', 'warp_pad': 80, 'warp_slices': slice(8, -8, None), 'warp_lm_edge_anchors': '[[[ 0 0]\n [ 0 255]\n [255 255]\n ...\n [127 255]\n [255 127]\n [ 0 127]]\n\n [[ 0 0]\n [ 0 255]\n [255 255]\n ...\n [127 255]\n [255 127]\n [ 0 127]]\n\n [[ 0 0]\n [ 0 255]\n [255 255]\n ...\n [127 255]\n [255 127]\n [ 0 127]]\n\n ...\n\n [[ 0 0]\n [ 0 255]\n [255 255]\n ...\n [127 255]\n [255 127]\n [ 0 127]]\n\n [[ 0 0]\n [ 0 255]\n [255 255]\n ...\n [127 255]\n [255 127]\n [ 0 127]]\n\n [[ 0 0]\n [ 0 255]\n [255 255]\n ...\n [127 255]\n [255 127]\n [ 0 127]]]', 'warp_lm_grids': '[[[ 0. 0. 0. ... 0. 0. 0.]\n [ 1. 1. 1. ... 1. 1. 1.]\n [ 2. 2. 2. ... 2. 2. 2.]\n ...\n [253. 253. 253. ... 253. 253. 253.]\n [254. 254. 254. ... 254. 254. 254.]\n [255. 255. 255. ... 255. 255. 255.]]\n\n [[ 0. 1. 2. ... 253. 254. 255.]\n [ 0. 1. 2. ... 253. 254. 255.]\n [ 0. 1. 2. ... 253. 254. 255.]\n ...\n [ 0. 1. 2. ... 253. 254. 255.]\n [ 0. 1. 2. ... 253. 254. 255.]\n [ 0. 1. 2. ... 253. 254. 255.]]]'}
02/17/2020 20:53:18 MainProcess _training_0 module_wrapper _tfmw_add_deprecation_warning DEBUG From C:\Users\choyt\MiniConda3\envs\faceswap\lib\site-packages\keras\backend\tensorflow_backend.py:986: The name tf.assign_add is deprecated. Please use tf.compat.v1.assign_add instead.\n
02/17/2020 20:53:18 MainProcess _training_0 module_wrapper _tfmw_add_deprecation_warning DEBUG From C:\Users\choyt\MiniConda3\envs\faceswap\lib\site-packages\keras\backend\tensorflow_backend.py:973: The name tf.assign is deprecated. Please use tf.compat.v1.assign instead.\n
02/17/2020 20:53:24 MainProcess _training_0 _base generate_preview DEBUG Generating preview
02/17/2020 20:53:24 MainProcess _training_0 _base largest_face_index DEBUG 0
02/17/2020 20:53:24 MainProcess _training_0 _base compile_sample DEBUG Compiling samples: (side: 'a', samples: 14)
02/17/2020 20:53:27 MainProcess _training_0 _base generate_preview DEBUG Generating preview
02/17/2020 20:53:27 MainProcess _training_0 _base largest_face_index DEBUG 0
02/17/2020 20:53:27 MainProcess _training_0 _base compile_sample DEBUG Compiling samples: (side: 'b', samples: 14)
02/17/2020 20:53:27 MainProcess _training_0 _base show_sample DEBUG Showing sample
02/17/2020 20:53:27 MainProcess _training_0 _base _get_predictions DEBUG Getting Predictions
02/17/2020 20:53:28 MainProcess _training_0 _base _get_predictions DEBUG Returning predictions: {'a_a': (14, 64, 64, 3), 'b_a': (14, 64, 64, 3), 'a_b': (14, 64, 64, 3), 'b_b': (14, 64, 64, 3)}
02/17/2020 20:53:28 MainProcess _training_0 _base _to_full_frame DEBUG side: 'a', number of sample arrays: 3, prediction.shapes: [(14, 64, 64, 3), (14, 64, 64, 3)])
02/17/2020 20:53:28 MainProcess _training_0 _base _frame_overlay DEBUG full_size: 256, target_size: 216, color: (0, 0, 255)
02/17/2020 20:53:28 MainProcess _training_0 _base _frame_overlay DEBUG Overlayed background. Shape: (14, 256, 256, 3)
02/17/2020 20:53:28 MainProcess _training_0 _base _resize_sample DEBUG Resizing sample: (side: 'a', sample.shape: (14, 64, 64, 3), target_size: 216, scale: 3.375)
02/17/2020 20:53:28 MainProcess _training_0 _base _resize_sample DEBUG Resized sample: (side: 'a' shape: (14, 216, 216, 3))
02/17/2020 20:53:28 MainProcess _training_0 _base _resize_sample DEBUG Resizing sample: (side: 'a', sample.shape: (14, 64, 64, 3), target_size: 216, scale: 3.375)
02/17/2020 20:53:28 MainProcess _training_0 _base _resize_sample DEBUG Resized sample: (side: 'a' shape: (14, 216, 216, 3))
02/17/2020 20:53:28 MainProcess _training_0 _base _resize_sample DEBUG Resizing sample: (side: 'a', sample.shape: (14, 64, 64, 3), target_size: 216, scale: 3.375)
02/17/2020 20:53:28 MainProcess _training_0 _base _resize_sample DEBUG Resized sample: (side: 'a' shape: (14, 216, 216, 3))
02/17/2020 20:53:28 MainProcess _training_0 _base _overlay_foreground DEBUG Overlayed foreground. Shape: (14, 256, 256, 3)
02/17/2020 20:53:28 MainProcess _training_0 _base _overlay_foreground DEBUG Overlayed foreground. Shape: (14, 256, 256, 3)
02/17/2020 20:53:28 MainProcess _training_0 _base _overlay_foreground DEBUG Overlayed foreground. Shape: (14, 256, 256, 3)
02/17/2020 20:53:28 MainProcess _training_0 _base _resize_sample DEBUG Resizing sample: (side: 'a', sample.shape: (14, 256, 256, 3), target_size: 128, scale: 0.5)
02/17/2020 20:53:28 MainProcess _training_0 _base _resize_sample DEBUG Resized sample: (side: 'a' shape: (14, 128, 128, 3))
02/17/2020 20:53:28 MainProcess _training_0 _base _resize_sample DEBUG Resizing sample: (side: 'a', sample.shape: (14, 256, 256, 3), target_size: 128, scale: 0.5)
02/17/2020 20:53:28 MainProcess _training_0 _base _resize_sample DEBUG Resized sample: (side: 'a' shape: (14, 128, 128, 3))
02/17/2020 20:53:28 MainProcess _training_0 _base _resize_sample DEBUG Resizing sample: (side: 'a', sample.shape: (14, 256, 256, 3), target_size: 128, scale: 0.5)
02/17/2020 20:53:28 MainProcess _training_0 _base _resize_sample DEBUG Resized sample: (side: 'a' shape: (14, 128, 128, 3))
02/17/2020 20:53:28 MainProcess _training_0 _base _get_headers DEBUG side: 'a', width: 128
02/17/2020 20:53:28 MainProcess _training_0 _base _get_headers DEBUG height: 32, total_width: 384
02/17/2020 20:53:28 MainProcess _training_0 _base _get_headers DEBUG texts: ['Original (A)', 'Original > Original', 'Original > Swap'], text_sizes: [(72, 9), (116, 9), (102, 9)], text_x: [28, 134, 269], text_y: 20
02/17/2020 20:53:28 MainProcess _training_0 _base _get_headers DEBUG header_box.shape: (32, 384, 3)
02/17/2020 20:53:28 MainProcess _training_0 _base _to_full_frame DEBUG side: 'b', number of sample arrays: 3, prediction.shapes: [(14, 64, 64, 3), (14, 64, 64, 3)])
02/17/2020 20:53:28 MainProcess _training_0 _base _frame_overlay DEBUG full_size: 256, target_size: 216, color: (0, 0, 255)
02/17/2020 20:53:28 MainProcess _training_0 _base _frame_overlay DEBUG Overlayed background. Shape: (14, 256, 256, 3)
02/17/2020 20:53:28 MainProcess _training_0 _base _resize_sample DEBUG Resizing sample: (side: 'b', sample.shape: (14, 64, 64, 3), target_size: 216, scale: 3.375)
02/17/2020 20:53:28 MainProcess _training_0 _base _resize_sample DEBUG Resized sample: (side: 'b' shape: (14, 216, 216, 3))
02/17/2020 20:53:28 MainProcess _training_0 _base _resize_sample DEBUG Resizing sample: (side: 'b', sample.shape: (14, 64, 64, 3), target_size: 216, scale: 3.375)
02/17/2020 20:53:28 MainProcess _training_0 _base _resize_sample DEBUG Resized sample: (side: 'b' shape: (14, 216, 216, 3))
02/17/2020 20:53:28 MainProcess _training_0 _base _resize_sample DEBUG Resizing sample: (side: 'b', sample.shape: (14, 64, 64, 3), target_size: 216, scale: 3.375)
02/17/2020 20:53:28 MainProcess _training_0 _base _resize_sample DEBUG Resized sample: (side: 'b' shape: (14, 216, 216, 3))
02/17/2020 20:53:28 MainProcess _training_0 _base _overlay_foreground DEBUG Overlayed foreground. Shape: (14, 256, 256, 3)
02/17/2020 20:53:28 MainProcess _training_0 _base _overlay_foreground DEBUG Overlayed foreground. Shape: (14, 256, 256, 3)
02/17/2020 20:53:28 MainProcess _training_0 _base _overlay_foreground DEBUG Overlayed foreground. Shape: (14, 256, 256, 3)
02/17/2020 20:53:28 MainProcess _training_0 _base _resize_sample DEBUG Resizing sample: (side: 'b', sample.shape: (14, 256, 256, 3), target_size: 128, scale: 0.5)
02/17/2020 20:53:28 MainProcess _training_0 _base _resize_sample DEBUG Resized sample: (side: 'b' shape: (14, 128, 128, 3))
02/17/2020 20:53:28 MainProcess _training_0 _base _resize_sample DEBUG Resizing sample: (side: 'b', sample.shape: (14, 256, 256, 3), target_size: 128, scale: 0.5)
02/17/2020 20:53:28 MainProcess _training_0 _base _resize_sample DEBUG Resized sample: (side: 'b' shape: (14, 128, 128, 3))
02/17/2020 20:53:28 MainProcess _training_0 _base _resize_sample DEBUG Resizing sample: (side: 'b', sample.shape: (14, 256, 256, 3), target_size: 128, scale: 0.5)
02/17/2020 20:53:28 MainProcess _training_0 _base _resize_sample DEBUG Resized sample: (side: 'b' shape: (14, 128, 128, 3))
02/17/2020 20:53:28 MainProcess _training_0 _base _get_headers DEBUG side: 'b', width: 128
02/17/2020 20:53:28 MainProcess _training_0 _base _get_headers DEBUG height: 32, total_width: 384
02/17/2020 20:53:28 MainProcess _training_0 _base _get_headers DEBUG texts: ['Swap (B)', 'Swap > Swap', 'Swap > Original'], text_sizes: [(59, 9), (87, 9), (102, 9)], text_x: [34, 148, 269], text_y: 20
02/17/2020 20:53:28 MainProcess _training_0 _base _get_headers DEBUG header_box.shape: (32, 384, 3)
02/17/2020 20:53:28 MainProcess _training_0 _base _duplicate_headers DEBUG side: a header.shape: (32, 384, 3)
02/17/2020 20:53:28 MainProcess _training_0 _base _duplicate_headers DEBUG side: b header.shape: (32, 384, 3)
02/17/2020 20:53:28 MainProcess _training_0 _base _stack_images DEBUG Stack images
02/17/2020 20:53:28 MainProcess _training_0 _base get_transpose_axes DEBUG Even number of images to stack
02/17/2020 20:53:28 MainProcess _training_0 _base _stack_images DEBUG Stacked images
02/17/2020 20:53:28 MainProcess _training_0 _base show_sample DEBUG Compiled sample
02/17/2020 20:53:28 MainProcess _training_0 _base save_models DEBUG Backing up and saving models
02/17/2020 20:53:28 MainProcess _training_0 _base get_save_averages DEBUG Getting save averages
02/17/2020 20:53:28 MainProcess _training_0 _base get_save_averages DEBUG Average losses since last save: {'a': 0.31185588240623474, 'b': 0.18341457843780518}
02/17/2020 20:53:28 MainProcess _training_0 _base should_backup DEBUG Setting initial save iteration loss average for 'a': 0.31185588240623474
02/17/2020 20:53:28 MainProcess _training_0 _base should_backup DEBUG Setting initial save iteration loss average for 'b': 0.18341457843780518
02/17/2020 20:53:28 MainProcess _training_0 _base should_backup DEBUG Lowest historical save iteration loss average: {'a': 0.31185588240623474, 'b': 0.18341457843780518}
02/17/2020 20:53:28 MainProcess _training_0 _base should_backup DEBUG Updating lowest save iteration average for 'a': 0.31185588240623474
02/17/2020 20:53:28 MainProcess _training_0 _base should_backup DEBUG Updating lowest save iteration average for 'b': 0.18341457843780518
02/17/2020 20:53:28 MainProcess _training_0 _base should_backup DEBUG Backing up: True
02/17/2020 20:53:28 MainProcess _training_0 _base save_models INFO Backing up models...
02/17/2020 20:53:28 MainProcess ThreadPoolExecutor-16_0 backup_restore backup_model VERBOSE Backing up: 'D:\Program\Fake\Model\original_decoder_A.h5' to 'D:\Program\Fake\Model\original_decoder_A.h5.bk'
02/17/2020 20:53:28 MainProcess ThreadPoolExecutor-16_0 _base save DEBUG Saving model: 'D:\Program\Fake\Model\original_decoder_A.h5'
02/17/2020 20:53:28 MainProcess ThreadPoolExecutor-16_1 backup_restore backup_model VERBOSE Backing up: 'D:\Program\Fake\Model\original_decoder_B.h5' to 'D:\Program\Fake\Model\original_decoder_B.h5.bk'
02/17/2020 20:53:28 MainProcess ThreadPoolExecutor-16_1 _base save DEBUG Saving model: 'D:\Program\Fake\Model\original_decoder_B.h5'
02/17/2020 20:53:28 MainProcess ThreadPoolExecutor-16_2 backup_restore backup_model VERBOSE Backing up: 'D:\Program\Fake\Model\original_encoder.h5' to 'D:\Program\Fake\Model\original_encoder.h5.bk'
02/17/2020 20:53:28 MainProcess ThreadPoolExecutor-16_2 _base save DEBUG Saving model: 'D:\Program\Fake\Model\original_encoder.h5'
02/17/2020 20:53:28 MainProcess ThreadPoolExecutor-16_3 _base save DEBUG Saving State
02/17/2020 20:53:28 MainProcess ThreadPoolExecutor-16_3 backup_restore backup_model VERBOSE Backing up: 'D:\Program\Fake\Model\original_state.json' to 'D:\Program\Fake\Model\original_state.json.bk'
02/17/2020 20:53:28 MainProcess ThreadPoolExecutor-16_3 serializer save DEBUG filename: D:\Program\Fake\Model\original_state.json, data type: <class 'dict'>
02/17/2020 20:53:28 MainProcess ThreadPoolExecutor-16_3 serializer _check_extension DEBUG Original filename: 'D:\Program\Fake\Model\original_state.json', final filename: 'D:\Program\Fake\Model\original_state.json'
02/17/2020 20:53:28 MainProcess ThreadPoolExecutor-16_3 serializer marshal DEBUG data type: <class 'dict'>
02/17/2020 20:53:28 MainProcess ThreadPoolExecutor-16_3 serializer marshal DEBUG returned data type: <class 'bytes'>
02/17/2020 20:53:28 MainProcess ThreadPoolExecutor-16_3 _base save DEBUG Saved State
02/17/2020 20:53:29 MainProcess _training_0 _base save_models INFO [Saved models] - Average since last save: face_loss_A: 0.31186, face_loss_B: 0.18341
02/17/2020 20:53:31 MainProcess _run_1 multithreading run DEBUG Error in thread (_run_1): tuple index out of range
02/17/2020 20:53:31 MainProcess _training_0 multithreading check_and_raise_error DEBUG Thread error caught: [(<class 'IndexError'>, IndexError('tuple index out of range'), <traceback object at 0x000001C05CAEE888>)]
02/17/2020 20:53:31 MainProcess _training_0 multithreading run DEBUG Error in thread (_training_0): tuple index out of range
02/17/2020 20:53:31 MainProcess _run_0 multithreading run DEBUG Error in thread (_run_0): tuple index out of range
02/17/2020 20:53:31 MainProcess MainThread train _monitor DEBUG Thread error detected
02/17/2020 20:53:31 MainProcess MainThread train _monitor DEBUG Closed Monitor
02/17/2020 20:53:31 MainProcess MainThread train _end_thread DEBUG Ending Training thread
02/17/2020 20:53:31 MainProcess MainThread train _end_thread CRITICAL Error caught! Exiting...
02/17/2020 20:53:31 MainProcess MainThread multithreading join DEBUG Joining Threads: '_training'
02/17/2020 20:53:31 MainProcess MainThread multithreading join DEBUG Joining Thread: '_training_0'
02/17/2020 20:53:31 MainProcess MainThread multithreading join ERROR Caught exception in thread: '_training_0'
Traceback (most recent call last):
File "C:\Users\choyt\faceswap\lib\cli.py", line 128, in execute_script
process.process()
File "C:\Users\choyt\faceswap\scripts\train.py", line 159, in process
self._end_thread(thread, err)
File "C:\Users\choyt\faceswap\scripts\train.py", line 199, in _end_thread
thread.join()
File "C:\Users\choyt\faceswap\lib\multithreading.py", line 121, in join
raise thread.err[1].with_traceback(thread.err[2])
File "C:\Users\choyt\faceswap\lib\multithreading.py", line 37, in run
self._target(*self._args, **self._kwargs)
File "C:\Users\choyt\faceswap\scripts\train.py", line 224, in _training
raise err
File "C:\Users\choyt\faceswap\scripts\train.py", line 214, in _training
self._run_training_cycle(model, trainer)
File "C:\Users\choyt\faceswap\scripts\train.py", line 303, in _run_training_cycle
trainer.train_one_step(viewer, timelapse)
File "C:\Users\choyt\faceswap\plugins\train\trainer\_base.py", line 316, in train_one_step
raise err
File "C:\Users\choyt\faceswap\plugins\train\trainer\_base.py", line 283, in train_one_step
loss[side] = batcher.train_one_batch()
File "C:\Users\choyt\faceswap\plugins\train\trainer\_base.py", line 422, in train_one_batch
model_inputs, model_targets = self._get_next()
File "C:\Users\choyt\faceswap\plugins\train\trainer\_base.py", line 452, in _get_next
batch = next(self._feed)
File "C:\Users\choyt\faceswap\lib\multithreading.py", line 156, in iterator
self.check_and_raise_error()
File "C:\Users\choyt\faceswap\lib\multithreading.py", line 84, in check_and_raise_error
raise error[1].with_traceback(error[2])
File "C:\Users\choyt\faceswap\lib\multithreading.py", line 37, in run
self._target(*self._args, **self._kwargs)
File "C:\Users\choyt\faceswap\lib\multithreading.py", line 145, in _run
for item in self.generator(*self._gen_args, **self._gen_kwargs):
File "C:\Users\choyt\faceswap\lib\training_data.py", line 189, in _minibatch
yield self._process_batch(img_paths, side)
File "C:\Users\choyt\faceswap\lib\training_data.py", line 216, in _process_batch
batch[..., :3] = self._processing.color_adjust(batch[..., :3])
File "C:\Users\choyt\faceswap\lib\training_data.py", line 522, in color_adjust
batch = batch_convert_color(batch, "BGR2LAB")
File "C:\Users\choyt\faceswap\lib\image.py", line 265, in batch_convert_color
batch = batch.reshape((original_shape[0] * original_shape[1], *original_shape[2:]))
IndexError: tuple index out of range
============ System Information ============
encoding: cp950
git_branch: master
git_commits: 7024047 Merge branch 'staging'
gpu_cuda: 9.0
gpu_cudnn: No global version found. Check Conda packages for Conda cuDNN
gpu_devices: GPU_0: GeForce RTX 2060
gpu_devices_active: GPU_0
gpu_driver: 442.19
gpu_vram: GPU_0: 6144MB
os_machine: AMD64
os_platform: Windows-10-10.0.18362-SP0
os_release: 10
py_command: C:\Users\choyt\faceswap\faceswap.py train -A D:/Program/Fake/Model A data -B D:/Program/Fake/Model B data -m D:/Program/Fake/Model -t original -bs 64 -it 1000000 -g 1 -s 100 -ss 25000 -ps 50 -L INFO -gui
py_conda_version: conda 4.8.2
py_implementation: CPython
py_version: 3.7.6
py_virtual_env: True
sys_cores: 24
sys_processor: AMD64 Family 23 Model 113 Stepping 0, AuthenticAMD
sys_ram: Total: 16294MB, Available: 6549MB, Used: 9744MB, Free: 6549MB
=============== Pip Packages ===============
absl-py==0.9.0
astor==0.8.0
certifi==2019.11.28
cloudpickle==1.3.0
cycler==0.10.0
cytoolz==0.10.1
dask==2.10.1
decorator==4.4.1
fastcluster==1.1.26
ffmpy==0.2.2
gast==0.2.2
google-pasta==0.1.8
grpcio==1.16.1
h5py==2.9.0
imageio==2.6.1
imageio-ffmpeg==0.3.0
joblib==0.14.1
Keras==2.2.4
Keras-Applications==1.0.8
Keras-Preprocessing==1.1.0
kiwisolver==1.1.0
Markdown==3.1.1
matplotlib==3.1.3
mkl-fft==1.0.15
mkl-random==1.1.0
mkl-service==2.3.0
networkx==2.4
numpy==1.17.4
nvidia-ml-py3==7.352.1
olefile==0.46
opencv-python==4.1.2.30
opt-einsum==3.1.0
pathlib==1.0.1
Pillow==6.2.1
protobuf==3.11.3
psutil==5.6.7
pyparsing==2.4.6
pyreadline==2.1
python-dateutil==2.8.1
pytz==2019.3
PyWavelets==1.1.1
pywin32==227
PyYAML==5.3
scikit-image==0.16.2
scikit-learn==0.22.1
scipy==1.4.1
six==1.14.0
tensorboard==2.0.0
tensorflow==1.15.0
tensorflow-estimator==1.15.1
termcolor==1.1.0
toolz==0.10.0
toposort==1.5
tornado==6.0.3
tqdm==4.42.1
Werkzeug==0.16.1
wincertstore==0.2
wrapt==1.11.2
============== Conda Packages ==============
# packages in environment at C:\Users\choyt\MiniConda3\envs\faceswap:
#
# Name Version Build Channel
_tflow_select 2.1.0 gpu
absl-py 0.9.0 py37_0
astor 0.8.0 py37_0
blas 1.0 mkl
ca-certificates 2020.1.1 0
certifi 2019.11.28 py37_0
cloudpickle 1.3.0 py_0
cudatoolkit 10.0.130 0
cudnn 7.6.5 cuda10.0_0
cycler 0.10.0 py37_0
cytoolz 0.10.1 py37he774522_0
dask-core 2.10.1 py_0
decorator 4.4.1 py_0
fastcluster 1.1.26 py37he350917_0 conda-forge
ffmpeg 4.2 h6538335_0 conda-forge
ffmpy 0.2.2 pypi_0 pypi
freetype 2.9.1 ha9979f8_1
gast 0.2.2 py37_0
git 2.23.0 h6bb4b03_0
google-pasta 0.1.8 py_0
grpcio 1.16.1 py37h351948d_1
h5py 2.9.0 py37h5e291fa_0
hdf5 1.10.4 h7ebc959_0
icc_rt 2019.0.0 h0cc432a_1
icu 58.2 ha66f8fd_1
imageio 2.6.1 py37_0
imageio-ffmpeg 0.3.0 py_0 conda-forge
intel-openmp 2020.0 166
joblib 0.14.1 py_0
jpeg 9b hb83a4c4_2
keras 2.2.4 0
keras-applications 1.0.8 py_0
keras-base 2.2.4 py37_0
keras-preprocessing 1.1.0 py_1
kiwisolver 1.1.0 py37ha925a31_0
libpng 1.6.37 h2a8f88b_0
libprotobuf 3.11.3 h7bd577a_0
libtiff 4.1.0 h56a325e_0
markdown 3.1.1 py37_0
matplotlib 3.1.1 py37hc8f65d3_0
matplotlib-base 3.1.3 py37h64f37c6_0
mkl 2020.0 166
mkl-service 2.3.0 py37hb782905_0
mkl_fft 1.0.15 py37h14836fe_0
mkl_random 1.1.0 py37h675688f_0
networkx 2.4 py_0
numpy 1.17.4 py37h4320e6b_0
numpy-base 1.17.4 py37hc3f5095_0
nvidia-ml-py3 7.352.1 pypi_0 pypi
olefile 0.46 py37_0
opencv-python 4.1.2.30 pypi_0 pypi
openssl 1.1.1d he774522_4
opt_einsum 3.1.0 py_0
pathlib 1.0.1 py37_1
pillow 6.2.1 py37hdc69c19_0
pip 20.0.2 py37_1
protobuf 3.11.3 py37h33f27b4_0
psutil 5.6.7 py37he774522_0
pyparsing 2.4.6 py_0
pyqt 5.9.2 py37h6538335_2
pyreadline 2.1 py37_1
python 3.7.6 h60c2a47_2
python-dateutil 2.8.1 py_0
pytz 2019.3 py_0
pywavelets 1.1.1 py37he774522_0
pywin32 227 py37he774522_1
pyyaml 5.3 py37he774522_0
qt 5.9.7 vc14h73c81de_0
scikit-image 0.16.2 py37h47e9c7a_0
scikit-learn 0.22.1 py37h6288b17_0
scipy 1.4.1 py37h9439919_0
setuptools 45.2.0 py37_0
sip 4.19.8 py37h6538335_0
six 1.14.0 py37_0
sqlite 3.31.1 he774522_0
tensorboard 2.0.0 pyhb38c66f_1
tensorflow 1.15.0 gpu_py37hc3743a6_0
tensorflow-base 1.15.0 gpu_py37h1afeea4_0
tensorflow-estimator 1.15.1 pyh2649769_0
tensorflow-gpu 1.15.0 h0d30ee6_0
termcolor 1.1.0 py37_1
tk 8.6.8 hfa6e2cd_0
toolz 0.10.0 py_0
toposort 1.5 py_3 conda-forge
tornado 6.0.3 py37he774522_3
tqdm 4.42.1 py_0
vc 14.1 h0510ff6_4
vs2015_runtime 14.16.27012 hf0eaf9b_1
werkzeug 0.16.1 py_0
wheel 0.34.2 py37_0
wincertstore 0.2 py37_0
wrapt 1.11.2 py37he774522_0
xz 5.2.4 h2fa13f4_4
yaml 0.1.7 hc54c509_2
zlib 1.2.11 h62dcd97_3
zstd 1.3.7 h508b16e_0
=============== State File =================
{
"name": "original",
"sessions": {
"1": {
"timestamp": 1581943994.930083,
"no_logs": false,
"pingpong": false,
"loss_names": {
"a": [
"face_loss"
],
"b": [
"face_loss"
]
},
"batchsize": 64,
"iterations": 1,
"config": {
"learning_rate": 5e-05
}
}
},
"lowest_avg_loss": {
"a": 0.31185588240623474,
"b": 0.18341457843780518
},
"iterations": 1,
"inputs": {
"face_in:0": [
64,
64,
3
]
},
"training_size": 256,
"config": {
"coverage": 85.0,
"mask_type": null,
"mask_blur_kernel": 3,
"mask_threshold": 4,
"learn_mask": false,
"icnr_init": false,
"conv_aware_init": false,
"subpixel_upscaling": false,
"reflect_padding": false,
"penalized_mask_loss": true,
"loss_function": "mae",
"learning_rate": 5e-05,
"lowmem": false
}
}
================= Configs ==================
--------- .faceswap ---------
backend: nvidia
--------- convert.ini ---------
[color.color_transfer]
clip: True
preserve_paper: True
[color.manual_balance]
colorspace: HSV
balance_1: 0.0
balance_2: 0.0
balance_3: 0.0
contrast: 0.0
brightness: 0.0
[color.match_hist]
threshold: 99.0
[mask.box_blend]
type: gaussian
distance: 11.0
radius: 5.0
passes: 1
[mask.mask_blend]
type: normalized
kernel_size: 3
passes: 4
threshold: 4
erosion: 0.0
[scaling.sharpen]
method: unsharp_mask
amount: 150
radius: 0.3
threshold: 5.0
[writer.ffmpeg]
container: mp4
codec: libx264
crf: 23
preset: medium
tune: none
profile: auto
level: auto
[writer.gif]
fps: 25
loop: 0
palettesize: 256
subrectangles: False
[writer.opencv]
format: png
draw_transparent: False
jpg_quality: 75
png_compress_level: 3
[writer.pillow]
format: png
draw_transparent: False
optimize: False
gif_interlace: True
jpg_quality: 75
png_compress_level: 3
tif_compression: tiff_deflate
--------- extract.ini ---------
[global]
allow_growth: True
[align.fan]
batch-size: 12
[detect.cv2_dnn]
confidence: 50
[detect.mtcnn]
minsize: 20
threshold_1: 0.6
threshold_2: 0.7
threshold_3: 0.7
scalefactor: 0.709
batch-size: 8
[detect.s3fd]
confidence: 70
batch-size: 4
[mask.unet_dfl]
batch-size: 8
[mask.vgg_clear]
batch-size: 6
[mask.vgg_obstructed]
batch-size: 2
--------- gui.ini ---------
[global]
fullscreen: False
tab: extract
options_panel_width: 30
console_panel_height: 20
icon_size: 14
font: default
font_size: 9
autosave_last_session: prompt
timeout: 120
auto_load_model_stats: True
--------- train.ini ---------
[global]
coverage: 85.0
mask_type: none
mask_blur_kernel: 3
mask_threshold: 4
learn_mask: False
icnr_init: False
conv_aware_init: False
subpixel_upscaling: False
reflect_padding: False
penalized_mask_loss: True
loss_function: mae
learning_rate: 5e-05
[model.dfl_h128]
lowmem: False
[model.dfl_sae]
input_size: 128
clipnorm: True
architecture: df
autoencoder_dims: 0
encoder_dims: 42
decoder_dims: 21
multiscale_decoder: False
[model.dlight]
features: best
details: good
output_size: 256
[model.original]
lowmem: False
[model.realface]
input_size: 64
output_size: 128
dense_nodes: 1536
complexity_encoder: 128
complexity_decoder: 512
[model.unbalanced]
input_size: 128
lowmem: False
clipnorm: True
nodes: 1024
complexity_encoder: 128
complexity_decoder_a: 384
complexity_decoder_b: 512
[model.villain]
lowmem: False
[trainer.original]
preview_images: 14
zoom_amount: 5
rotation_range: 10
shift_range: 5
flip_chance: 50
color_lightness: 30
color_ab: 8
color_clahe_chance: 50
color_clahe_max_size: 4