解决办法 原文 https://www.jianshu.com/p/2de8e01af88d
with tf.Session() as sess: tf.get_variable_scope().reuse_variables() print("Reading checkpoints...") ckpt = tf.train.get_checkpoint_state(logs_train_dir) if ckpt and ckpt.model_checkpoint_path: global_step = ckpt.model_checkpoint_path.split('/')[-1].split('-')[-1] saver.restore(sess, ckpt.model_checkpoint_path) print('Loading success, global_step is %s' % global_step) else: print('No checkpoint file found')
import os model_dir = '/home/mml/siamese_net/logs/train/' from tensorflow.python import pywrap_tensorflow #checkpoint_path = os.path.join(logs_train_dir, 'model.ckpt') checkpoint_path = os.path.join(model_dir, "model.ckpt-9999") reader = pywrap_tensorflow.NewCheckpointReader(checkpoint_path) var_to_shape_map = reader.get_variable_to_shape_map() for key in var_to_shape_map: print("tensor_name: ", key) print(reader.get_tensor(key))
查参数命名是否一致