tensorflow 使用预训练好的模型的一部分参数


   

vars = tf.global_variables()

net_var = [var for var in vars if 'bi-lstm_secondLayer' not in var.name and 'word_embedding1s' not in var.name

and 'proj_secondLayer' not in var.name

]

   

saver_pre = tf.train.Saver(net_var)

   

saver_pre.restore(self.sess, tf.train.latest_checkpoint(self.config.dir_model_storepath_pre))

   

'''

with tf.variable_scope('bi-lstm',reuse=True):

fwk=tf.get_variable('bidirectional_rnn/fw/lstm_cell/kernel')

fwb=tf.get_variable('bidirectional_rnn/fw/lstm_cell/bias')

bwk = tf.get_variable('bidirectional_rnn/bw/lstm_cell/kernel')

bwb = tf.get_variable('bidirectional_rnn/bw/lstm_cell/bias')

   

saver_pre= tf.train.Saver({'words/_word_embeddings':self._word_embeddings,

'bi-lstm/bidirectional_rnn/fw/lstm_cell/kernel':fwk,

'bi-lstm/bidirectional_rnn/fw/lstm_cell/bias':fwb,

'bi-lstm/bidirectional_rnn/bw/lstm_cell/kernel':bwk,

'bi-lstm/bidirectional_rnn/bw/lstm_cell/bias':bwb})

 

for x in tf.trainable_variables():

print(x.name)

   

#mysaver = tf.train.import_meta_graph(self.config.dir_model_storepath_pre_graph)

   

saver_pre.restore(self.sess, tf.train.latest_checkpoint(self.config.dir_model_storepath_pre))

'''


免责声明!

本站转载的文章为个人学习借鉴使用,本站对版权不负任何法律责任。如果侵犯了您的隐私权益,请联系本站邮箱yoyou2525@163.com删除。



 
粤ICP备18138465号  © 2018-2025 CODEPRJ.COM