tensorflow 使用預訓練好的模型的一部分參數


   

vars = tf.global_variables()

net_var = [var for var in vars if 'bi-lstm_secondLayer' not in var.name and 'word_embedding1s' not in var.name

and 'proj_secondLayer' not in var.name

]

   

saver_pre = tf.train.Saver(net_var)

   

saver_pre.restore(self.sess, tf.train.latest_checkpoint(self.config.dir_model_storepath_pre))

   

'''

with tf.variable_scope('bi-lstm',reuse=True):

fwk=tf.get_variable('bidirectional_rnn/fw/lstm_cell/kernel')

fwb=tf.get_variable('bidirectional_rnn/fw/lstm_cell/bias')

bwk = tf.get_variable('bidirectional_rnn/bw/lstm_cell/kernel')

bwb = tf.get_variable('bidirectional_rnn/bw/lstm_cell/bias')

   

saver_pre= tf.train.Saver({'words/_word_embeddings':self._word_embeddings,

'bi-lstm/bidirectional_rnn/fw/lstm_cell/kernel':fwk,

'bi-lstm/bidirectional_rnn/fw/lstm_cell/bias':fwb,

'bi-lstm/bidirectional_rnn/bw/lstm_cell/kernel':bwk,

'bi-lstm/bidirectional_rnn/bw/lstm_cell/bias':bwb})

 

for x in tf.trainable_variables():

print(x.name)

   

#mysaver = tf.train.import_meta_graph(self.config.dir_model_storepath_pre_graph)

   

saver_pre.restore(self.sess, tf.train.latest_checkpoint(self.config.dir_model_storepath_pre))

'''


免責聲明!

本站轉載的文章為個人學習借鑒使用,本站對版權不負任何法律責任。如果侵犯了您的隱私權益,請聯系本站郵箱yoyou2525@163.com刪除。



 
粵ICP備18138465號   © 2018-2025 CODEPRJ.COM