2017-11-18 107 views
0

我已经在不同名称范围内重用变量的问题。下面的代码将源代码嵌入和目标嵌入分离到两个不同的空间中,我想要做的是将源代码和目标代码放在同一个空间中,重用查找表中的变量。Tensorflow在不同名称范围内重用变量

''' Applying bidirectional encoding for source-side inputs and first-word decoding. 
''' 
def decode_first_word(self, source_vocab_id_tensor, source_mask_tensor, scope, reuse): 
    with tf.name_scope('Word_Embedding_Layer'): 
     with tf.variable_scope('Source_Side'): 
      source_embedding_tensor = self._src_lookup_table(source_vocab_id_tensor) 
    with tf.name_scope('Encoding_Layer'): 
     source_concated_hidden_tensor = self._encoder.get_biencoded_tensor(\ 
      source_embedding_tensor, source_mask_tensor) 
    with tf.name_scope('Decoding_Layer_First'): 
     rvals = self.decode_next_word(source_concated_hidden_tensor, source_mask_tensor, \ 
      None, None, None, scope, reuse) 
    return rvals + [source_concated_hidden_tensor] 


''' Applying one-step decoding. 
''' 
def decode_next_word(self, enc_concat_hidden, src_mask, cur_dec_hidden, \ 
          cur_trg_wid, trg_mask=None, scope=None, reuse=False, \ 
          src_side_pre_act=None): 
    with tf.name_scope('Word_Embedding_Layer'): 
     with tf.variable_scope('Target_Side'): 
      cur_trg_wemb = None 
      if None == cur_trg_wid: 
       pass 
      else: 
       cur_trg_wemb = self._trg_lookup_table(cur_trg_wid) 

我想让他们如下,所以只会出现在全图中的一个节点嵌入:

def decode_first_word_shared_embedding(self, source_vocab_id_tensor, source_mask_tensor, scope, reuse): 
    with tf.name_scope('Word_Embedding_Layer'): 
     with tf.variable_scope('Bi_Side'): 
      source_embedding_tensor = self._bi_lookup_table(source_vocab_id_tensor) 
    with tf.name_scope('Encoding_Layer'): 
     source_concated_hidden_tensor = self._encoder.get_biencoded_tensor(\ 
      source_embedding_tensor, source_mask_tensor) 
    with tf.name_scope('Decoding_Layer_First'): 
     rvals = self.decode_next_word_shared_embedding(source_concated_hidden_tensor, source_mask_tensor, \ 
      None, None, None, scope, reuse) 
    return rvals + [source_concated_hidden_tensor] 

def decode_next_word_shared_embedding(self, enc_concat_hidden, src_mask, cur_dec_hidden, \ 
          cur_trg_wid, trg_mask=None, scope=None, reuse=False, \ 
          src_side_pre_act=None): 
    with tf.name_scope('Word_Embedding_Layer'):    
     cur_trg_wemb = None 
     if None == cur_trg_wid: 
      pass 
     else: 
      with tf.variable_scope('Bi_Side'): 
       cur_trg_wemb = self._bi_lookup_table(cur_trg_wid) 

如何实现这一目标?

回答

相关问题