• 大小: 826KB
    文件类型: .zip
    金币: 2
    下载: 1 次
    发布日期: 2021-06-17
  • 语言: Python
  • 标签:

资源简介

TensorFlow神经机翻译(seq2seq) 教程

资源截图

代码片段和文件信息

# Copyright 2017 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License Version 2.0 (the “License“);
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
#     http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing software
# distributed under the License is distributed on an “AS IS“ BASIS
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
“““Attention-based sequence-to-sequence model with dynamic RNN support.“““
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function

import tensorflow as tf

from . import model
from . import model_helper

__all__ = [“AttentionModel“]


class AttentionModel(model.Model):
  “““Sequence-to-sequence dynamic model with attention.

  This class implements a multi-layer recurrent neural network as encoder
  and an attention-based decoder. This is the same as the model described in
  (Luong et al. EMNLP‘2015) paper: https://arxiv.org/pdf/1508.04025v5.pdf.
  This class also allows to use GRU cells in addition to LSTM cells with
  support for dropout.
  “““

  def __init__(self
               hparams
               mode
               iterator
               source_vocab_table
               target_vocab_table
               reverse_target_vocab_table=None
               scope=None
               extra_args=None):
    self.has_attention = hparams.attention_architecture and hparams.attention

    # Set attention_mechanism_fn
    if self.has_attention:
      if extra_args and extra_args.attention_mechanism_fn:
        self.attention_mechanism_fn = extra_args.attention_mechanism_fn
      else:
        self.attention_mechanism_fn = create_attention_mechanism

    super(AttentionModel self).__init__(
        hparams=hparams
        mode=mode
        iterator=iterator
        source_vocab_table=source_vocab_table
        target_vocab_table=target_vocab_table
        reverse_target_vocab_table=reverse_target_vocab_table
        scope=scope
        extra_args=extra_args)

  def _prepare_beam_search_decoder_inputs(
      self beam_width memory source_sequence_length encoder_state):
    memory = tf.contrib.seq2seq.tile_batch(
        memory multiplier=beam_width)
    source_sequence_length = tf.contrib.seq2seq.tile_batch(
        source_sequence_length multiplier=beam_width)
    encoder_state = tf.contrib.seq2seq.tile_batch(
        encoder_state multiplier=beam_width)
    batch_size = self.batch_size * beam_width
    return memory source_sequence_length encoder_state batch_size

  def _build_decoder_cell(self hparams encoder_outputs encoder_state
                          source_sequence_length):
    “““Build a RNN ce

 属性            大小     日期    时间   名称
----------- ---------  ---------- -----  ----
     目录           0  2019-02-13 19:16  nmt-master\
     文件        1455  2019-02-13 19:16  nmt-master\CONTRIBUTING.md
     文件       11358  2019-02-13 19:16  nmt-master\LICENSE
     文件       53850  2019-02-13 19:16  nmt-master\README.md
     目录           0  2019-02-13 19:16  nmt-master\nmt\
     文件          50  2019-02-13 19:16  nmt-master\nmt\.gitignore
     文件           0  2019-02-13 19:16  nmt-master\nmt\__init__.py
     文件        7387  2019-02-13 19:16  nmt-master\nmt\attention_model.py
     目录           0  2019-02-13 19:16  nmt-master\nmt\g3doc\
     目录           0  2019-02-13 19:16  nmt-master\nmt\g3doc\img\
     文件       68102  2019-02-13 19:16  nmt-master\nmt\g3doc\img\attention_equation_0.jpg
     文件       42298  2019-02-13 19:16  nmt-master\nmt\g3doc\img\attention_equation_1.jpg
     文件       47590  2019-02-13 19:16  nmt-master\nmt\g3doc\img\attention_mechanism.jpg
     文件      150076  2019-02-13 19:16  nmt-master\nmt\g3doc\img\attention_vis.jpg
     文件       14737  2019-02-13 19:16  nmt-master\nmt\g3doc\img\encdec.jpg
     文件       48359  2019-02-13 19:16  nmt-master\nmt\g3doc\img\greedy_dec.jpg
     文件       58518  2019-02-13 19:16  nmt-master\nmt\g3doc\img\seq2seq.jpg
     文件       12252  2019-02-13 19:16  nmt-master\nmt\gnmt_model.py
     文件        8895  2019-02-13 19:16  nmt-master\nmt\inference.py
     文件        6490  2019-02-13 19:16  nmt-master\nmt\inference_test.py
     文件       33798  2019-02-13 19:16  nmt-master\nmt\model.py
     文件       24395  2019-02-13 19:16  nmt-master\nmt\model_helper.py
     文件       48578  2019-02-13 19:16  nmt-master\nmt\model_test.py
     文件       29369  2019-02-13 19:16  nmt-master\nmt\nmt.py
     文件        3404  2019-02-13 19:16  nmt-master\nmt\nmt_test.py
     目录           0  2019-02-13 19:16  nmt-master\nmt\scripts\
     文件           0  2019-02-13 19:16  nmt-master\nmt\scripts\__init__.py
     文件        4071  2019-02-13 19:16  nmt-master\nmt\scripts\bleu.py
     文件        1188  2019-02-13 19:16  nmt-master\nmt\scripts\download_iwslt15.sh
     文件       10419  2019-02-13 19:16  nmt-master\nmt\scripts\rouge.py
     文件        6460  2019-02-13 19:16  nmt-master\nmt\scripts\wmt16_en_de.sh
............此处省略34个文件信息

评论

共有 条评论