• 大小: 6KB
    文件类型: .py
    金币: 1
    下载: 0 次
    发布日期: 2021-05-28
  • 语言: Python
  • 标签:

资源简介

lstm+attention在文本分类中的python代码文件,,,,,

资源截图

代码片段和文件信息

import os
import pickle
import random
import jieba
import numpy as np

from keras import backend as K
from keras.optimizers import SGD
from keras.regularizers import l2
from gensim.models import Word2Vec
from keras.engine.topology import layer
from keras.layers import Bidirectional
from keras.engine.topology import layer
from keras.utils import CustomobjectScope
from keras.models import ModelSequentialload_model
from keras.preprocessing.sequence import pad_sequences
from keras.layers.normalization import BatchNormalization
from keras.callbacks import EarlyStopping ModelCheckpoint
from keras import initializers regularizers constraints
from keras.layers.core import Dense Dropout Activation Flatten
from keras.layers import Input LSTM Dense merge Flatten embedding DropoutBidirectionalGRU

embedDING_DIM = 100

class Attention(layer):
    def __init__(self step_dim=130
                 W_regularizer=None b_regularizer=None
                 W_constraint=None b_constraint=None
                 bias=True **kwargs):
        self.supports_masking = True
        #self.init = initializations.get(‘glorot_uniform‘)
        self.init = initializers.get(‘glorot_uniform‘)

        self.W_regularizer = regularizers.get(W_regularizer)
        self.b_regularizer = regularizers.get(b_regularizer)

        self.W_constraint = constraints.get(W_constraint)
        self.b_constraint = constraints.get(b_constraint)

        self.bias = bias
        self.step_dim = step_dim
        self.features_dim = 0
        super(Attention self).__init__(**kwargs)
    def build(self input_shape):
        assert len(input_shape) == 3
        self.W = self.add_weight((input_shape[-1])
                                 initializer=self.init
                                 name=‘{}_W‘.format(self.name)
                                 regularizer=self.W_regularizer
                                 constraint=self.W_constraint)
        self.features_dim = input_shape[-1]
        if self.bias:
            self.b = self.add_weight((input_shape[1])
                                     initializer=‘zero‘
                                     name=‘{}_b‘.format(self.name)
                                     regularizer=self.b_regularizer
                                     constraint=self.b_constraint)
        else:
            self.b = None
        self.built = True
    def compute_mask(self input input_mask=None):
        return None

    def call(self x mask=None):
        features_dim = self.features_dim
        step_dim = self.step_dim
        eij = K.reshape(K.dot(K.reshape(x (-1 features_dim)) K.reshape(self.W (features_dim 1))) (-1 step_dim))
        if self.bias:
            eij += self.b
        eij = K.tanh(eij)
        a = K.exp(eij)
        if mask is not None:
            a *= K.cast(mask K.floatx())
        a /= K.cast(K.sum(a axis=1 keepdims=True) + K.epsilo

评论

共有 条评论

相关资源