资源简介

参照原论文使用tensorflow写的一个inceptionv3网络,后续会更新数据集的使用及训练。

资源截图

代码片段和文件信息

import tensorflow as tf
import numpy
import tensorflow.contrib.slim as slim


def _variable(name shape):
  “““Helper to create a Variable stored on CPU memory.

  Args:
    name: name of the variable
    shape: list of ints
    initializer: initializer for Variable

  Returns:
    Variable Tensor
  “““
  with tf.device(‘/gpu:0‘):
    var = tf.get_variable(name shape)
  return var

def conv_layer(inputshapestrideactivation=Truepadding=‘VALID‘name=None):
    in_channel=shape[2]
    out_channel=shape[3]
    k_size=shape[0]
    with tf.variable_scope(name) as scope:
        kernel=_variable(‘conv_weights‘shape = shape)
        conv=tf.nn.conv2d(input = inputfilter = kernelstrides = stridepadding =padding)
        biases=_variable(‘biases‘[out_channel])
        bias=tf.nn.bias_add(convbiases)
        if activation is True:
            conv_out=tf.nn.relu(biasname = ‘relu‘)
        else:
            conv_out=bias
        return conv_out

def conv_inception(input shape stride= [1111] activation = True padding = ‘SAME‘ name = None):
    in_channel = shape[2]
    out_channel = shape[3]
    k_size = shape[0]
    with tf.variable_scope(name) as scope:
        kernel = _variable(‘conv_weights‘ shape = shape)
        conv = tf.nn.conv2d(input = input filter = kernel strides = stride padding = padding)
        biases = _variable(‘biases‘ [out_channel])
        bias = tf.nn.bias_add(conv biases)
        if activation is True:
            conv_out = tf.nn.relu(bias name = ‘relu‘)
        else:
            conv_out = bias
        return conv_out

def inception_block_tradition(input name=None):

    with tf.variable_scope(name) as scope:
        with tf.variable_scope(“Branch_0“):
            branch_0=conv_inception(inputshape = [1128864]name = ‘0a_1x1‘)
        with tf.variable_scope(‘Branch_1‘):
            branch_1=conv_inception(inputshape = [1128848]name = ‘0a_1x1‘)
            branch_1=conv_inception(branch_1shape = [554864]name = ‘0b_5x5‘)
        with tf.variable_scope(“Branch_2“):
            branch_2=conv_inception(inputshape = [1128864]name = ‘0a_1x1‘)
            branch_2=conv_inception(branch_2shape = [336496]name = ‘0b_3x3‘)
        with tf.variable_scope(‘Branch_3‘):
            branch_3=tf.nn.avg_pool(inputksize = (1331)strides = [1111]padding = ‘SAME‘name = ‘Avgpool_0a_3x3‘)
            branch_3=conv_inception(branch_3shape = [1128864]name = ‘0b_1x1‘)
        inception_out=tf.concat([branch_0branch_1branch_2branch_3]3)
        b=1 # for debug
        return inception_out

def inception_grid_reduction_1(inputname=None):

    with tf.variable_scope(name) as scope:
        with tf.variable_scope(“Branch_0“):
            branch_0=conv_inception(inputshape = [11288384]name = ‘0a_1x1‘)
            branch_0=conv_inception(branch_0shape = [33384384]stride = [1221]padding = ‘VALID‘name = ‘0b_3x3‘

 属性            大小     日期    时间   名称
----------- ---------  ---------- -----  ----

     文件        455  2018-05-04 20:50  Architecture\.idea\Architecture.iml

     文件        153  2018-05-04 20:54  Architecture\.idea\codestyles\codestyleConfig.xml

     文件        198  2018-05-04 20:54  Architecture\.idea\codestyles\Project.xml

     文件         84  2018-05-04 20:54  Architecture\.idea\dictionaries\Yel.xml

     文件        185  2018-05-04 20:54  Architecture\.idea\misc.xml

     文件        276  2018-05-04 20:50  Architecture\.idea\modules.xml

     文件      19246  2018-05-06 21:51  Architecture\.idea\workspace.xml

     文件      14810  2018-05-06 11:03  Architecture\inception_v3.py

     文件       3783  2018-05-06 15:36  Architecture\training.py

     目录          0  2018-05-04 20:54  Architecture\.idea\codestyles

     目录          0  2018-05-04 20:54  Architecture\.idea\dictionaries

     目录          0  2018-05-06 21:51  Architecture\.idea

     目录          0  2018-05-06 15:36  Architecture

----------- ---------  ---------- -----  ----

                39190                    13


评论

共有 条评论