Posted in Python onJanuary 21, 2020
在test.py中可以通过如下代码直接生成带weight的pb文件,也可以通过tf官方的freeze_graph.py将ckpt转为pb文件。
constant_graph = graph_util.convert_variables_to_constants(sess, sess.graph_def,['net_loss/inference/encode/conv_output/conv_output']) with tf.gfile.FastGFile('net_model.pb', mode='wb') as f: f.write(constant_graph.SerializeToString())
tf1.0中通过带weight的pb文件与get_tensor_by_name函数可以获取每一层的输出
import os import os.path as ops import argparse import time import math import tensorflow as tf import glob import numpy as np import matplotlib.pyplot as plt import cv2 os.environ["CUDA_VISIBLE_DEVICES"] = "-1" gragh_path = './model.pb' image_path = './lvds1901.JPG' inputtensorname = 'input_tensor:0' tensorname = 'loss/inference/encode/resize_images/ResizeBilinear' filepath='./net_output.txt' HEIGHT=256 WIDTH=256 VGG_MEAN = [103.939, 116.779, 123.68] with tf.Graph().as_default(): graph_def = tf.GraphDef() with tf.gfile.GFile(gragh_path, 'rb') as fid: serialized_graph = fid.read() graph_def.ParseFromString(serialized_graph) tf.import_graph_def(graph_def, name='') image = cv2.imread(image_path) image = cv2.resize(image, (WIDTH, HEIGHT), interpolation=cv2.INTER_CUBIC) image_np = np.array(image) image_np = image_np - VGG_MEAN image_np_expanded = np.expand_dims(image_np, axis=0) with tf.Session() as sess: ops = tf.get_default_graph().get_operations() tensor_name = tensorname + ':0' tensor_dict = tf.get_default_graph().get_tensor_by_name(tensor_name) image_tensor = tf.get_default_graph().get_tensor_by_name(inputtensorname) output = sess.run(tensor_dict, feed_dict={image_tensor: image_np_expanded}) ftxt = open(filepath,'w') transform = output.transpose(0, 3, 1, 2) transform = transform.flatten() weight_count = 0 for i in transform: if weight_count % 10 == 0 and weight_count != 0: ftxt.write('\n') ftxt.write(str(i) + ',') weight_count += 1 ftxt.close()
以上这篇TensorFlow实现打印每一层的输出就是小编分享给大家的全部内容了,希望能给大家一个参考,也希望大家多多支持三水点靠木。
TensorFlow实现打印每一层的输出
- Author -
Kluiverthoo声明:登载此文出于传递更多信息之目的,并不意味着赞同其观点或证实其描述。
Reply on: @reply_date@
@reply_contents@