1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
# -*- coding: utf-8 -*-
"""Model.ipynb
Automatically generated by Colaboratory.
Original file is located at
    https://colab.research.google.com/drive/1hSnFaBG3KUZqi9ZcjGtjhv3bkeiHEqSv
"""
 
from google.colab import drive
from google.colab import files
drive.mount('/gdrive')
PATH = "/gdrive/My Drive/Colab Notebooks/"
PROJECT_PATH = "project/FaceRecognition/"
TOTAL_PATH = PATH + PROJECT_PATH
 
#files.download('new_file.txt')
 
tf.reset_default_graph()
 
= tf.placeholder( tf.float32, shape=[None,3], name='input_node' ) # input
= tf.Variable( np.zeros(shape=[3,2]), dtype=tf.float32, name='W' ) # weights
= tf.Variable( np.zeros(shape=[2]), dtype=tf.float32, name='b' ) # biases
= tf.nn.relu( tf.matmul(I, W) + b, name='output_node' ) # activation / output
 
saver = tf.train.Saver()
init_op = tf.global_variables_initializer()
 
with tf.Session() as sess:
  sess.run(init_op)
  
  # save the graph
  tf.train.write_graph(sess.graph_def,'.' , 'hellotensor.pbtxt',as_text = True )
 
  # normally you would do some training here
  # we will just assign something to W
  sess.run(tf.assign(W, [[12],[4,5],[7,8]]))
  sess.run(tf.assign(b, [1,1]))
 
  #save a checkpoint file, which will store the above assignment  
  
  saver.save(sess, './hellotensor.ckpt',global_step = 0)
 
  !cd /gdrive/My\ Drive/Colab\ Notebooks/project/FaceRecognition ; ls
 
from tensorflow.python.platform import gfile
from tensorflow.python.tools import optimize_for_inference_lib
GRAPH_PB_PATH = TOTAL_PATH + 'hellotensor.pbtxt'
 
with tf.Session() as sess:
   print("load graph")
   with gfile.FastGFile(GRAPH_PB_PATH,'rb') as f:
       graph_def = tf.GraphDef()
   graph_def.ParseFromString(f.read())
   sess.graph.as_default()
   tf.import_graph_def(graph_def, name='')
   graph_nodes=[n for n in graph_def.node]
   names = []
   for t in graph_nodes:
      names.append(t.name)
   print(names)
 
tf.reset_default_graph()
init_op = tf.global_variables_initializer()
meta_path = 'hellotensor.ckpt-0.meta' # Your .meta file
 
with tf.Session() as sess:
    # Restore the graph
    saver = tf.train.import_meta_graph(meta_path)
 
    # Load weights
    saver.restore(sess, tf.train.latest_checkpoint('.'))
    sess.run(init_op)
 
    
    # Output nodes
    # output_node_names =[n.name for n in tf.get_default_graph().as_graph_def().node]
    output_node_names = ['output_node']
    input_node_names = ['input_node']
    print("output_node_names {%s}" % output_node_names)
    print("input_node_names {%s}"  % input_node_names)
 
    # Freeze the graph
    frozen_graph_def = tf.graph_util.convert_variables_to_constants(
        sess,
        sess.graph_def,
        output_node_names)
    
    output_graph_def = optimize_for_inference_lib.optimize_for_inference(tf.graph_util.remove_training_nodes(frozen_graph_def),input_node_names, output_node_names,tf.float32.as_datatype_enum )# an array of the input node(s) output_node_names.split(“,”), # an array of output nodes
 
    # Save the frozen graph
    with open'./output_graph.pb''wb') as f:
      f.write(frozen_graph_def.SerializeToString())
 
!ls
 
import cv2 as cv
net = cv.dnn.readNetFromTensorflow('./output_graph.pb')
 
net.getLayerNames()
 
net.setInput(np.array([[1,2,3], [4,5,6]]), 'input_node')
 
net.forward('output_node')
 
np.array([[1,2,3], [4,5,6]]) @ np.array([[12],[4,5],[7,8]])  + np.array([[1],[1]])
 
!ls
 
files.download("output_graph.pb")
 
cs


+ Recent posts