# **Keras .h5 convert to tensorflow .pb & tensorRT .uff**
這是一個學習筆記,寫不好可以提建議
## .h5 to .ckpt
先把 keras model 讀取後,做tensor的初始化,再以tensorflow的方式儲存成.ckpt,如果要在一次檔案內連續存取tensor需使用tf.reset_default_graph() 將tensor重製

```
from tensorflow.python.keras import backend as k
from tensorflow.python.keras.models import *
import tensorflow as tf
import cv2
for i in range(3):
input_h5 = ['./encoder_model.h5', './decoder_model.h5', './feature_map.h5']
output_ckpt = ["./ckpt/encoder/saved.ckpt", "./ckpt/decoder/saved.ckpt", "./ckpt/map/saved.ckpt"]
model = load_model(input_h5[i])
with tf.Session() as sess:
sess = K.get_session()
K.get_session().run(tf.global_variables_initializer())
saver = tf.train.Saver()
save_path = saver.save(sess, output_ckpt[i])
tf.reset_default_graph()
```

## ckpt to pb
參考 https://github.com/r1cebank/tf-ckpt-2-pb
python convert.py
-- --checkpoint ./...你的檔案路徑.../saved.ckpt
-- --model ./...你的檔案路徑.../saved.ckpt.meta
-- --out-path ./...你的檔案路徑.../name.pb
```
### convert.py
import tensorflow as tf
from argparse import ArgumentParser
def main():
parser = ArgumentParser()
parser.add_argument('--checkpoint', type=str,
dest='checkpoint',
help='dir or .ckpt file to load checkpoint from',
metavar='CHECKPOINT', required=True)
parser.add_argument('--model', type=str,
dest='model',
help='.meta for your model',
metavar='MODEL', required=True)
parser.add_argument('--out-path', type=str,
dest='out_path',
help='model output directory',
metavar='MODEL_OUT', required=True)
opts = parser.parse_args()
tf.reset_default_graph()
saver = tf.train.import_meta_graph(opts.model)
builder = tf.saved_model.builder.SavedModelBuilder(opts.out_path)
with tf.Session() as sess:
# Restore variables from disk.
saver.restore(sess, opts.checkpoint)
print("Model restored.")
builder.add_meta_graph_and_variables(sess,
['feature_layer'],
strip_default_attrs=False)
builder.save()
if __name__ == '__main__':
main()
```
## h5 pb convert to uff
參考
https://devtalk.nvidia.com/default/topic/1028464/jetson-tx2/converting-tf-model-to-tensorrt-uff-format/
須先讀取 h5 並驅動tensor

再以已經轉換過的pb流程架構讀取

## convert to uff
```
import tensorflow.python.keras
import tensorflow.python.keras.backend as K
import tensorflow as tf
import uff
output_names = ['dense/Sigmoid']
frozen_graph_filename = '../pb/map/saved_model.pb'
model = tensorflow.python.keras.models.load_model('./feature_map.h5')
sess = K.get_session()
# print([node.name for node in sess.graph_def.node])
# freeze graph and remove training nodes
graph_def = tf.graph_util.convert_variables_to_constants(sess, sess.graph_def, output_names)
graph_def = tf.graph_util.remove_training_nodes(graph_def)
# write frozen graph to file
with open(frozen_graph_filename, 'wb') as f:
f.write(graph_def.SerializeToString())
f.close()
# convert frozen graph to uff
uff_model = uff.from_tensorflow_frozen_model(frozen_graph_filename, output_names)
```
```
uff.from_tensorflow(graphdef=frozen_graph,
output_filename=UFF_OUTPUT_FILENAME,
output_nodes=OUTPUT_NAMES,
text=True)
```
output_name 是最後輸出層的名稱,如果不清楚可以在load .h5檔案程式之後加上下面程式碼做查詢。
```
for layer in model.layers:
print(layer.output.name)
```