# google-search kws_streaming model translation code tracing
start from directory, google-research/kws_streaming/train/
## 1. In file, ==train/model_train_eval.py==:
in function, main():
```python=268
# convert model to TFlite
folder_name = opt_name + 'tflite_stream_state_external'
file_name = 'stream_state_external.tflite'
mode = modes.Modes.STREAM_EXTERNAL_STATE_INFERENCE
test.convert_model_tflite(flags, folder_name, mode, file_name,
optimizations=optimizations)
```
go into line 272: ==test.convert_model_tflite()==
## 2. In file, ==train/test.py==:
in function, convert_model_tflite():
Line 527 shows this function convert model.
```python=521
def convert_model_tflite(flags,
folder,
mode,
fname,
weights_name='best_weights',
optimizations=None):
"""Convert model to streaming and non streaming TFLite.
Args:
flags: model and data settings
folder: folder where converted model will be saved
mode: inference mode
fname: file name of converted model
weights_name: file name with model weights
optimizations: list of optimization options
"""
```
Line 544 shows models loading. If wanting to modify NN models, go to the folder: ==models/== and find related module file such as cnn.py, dnn.py.
```pythone=544
model = models.MODELS[flags.model_name](flags)
model.load_weights(os.path.join(flags.train_dir,
weights_name)).expect_partial()
```
---
```python=547
# convert trained model to non streaming TFLite stateless
# to finish other tests we do not stop program if exception happen here
path_model = os.path.join(flags.train_dir, folder)
if not os.path.exists(path_model):
os.makedirs(path_model)
try:
with open(os.path.join(path_model, fname), 'wb') as fd:
fd.write(
utils.model_to_tflite(sess, model, flags, mode, path_model,
optimizations))
```
go into line 555: ==utils.model_to_tflite()==
## 3. In file, ==models/utils.py==:
Line 333 shows this function is to create tflite file.
```python=333
"""Convert non streaming model to tflite inference model.
If mode==modes.Modes.STREAM_EXTERNAL_STATE_INFERENCE then inference graph
will be stateless: all states will be managed outside of the model and
will be passed to the model as additional inputs/outputs.
If mode==modes.Modes.STREAM_INTERNAL_STATE_INFERENCE then inference graph
will be stateful: all states will be part of the model - so model size
can increase. Latest version of TFLite converter supports it, so
conversion has to be done in eager mode.
```
---
Line 374 shows not only handling non-stream but streaming.
```python=373
# convert non streaming Keras model to
# Keras inference model (non streaming or streaming)
model_stream = to_streaming_inference(model_non_stream, flags, mode)
```
go into line 375: ==to_streaming_inference()==
## 4. In function, to_streaming_inference:
```python=272
def to_streaming_inference(model_non_stream, flags, mode):
"""Convert non streaming trained model to inference modes.
Args:
model_non_stream: trained Keras model non streamable
flags: settings with global data and model properties
mode: it supports Non streaming inference, Streaming inference with internal
states, Streaming inference with external states
```
---
go into line 313: ==convert_to_inference_model()==
```python=311
quantize_stream_scope = quantize.quantize_scope()
with quantize_stream_scope:
model_inference = convert_to_inference_model(model_non_stream,
input_tensors, mode)
return model_inference
```
# 5. In function, convert_to_inference_model:
```python=209
def convert_to_inference_model(model, input_tensors, mode):
"""Convert functional `Model` instance to a streaming inference.
It will create a new model with new inputs: input_tensors.
All weights will be copied. Internal states for streaming mode will be created
Only functional Keras model is supported!
Args:
model: Instance of `Model`.
input_tensors: list of input tensors to build the model upon.
mode: is defined by modes.Modes
Returns:
An instance of streaming inference `Model` reproducing the behavior
of the original model, on top of new inputs tensors,
using copied weights.
```
---
Line 252 shows converting to streaming.
Line 253 shows get states.
Line 256 shows creating new model.
Line 264 ==return new_streaming_model==.
```python=247
model = _set_mode(model, mode)
new_model = _clone_model(model, input_tensors)
if mode == modes.Modes.STREAM_INTERNAL_STATE_INFERENCE:
return _copy_weights(new_model, model)
elif mode == modes.Modes.STREAM_EXTERNAL_STATE_INFERENCE:
input_states, output_states = _get_input_output_states(new_model)
all_inputs = new_model.inputs + input_states
all_outputs = new_model.outputs + output_states
new_streaming_model = tf.keras.Model(all_inputs, all_outputs)
new_streaming_model.input_shapes = _get_state_shapes(all_inputs)
new_streaming_model.output_shapes = _get_state_shapes(all_outputs)
# inference streaming model with external states
# has the same number of weights with
# non streaming model so we can use set_weights directly
new_streaming_model.set_weights(model.get_weights())
return new_streaming_model
elif mode == modes.Modes.NON_STREAM_INFERENCE:
new_model.set_weights(model.get_weights())
return new_model
else:
raise ValueError('non supported mode ', mode)
```
---
go into Line 248: ==_clone_model()==
```python=88
def _clone_model(model, input_tensors):
"""Clone model with configs, except of weights."""
new_input_layers = {} # Cache for created layers.
# pylint: disable=protected-access
if input_tensors is not None:
# Make sure that all input tensors come from a Keras layer.
input_tensors = tf.nest.flatten(input_tensors)
for i, input_tensor in enumerate(input_tensors):
if not tf.keras.backend.is_keras_tensor(input_tensor):
raise ValueError('Expected keras tensor but get', input_tensor)
original_input_layer = model._input_layers[i]
newly_created_input_layer = input_tensor._keras_history.layer
new_input_layers[original_input_layer] = newly_created_input_layer
model_config, created_layers = models_utils._clone_layers_and_model_config(
model, new_input_layers, models_utils._clone_layer)
# pylint: enable=protected-access
# Reconstruct model from the config, using the cloned layers.
input_tensors, output_tensors, created_layers = (
functional.reconstruct_from_config(
model_config, created_layers=created_layers))
new_model = tf.keras.Model(input_tensors, output_tensors, name=model.name)
return new_model
```