|
5 | 5 | import cv2 as cv
|
6 | 6 | import numpy as np
|
7 | 7 | import tensorflow as tf
|
| 8 | +from tensorflow import keras as K |
8 | 9 | from tensorflow.python.tools import optimize_for_inference_lib
|
9 | 10 | from tensorflow.tools.graph_transforms import TransformGraph
|
10 | 11 |
|
@@ -405,6 +406,16 @@ def my_dropout(x):
|
405 | 406 | mm = tf.matmul(flattened, weights) + biases
|
406 | 407 | save(inp, flattened, 'nhwc_transpose_reshape_matmul')
|
407 | 408 | ################################################################################
|
| 409 | +inp = tf.placeholder(tf.float32, [1, 24], 'input') |
| 410 | +# tf Reshape layer automatically adds 1 to the shape => (1, 2, 4, 3) |
| 411 | +out = K.layers.Reshape((2, 4, 3), name="reshape")(inp) |
| 412 | +save(inp, out, 'reshape_layer') |
| 413 | +################################################################################ |
| 414 | +inp = tf.placeholder(tf.float32, [1, 3, 3, 4], 'input') |
| 415 | +conv2 = tf.layers.conv2d(inp, filters=4, kernel_size=1) |
| 416 | +out = tf.reshape(conv2, [1, 2, 3, 6], 'reshaped') |
| 417 | +save(inp, out, 'reshape_nchw') |
| 418 | +################################################################################ |
408 | 419 | inp = tf.placeholder(tf.float32, [1, 6, 5, 3], 'input')
|
409 | 420 | conv = tf.layers.conv2d(inputs=inp, filters=3, kernel_size=[1, 1],
|
410 | 421 | activation=tf.nn.relu,
|
@@ -512,8 +523,6 @@ def my_dropout(x):
|
512 | 523 | relu = tf.maximum(0.01 * inp, inp, name='leaky_relu') * 2
|
513 | 524 | save(inp, relu, 'leaky_relu_order3', optimize=False)
|
514 | 525 | ################################################################################
|
515 |
| -from tensorflow import keras as K |
516 |
| - |
517 | 526 | model = K.models.Sequential()
|
518 | 527 | model.add(K.layers.Softmax(name='keras_softmax', input_shape=(2, 3, 4)))
|
519 | 528 | sess = K.backend.get_session()
|
|
0 commit comments