Skip to content

Commit

Permalink
SequenceErase, SequenceInsert, SequenceLength
Browse files Browse the repository at this point in the history
  • Loading branch information
PINTO0309 committed Oct 26, 2022
1 parent 0e5eb7f commit 0bd1cce
Show file tree
Hide file tree
Showing 5 changed files with 283 additions and 5 deletions.
8 changes: 4 additions & 4 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -69,7 +69,7 @@ Video speed is adjusted approximately 50 times slower than actual speed.
$ docker run --rm -it \
-v `pwd`:/workdir \
-w /workdir \
ghcr.io/pinto0309/onnx2tf:1.0.19
ghcr.io/pinto0309/onnx2tf:1.0.20
or
Expand Down Expand Up @@ -557,9 +557,9 @@ Please don't post such low level questions as issues.
|SequenceAt|:heavy_check_mark:|
|SequenceConstruct|:heavy_check_mark:|
|SequenceEmpty|:heavy_check_mark:|
|SequenceErase|**Help wanted**|
|SequenceInsert|**Help wanted**|
|SequenceLength|**Help wanted**|
|SequenceErase|:heavy_check_mark:|
|SequenceInsert|:heavy_check_mark:|
|SequenceLength|:heavy_check_mark:|
|Shape|:heavy_check_mark:|
|Shrink|:heavy_check_mark:|
|Sigmoid|:heavy_check_mark:|
Expand Down
2 changes: 1 addition & 1 deletion onnx2tf/__init__.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,3 @@
from onnx2tf.onnx2tf import convert, main

__version__ = '1.0.19'
__version__ = '1.0.20'
85 changes: 85 additions & 0 deletions onnx2tf/ops/SequenceErase.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,85 @@
import random
random.seed(0)
import numpy as np
np.random.seed(0)
import tensorflow as tf
import onnx_graphsurgeon as gs
from onnx2tf.utils.common_functions import (
get_constant_or_variable,
print_node_info,
inverted_operation_enable_disable,
make_tf_node_info,
)


@print_node_info
@inverted_operation_enable_disable
def make_node(
*,
graph_node: gs.Node,
tf_layers_dict: dict,
**kwargs: dict,
):
"""SequenceErase
Parameters
----------
graph_node: gs.Node
graph_surgeon Node
tf_layers_dict: dict
optype, shape, dtype, tensorflow graph
"""
before_op_output_shape_trans_1 = \
tf_layers_dict.get(graph_node.inputs[0].name, {}).get('before_op_output_shape_trans', True)
before_op_output_shape_trans_2 = \
tf_layers_dict.get(graph_node.inputs[1].name, {}).get('before_op_output_shape_trans', True)
before_op_output_shape_trans = \
before_op_output_shape_trans_1 \
and before_op_output_shape_trans_2

graph_node_input_1 = get_constant_or_variable(
graph_node.inputs[0],
before_op_output_shape_trans,
)
graph_node_input_2 = get_constant_or_variable(
graph_node.inputs[1],
before_op_output_shape_trans,
)
graph_node_output: gs.Variable = graph_node.outputs[0]
shape = graph_node_output.shape
dtype = graph_node_output.dtype

# Preserving Graph Structure (Dict)
tf_layers_dict[graph_node_output.name] = {
'optype': graph_node.op,
'shape': shape,
'dtype': dtype,
}

# Generation of TF OP
input_sequence = tf_layers_dict[graph_node_input_1.name]['tf_node'] \
if isinstance(graph_node_input_1, gs.Variable) else graph_node_input_1
position = tf_layers_dict[graph_node_input_2.name]['tf_node'] \
if isinstance(graph_node_input_2, gs.Variable) else graph_node_input_2

s1 =input_sequence[:position]
s2 = input_sequence[position + 1:]

tf_layers_dict[graph_node_output.name]['tf_node'] = \
tf.concat([s1, s2], axis=0)

# Generation of Debug Info
tf_layers_dict[graph_node_output.name]['tf_node_info'] = \
make_tf_node_info(
node_info={
'tf_op_type': 'SequenceErase',
'tf_inputs': {
'input_sequence': input_sequence,
'position': position,
},
'tf_outputs': {
'output': tf_layers_dict[graph_node_output.name]['tf_node'],
},
}
)
118 changes: 118 additions & 0 deletions onnx2tf/ops/SequenceInsert.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,118 @@
import random
random.seed(0)
import numpy as np
np.random.seed(0)
import tensorflow as tf
import onnx_graphsurgeon as gs
from onnx2tf.utils.common_functions import (
get_constant_or_variable,
print_node_info,
inverted_operation_enable_disable,
make_tf_node_info,
)


@print_node_info
@inverted_operation_enable_disable
def make_node(
*,
graph_node: gs.Node,
tf_layers_dict: dict,
**kwargs: dict,
):
"""SequenceInsert
Parameters
----------
graph_node: gs.Node
graph_surgeon Node
tf_layers_dict: dict
optype, shape, dtype, tensorflow graph
"""
before_op_output_shape_trans_1 = \
tf_layers_dict.get(graph_node.inputs[0].name, {}).get('before_op_output_shape_trans', True)
before_op_output_shape_trans_2 = \
tf_layers_dict.get(graph_node.inputs[1].name, {}).get('before_op_output_shape_trans', True)
before_op_output_shape_trans = \
before_op_output_shape_trans_1 \
and before_op_output_shape_trans_2

graph_node_input_1 = get_constant_or_variable(
graph_node.inputs[0],
before_op_output_shape_trans,
)
graph_node_input_2 = get_constant_or_variable(
graph_node.inputs[1],
before_op_output_shape_trans,
)
graph_node_input_3 = None
if len(graph_node.inputs) >= 3:
graph_node_input_3 = get_constant_or_variable(
graph_node.inputs[2],
before_op_output_shape_trans,
)
graph_node_output: gs.Variable = graph_node.outputs[0]
shape = graph_node_output.shape
dtype = graph_node_output.dtype

# Preserving Graph Structure (Dict)
tf_layers_dict[graph_node_output.name] = {
'optype': graph_node.op,
'shape': shape,
'dtype': dtype,
}

# Generation of TF OP
input_sequence = tf_layers_dict[graph_node_input_1.name]['tf_node'] \
if isinstance(graph_node_input_1, gs.Variable) else graph_node_input_1
input_sequence_length = input_sequence.shape[-1]
input_tensor = tf_layers_dict[graph_node_input_2.name]['tf_node'] \
if isinstance(graph_node_input_2, gs.Variable) else graph_node_input_2
position = tf_layers_dict[graph_node_input_3.name]['tf_node'] \
if isinstance(graph_node_input_3, gs.Variable) else graph_node_input_3
if position is None:
position = input_sequence_length

input_tensor = tf.expand_dims(input_tensor, 0)
output_seq = None
if input_sequence.shape[0] is not None:
if input_sequence.shape[0] == 0:
output_seq = tf.RaggedTensor.from_tensor(input_tensor)
else:
s1 = input_sequence[:position]
s2 = input_sequence[position:]
output_seq = tf.concat([s1, input_tensor, s2], axis=0)
else:
output_seq = tf.cond(
tf.equal(
x=input_sequence.bounding_shape(axis=0),
y=0,
),
lambda: tf.RaggedTensor.from_tensor(input_tensor),
lambda: tf.concat(
[
input_sequence[:position],
input_tensor,
input_sequence[position:],
],
axis=0
)
)
tf_layers_dict[graph_node_output.name]['tf_node'] = output_seq

# Generation of Debug Info
tf_layers_dict[graph_node_output.name]['tf_node_info'] = \
make_tf_node_info(
node_info={
'tf_op_type': 'SequenceInsert',
'tf_inputs': {
'input_sequence': input_sequence,
'input_tensor': input_tensor,
'position': position,
},
'tf_outputs': {
'output': tf_layers_dict[graph_node_output.name]['tf_node'],
},
}
)
75 changes: 75 additions & 0 deletions onnx2tf/ops/SequenceLength.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,75 @@
import random
random.seed(0)
import numpy as np
np.random.seed(0)
import tensorflow as tf
import onnx_graphsurgeon as gs
from onnx2tf.utils.common_functions import (
get_constant_or_variable,
print_node_info,
inverted_operation_enable_disable,
make_tf_node_info,
)


@print_node_info
@inverted_operation_enable_disable
def make_node(
*,
graph_node: gs.Node,
tf_layers_dict: dict,
**kwargs: dict,
):
"""SequenceLength
Parameters
----------
graph_node: gs.Node
graph_surgeon Node
tf_layers_dict: dict
optype, shape, dtype, tensorflow graph
"""
before_op_output_shape_trans_1 = \
tf_layers_dict.get(graph_node.inputs[0].name, {}).get('before_op_output_shape_trans', True)
before_op_output_shape_trans = \
before_op_output_shape_trans_1

graph_node_input_1 = get_constant_or_variable(
graph_node.inputs[0],
before_op_output_shape_trans,
)
graph_node_output: gs.Variable = graph_node.outputs[0]
shape = graph_node_output.shape
dtype = graph_node_output.dtype

# Preserving Graph Structure (Dict)
tf_layers_dict[graph_node_output.name] = {
'optype': graph_node.op,
'shape': shape,
'dtype': dtype,
}

# Generation of TF OP
input_sequence = tf_layers_dict[graph_node_input_1.name]['tf_node'] \
if isinstance(graph_node_input_1, gs.Variable) else graph_node_input_1

tf_layers_dict[graph_node_output.name]['tf_node'] = \
tf.shape(
input=input_sequence.to_sparse(),
out_type=tf.int64,
)[0]

# Generation of Debug Info
tf_layers_dict[graph_node_output.name]['tf_node_info'] = \
make_tf_node_info(
node_info={
'tf_op_type': 'SequenceLength',
'tf_inputs': {
'input_sequence': input_sequence,
},
'tf_outputs': {
'output': tf_layers_dict[graph_node_output.name]['tf_node'],
},
}
)

0 comments on commit 0bd1cce

Please sign in to comment.