@@ -431,12 +431,12 @@ def test_convert_saved_model_with_fused_conv2d(self):
431
431
432
432
fusedOp = None
433
433
for node in nodes :
434
- self .assertTrue ( not 'BatchNorm' in node ['op' ])
435
- self .assertTrue ( not 'Relu' in node ['op' ])
436
- self .assertTrue ( not 'BiasAdd' in node ['op' ])
434
+ self .assertNotIn ( 'BatchNorm' , node ['op' ])
435
+ self .assertNotIn ( 'Relu' , node ['op' ])
436
+ self .assertNotIn ( 'BiasAdd' , node ['op' ])
437
437
if node ['op' ] == '_FusedConv2D' :
438
438
fusedOp = node
439
- self .assertTrue (fusedOp is not None )
439
+ self .assertIsNot (fusedOp , None )
440
440
self .assertEqual (
441
441
base64 .b64decode (fusedOp ['attr' ]['fused_ops' ]['list' ]['s' ][0 ]),
442
442
b'BiasAdd' )
@@ -476,12 +476,12 @@ def test_convert_saved_model_with_fused_matmul(self):
476
476
nodes = model_json ['modelTopology' ]['node' ]
477
477
fusedOp = None
478
478
for node in nodes :
479
- self .assertTrue (node ['op' ] != 'MatMul' )
480
- self .assertTrue ( not 'Relu' in node ['op' ])
481
- self .assertTrue ( not 'BiasAdd' in node ['op' ])
479
+ self .assertNotEqual (node ['op' ], 'MatMul' )
480
+ self .assertNotIn ( 'Relu' , node ['op' ])
481
+ self .assertNotIn ( 'BiasAdd' , node ['op' ])
482
482
if node ['op' ] == graph_rewrite_util .FUSED_MATMUL :
483
483
fusedOp = node
484
- self .assertTrue (fusedOp is not None )
484
+ self .assertIsNot (fusedOp , None )
485
485
self .assertIsNot (fusedOp ['attr' ]['transpose_a' ], None )
486
486
self .assertIsNot (fusedOp ['attr' ]['transpose_b' ], None )
487
487
self .assertEqual (
@@ -524,12 +524,12 @@ def test_convert_saved_model_with_fused_depthwise_conv2d(self):
524
524
525
525
fusedOp = None
526
526
for node in nodes :
527
- self .assertTrue ( not 'BatchNorm' in node ['op' ])
528
- self .assertTrue ( not 'Relu' in node ['op' ])
529
- self .assertTrue ( not 'BiasAdd' in node ['op' ])
527
+ self .assertNotIn ( 'BatchNorm' , node ['op' ])
528
+ self .assertNotIn ( 'Relu' , node ['op' ])
529
+ self .assertNotIn ( 'BiasAdd' , node ['op' ])
530
530
if node ['op' ] == graph_rewrite_util .FUSED_DEPTHWISE_CONV2D :
531
531
fusedOp = node
532
- self .assertTrue (fusedOp is not None )
532
+ self .assertIsNot (fusedOp , None )
533
533
self .assertIsNot (fusedOp ['attr' ]['dilations' ], None )
534
534
self .assertIsNot (fusedOp ['attr' ]['strides' ], None )
535
535
self .assertEqual (
@@ -581,8 +581,8 @@ def test_convert_saved_model_with_prelu(self):
581
581
if node ['op' ] == graph_rewrite_util .FUSED_DEPTHWISE_CONV2D :
582
582
depthwise_fused_op = node
583
583
self .assertTrue (prelu_op is None )
584
- self .assertTrue (fused_op is not None )
585
- self .assertTrue (depthwise_fused_op is not None )
584
+ self .assertIsNot (fused_op , None )
585
+ self .assertIsNot (depthwise_fused_op , None )
586
586
587
587
fused_ops = list (map (base64 .b64decode ,
588
588
fused_op ['attr' ]['fused_ops' ]['list' ]['s' ]))
0 commit comments