Skip to content

Commit 8267a95

Browse files
authored
fix g3 lint error (#2502)
MISC
1 parent b80ba7e commit 8267a95

File tree

1 file changed

+14
-14
lines changed

1 file changed

+14
-14
lines changed

tfjs-converter/python/tensorflowjs/converters/tf_saved_model_conversion_v2_test.py

Lines changed: 14 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -431,12 +431,12 @@ def test_convert_saved_model_with_fused_conv2d(self):
431431

432432
fusedOp = None
433433
for node in nodes:
434-
self.assertTrue(not 'BatchNorm' in node['op'])
435-
self.assertTrue(not 'Relu' in node['op'])
436-
self.assertTrue(not 'BiasAdd' in node['op'])
434+
self.assertNotIn('BatchNorm', node['op'])
435+
self.assertNotIn('Relu', node['op'])
436+
self.assertNotIn('BiasAdd', node['op'])
437437
if node['op'] == '_FusedConv2D':
438438
fusedOp = node
439-
self.assertTrue(fusedOp is not None)
439+
self.assertIsNot(fusedOp, None)
440440
self.assertEqual(
441441
base64.b64decode(fusedOp['attr']['fused_ops']['list']['s'][0]),
442442
b'BiasAdd')
@@ -476,12 +476,12 @@ def test_convert_saved_model_with_fused_matmul(self):
476476
nodes = model_json['modelTopology']['node']
477477
fusedOp = None
478478
for node in nodes:
479-
self.assertTrue(node['op'] != 'MatMul')
480-
self.assertTrue(not 'Relu' in node['op'])
481-
self.assertTrue(not 'BiasAdd' in node['op'])
479+
self.assertNotEqual(node['op'], 'MatMul')
480+
self.assertNotIn('Relu', node['op'])
481+
self.assertNotIn('BiasAdd', node['op'])
482482
if node['op'] == graph_rewrite_util.FUSED_MATMUL:
483483
fusedOp = node
484-
self.assertTrue(fusedOp is not None)
484+
self.assertIsNot(fusedOp, None)
485485
self.assertIsNot(fusedOp['attr']['transpose_a'], None)
486486
self.assertIsNot(fusedOp['attr']['transpose_b'], None)
487487
self.assertEqual(
@@ -524,12 +524,12 @@ def test_convert_saved_model_with_fused_depthwise_conv2d(self):
524524

525525
fusedOp = None
526526
for node in nodes:
527-
self.assertTrue(not 'BatchNorm' in node['op'])
528-
self.assertTrue(not 'Relu' in node['op'])
529-
self.assertTrue(not 'BiasAdd' in node['op'])
527+
self.assertNotIn('BatchNorm', node['op'])
528+
self.assertNotIn('Relu', node['op'])
529+
self.assertNotIn('BiasAdd', node['op'])
530530
if node['op'] == graph_rewrite_util.FUSED_DEPTHWISE_CONV2D:
531531
fusedOp = node
532-
self.assertTrue(fusedOp is not None)
532+
self.assertIsNot(fusedOp, None)
533533
self.assertIsNot(fusedOp['attr']['dilations'], None)
534534
self.assertIsNot(fusedOp['attr']['strides'], None)
535535
self.assertEqual(
@@ -581,8 +581,8 @@ def test_convert_saved_model_with_prelu(self):
581581
if node['op'] == graph_rewrite_util.FUSED_DEPTHWISE_CONV2D:
582582
depthwise_fused_op = node
583583
self.assertTrue(prelu_op is None)
584-
self.assertTrue(fused_op is not None)
585-
self.assertTrue(depthwise_fused_op is not None)
584+
self.assertIsNot(fused_op, None)
585+
self.assertIsNot(depthwise_fused_op, None)
586586

587587
fused_ops = list(map(base64.b64decode,
588588
fused_op['attr']['fused_ops']['list']['s']))

0 commit comments

Comments
 (0)