@@ -41,8 +41,7 @@ def testSimple(self):
41
41
dataset_ops .Dataset .from_tensor_slices (components ).map (lambda x : x * x )
42
42
.apply (
43
43
grouping .group_by_window (lambda x : x % 2 , lambda _ , xs : xs .batch (4 ),
44
- 4 ))
45
- .make_initializable_iterator ())
44
+ 4 )).make_initializable_iterator ())
46
45
init_op = iterator .initializer
47
46
get_next = iterator .get_next ()
48
47
@@ -53,7 +52,8 @@ def testSimple(self):
53
52
while True :
54
53
result = sess .run (get_next )
55
54
self .assertTrue (
56
- all (x % 2 == 0 for x in result ) or all (x % 2 == 1 )
55
+ all (x % 2 == 0
56
+ for x in result ) or all (x % 2 == 1 )
57
57
for x in result )
58
58
counts .append (result .shape [0 ])
59
59
@@ -116,8 +116,8 @@ def reduce_func(_, xs):
116
116
iterator = (
117
117
dataset_ops .Dataset .from_tensor_slices (components )
118
118
.map (lambda x : (x , ops .convert_to_tensor ([x * x ]))).apply (
119
- grouping .group_by_window (lambda x , _ : x % 2 , reduce_func , 32 ))
120
- .make_initializable_iterator ())
119
+ grouping .group_by_window (lambda x , _ : x % 2 , reduce_func ,
120
+ 32 )) .make_initializable_iterator ())
121
121
init_op = iterator .initializer
122
122
get_next = iterator .get_next ()
123
123
@@ -136,7 +136,8 @@ def reduce_func(key, window):
136
136
window .padded_batch (
137
137
4 , padded_shapes = tensor_shape .TensorShape ([None ])),
138
138
window .padded_batch (
139
- 4 , padded_shapes = ops .convert_to_tensor ([(key + 1 ) * 10 ])),))
139
+ 4 , padded_shapes = ops .convert_to_tensor ([(key + 1 ) * 10 ])),
140
+ ))
140
141
141
142
iterator = (
142
143
dataset_ops .Dataset .from_tensor_slices (components )
@@ -200,9 +201,10 @@ def _dynamicPad(self, bucket, window, window_size):
200
201
# dynamically and does not rely on static shape information about
201
202
# the arguments.
202
203
return dataset_ops .Dataset .zip (
203
- (dataset_ops .Dataset .from_tensors (bucket ), window .padded_batch (
204
- 32 , (tensor_shape .TensorShape ([]), tensor_shape .TensorShape ([None ]),
205
- tensor_shape .TensorShape ([3 ])))))
204
+ (dataset_ops .Dataset .from_tensors (bucket ),
205
+ window .padded_batch (
206
+ 32 , (tensor_shape .TensorShape ([]), tensor_shape .TensorShape (
207
+ [None ]), tensor_shape .TensorShape ([3 ])))))
206
208
207
209
def testSingleBucket (self ):
208
210
@@ -307,12 +309,13 @@ def _map_fn(v):
307
309
308
310
def _dynamic_pad_fn (bucket , window , _ ):
309
311
return dataset_ops .Dataset .zip (
310
- (dataset_ops .Dataset .from_tensors (bucket ), window .padded_batch (
311
- 32 , {
312
- "x" : tensor_shape .TensorShape ([]),
313
- "y" : tensor_shape .TensorShape ([None ]),
314
- "z" : tensor_shape .TensorShape ([3 ])
315
- })))
312
+ (dataset_ops .Dataset .from_tensors (bucket ),
313
+ window .padded_batch (
314
+ 32 , {
315
+ "x" : tensor_shape .TensorShape ([]),
316
+ "y" : tensor_shape .TensorShape ([None ]),
317
+ "z" : tensor_shape .TensorShape ([3 ])
318
+ })))
316
319
317
320
input_dataset = (
318
321
dataset_ops .Dataset .from_tensor_slices (math_ops .range (128 )).map (_map_fn )
0 commit comments