Skip to content

Commit 0bd9dec

Browse files
committed
feat: finish clearer version of downsample
1 parent 56d80b1 commit 0bd9dec

File tree

1 file changed

+29
-21
lines changed

1 file changed

+29
-21
lines changed

create_downsampled/main.py

Lines changed: 29 additions & 21 deletions
Original file line numberDiff line numberDiff line change
@@ -64,13 +64,14 @@ def main():
6464
zarr_store = load_file(
6565
0, 0, use_gcs_bucket, input_path, total_rows, total_cols, all_files, gcs_project
6666
)
67-
volume_shape = zarr_store.shape
67+
single_vol_shape = zarr_store.shape
6868
# Input is in Z, T, C, Y, X order but want XYTCZ order
69-
single_file_xyz_shape = [volume_shape[4], volume_shape[3], volume_shape[1]]
70-
single_file_shape = np.array(volume_shape)
71-
num_chunks_per_dim = np.ceil(volume_shape / single_file_shape).astype(int)
72-
num_channels = min(volume_shape[2], channel_limit)
73-
data_type = "uint16"
69+
single_file_xyz_shape = [
70+
single_vol_shape[4],
71+
single_vol_shape[3],
72+
single_vol_shape[1],
73+
]
74+
single_file_shape = np.array(single_file_xyz_shape)
7475

7576
# Compute volume and chunk sizes
7677
volume_size, chunk_size = compute_volume_and_chunk_size(
@@ -81,6 +82,10 @@ def main():
8182
manual_chunk_size,
8283
)
8384

85+
num_chunks_per_dim = np.ceil(volume_size / single_file_shape).astype(int)
86+
num_channels = min(single_vol_shape[2], channel_limit)
87+
data_type = "uint16"
88+
8489
vols = create_cloudvolume_info(
8590
num_channels,
8691
data_type,
@@ -104,7 +109,7 @@ def main():
104109
bounds = process(
105110
args=coord,
106111
single_file_shape=single_file_shape,
107-
volume_shape=volume_shape,
112+
volume_shape=volume_size,
108113
vols=vols,
109114
chunk_size=chunk_size,
110115
num_mips=num_mips,
@@ -141,25 +146,28 @@ def main():
141146
print(f"Total chunks uploaded so far: {total_uploads}")
142147

143148
if failed_chunks:
144-
print(f"Failed to process {len(failed_chunks)} chunks:")
145-
for chunk in failed_chunks:
146-
print(f" {chunk}")
149+
print("Some chunks failed to upload, writing to failed_chunks.txt")
150+
with open(output_path / "failed_chunks.txt", "w") as f:
151+
for item in failed_chunks:
152+
f.write(f"{item}\n")
147153

148154
remaining_files = check_any_remaining_chunks(
149155
num_mips=num_mips, output_path=output_path, uploaded_files=uploaded_files
150156
)
151157
if remaining_files:
152-
print(f"Remaining chunks: {remaining_files}")
153-
154-
# Do at the end to avoid uploading info file repeatedly
155-
# if issues during processing of chunks
156-
sync_info_to_gcs_output(
157-
output_path,
158-
gcs_output_path,
159-
use_gcs_output,
160-
gcs_project,
161-
gcs_output_bucket_name,
162-
)
158+
for f in remaining_files:
159+
if f not in failed_chunks:
160+
print(f"Remaining file not yet uploaded: {f}")
161+
else:
162+
# Do at the end to avoid uploading info file repeatedly
163+
# if issues during processing of chunks
164+
sync_info_to_gcs_output(
165+
output_path,
166+
gcs_output_path,
167+
use_gcs_output,
168+
gcs_project,
169+
gcs_output_bucket_name,
170+
)
163171

164172

165173
if __name__ == "__main__":

0 commit comments

Comments
 (0)