|
1 | 1 | from pathlib import Path |
2 | 2 | import numpy as np |
3 | | -import h5py |
4 | 3 | import json |
5 | 4 | from icu import Char |
6 | 5 | from keras.models import Sequential |
@@ -605,13 +604,10 @@ def save_model(self): |
605 | 604 | model_path = (Path.joinpath(Path(__file__).parent.parent.absolute(), "Models/" + self.name)) |
606 | 605 | tf.saved_model.save(self.model, model_path) |
607 | 606 |
|
608 | | - # Inlining weight saving directly into HDF5 format |
609 | | - weights_file = Path.joinpath(Path(__file__).parent.parent.absolute(), "Models/" + self.name + "/weights.h5") |
610 | | - with h5py.File(str(weights_file), 'w') as hdf5_file: |
611 | | - # Iterate over the model weights and save each one as a dataset in the HDF5 file |
612 | | - for i, weight in enumerate(self.model.weights): |
613 | | - weight_name = f"weight_{i+1}" |
614 | | - hdf5_file.create_dataset(weight_name, data=weight.numpy()) # Save weight tensor directly |
| 607 | + # Save one np array that holds all weights |
| 608 | + file = Path.joinpath(Path(__file__).parent.parent.absolute(), "Models/" + self.name + "/weights") |
| 609 | + np.save(str(file), self.model.weights) |
| 610 | + |
615 | 611 | # Save the model in json format, that has both weights and grapheme clusters dictionary |
616 | 612 | json_file = Path.joinpath(Path(__file__).parent.parent.absolute(), "Models/" + self.name + "/weights.json") |
617 | 613 | with open(str(json_file), 'w') as wfile: |
@@ -640,7 +636,6 @@ def save_model(self): |
640 | 636 | dic_model["data"] = serial_mat |
641 | 637 | output["mat{}".format(i+1)] = dic_model |
642 | 638 | json.dump(output, wfile) |
643 | | - print(f"Model, weights in .h5, and weights metadata in .json saved successfully!") |
644 | 639 |
|
645 | 640 | def set_model(self, input_model): |
646 | 641 | """ |
|
0 commit comments