Skip to content

Commit a3f544b

Browse files
authored
update bibtex
1 parent 651a704 commit a3f544b

File tree

1 file changed

+5
-18
lines changed

1 file changed

+5
-18
lines changed

README.md

+5-18
Original file line numberDiff line numberDiff line change
@@ -80,23 +80,10 @@ For more accurate metrics:
8080
}
8181
```
8282
```
83-
@InProceedings{10.1007/978-3-030-86331-9_37,
84-
author="Zhao, Wenqi
85-
and Gao, Liangcai
86-
and Yan, Zuoyu
87-
and Peng, Shuai
88-
and Du, Lin
89-
and Zhang, Ziyin",
90-
editor="Llad{\'o}s, Josep
91-
and Lopresti, Daniel
92-
and Uchida, Seiichi",
93-
title="Handwritten Mathematical Expression Recognition with Bidirectionally Trained Transformer",
94-
booktitle="Document Analysis and Recognition -- ICDAR 2021",
95-
year="2021",
96-
publisher="Springer International Publishing",
97-
address="Cham",
98-
pages="570--584",
99-
abstract="Encoder-decoder models have made great progress on handwritten mathematical expression recognition recently. However, it is still a challenge for existing methods to assign attention to image features accurately. Moreover, those encoder-decoder models usually adopt RNN-based models in their decoder part, which makes them inefficient in processing long sequences. In this paper, a transformer-based decoder is employed to replace RNN-based ones, which makes the whole model architecture very concise. Furthermore, a novel training strategy is introduced to fully exploit the potential of the transformer in bidirectional language modeling. Compared to several methods that do not use data augmentation, experiments demonstrate that our model improves the ExpRate of current state-of-the-art methods on CROHME 2014 by 2.23{\%}. Similarly, on CROHME 2016 and CROHME 2019, we improve the ExpRate by 1.92{\%} and 2.28{\%} respectively.",
100-
isbn="978-3-030-86331-9"
83+
@inproceedings{Zhao2021HandwrittenME,
84+
title={Handwritten Mathematical Expression Recognition with Bidirectionally Trained Transformer},
85+
author={Wenqi Zhao and Liangcai Gao and Zuoyu Yan and Shuai Peng and Lin Du and Ziyin Zhang},
86+
booktitle={ICDAR},
87+
year={2021}
10188
}
10289
```

0 commit comments

Comments
 (0)