forked from jamescourtney/FlatSharp
-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathSerializerOptionsExample.cs
274 lines (237 loc) · 11.8 KB
/
SerializerOptionsExample.cs
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
/*
* Copyright 2020 James Courtney
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
namespace Samples.SerializerOptions
{
using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.Linq;
using FlatSharp;
using FlatSharp.Attributes;
/// <summary>
/// This sample shows some of the different serializer options in FlatSharp and discusses the tradeoffs with them.
/// </summary>
/// <remarks>
/// FlatSharp exposes 4 core deserialization modes, in order from most greedy to laziest:
/// 1) Greedy / GreedyMutable:
/// Pre-parse everything and release references to the underlying buffer.
/// This is the "normal" option, and therefore the default. When in Greedy mode, FlatSharp behaves like other .NET serializers (JSON.NET, etc).
///
/// 2) Progressive:
/// The FlatBuffer is read on-demand. As pieces are read, they are cached. Each logical element of the buffer will be accessed at most once.
/// Progressive is a great choice when access patterns are not predictable.
///
/// 3) Lazy:
/// Nothing is ever cached, and data is reconstituted from the underlying buffer each time. This option is fastest when you access each item
/// no more than once, but gets expensive very quickly if you repeatedly access items.
///
/// In general, your choice of deserialization method will be informed by your answers to these
/// questions:
///
/// Question 1: Am I managing the lifetime of my input buffers?
/// Greedy deserialization guarantees deserialized objects hold no more reference to the input buffer (literally, the generated code
/// does not even have a variable declared for the input buffer), so you are free to immediately recycle/reuse the buffer.
/// If you are pooling or doing your own lifetime management of these objects, then Greedy deserialization may make sense so the
/// buffer can be immediately reused. Otherwise, you will likely see better performance from another option.
///
/// Question 2: Will I read the entire object graph?
/// If you're not reading all properties of the object, then Greedy deserialization will waste cycles preparsing data you will not use.
/// If you plan to touch each field no more than once, then a lazier parsing option will be the best approach.
///
/// Question 3: Do I have large vectors?
/// Array allocations can be expensive, especially for large arrays. If you are deserializing large vectors, you should use some form of lazy parsing
/// (options.Lazy or options.Progressive). These options will not preallocate giant arrays (though they may allocate some smaller arrays).
///
/// The right way to handle this is to benchmark, and make your choices based on that. What performs best depends on your access patterns. Objectively,
/// all of these configurations are quite fast.
/// </remarks>
public class SerializerOptionsExample
{
public static void Run()
{
DemoTable demo = new DemoTable
{
Name = "My demo table",
ListVector = new List<InnerTable>
{
new InnerTable { Fruit = "Apple" },
new InnerTable { Fruit = "Banana" },
new InnerTable { Fruit = "Pear" }
}
};
// In order of greediness
LazyDeserialization(demo);
ProgressiveDeserialization(demo);
GreedyDeserialization(demo);
GreedyMutableDeserialization(demo);
}
/// <summary>
/// In lazy deserialization, FlatSharp reads from the underlying buffer each time. No caching is done. This will be
/// the fastest option if your access patterns are sparse and you touch each element only once.
/// </summary>
public static void LazyDeserialization(DemoTable demo)
{
var serializer = new FlatBufferSerializer(new FlatBufferSerializerOptions(FlatBufferDeserializationOption.Lazy));
byte[] buffer = new byte[1024];
serializer.Serialize(demo, buffer);
var parsed = serializer.Parse<DemoTable>(buffer);
// Lazy deserialization reads objects from vectors each time you ask for them.
InnerTable index0_1 = parsed.ListVector![0];
InnerTable index0_2 = parsed.ListVector[0];
Debug.Assert(!object.ReferenceEquals(index0_1, index0_2), "A different instance is returned each time from lazy vectors");
try
{
parsed.Name = "Bob";
Debug.Assert(false); // the above will throw.
}
catch (NotMutableException)
{
// Lazy mode is immutable. Writes will never succeed unless using write through.
}
// Properties from tables and structs are cached after they are read.
string? name = parsed.Name;
string? name2 = parsed.Name;
Debug.Assert(
!object.ReferenceEquals(name, name2),
"When reading table/struct properties Lazy parsing returns a different instance each time.");
// Invalidate the whole buffer. Undefined behavior past here!
buffer.AsSpan().Fill(0);
try
{
var whoKnows = parsed.ListVector[1];
Debug.Assert(false);
}
catch
{
// This can be any sort of exception. This behavior is undefined.
}
}
/// <summary>
/// The next step up in greediness is Progressive mode. In this mode, Flatsharp will cache the results of property and
/// vector accesses. So, if you read the results of FooObject.Property1 multiple times, the same value comes back each time.
/// </summary>
public static void ProgressiveDeserialization(DemoTable demo)
{
var serializer = new FlatBufferSerializer(new FlatBufferSerializerOptions(FlatBufferDeserializationOption.Progressive));
byte[] buffer = new byte[1024];
serializer.Serialize(demo, buffer);
var parsed = serializer.Parse<DemoTable>(buffer);
try
{
parsed.Name = "Bob";
Debug.Assert(false); // the above will throw.
}
catch (NotMutableException)
{
// Progressive mode is immutable. Writes will never succeed, unless using writethrough.
}
// Properties from tables and structs are cached after they are read.
string? name = parsed.Name;
string? name2 = parsed.Name;
Debug.Assert(
object.ReferenceEquals(name, name2),
"When reading table/struct properties, Progressive mode returns the same instance.");
// PropertyCache deserialization doesn't cache the results of vector lookups.
InnerTable index0_1 = parsed.ListVector![0];
InnerTable index0_2 = parsed.ListVector[0];
Debug.Assert(object.ReferenceEquals(index0_1, index0_2), "The same instances is also returned from vectors.");
// Invalidate the whole buffer. Undefined behavior past here!
buffer.AsSpan().Fill(0);
try
{
var whoKnows = parsed.ListVector[1]; // we haven't accessed element 1 before, so this will lead to issues since Progressive still uses
// the underlying buffer.
Debug.Assert(false);
}
catch
{
// This can be any sort of exception. This behavior is undefined.
}
}
/// <summary>
/// Greedy deserialization operates the same way that conventional serializers do. The entire buffer is traversed
/// and the structure is copied into the deserialized object. This is the most straightforward way of using FlatSharp,
/// because the results it gives are predictable, and require no developer cognitive overhead. However, it can be less efficient
/// in cases where you do not need to access all data in the buffer.
/// </summary>
public static void GreedyDeserialization(DemoTable demo)
{
// Same as FlatBufferSerializer.Default
var serializer = new FlatBufferSerializer(new FlatBufferSerializerOptions(FlatBufferDeserializationOption.Greedy));
byte[] buffer = new byte[1024];
serializer.Serialize(demo, buffer);
long originalSum = buffer.Sum(x => (long)x);
var parsed = serializer.Parse<DemoTable>(buffer);
// Fill array with 0. Source data is gone now, but we can still read the buffer because we were greedy!
buffer.AsSpan().Fill(0);
InnerTable index0_1 = parsed.ListVector![0];
InnerTable index0_2 = parsed.ListVector[0];
Debug.Assert(object.ReferenceEquals(index0_1, index0_2), "Greedy deserialization returns you the same instance each time");
// We cleared the data, but can still read the name. Greedy deserialization is easy!
string? name = parsed.Name;
// By default, Flatsharp will not allow mutations to properties. You can learn more about this in the mutable example below.
try
{
parsed.Name = "George Washington";
Debug.Assert(false);
}
catch (NotMutableException)
{
}
try
{
parsed.ListVector.Clear();
Debug.Assert(false);
}
catch (NotSupportedException)
{
}
}
/// <summary>
/// This example shows GreedyMutable deserialization. This is exactly the same as Greedy deserialization, but setters are generated for
/// the objects, so vectors and properties are mutable in a predictable way.
/// </summary>
public static void GreedyMutableDeserialization(DemoTable demo)
{
var serializer = new FlatBufferSerializer(new FlatBufferSerializerOptions(FlatBufferDeserializationOption.GreedyMutable));
byte[] buffer = new byte[1024];
serializer.Serialize(demo, buffer);
long originalSum = buffer.Sum(x => (long)x);
var parsed = serializer.Parse<DemoTable>(buffer);
parsed.Name = "James Adams";
parsed.ListVector!.Clear();
parsed.ListVector.Add(new InnerTable());
long newSum = buffer.Sum(x => (long)x);
Debug.Assert(
newSum == originalSum,
"Changes to the deserialized objects are not written back to the buffer. You'll need to re-serialize it to a new buffer for that.");
}
}
[FlatBufferTable]
public class DemoTable : object
{
[FlatBufferItem(0)]
public virtual string? Name { get; set; }
[FlatBufferItem(1)]
public virtual IList<InnerTable>? ListVector { get; set; }
}
[FlatBufferTable]
public class InnerTable
{
[FlatBufferItem(0)]
public virtual string? Fruit { get; set; }
}
}