diff --git a/python/pydantic_core/_pydantic_core.pyi b/python/pydantic_core/_pydantic_core.pyi index 0e29a6b41..838a62d66 100644 --- a/python/pydantic_core/_pydantic_core.pyi +++ b/python/pydantic_core/_pydantic_core.pyi @@ -305,6 +305,7 @@ class SchemaSerializer: exclude_defaults: bool = False, exclude_none: bool = False, round_trip: bool = False, + sort_keys: bool = False, warnings: bool | Literal['none', 'warn', 'error'] = True, fallback: Callable[[Any], Any] | None = None, serialize_as_any: bool = False, @@ -325,6 +326,7 @@ class SchemaSerializer: exclude_defaults: Whether to exclude fields that are equal to their default value. exclude_none: Whether to exclude fields that have a value of `None`. round_trip: Whether to enable serialization and validation round-trip support. + sort_keys: Whether to sort dictionary keys. If True, all dictionary keys will be sorted alphabetically, including nested dictionaries. warnings: How to handle invalid fields. False/"none" ignores them, True/"warn" logs errors, "error" raises a [`PydanticSerializationError`][pydantic_core.PydanticSerializationError]. fallback: A function to call when an unknown value is encountered, @@ -352,6 +354,7 @@ class SchemaSerializer: exclude_defaults: bool = False, exclude_none: bool = False, round_trip: bool = False, + sort_keys: bool = False, warnings: bool | Literal['none', 'warn', 'error'] = True, fallback: Callable[[Any], Any] | None = None, serialize_as_any: bool = False, @@ -373,6 +376,7 @@ class SchemaSerializer: exclude_defaults: Whether to exclude fields that are equal to their default value. exclude_none: Whether to exclude fields that have a value of `None`. round_trip: Whether to enable serialization and validation round-trip support. + sort_keys: Whether to sort dictionary keys. If True, all dictionary keys will be sorted alphabetically, including nested dictionaries. warnings: How to handle invalid fields. False/"none" ignores them, True/"warn" logs errors, "error" raises a [`PydanticSerializationError`][pydantic_core.PydanticSerializationError]. fallback: A function to call when an unknown value is encountered, @@ -401,6 +405,7 @@ def to_json( by_alias: bool = True, exclude_none: bool = False, round_trip: bool = False, + sort_keys: bool = False, timedelta_mode: Literal['iso8601', 'float'] = 'iso8601', bytes_mode: Literal['utf8', 'base64', 'hex'] = 'utf8', inf_nan_mode: Literal['null', 'constants', 'strings'] = 'constants', @@ -424,6 +429,7 @@ def to_json( by_alias: Whether to use the alias names of fields. exclude_none: Whether to exclude fields that have a value of `None`. round_trip: Whether to enable serialization and validation round-trip support. + sort_keys: Whether to sort dictionary keys. If True, all dictionary keys will be sorted alphabetically, including nested dictionaries. timedelta_mode: How to serialize `timedelta` objects, either `'iso8601'` or `'float'`. bytes_mode: How to serialize `bytes` objects, either `'utf8'`, `'base64'`, or `'hex'`. inf_nan_mode: How to serialize `Infinity`, `-Infinity` and `NaN` values, either `'null'`, `'constants'`, or `'strings'`. @@ -482,6 +488,7 @@ def to_jsonable_python( by_alias: bool = True, exclude_none: bool = False, round_trip: bool = False, + sort_keys: bool = False, timedelta_mode: Literal['iso8601', 'float'] = 'iso8601', bytes_mode: Literal['utf8', 'base64', 'hex'] = 'utf8', inf_nan_mode: Literal['null', 'constants', 'strings'] = 'constants', @@ -503,6 +510,7 @@ def to_jsonable_python( by_alias: Whether to use the alias names of fields. exclude_none: Whether to exclude fields that have a value of `None`. round_trip: Whether to enable serialization and validation round-trip support. + sort_keys: Whether to sort dictionary keys. If True, all dictionary keys will be sorted alphabetically, including nested dictionaries. timedelta_mode: How to serialize `timedelta` objects, either `'iso8601'` or `'float'`. bytes_mode: How to serialize `bytes` objects, either `'utf8'`, `'base64'`, or `'hex'`. inf_nan_mode: How to serialize `Infinity`, `-Infinity` and `NaN` values, either `'null'`, `'constants'`, or `'strings'`. diff --git a/python/pydantic_core/core_schema.py b/python/pydantic_core/core_schema.py index 6d265d48e..f9323c53c 100644 --- a/python/pydantic_core/core_schema.py +++ b/python/pydantic_core/core_schema.py @@ -154,6 +154,9 @@ def serialize_as_any(self) -> bool: ... @property def round_trip(self) -> bool: ... + @property + def sort_keys(self) -> bool: ... + def mode_is_json(self) -> bool: ... def __str__(self) -> str: ... diff --git a/src/errors/validation_exception.rs b/src/errors/validation_exception.rs index 5d36851c2..d807be002 100644 --- a/src/errors/validation_exception.rs +++ b/src/errors/validation_exception.rs @@ -341,7 +341,7 @@ impl ValidationError { include_input: bool, ) -> PyResult> { let state = SerializationState::new("iso8601", "utf8", "constants")?; - let extra = state.extra(py, &SerMode::Json, None, false, false, true, None, false, None); + let extra = state.extra(py, &SerMode::Json, None, false, false, false, true, None, false, None); let serializer = ValidationErrorSerializer { py, line_errors: &self.line_errors, diff --git a/src/serializers/computed_fields.rs b/src/serializers/computed_fields.rs index 7a574093c..f5fc3d398 100644 --- a/src/serializers/computed_fields.rs +++ b/src/serializers/computed_fields.rs @@ -130,36 +130,73 @@ impl ComputedFields { // Do not serialize computed fields return Ok(()); } + if extra.sort_keys { + let mut sorted_fields: Vec<&ComputedField> = self.0.iter().collect(); + sorted_fields.sort_by_cached_key(|field| match extra.serialize_by_alias_or(field.serialize_by_alias) { + true => field.alias.as_str(), + false => field.property_name.as_str(), + }); + for computed_field in sorted_fields { + let property_name_py = computed_field.property_name_py.bind(model.py()); + let (next_include, next_exclude) = match filter.key_filter(property_name_py, include, exclude) { + Ok(Some((next_include, next_exclude))) => (next_include, next_exclude), + Ok(None) => continue, + Err(e) => return Err(convert_error(e)), + }; - for computed_field in &self.0 { - let property_name_py = computed_field.property_name_py.bind(model.py()); - let (next_include, next_exclude) = match filter.key_filter(property_name_py, include, exclude) { - Ok(Some((next_include, next_exclude))) => (next_include, next_exclude), - Ok(None) => continue, - Err(e) => return Err(convert_error(e)), - }; - - let value = match model.getattr(property_name_py) { - Ok(field_value) => field_value, - Err(e) => { - return Err(convert_error(e)); + let value = match model.getattr(property_name_py) { + Ok(field_value) => field_value, + Err(e) => { + return Err(convert_error(e)); + } + }; + if extra.exclude_none && value.is_none() { + continue; } - }; - if extra.exclude_none && value.is_none() { - continue; + + let field_extra = Extra { + field_name: Some(&computed_field.property_name), + ..*extra + }; + serialize(ComputedFieldToSerialize { + computed_field, + value, + include: next_include, + exclude: next_exclude, + field_extra, + })?; } + } else { + for computed_field in &self.0 { + let property_name_py = computed_field.property_name_py.bind(model.py()); + let (next_include, next_exclude) = match filter.key_filter(property_name_py, include, exclude) { + Ok(Some((next_include, next_exclude))) => (next_include, next_exclude), + Ok(None) => continue, + Err(e) => return Err(convert_error(e)), + }; - let field_extra = Extra { - field_name: Some(&computed_field.property_name), - ..*extra - }; - serialize(ComputedFieldToSerialize { - computed_field, - value, - include: next_include, - exclude: next_exclude, - field_extra, - })?; + let value = match model.getattr(property_name_py) { + Ok(field_value) => field_value, + Err(e) => { + return Err(convert_error(e)); + } + }; + if extra.exclude_none && value.is_none() { + continue; + } + + let field_extra = Extra { + field_name: Some(&computed_field.property_name), + ..*extra + }; + serialize(ComputedFieldToSerialize { + computed_field, + value, + include: next_include, + exclude: next_exclude, + field_extra, + })?; + } } Ok(()) } diff --git a/src/serializers/extra.rs b/src/serializers/extra.rs index e919beb8c..3bfd92723 100644 --- a/src/serializers/extra.rs +++ b/src/serializers/extra.rs @@ -47,6 +47,7 @@ impl SerializationState { by_alias: Option, exclude_none: bool, round_trip: bool, + sort_keys: bool, serialize_unknown: bool, fallback: Option<&'py Bound<'_, PyAny>>, serialize_as_any: bool, @@ -61,6 +62,7 @@ impl SerializationState { false, exclude_none, round_trip, + sort_keys, &self.config, &self.rec_guard, serialize_unknown, @@ -87,6 +89,7 @@ pub(crate) struct Extra<'a> { pub exclude_defaults: bool, pub exclude_none: bool, pub round_trip: bool, + pub sort_keys: bool, pub config: &'a SerializationConfig, pub rec_guard: &'a SerRecursionState, // the next two are used for union logic @@ -113,6 +116,7 @@ impl<'a> Extra<'a> { exclude_defaults: bool, exclude_none: bool, round_trip: bool, + sort_keys: bool, config: &'a SerializationConfig, rec_guard: &'a SerRecursionState, serialize_unknown: bool, @@ -129,6 +133,7 @@ impl<'a> Extra<'a> { exclude_defaults, exclude_none, round_trip, + sort_keys, config, rec_guard, check: SerCheck::None, @@ -197,6 +202,7 @@ pub(crate) struct ExtraOwned { exclude_defaults: bool, exclude_none: bool, round_trip: bool, + sort_keys: bool, config: SerializationConfig, rec_guard: SerRecursionState, check: SerCheck, @@ -218,6 +224,7 @@ impl ExtraOwned { exclude_defaults: extra.exclude_defaults, exclude_none: extra.exclude_none, round_trip: extra.round_trip, + sort_keys: extra.sort_keys, config: extra.config.clone(), rec_guard: extra.rec_guard.clone(), check: extra.check, @@ -240,6 +247,7 @@ impl ExtraOwned { exclude_defaults: self.exclude_defaults, exclude_none: self.exclude_none, round_trip: self.round_trip, + sort_keys: self.sort_keys, config: &self.config, rec_guard: &self.rec_guard, check: self.check, diff --git a/src/serializers/fields.rs b/src/serializers/fields.rs index a5c5bc6b3..6dd1a0ba5 100644 --- a/src/serializers/fields.rs +++ b/src/serializers/fields.rs @@ -155,52 +155,42 @@ impl GeneralFieldsSerializer { let output_dict = PyDict::new(py); let mut used_req_fields: usize = 0; - // NOTE! we maintain the order of the input dict assuming that's right - for result in main_iter { - let (key, value) = result?; - let key_str = key_str(&key)?; - let op_field = self.fields.get(key_str); - if extra.exclude_none && value.is_none() { - continue; + if extra.sort_keys { + let mut items = main_iter + .map(|r| -> PyResult<_> { + let (k, v) = r?; + let k_str = key_str(&k)?.to_owned(); + Ok((k_str, k, v)) + }) + .collect::>>()?; + items.sort_by(|(a, _, _), (b, _, _)| a.cmp(b)); + + for (key_str, key, value) in items { + self.process_field( + &key_str, + &key, + value, + &output_dict, + include, + exclude, + &extra, + &mut used_req_fields, + )?; } - let field_extra = Extra { - field_name: Some(key_str), - ..extra - }; - if let Some((next_include, next_exclude)) = self.filter.key_filter(&key, include, exclude)? { - if let Some(field) = op_field { - if let Some(ref serializer) = field.serializer { - if !exclude_default(&value, &field_extra, serializer)? { - let value = serializer.to_python( - &value, - next_include.as_ref(), - next_exclude.as_ref(), - &field_extra, - )?; - let output_key = field.get_key_py(output_dict.py(), &field_extra); - output_dict.set_item(output_key, value)?; - } - } - - if field.required { - used_req_fields += 1; - } - } else if self.mode == FieldsMode::TypedDictAllow { - let value = match &self.extra_serializer { - Some(serializer) => { - serializer.to_python(&value, next_include.as_ref(), next_exclude.as_ref(), &field_extra)? - } - _ => infer_to_python(&value, next_include.as_ref(), next_exclude.as_ref(), &field_extra)?, - }; - output_dict.set_item(key, value)?; - } else if field_extra.check == SerCheck::Strict { - return Err(PydanticSerializationUnexpectedValue::new( - Some(format!("Unexpected field `{key}`")), - field_extra.model_type_name().map(|bound| bound.to_string()), - None, - ) - .to_py_err()); - } + } else { + for result in main_iter { + let (key, value) = result?; + let key_str = key_str(&key)?; + self.process_field( + key_str, + &key, + value, + &output_dict, + include, + exclude, + &extra, + &mut used_req_fields, + )?; } } @@ -223,6 +213,60 @@ impl GeneralFieldsSerializer { } } + #[allow(clippy::too_many_arguments)] + fn process_field<'py>( + &self, + key_str: &str, + key: &Bound<'py, PyAny>, + value: Bound<'py, PyAny>, + output_dict: &Bound<'py, PyDict>, + include: Option<&Bound<'py, PyAny>>, + exclude: Option<&Bound<'py, PyAny>>, + extra: &Extra, + used_req_fields: &mut usize, + ) -> PyResult<()> { + let op_field = self.fields.get(key_str); + if extra.exclude_none && value.is_none() { + return Ok(()); + } + let field_extra = Extra { + field_name: Some(key_str), + ..*extra + }; + if let Some((next_include, next_exclude)) = self.filter.key_filter(key, include, exclude)? { + if let Some(field) = op_field { + if let Some(ref serializer) = field.serializer { + if !exclude_default(&value, &field_extra, serializer)? { + let value = + serializer.to_python(&value, next_include.as_ref(), next_exclude.as_ref(), &field_extra)?; + let output_key = field.get_key_py(output_dict.py(), &field_extra); + output_dict.set_item(output_key, value)?; + } + } + + if field.required { + *used_req_fields += 1; + } + } else if self.mode == FieldsMode::TypedDictAllow { + let value = match &self.extra_serializer { + Some(serializer) => { + serializer.to_python(&value, next_include.as_ref(), next_exclude.as_ref(), &field_extra)? + } + _ => infer_to_python(&value, next_include.as_ref(), next_exclude.as_ref(), &field_extra)?, + }; + output_dict.set_item(key, value)?; + } else if field_extra.check == SerCheck::Strict { + return Err(PydanticSerializationUnexpectedValue::new( + Some(format!("Unexpected field `{key}`")), + field_extra.model_type_name().map(|bound| bound.to_string()), + None, + ) + .to_py_err()); + } + } + Ok(()) + } + pub(crate) fn main_serde_serialize<'py, S: serde::ser::Serializer>( &self, main_iter: impl Iterator, Bound<'py, PyAny>)>>, @@ -232,48 +276,82 @@ impl GeneralFieldsSerializer { exclude: Option<&Bound<'py, PyAny>>, extra: Extra, ) -> Result { - // NOTE! As above, we maintain the order of the input dict assuming that's right // we don't both with `used_fields` here because on unions, `to_python(..., mode='json')` is used let mut map = serializer.serialize_map(Some(expected_len))?; - for result in main_iter { - let (key, value) = result.map_err(py_err_se_err)?; - if extra.exclude_none && value.is_none() { - continue; + if extra.sort_keys { + let mut items = main_iter + .map(|r| -> PyResult<_> { + let (k, v) = r?; + let k_str = key_str(&k)?.to_owned(); + Ok((k_str, k, v)) + }) + .collect::>>() + .map_err(py_err_se_err)?; + items.sort_by(|(a, _, _), (b, _, _)| a.cmp(b)); + for (key_str, key, value) in items { + self.process_serde_field::(&key_str, &key, &value, &mut map, include, exclude, &extra)?; } - let key_str = key_str(&key).map_err(py_err_se_err)?; - let field_extra = Extra { - field_name: Some(key_str), - ..extra - }; - - let filter = self.filter.key_filter(&key, include, exclude).map_err(py_err_se_err)?; - if let Some((next_include, next_exclude)) = filter { - if let Some(field) = self.fields.get(key_str) { - if let Some(ref serializer) = field.serializer { - if !exclude_default(&value, &field_extra, serializer).map_err(py_err_se_err)? { - let s = PydanticSerializer::new( - &value, - serializer, - next_include.as_ref(), - next_exclude.as_ref(), - &field_extra, - ); - let output_key = field.get_key_json(key_str, &field_extra); - map.serialize_entry(&output_key, &s)?; - } - } - } else if self.mode == FieldsMode::TypedDictAllow { - let output_key = infer_json_key(&key, &field_extra).map_err(py_err_se_err)?; - let s = SerializeInfer::new(&value, next_include.as_ref(), next_exclude.as_ref(), &field_extra); - map.serialize_entry(&output_key, &s)?; + } else { + for result in main_iter { + let (key, value) = result.map_err(py_err_se_err)?; + if extra.exclude_none && value.is_none() { + continue; } - // no error case here since unions (which need the error case) use `to_python(..., mode='json')` + let key_str = key_str(&key).map_err(py_err_se_err)?; + self.process_serde_field::(key_str, &key, &value, &mut map, include, exclude, &extra)?; } } Ok(map) } + #[allow(clippy::too_many_arguments)] + fn process_serde_field<'py, S: serde::ser::Serializer>( + &self, + key_str: &str, + key: &Bound<'py, PyAny>, + value: &Bound<'py, PyAny>, + map: &mut S::SerializeMap, + include: Option<&Bound<'py, PyAny>>, + exclude: Option<&Bound<'py, PyAny>>, + extra: &Extra, + ) -> Result<(), S::Error> { + if extra.exclude_none && value.is_none() { + return Ok(()); + } + + let field_extra = Extra { + field_name: Some(key_str), + ..*extra + }; + + let filter = self.filter.key_filter(key, include, exclude).map_err(py_err_se_err)?; + if let Some((next_include, next_exclude)) = filter { + if let Some(field) = self.fields.get(key_str) { + if let Some(ref serializer) = field.serializer { + if !exclude_default(value, &field_extra, serializer).map_err(py_err_se_err)? { + // Let nested serializers handle their own sorting + let s = PydanticSerializer::new( + value, + serializer, + next_include.as_ref(), + next_exclude.as_ref(), + &field_extra, + ); + let output_key = field.get_key_json(key_str, &field_extra); + map.serialize_entry(&output_key, &s)?; + } + } + } else if self.mode == FieldsMode::TypedDictAllow { + let output_key = infer_json_key(key, &field_extra).map_err(py_err_se_err)?; + // Let nested serializers handle their own sorting + let s = SerializeInfer::new(value, next_include.as_ref(), next_exclude.as_ref(), &field_extra); + map.serialize_entry(&output_key, &s)?; + } + } + Ok(()) + } + pub(crate) fn add_computed_fields_python( &self, model: Option<&Bound<'_, PyAny>>, @@ -390,7 +468,6 @@ impl TypeSerializer for GeneralFieldsSerializer { FieldsMode::TypedDictAllow => main_dict.len() + self.computed_field_count(), _ => self.fields.len() + option_length!(extra_dict) + self.computed_field_count(), }; - // NOTE! As above, we maintain the order of the input dict assuming that's right // we don't both with `used_fields` here because on unions, `to_python(..., mode='json')` is used let mut map = self.main_serde_serialize( dict_items(&main_dict), @@ -410,6 +487,7 @@ impl TypeSerializer for GeneralFieldsSerializer { let filter = self.filter.key_filter(&key, include, exclude).map_err(py_err_se_err)?; if let Some((next_include, next_exclude)) = filter { let output_key = infer_json_key(&key, extra).map_err(py_err_se_err)?; + // Let nested serializers handle their own sorting let s = SerializeInfer::new(&value, next_include.as_ref(), next_exclude.as_ref(), extra); map.serialize_entry(&output_key, &s)?; } diff --git a/src/serializers/infer.rs b/src/serializers/infer.rs index eb067b81d..77837d109 100644 --- a/src/serializers/infer.rs +++ b/src/serializers/infer.rs @@ -103,6 +103,7 @@ pub(crate) fn infer_to_python_known( extra.exclude_defaults, extra.exclude_none, extra.round_trip, + extra.sort_keys, extra.rec_guard, extra.serialize_unknown, extra.fallback, @@ -503,6 +504,7 @@ pub(crate) fn infer_serialize_known( extra.exclude_defaults, extra.exclude_none, extra.round_trip, + extra.sort_keys, extra.rec_guard, extra.serialize_unknown, extra.fallback, @@ -721,13 +723,27 @@ fn serialize_pairs_python<'py>( let new_dict = PyDict::new(py); let filter = AnyFilter::new(); - for result in pairs_iter { - let (k, v) = result?; - let op_next = filter.key_filter(&k, include, exclude)?; - if let Some((next_include, next_exclude)) = op_next { - let k = key_transform(k)?; - let v = infer_to_python(&v, next_include.as_ref(), next_exclude.as_ref(), extra)?; - new_dict.set_item(k, v)?; + if extra.sort_keys { + let mut pairs: Vec<(Bound<'py, PyAny>, Bound<'py, PyAny>)> = pairs_iter.collect::>>()?; + pairs.sort_by_cached_key(|(k, _)| k.to_string()); + + for (k, v) in pairs { + let op_next = filter.key_filter(&k, include, exclude)?; + if let Some((next_include, next_exclude)) = op_next { + let k = key_transform(k)?; + let v = infer_to_python(&v, next_include.as_ref(), next_exclude.as_ref(), extra)?; + new_dict.set_item(k, v)?; + } + } + } else { + for result in pairs_iter { + let (k, v) = result?; + let op_next = filter.key_filter(&k, include, exclude)?; + if let Some((next_include, next_exclude)) = op_next { + let k = key_transform(k)?; + let v = infer_to_python(&v, next_include.as_ref(), next_exclude.as_ref(), extra)?; + new_dict.set_item(k, v)?; + } } } Ok(new_dict.into()) @@ -744,14 +760,29 @@ fn serialize_pairs_json<'py, S: Serializer>( let mut map = serializer.serialize_map(Some(iter_size))?; let filter = AnyFilter::new(); - for result in pairs_iter { - let (key, value) = result.map_err(py_err_se_err)?; + if extra.sort_keys { + let mut pairs: Vec<(Bound<'py, PyAny>, Bound<'py, PyAny>)> = + pairs_iter.collect::>>().map_err(py_err_se_err)?; + pairs.sort_by_cached_key(|(k, _)| k.to_string()); - let op_next = filter.key_filter(&key, include, exclude).map_err(py_err_se_err)?; - if let Some((next_include, next_exclude)) = op_next { - let key = infer_json_key(&key, extra).map_err(py_err_se_err)?; - let value_serializer = SerializeInfer::new(&value, next_include.as_ref(), next_exclude.as_ref(), extra); - map.serialize_entry(&key, &value_serializer)?; + for (key, value) in pairs { + let op_next = filter.key_filter(&key, include, exclude).map_err(py_err_se_err)?; + if let Some((next_include, next_exclude)) = op_next { + let key = infer_json_key(&key, extra).map_err(py_err_se_err)?; + let value_serializer = SerializeInfer::new(&value, next_include.as_ref(), next_exclude.as_ref(), extra); + map.serialize_entry(&key, &value_serializer)?; + } + } + } else { + for result in pairs_iter { + let (key, value) = result.map_err(py_err_se_err)?; + + let op_next = filter.key_filter(&key, include, exclude).map_err(py_err_se_err)?; + if let Some((next_include, next_exclude)) = op_next { + let key = infer_json_key(&key, extra).map_err(py_err_se_err)?; + let value_serializer = SerializeInfer::new(&value, next_include.as_ref(), next_exclude.as_ref(), extra); + map.serialize_entry(&key, &value_serializer)?; + } } } map.end() diff --git a/src/serializers/mod.rs b/src/serializers/mod.rs index f9b51496f..ab3e32b17 100644 --- a/src/serializers/mod.rs +++ b/src/serializers/mod.rs @@ -61,6 +61,7 @@ impl SchemaSerializer { exclude_defaults: bool, exclude_none: bool, round_trip: bool, + sort_keys: bool, rec_guard: &'a SerRecursionState, serialize_unknown: bool, fallback: Option<&'a Bound<'a, PyAny>>, @@ -76,6 +77,7 @@ impl SchemaSerializer { exclude_defaults, exclude_none, round_trip, + sort_keys, &self.config, rec_guard, serialize_unknown, @@ -108,8 +110,8 @@ impl SchemaSerializer { #[allow(clippy::too_many_arguments)] #[pyo3(signature = (value, *, mode = None, include = None, exclude = None, by_alias = None, - exclude_unset = false, exclude_defaults = false, exclude_none = false, round_trip = false, warnings = WarningsArg::Bool(true), - fallback = None, serialize_as_any = false, context = None))] + exclude_unset = false, exclude_defaults = false, exclude_none = false, round_trip = false, sort_keys = false, + warnings = WarningsArg::Bool(true), fallback = None, serialize_as_any = false, context = None))] pub fn to_python( &self, py: Python, @@ -122,6 +124,7 @@ impl SchemaSerializer { exclude_defaults: bool, exclude_none: bool, round_trip: bool, + sort_keys: bool, warnings: WarningsArg, fallback: Option<&Bound<'_, PyAny>>, serialize_as_any: bool, @@ -143,6 +146,7 @@ impl SchemaSerializer { exclude_defaults, exclude_none, round_trip, + sort_keys, &rec_guard, false, fallback, @@ -156,7 +160,8 @@ impl SchemaSerializer { #[allow(clippy::too_many_arguments)] #[pyo3(signature = (value, *, indent = None, ensure_ascii = false, include = None, exclude = None, by_alias = None, - exclude_unset = false, exclude_defaults = false, exclude_none = false, round_trip = false, warnings = WarningsArg::Bool(true), + exclude_unset = false, exclude_defaults = false, exclude_none = false, round_trip = false, + sort_keys = false,warnings = WarningsArg::Bool(true), fallback = None, serialize_as_any = false, context = None))] pub fn to_json( &self, @@ -171,6 +176,7 @@ impl SchemaSerializer { exclude_defaults: bool, exclude_none: bool, round_trip: bool, + sort_keys: bool, warnings: WarningsArg, fallback: Option<&Bound<'_, PyAny>>, serialize_as_any: bool, @@ -191,6 +197,7 @@ impl SchemaSerializer { exclude_defaults, exclude_none, round_trip, + sort_keys, &rec_guard, false, fallback, @@ -241,7 +248,7 @@ impl SchemaSerializer { #[allow(clippy::too_many_arguments)] #[pyfunction] #[pyo3(signature = (value, *, indent = None, ensure_ascii = false, include = None, exclude = None, by_alias = true, - exclude_none = false, round_trip = false, timedelta_mode = "iso8601", bytes_mode = "utf8", + exclude_none = false, round_trip = false, sort_keys = false, timedelta_mode = "iso8601", bytes_mode = "utf8", inf_nan_mode = "constants", serialize_unknown = false, fallback = None, serialize_as_any = false, context = None))] pub fn to_json( @@ -254,6 +261,7 @@ pub fn to_json( by_alias: bool, exclude_none: bool, round_trip: bool, + sort_keys: bool, timedelta_mode: &str, bytes_mode: &str, inf_nan_mode: &str, @@ -269,6 +277,7 @@ pub fn to_json( Some(by_alias), exclude_none, round_trip, + sort_keys, serialize_unknown, fallback, serialize_as_any, @@ -292,7 +301,7 @@ pub fn to_json( #[allow(clippy::too_many_arguments)] #[pyfunction] #[pyo3(signature = (value, *, include = None, exclude = None, by_alias = true, exclude_none = false, round_trip = false, - timedelta_mode = "iso8601", bytes_mode = "utf8", inf_nan_mode = "constants", serialize_unknown = false, fallback = None, + sort_keys = false, timedelta_mode = "iso8601", bytes_mode = "utf8", inf_nan_mode = "constants", serialize_unknown = false, fallback = None, serialize_as_any = false, context = None))] pub fn to_jsonable_python( py: Python, @@ -302,6 +311,7 @@ pub fn to_jsonable_python( by_alias: bool, exclude_none: bool, round_trip: bool, + sort_keys: bool, timedelta_mode: &str, bytes_mode: &str, inf_nan_mode: &str, @@ -317,6 +327,7 @@ pub fn to_jsonable_python( Some(by_alias), exclude_none, round_trip, + sort_keys, serialize_unknown, fallback, serialize_as_any, diff --git a/src/serializers/type_serializers/dict.rs b/src/serializers/type_serializers/dict.rs index feb45090c..a4b7b38be 100644 --- a/src/serializers/type_serializers/dict.rs +++ b/src/serializers/type_serializers/dict.rs @@ -83,18 +83,45 @@ impl TypeSerializer for DictSerializer { match value.downcast::() { Ok(py_dict) => { let value_serializer = self.value_serializer.as_ref(); - let new_dict = PyDict::new(py); - for (key, value) in py_dict.iter() { - let op_next = self.filter.key_filter(&key, include, exclude)?; - if let Some((next_include, next_exclude)) = op_next { - let key = match extra.mode { - SerMode::Json => self.key_serializer.json_key(&key, extra)?.into_py_any(py)?, - _ => self.key_serializer.to_python(&key, None, None, extra)?, - }; - let value = - value_serializer.to_python(&value, next_include.as_ref(), next_exclude.as_ref(), extra)?; - new_dict.set_item(key, value)?; + + if extra.sort_keys { + let mut items: Vec<(Bound<'_, PyAny>, Bound<'_, PyAny>)> = py_dict.iter().collect(); + items.sort_by_cached_key(|(key, _)| key.to_string()); + + for (key, value) in items { + let op_next = self.filter.key_filter(&key, include, exclude)?; + if let Some((next_include, next_exclude)) = op_next { + let key = match extra.mode { + SerMode::Json => self.key_serializer.json_key(&key, extra)?.into_py_any(py)?, + _ => self.key_serializer.to_python(&key, None, None, extra)?, + }; + // Let nested serializers handle their own sorting + let value = value_serializer.to_python( + &value, + next_include.as_ref(), + next_exclude.as_ref(), + extra, + )?; + new_dict.set_item(key, value)?; + } + } + } else { + for (key, value) in py_dict.iter() { + let op_next = self.filter.key_filter(&key, include, exclude)?; + if let Some((next_include, next_exclude)) = op_next { + let key = match extra.mode { + SerMode::Json => self.key_serializer.json_key(&key, extra)?.into_py_any(py)?, + _ => self.key_serializer.to_python(&key, None, None, extra)?, + }; + let value = value_serializer.to_python( + &value, + next_include.as_ref(), + next_exclude.as_ref(), + extra, + )?; + new_dict.set_item(key, value)?; + } } } Ok(new_dict.into()) @@ -124,18 +151,39 @@ impl TypeSerializer for DictSerializer { let key_serializer = self.key_serializer.as_ref(); let value_serializer = self.value_serializer.as_ref(); - for (key, value) in py_dict.iter() { - let op_next = self.filter.key_filter(&key, include, exclude).map_err(py_err_se_err)?; - if let Some((next_include, next_exclude)) = op_next { - let key = key_serializer.json_key(&key, extra).map_err(py_err_se_err)?; - let value_serialize = PydanticSerializer::new( - &value, - value_serializer, - next_include.as_ref(), - next_exclude.as_ref(), - extra, - ); - map.serialize_entry(&key, &value_serialize)?; + if extra.sort_keys { + let mut items: Vec<(Bound<'_, PyAny>, Bound<'_, PyAny>)> = py_dict.iter().collect(); + items.sort_by_cached_key(|(key, _)| key.to_string()); + + for (key, value) in items { + let op_next = self.filter.key_filter(&key, include, exclude).map_err(py_err_se_err)?; + if let Some((next_include, next_exclude)) = op_next { + let key = key_serializer.json_key(&key, extra).map_err(py_err_se_err)?; + // Let nested serializers handle their own sorting + let s = PydanticSerializer::new( + &value, + value_serializer, + next_include.as_ref(), + next_exclude.as_ref(), + extra, + ); + map.serialize_entry(&key, &s)?; + } + } + } else { + for (key, value) in py_dict.iter() { + let op_next = self.filter.key_filter(&key, include, exclude).map_err(py_err_se_err)?; + if let Some((next_include, next_exclude)) = op_next { + let key = key_serializer.json_key(&key, extra).map_err(py_err_se_err)?; + let s = PydanticSerializer::new( + &value, + value_serializer, + next_include.as_ref(), + next_exclude.as_ref(), + extra, + ); + map.serialize_entry(&key, &s)?; + } } } map.end() diff --git a/tests/serializers/test_model.py b/tests/serializers/test_model.py index 65871e050..987aba5ec 100644 --- a/tests/serializers/test_model.py +++ b/tests/serializers/test_model.py @@ -12,6 +12,7 @@ import pytest from dirty_equals import IsJson +from inline_snapshot import snapshot from pydantic_core import ( PydanticSerializationError, @@ -1044,6 +1045,244 @@ class MyModel: assert s.to_json(m, exclude={'field_d': [0]}) == b'{"field_a":"test","field_b":12,"field_c":null,"field_d":[2,3]}' +def test_extra_sort_keys(): + class MyModel: + field_123: str + field_b: int + field_a: str + field_c: dict[str, Any] + + schema = core_schema.model_schema( + MyModel, + core_schema.model_fields_schema( + { + 'field_123': core_schema.model_field(core_schema.bytes_schema()), + 'field_b': core_schema.model_field(core_schema.int_schema()), + 'field_a': core_schema.model_field(core_schema.bytes_schema()), + 'field_c': core_schema.model_field(core_schema.dict_schema(core_schema.any_schema())), + 'field_n': core_schema.model_field(core_schema.list_schema(core_schema.any_schema())), + }, + extra_behavior='allow', + ), + extra_behavior='allow', + ) + v = SchemaValidator(schema) + m = v.validate_python( + { + 'field_123': b'test_123', + 'field_b': 12, + 'field_a': b'test', + 'field_c': {'mango': 2, 'banana': 3, 'apple': 1}, + 'field_n': [{'mango': 3, 'banana': 2, 'apple': 1}, 2, 3], + } + ) + s = SchemaSerializer(schema) + assert 'mode:ModelExtra' in plain_repr(s) + assert 'has_extra:true' in plain_repr(s) + assert s.to_python(m, mode='json') == snapshot( + { + 'field_123': 'test_123', + 'field_b': 12, + 'field_a': 'test', + 'field_c': {'mango': 2, 'banana': 3, 'apple': 1}, + 'field_n': [{'mango': 3, 'banana': 2, 'apple': 1}, 2, 3], + } + ) + assert s.to_python(m, mode='json', sort_keys=True) == snapshot( + { + 'field_123': 'test_123', + 'field_a': 'test', + 'field_b': 12, + 'field_c': {'apple': 1, 'banana': 3, 'mango': 2}, + 'field_n': [{'apple': 1, 'banana': 2, 'mango': 3}, 2, 3], + } + ) + assert s.to_json(m) == snapshot( + b'{"field_123":"test_123","field_b":12,"field_a":"test","field_c":{"mango":2,"banana":3,"apple":1},"field_n":[{"mango":3,"banana":2,"apple":1},2,3]}' + ) + assert s.to_json(m, sort_keys=True) == snapshot( + b'{"field_123":"test_123","field_a":"test","field_b":12,"field_c":{"apple":1,"banana":3,"mango":2},"field_n":[{"apple":1,"banana":2,"mango":3},2,3]}' + ) + + # test filterings + m = v.validate_python( + { + 'field_123': b'test_123', + 'field_b': 12, + 'field_a': b'test', + 'field_c': {'mango': 2, 'banana': 3, 'apple': 1}, + 'field_n': [ + {'mango': 3, 'banana': 2, 'apple': 1}, + [{'mango': 3, 'banana': 2, 'apple': 1}, {'d': 3, 'b': 2, 'a': 1}], + 3, + ], + 'field_d': [ + [[{'mango': 3, 'banana': 2, 'apple': 1}], {'d': 3, 'b': 2, 'a': 1}], + 3, + ], + 'field_e': {'c': 1, 'b': {'c': 2, 'd': {'mango': 3, 'banana': 2, 'apple': 1}}, 'a': 3}, + 'field_none': None, + } + ) + assert s.to_python(m, sort_keys=True) == snapshot( + { + 'field_123': b'test_123', + 'field_a': b'test', + 'field_b': 12, + 'field_c': {'apple': 1, 'banana': 3, 'mango': 2}, + 'field_n': [ + {'apple': 1, 'banana': 2, 'mango': 3}, + [{'apple': 1, 'banana': 2, 'mango': 3}, {'a': 1, 'b': 2, 'd': 3}], + 3, + ], + 'field_d': [ + [[{'apple': 1, 'banana': 2, 'mango': 3}], {'a': 1, 'b': 2, 'd': 3}], + 3, + ], + 'field_e': {'a': 3, 'b': {'c': 2, 'd': {'apple': 1, 'banana': 2, 'mango': 3}}, 'c': 1}, + 'field_none': None, + } + ) + assert s.to_python(m, exclude_none=True) == snapshot( + { + 'field_123': b'test_123', + 'field_a': b'test', + 'field_b': 12, + 'field_c': {'mango': 2, 'banana': 3, 'apple': 1}, + 'field_n': [ + {'mango': 3, 'banana': 2, 'apple': 1}, + [{'mango': 3, 'banana': 2, 'apple': 1}, {'d': 3, 'b': 2, 'a': 1}], + 3, + ], + 'field_d': [ + [[{'mango': 3, 'banana': 2, 'apple': 1}], {'d': 3, 'b': 2, 'a': 1}], + 3, + ], + 'field_e': {'c': 1, 'b': {'c': 2, 'd': {'mango': 3, 'banana': 2, 'apple': 1}}, 'a': 3}, + } + ) + assert s.to_python(m, exclude_none=True, sort_keys=True) == snapshot( + { + 'field_123': b'test_123', + 'field_a': b'test', + 'field_b': 12, + 'field_c': {'apple': 1, 'banana': 3, 'mango': 2}, + 'field_n': [ + {'apple': 1, 'banana': 2, 'mango': 3}, + [{'apple': 1, 'banana': 2, 'mango': 3}, {'a': 1, 'b': 2, 'd': 3}], + 3, + ], + 'field_d': [ + [[{'apple': 1, 'banana': 2, 'mango': 3}], {'a': 1, 'b': 2, 'd': 3}], + 3, + ], + 'field_e': {'a': 3, 'b': {'c': 2, 'd': {'apple': 1, 'banana': 2, 'mango': 3}}, 'c': 1}, + } + ) + assert s.to_json(m, exclude_none=True) == snapshot( + b'{"field_123":"test_123","field_b":12,"field_a":"test","field_c":{"mango":2,"banana":3,"apple":1},"field_n":[{"mango":3,"banana":2,"apple":1},[{"mango":3,"banana":2,"apple":1},{"d":3,"b":2,"a":1}],3],"field_d":[[[{"mango":3,"banana":2,"apple":1}],{"d":3,"b":2,"a":1}],3],"field_e":{"c":1,"b":{"c":2,"d":{"mango":3,"banana":2,"apple":1}},"a":3}}' + ) + assert s.to_json(m, exclude_none=True, sort_keys=True) == snapshot( + b'{"field_123":"test_123","field_a":"test","field_b":12,"field_c":{"apple":1,"banana":3,"mango":2},"field_n":[{"apple":1,"banana":2,"mango":3},[{"apple":1,"banana":2,"mango":3},{"a":1,"b":2,"d":3}],3],"field_d":[[[{"apple":1,"banana":2,"mango":3}],{"a":1,"b":2,"d":3}],3],"field_e":{"a":3,"b":{"c":2,"d":{"apple":1,"banana":2,"mango":3}},"c":1}}' + ) + assert s.to_python(m, exclude={'field_c'}) == snapshot( + { + 'field_123': b'test_123', + 'field_a': b'test', + 'field_b': 12, + 'field_n': [ + {'mango': 3, 'banana': 2, 'apple': 1}, + [{'mango': 3, 'banana': 2, 'apple': 1}, {'d': 3, 'b': 2, 'a': 1}], + 3, + ], + 'field_d': [ + [[{'mango': 3, 'banana': 2, 'apple': 1}], {'d': 3, 'b': 2, 'a': 1}], + 3, + ], + 'field_e': {'c': 1, 'b': {'c': 2, 'd': {'mango': 3, 'banana': 2, 'apple': 1}}, 'a': 3}, + 'field_none': None, + } + ) + assert s.to_python(m, exclude={'field_c'}, sort_keys=True) == snapshot( + { + 'field_123': b'test_123', + 'field_a': b'test', + 'field_b': 12, + 'field_n': [ + {'apple': 1, 'banana': 2, 'mango': 3}, + [{'apple': 1, 'banana': 2, 'mango': 3}, {'a': 1, 'b': 2, 'd': 3}], + 3, + ], + 'field_d': [ + [[{'apple': 1, 'banana': 2, 'mango': 3}], {'a': 1, 'b': 2, 'd': 3}], + 3, + ], + 'field_e': {'a': 3, 'b': {'c': 2, 'd': {'apple': 1, 'banana': 2, 'mango': 3}}, 'c': 1}, + 'field_none': None, + } + ) + assert s.to_json(m, exclude={'field_c'}) == snapshot( + b'{"field_123":"test_123","field_b":12,"field_a":"test","field_n":[{"mango":3,"banana":2,"apple":1},[{"mango":3,"banana":2,"apple":1},{"d":3,"b":2,"a":1}],3],"field_d":[[[{"mango":3,"banana":2,"apple":1}],{"d":3,"b":2,"a":1}],3],"field_e":{"c":1,"b":{"c":2,"d":{"mango":3,"banana":2,"apple":1}},"a":3},"field_none":null}' + ) + assert s.to_json(m, exclude={'field_c'}, sort_keys=True) == snapshot( + b'{"field_123":"test_123","field_a":"test","field_b":12,"field_n":[{"apple":1,"banana":2,"mango":3},[{"apple":1,"banana":2,"mango":3},{"a":1,"b":2,"d":3}],3],"field_d":[[[{"apple":1,"banana":2,"mango":3}],{"a":1,"b":2,"d":3}],3],"field_e":{"a":3,"b":{"c":2,"d":{"apple":1,"banana":2,"mango":3}},"c":1},"field_none":null}' + ) + assert s.to_python(m, exclude={'field_d': [0]}) == snapshot( + { + 'field_123': b'test_123', + 'field_a': b'test', + 'field_b': 12, + 'field_c': {'mango': 2, 'banana': 3, 'apple': 1}, + 'field_n': [ + {'mango': 3, 'banana': 2, 'apple': 1}, + [{'mango': 3, 'banana': 2, 'apple': 1}, {'d': 3, 'b': 2, 'a': 1}], + 3, + ], + 'field_d': [3], + 'field_e': {'c': 1, 'b': {'c': 2, 'd': {'mango': 3, 'banana': 2, 'apple': 1}}, 'a': 3}, + 'field_none': None, + } + ) + assert s.to_python(m, exclude={'field_d': [0]}, sort_keys=True) == snapshot( + { + 'field_123': b'test_123', + 'field_a': b'test', + 'field_b': 12, + 'field_c': {'apple': 1, 'banana': 3, 'mango': 2}, + 'field_n': [ + {'apple': 1, 'banana': 2, 'mango': 3}, + [{'apple': 1, 'banana': 2, 'mango': 3}, {'a': 1, 'b': 2, 'd': 3}], + 3, + ], + 'field_d': [3], + 'field_e': {'a': 3, 'b': {'c': 2, 'd': {'apple': 1, 'banana': 2, 'mango': 3}}, 'c': 1}, + 'field_none': None, + } + ) + assert s.to_python(m, exclude={'field_d': [0]}, sort_keys=True, mode='json') == snapshot( + { + 'field_123': 'test_123', + 'field_a': 'test', + 'field_b': 12, + 'field_c': {'apple': 1, 'banana': 3, 'mango': 2}, + 'field_n': [ + {'apple': 1, 'banana': 2, 'mango': 3}, + [{'apple': 1, 'banana': 2, 'mango': 3}, {'a': 1, 'b': 2, 'd': 3}], + 3, + ], + 'field_d': [3], + 'field_e': {'a': 3, 'b': {'c': 2, 'd': {'apple': 1, 'banana': 2, 'mango': 3}}, 'c': 1}, + 'field_none': None, + } + ) + assert s.to_json(m, exclude={'field_d': [0]}) == snapshot( + b'{"field_123":"test_123","field_b":12,"field_a":"test","field_c":{"mango":2,"banana":3,"apple":1},"field_n":[{"mango":3,"banana":2,"apple":1},[{"mango":3,"banana":2,"apple":1},{"d":3,"b":2,"a":1}],3],"field_d":[3],"field_e":{"c":1,"b":{"c":2,"d":{"mango":3,"banana":2,"apple":1}},"a":3},"field_none":null}' + ) + assert s.to_json(m, exclude={'field_d': [0]}, sort_keys=True) == snapshot( + b'{"field_123":"test_123","field_a":"test","field_b":12,"field_c":{"apple":1,"banana":3,"mango":2},"field_n":[{"apple":1,"banana":2,"mango":3},[{"apple":1,"banana":2,"mango":3},{"a":1,"b":2,"d":3}],3],"field_d":[3],"field_e":{"a":3,"b":{"c":2,"d":{"apple":1,"banana":2,"mango":3}},"c":1},"field_none":null}' + ) + + def test_extra_config(): class MyModel: # this is not required, but it avoids `__pydantic_fields_set__` being included in `__dict__` @@ -1224,3 +1463,127 @@ def __init__(self, my_field: int) -> None: ) s = SchemaSerializer(schema) assert s.to_python(Model(1), by_alias=runtime) == expected + + +def test_computed_fields_sort_keys(): + @dataclasses.dataclass + class Model: + width: int + height: int + + @property + def zebra_property(self) -> str: + return f'zebra_{self.width}' + + @property + def alpha_property(self) -> str: + return f'alpha_{self.height}' + + @property + def beta_property(self) -> str: + return f'beta_{self.width + self.height}' + + s = SchemaSerializer( + core_schema.model_schema( + Model, + core_schema.model_fields_schema( + { + 'width': core_schema.model_field(core_schema.int_schema()), + 'height': core_schema.model_field(core_schema.int_schema()), + }, + computed_fields=[ + core_schema.computed_field('zebra_property', core_schema.str_schema()), + core_schema.computed_field('alpha_property', core_schema.str_schema()), + core_schema.computed_field('beta_property', core_schema.str_schema()), + ], + ), + ) + ) + + model = Model(width=3, height=4) + + assert s.to_python(model) == { + 'width': 3, + 'height': 4, + 'zebra_property': 'zebra_3', + 'alpha_property': 'alpha_4', + 'beta_property': 'beta_7', + } + + assert s.to_python(model, sort_keys=True) == snapshot( + {'height': 4, 'width': 3, 'alpha_property': 'alpha_4', 'beta_property': 'beta_7', 'zebra_property': 'zebra_3'} + ) + + assert s.to_json(model, sort_keys=False) == snapshot( + b'{"width":3,"height":4,"zebra_property":"zebra_3","alpha_property":"alpha_4","beta_property":"beta_7"}' + ) + + assert s.to_json(model, sort_keys=True) == snapshot( + b'{"height":4,"width":3,"alpha_property":"alpha_4","beta_property":"beta_7","zebra_property":"zebra_3"}' + ) + + s = SchemaSerializer( + core_schema.model_schema( + Model, + core_schema.model_fields_schema( + { + 'width': core_schema.model_field(core_schema.int_schema()), + 'height': core_schema.model_field(core_schema.int_schema()), + }, + computed_fields=[ + core_schema.computed_field('zebra_property', core_schema.str_schema(), alias='z_zebra_alias'), + core_schema.computed_field('alpha_property', core_schema.str_schema(), alias='a_alpha_alias'), + core_schema.computed_field('beta_property', core_schema.str_schema(), alias='b_beta_alias'), + ], + ), + ) + ) + + assert s.to_python(model, sort_keys=True, by_alias=True) == snapshot( + {'height': 4, 'width': 3, 'a_alpha_alias': 'alpha_4', 'b_beta_alias': 'beta_7', 'z_zebra_alias': 'zebra_3'} + ) + + assert s.to_python(model, sort_keys=True, by_alias=False) == snapshot( + {'height': 4, 'width': 3, 'alpha_property': 'alpha_4', 'beta_property': 'beta_7', 'zebra_property': 'zebra_3'} + ) + + assert s.to_json(model, sort_keys=True, by_alias=True) == snapshot( + b'{"height":4,"width":3,"a_alpha_alias":"alpha_4","b_beta_alias":"beta_7","z_zebra_alias":"zebra_3"}' + ) + + assert s.to_json(model, sort_keys=True, by_alias=False) == snapshot( + b'{"height":4,"width":3,"alpha_property":"alpha_4","beta_property":"beta_7","zebra_property":"zebra_3"}' + ) + + s = SchemaSerializer( + core_schema.model_schema( + Model, + core_schema.model_fields_schema( + { + 'width': core_schema.model_field(core_schema.int_schema()), + 'height': core_schema.model_field(core_schema.int_schema()), + }, + computed_fields=[ + core_schema.computed_field('zebra_property', core_schema.str_schema()), + core_schema.computed_field('alpha_property', core_schema.nullable_schema(core_schema.str_schema())), + core_schema.computed_field('beta_property', core_schema.str_schema()), + ], + ), + ) + ) + + assert s.to_python(model, sort_keys=True, exclude_none=True) == snapshot( + {'height': 4, 'width': 3, 'alpha_property': 'alpha_4', 'beta_property': 'beta_7', 'zebra_property': 'zebra_3'} + ) + + assert s.to_python(model, sort_keys=True, exclude_none=False) == snapshot( + {'height': 4, 'width': 3, 'alpha_property': 'alpha_4', 'beta_property': 'beta_7', 'zebra_property': 'zebra_3'} + ) + + assert s.to_json(model, sort_keys=True, exclude_none=True) == snapshot( + b'{"height":4,"width":3,"alpha_property":"alpha_4","beta_property":"beta_7","zebra_property":"zebra_3"}' + ) + + assert s.to_json(model, sort_keys=True, exclude_none=False) == snapshot( + b'{"height":4,"width":3,"alpha_property":"alpha_4","beta_property":"beta_7","zebra_property":"zebra_3"}' + ) diff --git a/tests/test.rs b/tests/test.rs index 6ca066c91..6c0eeb2b8 100644 --- a/tests/test.rs +++ b/tests/test.rs @@ -101,6 +101,7 @@ a = A() false, false, false, + false, WarningsArg::Bool(true), None, false, @@ -210,6 +211,7 @@ dump_json_input_2 = {'a': 'something'} false, false, false, + false, WarningsArg::Bool(false), None, false, @@ -232,6 +234,7 @@ dump_json_input_2 = {'a': 'something'} false, false, false, + false, WarningsArg::Bool(false), None, false,