hat.json
JSON Data library
1"""JSON Data library""" 2 3from hat.json.data import (Array, 4 Object, 5 Data, 6 equals, 7 clone, 8 flatten) 9from hat.json.path import (Path, 10 get, 11 set_, 12 remove, 13 Storage) 14from hat.json.encoder import (Format, 15 encode, 16 decode, 17 get_file_format, 18 encode_file, 19 decode_file, 20 encode_stream, 21 decode_stream, 22 read_conf) 23from hat.json.patch import (diff, 24 patch) 25from hat.json.schema import (SchemaId, 26 Schema, 27 SchemaRepository, 28 create_schema_repository, 29 merge_schema_repositories, 30 SchemaValidator, 31 PySchemaValidator, 32 RsSchemaValidator, 33 DefaultSchemaValidator, 34 json_schema_repo) 35from hat.json import vt 36 37 38__all__ = ['Array', 39 'Object', 40 'Data', 41 'equals', 42 'clone', 43 'flatten', 44 'Path', 45 'get', 46 'set_', 47 'remove', 48 'Storage', 49 'Format', 50 'encode', 51 'decode', 52 'get_file_format', 53 'encode_file', 54 'decode_file', 55 'encode_stream', 56 'decode_stream', 57 'read_conf', 58 'diff', 59 'patch', 60 'SchemaId', 61 'Schema', 62 'SchemaRepository', 63 'create_schema_repository', 64 'merge_schema_repositories', 65 'SchemaValidator', 66 'PySchemaValidator', 67 'RsSchemaValidator', 68 'DefaultSchemaValidator', 69 'json_schema_repo', 70 'vt']
18def equals(a: Data, 19 b: Data 20 ) -> bool: 21 """Equality comparison of json serializable data. 22 23 Tests for equality of data according to JSON format. Notably, ``bool`` 24 values are not considered equal to numeric values in any case. This is 25 different from default equality comparison, which considers `False` 26 equal to `0` and `0.0`; and `True` equal to `1` and `1.0`. 27 28 Example:: 29 30 assert equals(0, 0.0) is True 31 assert equals({'a': 1, 'b': 2}, {'b': 2, 'a': 1}) is True 32 assert equals(1, True) is False 33 34 """ 35 if a is None: 36 return b is None 37 38 if isinstance(a, bool): 39 return isinstance(b, bool) and a == b 40 41 if isinstance(a, (int, float)): 42 return (isinstance(b, (int, float)) and 43 not isinstance(b, bool) and 44 a == b) 45 46 if isinstance(a, str): 47 return isinstance(b, str) and a == b 48 49 if isinstance(a, list): 50 return (isinstance(b, list) and 51 len(a) == len(b) and 52 all(equals(i, j) for i, j in zip(a, b))) 53 54 if isinstance(a, dict): 55 return (isinstance(b, dict) and 56 len(a) == len(b) and 57 all(equals(a[key], b[key]) for key in a.keys())) 58 59 raise TypeError('invalid json type')
Equality comparison of json serializable data.
Tests for equality of data according to JSON format. Notably, bool
values are not considered equal to numeric values in any case. This is
different from default equality comparison, which considers False
equal to 0
and 0.0
; and True
equal to 1
and 1.0
.
Example::
assert equals(0, 0.0) is True
assert equals({'a': 1, 'b': 2}, {'b': 2, 'a': 1}) is True
assert equals(1, True) is False
62def clone(data: Data) -> Data: 63 """Deep clone data 64 65 This function recursively creates new instances of array and object data 66 based on input data. Resulting json data is equal to provided data. 67 68 Example:: 69 70 x = {'a': [1, 2, 3]} 71 y = clone(x) 72 assert x is not y 73 assert x['a'] is not y['a'] 74 assert equals(x, y) 75 76 """ 77 if isinstance(data, list): 78 return [clone(i) for i in data] 79 80 if isinstance(data, dict): 81 return {k: clone(v) for k, v in data.items()} 82 83 return data
Deep clone data
This function recursively creates new instances of array and object data based on input data. Resulting json data is equal to provided data.
Example::
x = {'a': [1, 2, 3]}
y = clone(x)
assert x is not y
assert x['a'] is not y['a']
assert equals(x, y)
86def flatten(data: Data) -> Iterable[Data]: 87 """Flatten JSON data 88 89 If `data` is array, this generator recursively yields result of `flatten` 90 call with each element of input list. For other `Data` types, input data is 91 yielded. 92 93 Example:: 94 95 data = [1, [], [2], {'a': [3]}] 96 result = [1, 2, {'a': [3]}] 97 assert list(flatten(data)) == result 98 99 """ 100 if isinstance(data, list): 101 for i in data: 102 yield from flatten(i) 103 104 else: 105 yield data
17def get(data: Data, 18 path: Path, 19 default: Data | None = None 20 ) -> Data: 21 """Get data element referenced by path 22 23 Example:: 24 25 data = {'a': [1, 2, [3, 4]]} 26 path = ['a', 2, 0] 27 assert get(data, path) == 3 28 29 data = [1, 2, 3] 30 assert get(data, 0) == 1 31 assert get(data, 5) is None 32 assert get(data, 5, default=123) == 123 33 34 """ 35 for i in flatten(path): 36 if isinstance(i, str): 37 if not isinstance(data, dict) or i not in data: 38 return default 39 data = data[i] 40 41 elif isinstance(i, int) and not isinstance(i, bool): 42 if not isinstance(data, list): 43 return default 44 try: 45 data = data[i] 46 except IndexError: 47 return default 48 49 else: 50 raise ValueError('invalid path') 51 52 return data
Get data element referenced by path
Example::
data = {'a': [1, 2, [3, 4]]}
path = ['a', 2, 0]
assert get(data, path) == 3
data = [1, 2, 3]
assert get(data, 0) == 1
assert get(data, 5) is None
assert get(data, 5, default=123) == 123
55def set_(data: Data, 56 path: Path, 57 value: Data 58 ) -> Data: 59 """Create new data by setting data path element value 60 61 Example:: 62 63 data = [1, {'a': 2, 'b': 3}, 4] 64 path = [1, 'b'] 65 result = set_(data, path, 5) 66 assert result == [1, {'a': 2, 'b': 5}, 4] 67 assert result is not data 68 69 data = [1, 2, 3] 70 result = set_(data, 4, 4) 71 assert result == [1, 2, 3, None, 4] 72 73 """ 74 parents = collections.deque() 75 76 for i in flatten(path): 77 parent = data 78 79 if isinstance(i, str): 80 data = data.get(i) if isinstance(data, dict) else None 81 82 elif isinstance(i, int) and not isinstance(i, bool): 83 try: 84 data = data[i] if isinstance(data, list) else None 85 except IndexError: 86 data = None 87 88 else: 89 raise ValueError('invalid path') 90 91 parents.append((parent, i)) 92 93 while parents: 94 parent, i = parents.pop() 95 96 if isinstance(i, str): 97 parent = dict(parent) if isinstance(parent, dict) else {} 98 parent[i] = value 99 100 elif isinstance(i, int) and not isinstance(i, bool): 101 if not isinstance(parent, list): 102 parent = [] 103 104 if i >= len(parent): 105 parent = [*parent, 106 *itertools.repeat(None, i - len(parent) + 1)] 107 108 elif i < 0 and (-i) > len(parent): 109 parent = [*itertools.repeat(None, (-i) - len(parent)), 110 *parent] 111 112 else: 113 parent = list(parent) 114 115 parent[i] = value 116 117 else: 118 raise ValueError('invalid path') 119 120 value = parent 121 122 return value
Create new data by setting data path element value
Example::
data = [1, {'a': 2, 'b': 3}, 4]
path = [1, 'b']
result = set_(data, path, 5)
assert result == [1, {'a': 2, 'b': 5}, 4]
assert result is not data
data = [1, 2, 3]
result = set_(data, 4, 4)
assert result == [1, 2, 3, None, 4]
125def remove(data: Data, 126 path: Path 127 ) -> Data: 128 """Create new data by removing part of data referenced by path 129 130 Example:: 131 132 data = [1, {'a': 2, 'b': 3}, 4] 133 path = [1, 'b'] 134 result = remove(data, path) 135 assert result == [1, {'a': 2}, 4] 136 assert result is not data 137 138 data = [1, 2, 3] 139 result = remove(data, 4) 140 assert result == [1, 2, 3] 141 142 """ 143 result = data 144 parents = collections.deque() 145 146 for i in flatten(path): 147 parent = data 148 149 if isinstance(i, str): 150 if not isinstance(data, dict) or i not in data: 151 return result 152 data = data[i] 153 154 elif isinstance(i, int) and not isinstance(i, bool): 155 if not isinstance(data, list): 156 return result 157 try: 158 data = data[i] 159 except IndexError: 160 return result 161 162 else: 163 raise ValueError('invalid path') 164 165 parents.append((parent, i)) 166 167 result = None 168 169 while parents: 170 parent, i = parents.pop() 171 172 if isinstance(i, str): 173 parent = dict(parent) 174 175 elif isinstance(i, int) and not isinstance(i, bool): 176 parent = list(parent) 177 178 else: 179 raise ValueError('invalid path') 180 181 if result is None: 182 del parent[i] 183 184 else: 185 parent[i] = result 186 187 result = parent 188 189 return result
Create new data by removing part of data referenced by path
Example::
data = [1, {'a': 2, 'b': 3}, 4]
path = [1, 'b']
result = remove(data, path)
assert result == [1, {'a': 2}, 4]
assert result is not data
data = [1, 2, 3]
result = remove(data, 4)
assert result == [1, 2, 3]
192class Storage: 193 """JSON data storage 194 195 Helper class representing observable JSON data state manipulated with 196 path based get/set/remove functions. 197 198 """ 199 200 def __init__(self, data: Data = None): 201 self._data = data 202 self._change_cbs = util.CallbackRegistry() 203 204 @property 205 def data(self) -> Data: 206 """Data""" 207 return self._data 208 209 def register_change_cb(self, 210 cb: Callable[[Data], None] 211 ) -> util.RegisterCallbackHandle: 212 """Register data change callback""" 213 return self._change_cbs.register(cb) 214 215 def get(self, path: Path, default: Data | None = None): 216 """Get data""" 217 return get(self._data, path, default) 218 219 def set(self, path: Path, value: Data): 220 """Set data""" 221 self._data = set_(self._data, path, value) 222 self._change_cbs.notify(self._data) 223 224 def remove(self, path: Path): 225 """Remove data""" 226 self._data = remove(self._data, path) 227 self._change_cbs.notify(self._data)
JSON data storage
Helper class representing observable JSON data state manipulated with path based get/set/remove functions.
Data
209 def register_change_cb(self, 210 cb: Callable[[Data], None] 211 ) -> util.RegisterCallbackHandle: 212 """Register data change callback""" 213 return self._change_cbs.register(cb)
Register data change callback
215 def get(self, path: Path, default: Data | None = None): 216 """Get data""" 217 return get(self._data, path, default)
Get data
219 def set(self, path: Path, value: Data): 220 """Set data""" 221 self._data = set_(self._data, path, value) 222 self._change_cbs.notify(self._data)
Set data
21class Format(enum.Enum): 22 """Encoding format""" 23 JSON = 'json' 24 YAML = 'yaml' 25 TOML = 'toml'
Encoding format
28def encode(data: Data, 29 format: Format = Format.JSON, 30 indent: int | None = None, 31 sort_keys: bool = False 32 ) -> str: 33 """Encode JSON data. 34 35 In case of TOML format, data must be JSON Object. 36 37 In case of TOML format, `indent` is ignored. 38 39 In case of YAML or TOML format, `sort_keys` is ignored. 40 41 Args: 42 data: JSON data 43 format: encoding format 44 indent: indentation size 45 sort_keys: sort object keys 46 47 """ 48 if format == Format.JSON: 49 return json.dumps(data, 50 indent=indent, 51 sort_keys=sort_keys, 52 allow_nan=False) 53 54 if format == Format.YAML: 55 dumper = (yaml.CSafeDumper if hasattr(yaml, 'CSafeDumper') 56 else yaml.SafeDumper) 57 return str(yaml.dump(data, indent=indent, Dumper=dumper)) 58 59 if format == Format.TOML: 60 return tomli_w.dumps(data) 61 62 raise ValueError('unsupported format')
Encode JSON data.
In case of TOML format, data must be JSON Object.
In case of TOML format, indent
is ignored.
In case of YAML or TOML format, sort_keys
is ignored.
Arguments:
- data: JSON data
- format: encoding format
- indent: indentation size
- sort_keys: sort object keys
65def decode(data_str: str, 66 format: Format = Format.JSON 67 ) -> Data: 68 """Decode JSON data. 69 70 Args: 71 data_str: encoded JSON data 72 format: encoding format 73 74 """ 75 if format == Format.JSON: 76 return json.loads(data_str) 77 78 if format == Format.YAML: 79 loader = (yaml.CSafeLoader if hasattr(yaml, 'CSafeLoader') 80 else yaml.SafeLoader) 81 return yaml.load(io.StringIO(data_str), Loader=loader) 82 83 if format == Format.TOML: 84 return toml.loads(data_str) 85 86 raise ValueError('unsupported format')
Decode JSON data.
Arguments:
- data_str: encoded JSON data
- format: encoding format
89def get_file_format(path: pathlib.PurePath) -> Format: 90 """Detect file format based on path suffix""" 91 if path.suffix == '.json': 92 return Format.JSON 93 94 if path.suffix in ('.yaml', '.yml'): 95 return Format.YAML 96 97 if path.suffix == '.toml': 98 return Format.TOML 99 100 raise ValueError('can not determine format from path suffix')
Detect file format based on path suffix
103def encode_file(data: Data, 104 path: pathlib.PurePath, 105 format: Format | None = None, 106 indent: int | None = 4, 107 sort_keys: bool = False): 108 """Encode JSON data to file. 109 110 If `format` is ``None``, encoding format is derived from path suffix. 111 112 In case of TOML format, data must be JSON Object. 113 114 In case of TOML format, `indent` is ignored. 115 116 In case of YAML or TOML format, `sort_keys` is ignored. 117 118 Args: 119 data: JSON data 120 path: file path 121 format: encoding format 122 indent: indentation size 123 sort_keys: sort object keys 124 125 """ 126 if format is None: 127 format = get_file_format(path) 128 129 flags = 'w' if format != Format.TOML else 'wb' 130 encoding = 'utf-8' if format != Format.TOML else None 131 132 with open(path, flags, encoding=encoding) as f: 133 encode_stream(data=data, 134 stream=f, 135 format=format, 136 indent=indent, 137 sort_keys=sort_keys)
Encode JSON data to file.
If format
is None
, encoding format is derived from path suffix.
In case of TOML format, data must be JSON Object.
In case of TOML format, indent
is ignored.
In case of YAML or TOML format, sort_keys
is ignored.
Arguments:
- data: JSON data
- path: file path
- format: encoding format
- indent: indentation size
- sort_keys: sort object keys
140def decode_file(path: pathlib.PurePath, 141 format: Format | None = None 142 ) -> Data: 143 """Decode JSON data from file. 144 145 If `format` is ``None``, encoding format is derived from path suffix. 146 147 Args: 148 path: file path 149 format: encoding format 150 151 """ 152 if format is None: 153 format = get_file_format(path) 154 155 flags = 'r' if format != Format.TOML else 'rb' 156 encoding = 'utf-8' if format != Format.TOML else None 157 158 with open(path, flags, encoding=encoding) as f: 159 return decode_stream(f, format)
Decode JSON data from file.
If format
is None
, encoding format is derived from path suffix.
Arguments:
- path: file path
- format: encoding format
162def encode_stream(data: Data, 163 stream: io.TextIOBase | io.RawIOBase, 164 format: Format = Format.JSON, 165 indent: int | None = 4, 166 sort_keys: bool = False): 167 """Encode JSON data to stream. 168 169 In case of TOML format, data must be JSON Object. 170 171 In case of TOML format, `stream` should be `io.RawIOBase`. For 172 other formats, `io.TextIOBase` is expected. 173 174 In case of TOML format, `indent` is ignored. 175 176 In case of YAML or TOML format, `sort_keys` is ignored. 177 178 Args: 179 data: JSON data 180 stream: output stream 181 format: encoding format 182 indent: indentation size 183 sort_keys: sort object keys 184 185 """ 186 if format == Format.JSON: 187 json.dump(data, stream, 188 indent=indent, 189 sort_keys=sort_keys, 190 allow_nan=False) 191 192 elif format == Format.YAML: 193 dumper = (yaml.CSafeDumper if hasattr(yaml, 'CSafeDumper') 194 else yaml.SafeDumper) 195 yaml.dump(data, stream, 196 indent=indent, 197 Dumper=dumper, 198 explicit_start=True, 199 explicit_end=True) 200 201 elif format == Format.TOML: 202 tomli_w.dump(data, stream) 203 204 else: 205 raise ValueError('unsupported format')
Encode JSON data to stream.
In case of TOML format, data must be JSON Object.
In case of TOML format, stream
should be io.RawIOBase
. For
other formats, io.TextIOBase
is expected.
In case of TOML format, indent
is ignored.
In case of YAML or TOML format, sort_keys
is ignored.
Arguments:
- data: JSON data
- stream: output stream
- format: encoding format
- indent: indentation size
- sort_keys: sort object keys
208def decode_stream(stream: io.TextIOBase | io.RawIOBase, 209 format: Format = Format.JSON 210 ) -> Data: 211 """Decode JSON data from stream. 212 213 In case of TOML format, `stream` should be `io.RawIOBase`. For 214 other formats, `io.TextIOBase` is expected. 215 216 Args: 217 stream: input stream 218 format: encoding format 219 220 """ 221 if format == Format.JSON: 222 return json.load(stream) 223 224 if format == Format.YAML: 225 loader = (yaml.CSafeLoader if hasattr(yaml, 'CSafeLoader') 226 else yaml.SafeLoader) 227 return yaml.load(stream, Loader=loader) 228 229 if format == Format.TOML: 230 return toml.load(stream) 231 232 raise ValueError('unsupported format')
Decode JSON data from stream.
In case of TOML format, stream
should be io.RawIOBase
. For
other formats, io.TextIOBase
is expected.
Arguments:
- stream: input stream
- format: encoding format
235def read_conf(path: pathlib.Path | None, 236 default_path: pathlib.Path | None = None, 237 default_suffixes: list[str] = ['.yaml', '.yml', '.toml', '.json'], # NOQA 238 stdio_path: pathlib.Path | None = pathlib.Path('-') 239 ) -> Data: 240 """Read configuration formated as JSON data""" 241 if stdio_path and path == stdio_path: 242 return decode_stream(sys.stdin) 243 244 if path: 245 return decode_file(path) 246 247 if not default_path: 248 raise Exception('invalid configuration path') 249 250 for suffix in default_suffixes: 251 path = default_path.with_suffix(suffix) 252 if path.exists(): 253 break 254 255 return decode_file(path)
Read configuration formated as JSON data
9def diff(src: Data, 10 dst: Data 11 ) -> Data: 12 """Generate JSON Patch diff. 13 14 Example:: 15 16 src = [1, {'a': 2}, 3] 17 dst = [1, {'a': 4}, 3] 18 result = diff(src, dst) 19 assert result == [{'op': 'replace', 'path': '/1/a', 'value': 4}] 20 21 """ 22 return jsonpatch.JsonPatch.from_diff(src, dst).patch
Generate JSON Patch diff.
Example::
src = [1, {'a': 2}, 3]
dst = [1, {'a': 4}, 3]
result = diff(src, dst)
assert result == [{'op': 'replace', 'path': '/1/a', 'value': 4}]
25def patch(data: Data, 26 diff: Data 27 ) -> Data: 28 """Apply JSON Patch diff. 29 30 Example:: 31 32 data = [1, {'a': 2}, 3] 33 d = [{'op': 'replace', 'path': '/1/a', 'value': 4}] 34 result = patch(data, d) 35 assert result == [1, {'a': 4}, 3] 36 37 """ 38 return jsonpatch.apply_patch(data, diff)
Apply JSON Patch diff.
Example::
data = [1, {'a': 2}, 3]
d = [{'op': 'replace', 'path': '/1/a', 'value': 4}]
result = patch(data, d)
assert result == [1, {'a': 4}, 3]
34def create_schema_repository(*args: pathlib.PurePath | Schema 35 ) -> SchemaRepository: 36 """Create JSON Schema repository. 37 38 Repository can be initialized with multiple arguments, which can be 39 instances of ``pathlib.PurePath`` or ``Schema``. 40 41 If an argument is of type ``pathlib.PurePath``, and path points to file 42 with a suffix '.json', '.yml' or '.yaml', json serializable data is decoded 43 from the file. Otherwise, it is assumed that path points to a directory, 44 which is recursively searched for json and yaml files. All decoded schemas 45 are added to the repository. If a schema with the same `id` was previously 46 added, an exception is raised. 47 48 If an argument is of type ``Schema``, it should be a json serializable data 49 representation of a JSON schema. If a schema with the same `id` was 50 previously added, an exception is raised. 51 52 """ 53 repo = {} 54 55 for arg in args: 56 if isinstance(arg, pathlib.PurePath): 57 if arg.suffix in _schema_path_suffixes: 58 paths = [arg] 59 60 else: 61 paths = itertools.chain.from_iterable( 62 arg.rglob(f'*{i}') for i in _schema_path_suffixes) 63 64 schemas = (decode_file(path) for path in paths) 65 66 elif isinstance(arg, dict): 67 schemas = [arg] 68 69 else: 70 raise TypeError('invalid argument type') 71 72 for schema in schemas: 73 if '$schema' in schema: 74 meta_schema_id = urllib.parse.urldefrag(schema['$schema']).url 75 if meta_schema_id not in _meta_schema_ids: 76 raise Exception( 77 f"unsupported meta schema id {meta_schema_id}") 78 79 else: 80 schema = {'$schema': _default_meta_schema_id, 81 **schema} 82 83 schema_id = schema.get('$id') 84 if not schema_id: 85 schema_id = schema.get('id') 86 if not schema_id: 87 raise Exception('invalid schema id') 88 89 sanitized_schema_id = urllib.parse.urldefrag(schema_id).url 90 if sanitized_schema_id in repo: 91 raise Exception(f"duplicate schema id {sanitized_schema_id}") 92 93 repo[sanitized_schema_id] = schema 94 95 return repo
Create JSON Schema repository.
Repository can be initialized with multiple arguments, which can be
instances of pathlib.PurePath
or Schema
.
If an argument is of type pathlib.PurePath
, and path points to file
with a suffix '.json', '.yml' or '.yaml', json serializable data is decoded
from the file. Otherwise, it is assumed that path points to a directory,
which is recursively searched for json and yaml files. All decoded schemas
are added to the repository. If a schema with the same id
was previously
added, an exception is raised.
If an argument is of type Schema
, it should be a json serializable data
representation of a JSON schema. If a schema with the same id
was
previously added, an exception is raised.
98def merge_schema_repositories(*repos: SchemaRepository 99 ) -> SchemaRepository: 100 """Merge JSON Schema repositories. 101 102 Exception is raised is multiple repositories contain same schema id with 103 diferent schemas. 104 105 """ 106 result = {} 107 108 for repo in repos: 109 for schema_id, schema in repo.items(): 110 other_schema = result.get(schema_id) 111 if other_schema is not None and other_schema != schema: 112 raise Exception(f"conflict for schema id {schema_id}") 113 114 result[schema_id] = schema 115 116 return result
Merge JSON Schema repositories.
Exception is raised is multiple repositories contain same schema id with diferent schemas.
119class SchemaValidator(abc.ABC): 120 """JSON Schema validator interface 121 122 Args: 123 repo: repository containing JSON Schemas 124 125 """ 126 127 @abc.abstractmethod 128 def __init__(self, repo: SchemaRepository): 129 pass 130 131 @abc.abstractmethod 132 def validate(self, schema_id: SchemaId, data: Data): 133 """Validate data against JSON Schema. 134 135 Args: 136 schema_id: JSON schema identifier 137 data: data to be validated 138 139 Raises: 140 Exception 141 142 """
JSON Schema validator interface
Arguments:
- repo: repository containing JSON Schemas
131 @abc.abstractmethod 132 def validate(self, schema_id: SchemaId, data: Data): 133 """Validate data against JSON Schema. 134 135 Args: 136 schema_id: JSON schema identifier 137 data: data to be validated 138 139 Raises: 140 Exception 141 142 """
Validate data against JSON Schema.
Arguments:
- schema_id: JSON schema identifier
- data: data to be validated
Raises:
- Exception
145class PySchemaValidator(SchemaValidator): 146 """Python implementation of SchemaValidator""" 147 148 def __init__(self, repo: SchemaRepository): 149 self._repo = repo 150 self._registry = referencing.Registry(retrieve=self._retrieve) 151 152 def validate(self, schema_id: SchemaId, data: Data): 153 jsonschema.validate(instance=data, 154 schema={'$ref': schema_id}, 155 registry=self._registry) 156 157 def _retrieve(self, uri): 158 try: 159 schema = self._repo[uri] 160 161 except KeyError: 162 raise referencing.exceptions.NoSuchResource(uri) 163 164 return referencing.Resource.from_contents(schema)
Python implementation of SchemaValidator
152 def validate(self, schema_id: SchemaId, data: Data): 153 jsonschema.validate(instance=data, 154 schema={'$ref': schema_id}, 155 registry=self._registry)
Validate data against JSON Schema.
Arguments:
- schema_id: JSON schema identifier
- data: data to be validated
Raises:
- Exception
167class RsSchemaValidator(SchemaValidator): 168 """Rust implementation of SchemaValidatior""" 169 170 def __init__(self, repo: SchemaRepository): 171 if not jsonschema_rs: 172 raise Exception('implementation not available') 173 174 self._repo = repo 175 self._defs = {i: {'$ref': i} for i in self._repo.keys()} 176 177 def validate(self, schema_id: SchemaId, data: Data): 178 jsonschema_rs.validate(schema={'$ref': schema_id, 179 '$defs': self._defs}, 180 instance=data, 181 retriever=self._retriever) 182 183 def _retriever(self, uri): 184 return self._repo[uri]
Rust implementation of SchemaValidatior
177 def validate(self, schema_id: SchemaId, data: Data): 178 jsonschema_rs.validate(schema={'$ref': schema_id, 179 '$defs': self._defs}, 180 instance=data, 181 retriever=self._retriever)
Validate data against JSON Schema.
Arguments:
- schema_id: JSON schema identifier
- data: data to be validated
Raises:
- Exception