be/src/util/json/json_parser.cpp
Line | Count | Source |
1 | | // Licensed to the Apache Software Foundation (ASF) under one |
2 | | // or more contributor license agreements. See the NOTICE file |
3 | | // distributed with this work for additional information |
4 | | // regarding copyright ownership. The ASF licenses this file |
5 | | // to you under the Apache License, Version 2.0 (the |
6 | | // "License"); you may not use this file except in compliance |
7 | | // with the License. You may obtain a copy of the License at |
8 | | // |
9 | | // http://www.apache.org/licenses/LICENSE-2.0 |
10 | | // |
11 | | // Unless required by applicable law or agreed to in writing, |
12 | | // software distributed under the License is distributed on an |
13 | | // "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY |
14 | | // KIND, either express or implied. See the License for the |
15 | | // specific language governing permissions and limitations |
16 | | // under the License. |
17 | | // This file is copied from |
18 | | // https://github.com/ClickHouse/ClickHouse/blob/master/src/Common/JSONParsers/SimdJSONParser.cpp |
19 | | // and modified by Doris |
20 | | |
21 | | #include "util/json/json_parser.h" |
22 | | |
23 | | #include <fmt/format.h> |
24 | | #include <glog/logging.h> |
25 | | |
26 | | #include <algorithm> |
27 | | #include <cassert> |
28 | | #include <string_view> |
29 | | |
30 | | #include "common/cast_set.h" |
31 | | // IWYU pragma: keep |
32 | | #include "common/status.h" |
33 | | #include "util/json/path_in_data.h" |
34 | | #include "util/json/simd_json_parser.h" |
35 | | |
36 | | namespace doris { |
37 | | #include "common/compile_check_begin.h" |
38 | | |
39 | | template <typename ParserImpl> |
40 | | std::optional<ParseResult> JSONDataParser<ParserImpl>::parse(const char* begin, size_t length, |
41 | 80.6k | const ParseConfig& config) { |
42 | 80.6k | Element document; |
43 | 80.6k | if (!parser.parse(begin, length, document)) { |
44 | 19 | return {}; |
45 | 19 | } |
46 | 80.6k | ParseContext context; |
47 | | // enable_flatten_nested controls nested path traversal |
48 | | // NestedGroup expansion is now handled at storage layer |
49 | 80.6k | context.enable_flatten_nested = config.enable_flatten_nested; |
50 | 80.6k | context.is_top_array = document.isArray(); |
51 | 80.6k | traverse(document, context); |
52 | 80.6k | ParseResult result; |
53 | 80.6k | result.values = std::move(context.values); |
54 | 80.6k | result.paths.reserve(context.paths.size()); |
55 | 1.35M | for (auto&& path : context.paths) { |
56 | 1.35M | result.paths.emplace_back(std::move(path)); |
57 | 1.35M | } |
58 | 80.6k | return result; |
59 | 80.6k | } |
60 | | |
61 | | template <typename ParserImpl> |
62 | 2.08M | void JSONDataParser<ParserImpl>::traverse(const Element& element, ParseContext& ctx) { |
63 | | // checkStackSize(); |
64 | 2.08M | if (element.isObject()) { |
65 | 144k | traverseObject(element.getObject(), ctx); |
66 | 1.93M | } else if (element.isArray()) { |
67 | | // allow nested arrays (multi-level) for NestedGroup; deeper levels are |
68 | | // handled by VariantNestedBuilder with a max-depth guard. |
69 | 55.3k | has_nested = false; |
70 | 55.3k | check_has_nested_object(element); |
71 | 55.3k | ctx.has_nested_in_flatten = has_nested && ctx.enable_flatten_nested; |
72 | 55.3k | if (has_nested && !ctx.enable_flatten_nested) { |
73 | | // Parse nested arrays to JsonbField |
74 | 209 | JsonbWriter writer; |
75 | 209 | traverseArrayAsJsonb(element.getArray(), writer); |
76 | 209 | ctx.paths.push_back(ctx.builder.get_parts()); |
77 | 209 | ctx.values.push_back(Field::create_field<TYPE_JSONB>( |
78 | 209 | JsonbField(writer.getOutput()->getBuffer(), writer.getOutput()->getSize()))); |
79 | 55.1k | } else { |
80 | 55.1k | traverseArray(element.getArray(), ctx); |
81 | 55.1k | } |
82 | | // we should set has_nested_in_flatten to false when traverse array finished for next array otherwise it will be true for next array |
83 | 55.3k | ctx.has_nested_in_flatten = false; |
84 | 1.88M | } else { |
85 | 1.88M | ctx.paths.push_back(ctx.builder.get_parts()); |
86 | 1.88M | ctx.values.push_back(getValueAsField(element)); |
87 | 1.88M | } |
88 | 2.08M | } |
89 | | template <typename ParserImpl> |
90 | 144k | void JSONDataParser<ParserImpl>::traverseObject(const JSONObject& object, ParseContext& ctx) { |
91 | 144k | ctx.paths.reserve(ctx.paths.size() + object.size()); |
92 | 144k | ctx.values.reserve(ctx.values.size() + object.size()); |
93 | 1.56M | for (auto it = object.begin(); it != object.end(); ++it) { |
94 | 1.42M | const auto& [key, value] = *it; |
95 | 1.42M | const size_t max_key_length = cast_set<size_t>(config::variant_max_json_key_length); |
96 | 1.42M | if (key.size() > max_key_length) { |
97 | 336 | throw doris::Exception( |
98 | 336 | doris::ErrorCode::INVALID_ARGUMENT, |
99 | 336 | fmt::format("Key length exceeds maximum allowed size of {} bytes.", |
100 | 336 | max_key_length)); |
101 | 336 | } |
102 | 1.42M | ctx.builder.append(key, false); |
103 | 1.42M | traverse(value, ctx); |
104 | 1.42M | ctx.builder.pop_back(); |
105 | 1.42M | } |
106 | 144k | } |
107 | | |
108 | | template <typename ParserImpl> |
109 | 1.12M | void JSONDataParser<ParserImpl>::check_has_nested_object(const Element& element) { |
110 | 1.12M | if (element.isArray()) { |
111 | 105k | const JSONArray& array = element.getArray(); |
112 | 1.17M | for (auto it = array.begin(); it != array.end(); ++it) { |
113 | 1.06M | check_has_nested_object(*it); |
114 | 1.06M | } |
115 | 105k | } |
116 | 1.12M | if (element.isObject()) { |
117 | 23.0k | has_nested = true; |
118 | 23.0k | } |
119 | 1.12M | } |
120 | | |
121 | | template <typename ParserImpl> |
122 | 4.31k | void JSONDataParser<ParserImpl>::traverseAsJsonb(const Element& element, JsonbWriter& writer) { |
123 | 4.31k | if (element.isObject()) { |
124 | 1.49k | traverseObjectAsJsonb(element.getObject(), writer); |
125 | 2.82k | } else if (element.isArray()) { |
126 | 7 | traverseArrayAsJsonb(element.getArray(), writer); |
127 | 2.81k | } else { |
128 | 2.81k | writeValueAsJsonb(element, writer); |
129 | 2.81k | } |
130 | 4.31k | } |
131 | | |
132 | | template <typename ParserImpl> |
133 | | void JSONDataParser<ParserImpl>::traverseObjectAsJsonb(const JSONObject& object, |
134 | 1.49k | JsonbWriter& writer) { |
135 | 1.49k | writer.writeStartObject(); |
136 | 4.93k | for (auto it = object.begin(); it != object.end(); ++it) { |
137 | 3.64k | const auto& [key, value] = *it; |
138 | 3.64k | const size_t max_key_length = cast_set<size_t>(config::variant_max_json_key_length); |
139 | 3.64k | if (key.size() > max_key_length) { |
140 | 201 | throw doris::Exception( |
141 | 201 | doris::ErrorCode::INVALID_ARGUMENT, |
142 | 201 | fmt::format("Key length exceeds maximum allowed size of {} bytes.", |
143 | 201 | max_key_length)); |
144 | 201 | } |
145 | 3.44k | writer.writeKey(key.data(), cast_set<uint8_t>(key.size())); |
146 | 3.44k | traverseAsJsonb(value, writer); |
147 | 3.44k | } |
148 | 1.28k | writer.writeEndObject(); |
149 | 1.28k | } |
150 | | |
151 | | template <typename ParserImpl> |
152 | 216 | void JSONDataParser<ParserImpl>::traverseArrayAsJsonb(const JSONArray& array, JsonbWriter& writer) { |
153 | 216 | writer.writeStartArray(); |
154 | 1.08k | for (auto it = array.begin(); it != array.end(); ++it) { |
155 | 873 | traverseAsJsonb(*it, writer); |
156 | 873 | } |
157 | 216 | writer.writeEndArray(); |
158 | 216 | } |
159 | | |
160 | | // check isPrefix in PathInData::Parts. like : [{"a": {"c": {"b": 1}}}, {"a": {"c": 2.2}}], "a.c" is prefix of "a.c.b" |
161 | | // return true if prefix is a prefix of parts |
162 | 6 | static bool is_prefix(const PathInData::Parts& prefix, const PathInData::Parts& parts) { |
163 | 6 | if (prefix.size() >= parts.size()) { |
164 | 5 | return false; |
165 | 5 | } |
166 | 2 | for (size_t i = 0; i < prefix.size(); ++i) { |
167 | 1 | if (prefix[i].key != parts[i].key) { |
168 | 0 | return false; |
169 | 0 | } |
170 | 1 | } |
171 | 1 | return true; |
172 | 1 | } |
173 | | |
174 | | template <typename ParserImpl> |
175 | 55.1k | void JSONDataParser<ParserImpl>::traverseArray(const JSONArray& array, ParseContext& ctx) { |
176 | | /// Traverse elements of array and collect an array of fields by each path. |
177 | 55.1k | ParseArrayContext array_ctx; |
178 | 55.1k | array_ctx.has_nested_in_flatten = ctx.has_nested_in_flatten; |
179 | 55.1k | array_ctx.is_top_array = ctx.is_top_array; |
180 | 55.1k | array_ctx.total_size = array.size(); |
181 | 633k | for (auto it = array.begin(); it != array.end(); ++it) { |
182 | 577k | traverseArrayElement(*it, array_ctx); |
183 | 577k | ++array_ctx.current_size; |
184 | 577k | } |
185 | 55.1k | auto&& arrays_by_path = array_ctx.arrays_by_path; |
186 | 55.1k | if (arrays_by_path.empty()) { |
187 | 1 | ctx.paths.push_back(ctx.builder.get_parts()); |
188 | 1 | ctx.values.push_back(Field::create_field<TYPE_ARRAY>(Array())); |
189 | 55.1k | } else { |
190 | 55.1k | ctx.paths.reserve(ctx.paths.size() + arrays_by_path.size()); |
191 | 55.1k | ctx.values.reserve(ctx.values.size() + arrays_by_path.size()); |
192 | 113k | for (auto it = arrays_by_path.begin(); it != arrays_by_path.end(); ++it) { |
193 | 58.1k | auto&& [path, path_array] = it->second; |
194 | | /// Merge prefix path and path of array element. |
195 | 58.1k | ctx.paths.push_back(ctx.builder.append(path, true).get_parts()); |
196 | 58.1k | ctx.values.push_back(Field::create_field<TYPE_ARRAY>(std::move(path_array))); |
197 | 58.1k | ctx.builder.pop_back(path.size()); |
198 | 58.1k | } |
199 | 55.1k | } |
200 | 55.1k | } |
201 | | |
202 | | template <typename ParserImpl> |
203 | | void JSONDataParser<ParserImpl>::traverseArrayElement(const Element& element, |
204 | 577k | ParseArrayContext& ctx) { |
205 | 577k | ParseContext element_ctx; |
206 | 577k | element_ctx.has_nested_in_flatten = ctx.has_nested_in_flatten; |
207 | 577k | element_ctx.is_top_array = ctx.is_top_array; |
208 | 577k | traverse(element, element_ctx); |
209 | 577k | auto& [_, paths, values, flatten_nested, __, is_top_array] = element_ctx; |
210 | | |
211 | 577k | if (element_ctx.has_nested_in_flatten && is_top_array) { |
212 | 6 | checkAmbiguousStructure(ctx, paths); |
213 | 6 | } |
214 | | |
215 | 577k | size_t size = paths.size(); |
216 | 577k | size_t keys_to_update = ctx.arrays_by_path.size(); |
217 | | |
218 | 1.15M | for (size_t i = 0; i < size; ++i) { |
219 | 580k | if (values[i].is_null()) { |
220 | 11.7k | continue; |
221 | 11.7k | } |
222 | | |
223 | 569k | UInt128 hash = PathInData::get_parts_hash(paths[i]); |
224 | 569k | auto found = ctx.arrays_by_path.find(hash); |
225 | | |
226 | 569k | if (found != ctx.arrays_by_path.end()) { |
227 | 510k | handleExistingPath(found->second, paths[i], values[i], ctx, keys_to_update); |
228 | 510k | } else { |
229 | 58.1k | handleNewPath(hash, paths[i], values[i], ctx); |
230 | 58.1k | } |
231 | 569k | } |
232 | | |
233 | | // always fill missed values to keep element-level association between keys. |
234 | 577k | if (keys_to_update) { |
235 | 10.2k | fillMissedValuesInArrays(ctx); |
236 | 10.2k | } |
237 | 577k | } |
238 | | |
239 | | // check if the structure of top_array is ambiguous like: |
240 | | // [{"a": {"b": {"c": 1}}}, {"a": {"b": 1}}] a.b is ambiguous |
241 | | template <typename ParserImpl> |
242 | | void JSONDataParser<ParserImpl>::checkAmbiguousStructure( |
243 | 6 | const ParseArrayContext& ctx, const std::vector<PathInData::Parts>& paths) { |
244 | 6 | for (auto&& current_path : paths) { |
245 | 8 | for (auto it = ctx.arrays_by_path.begin(); it != ctx.arrays_by_path.end(); ++it) { |
246 | 3 | auto&& [p, _] = it->second; |
247 | 3 | if (is_prefix(p, current_path) || is_prefix(current_path, p)) { |
248 | 1 | throw doris::Exception(doris::ErrorCode::INVALID_ARGUMENT, |
249 | 1 | "Ambiguous structure of top_array nested subcolumns: {}, {}", |
250 | 1 | PathInData(p).to_jsonpath(), |
251 | 1 | PathInData(current_path).to_jsonpath()); |
252 | 1 | } |
253 | 3 | } |
254 | 6 | } |
255 | 6 | } |
256 | | |
257 | | template <typename ParserImpl> |
258 | | void JSONDataParser<ParserImpl>::handleExistingPath(std::pair<PathInData::Parts, Array>& path_data, |
259 | | const PathInData::Parts& path, Field& value, |
260 | | ParseArrayContext& ctx, |
261 | 510k | size_t& keys_to_update) { |
262 | 510k | auto& path_array = path_data.second; |
263 | | // keep arrays aligned for all keys (including top-level arrays). |
264 | 510k | assert(path_array.size() == ctx.current_size); |
265 | | // If current element of array is part of Nested, |
266 | | // collect its size or check it if the size of |
267 | | // the Nested has been already collected. |
268 | 510k | auto nested_key = getNameOfNested(path, value); |
269 | 510k | if (!nested_key.empty()) { |
270 | 0 | size_t array_size = value.get<TYPE_ARRAY>().size(); |
271 | 0 | auto& current_nested_sizes = ctx.nested_sizes_by_key[nested_key]; |
272 | 0 | if (current_nested_sizes.size() == ctx.current_size) { |
273 | 0 | current_nested_sizes.push_back(array_size); |
274 | 0 | } else if (array_size != current_nested_sizes.back()) { |
275 | 0 | throw doris::Exception(doris::ErrorCode::INTERNAL_ERROR, |
276 | 0 | "Array sizes mismatched ({} and {})", array_size, |
277 | 0 | current_nested_sizes.back()); |
278 | 0 | } |
279 | 0 | } |
280 | | |
281 | 510k | path_array.push_back(std::move(value)); |
282 | 510k | --keys_to_update; |
283 | 510k | } |
284 | | |
285 | | template <typename ParserImpl> |
286 | | void JSONDataParser<ParserImpl>::handleNewPath(UInt128 hash, const PathInData::Parts& path, |
287 | 58.1k | Field& value, ParseArrayContext& ctx) { |
288 | 58.1k | Array path_array; |
289 | 58.1k | path_array.reserve(ctx.total_size); |
290 | | |
291 | | // always resize to keep alignment. |
292 | 58.1k | path_array.resize(ctx.current_size); |
293 | | |
294 | 58.1k | auto nested_key = getNameOfNested(path, value); |
295 | 58.1k | if (!nested_key.empty()) { |
296 | 3 | size_t array_size = value.get<TYPE_ARRAY>().size(); |
297 | 3 | auto& current_nested_sizes = ctx.nested_sizes_by_key[nested_key]; |
298 | 3 | if (current_nested_sizes.empty()) { |
299 | 2 | current_nested_sizes.resize(ctx.current_size); |
300 | 2 | } else { |
301 | | // If newly added element is part of the Nested then |
302 | | // resize its elements to keep correct sizes of Nested arrays. |
303 | 3 | for (size_t j = 0; j < ctx.current_size; ++j) { |
304 | 2 | path_array[j] = Field::create_field<TYPE_ARRAY>(Array(current_nested_sizes[j])); |
305 | 2 | } |
306 | 1 | } |
307 | 3 | if (current_nested_sizes.size() == ctx.current_size) { |
308 | 2 | current_nested_sizes.push_back(array_size); |
309 | 2 | } else if (array_size != current_nested_sizes.back()) { |
310 | 0 | throw doris::Exception(doris::ErrorCode::INTERNAL_ERROR, |
311 | 0 | "Array sizes mismatched ({} and {})", array_size, |
312 | 0 | current_nested_sizes.back()); |
313 | 0 | } |
314 | 3 | } |
315 | | |
316 | 58.1k | path_array.push_back(std::move(value)); |
317 | 58.1k | auto& elem = ctx.arrays_by_path[hash]; |
318 | 58.1k | elem.first = std::move(path); |
319 | 58.1k | elem.second = std::move(path_array); |
320 | 58.1k | } |
321 | | |
322 | | template <typename ParserImpl> |
323 | 10.2k | void JSONDataParser<ParserImpl>::fillMissedValuesInArrays(ParseArrayContext& ctx) { |
324 | 20.5k | for (auto it = ctx.arrays_by_path.begin(); it != ctx.arrays_by_path.end(); ++it) { |
325 | 10.2k | auto& [path, path_array] = it->second; |
326 | 10.2k | assert(path_array.size() == ctx.current_size || path_array.size() == ctx.current_size + 1); |
327 | 10.2k | if (path_array.size() == ctx.current_size) { |
328 | 10.2k | bool inserted = tryInsertDefaultFromNested(ctx, path, path_array); |
329 | 10.2k | if (!inserted) { |
330 | 10.2k | path_array.emplace_back(); |
331 | 10.2k | } |
332 | 10.2k | } |
333 | 10.2k | } |
334 | 10.2k | } |
335 | | |
336 | | template <typename ParserImpl> |
337 | | bool JSONDataParser<ParserImpl>::tryInsertDefaultFromNested(ParseArrayContext& ctx, |
338 | | const PathInData::Parts& path, |
339 | 10.2k | Array& array) { |
340 | | /// If there is a collected size of current Nested |
341 | | /// then insert array of this size as a default value. |
342 | 10.2k | if (path.empty() || array.empty()) { |
343 | 10.2k | return false; |
344 | 10.2k | } |
345 | | /// Last element is not Null, because otherwise this path wouldn't exist. |
346 | 1 | auto nested_key = getNameOfNested(path, array.back()); |
347 | 1 | if (nested_key.empty()) { |
348 | 1 | return false; |
349 | 1 | } |
350 | 0 | auto mapped = ctx.nested_sizes_by_key.find(nested_key); |
351 | 0 | if (mapped == ctx.nested_sizes_by_key.end()) { |
352 | 0 | return false; |
353 | 0 | } |
354 | 0 | auto& current_nested_sizes = mapped->second; |
355 | 0 | assert(current_nested_sizes.size() == ctx.current_size || |
356 | 0 | current_nested_sizes.size() == ctx.current_size + 1); |
357 | | /// If all keys of Nested were missed then add a zero length. |
358 | 0 | if (current_nested_sizes.size() == ctx.current_size) { |
359 | 0 | current_nested_sizes.push_back(0); |
360 | 0 | } |
361 | 0 | size_t array_size = current_nested_sizes.back(); |
362 | 0 | array.push_back(Field::create_field<TYPE_ARRAY>(Array(array_size))); |
363 | 0 | return true; |
364 | 0 | } |
365 | | |
366 | | template <typename ParserImpl> |
367 | | StringRef JSONDataParser<ParserImpl>::getNameOfNested(const PathInData::Parts& path, |
368 | 569k | const Field& value) { |
369 | 569k | if (value.get_type() != PrimitiveType::TYPE_ARRAY || path.empty()) { |
370 | 569k | return {}; |
371 | 569k | } |
372 | | /// Find first key that is marked as nested, |
373 | | /// because we may have tuple of Nested and there could be |
374 | | /// several arrays with the same prefix, but with independent sizes. |
375 | | /// Consider we have array element with type `k2 Tuple(k3 Nested(...), k5 Nested(...))` |
376 | | /// Then subcolumns `k2.k3` and `k2.k5` may have indepented sizes and we should extract |
377 | | /// `k3` and `k5` keys instead of `k2`. |
378 | 3 | for (const auto& part : path) { |
379 | 3 | if (part.is_nested) { |
380 | 3 | return {part.key.data(), part.key.size()}; |
381 | 3 | } |
382 | 3 | } |
383 | 0 | return {}; |
384 | 3 | } |
385 | | |
386 | | #include "common/compile_check_end.h" |
387 | | |
388 | | template class JSONDataParser<SimdJSONParser>; |
389 | | } // namespace doris |