Coverage Report

Created: 2026-05-13 01:09

next uncovered line (L), next uncovered region (R), next uncovered branch (B)
be/src/format/json/new_json_reader.cpp
Line
Count
Source
1
// Licensed to the Apache Software Foundation (ASF) under one
2
// or more contributor license agreements.  See the NOTICE file
3
// distributed with this work for additional information
4
// regarding copyright ownership.  The ASF licenses this file
5
// to you under the Apache License, Version 2.0 (the
6
// "License"); you may not use this file except in compliance
7
// with the License.  You may obtain a copy of the License at
8
//
9
//   http://www.apache.org/licenses/LICENSE-2.0
10
//
11
// Unless required by applicable law or agreed to in writing,
12
// software distributed under the License is distributed on an
13
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
14
// KIND, either express or implied.  See the License for the
15
// specific language governing permissions and limitations
16
// under the License.
17
18
#include "format/json/new_json_reader.h"
19
20
#include <fmt/format.h>
21
#include <gen_cpp/Metrics_types.h>
22
#include <gen_cpp/PlanNodes_types.h>
23
#include <gen_cpp/Types_types.h>
24
#include <glog/logging.h>
25
#include <rapidjson/error/en.h>
26
#include <rapidjson/reader.h>
27
#include <rapidjson/stringbuffer.h>
28
#include <rapidjson/writer.h>
29
#include <simdjson/simdjson.h> // IWYU pragma: keep
30
31
#include <algorithm>
32
#include <cinttypes>
33
#include <cstdio>
34
#include <cstring>
35
#include <map>
36
#include <memory>
37
#include <string_view>
38
#include <utility>
39
40
#include "common/compiler_util.h" // IWYU pragma: keep
41
#include "common/config.h"
42
#include "common/status.h"
43
#include "core/assert_cast.h"
44
#include "core/block/column_with_type_and_name.h"
45
#include "core/column/column.h"
46
#include "core/column/column_array.h"
47
#include "core/column/column_map.h"
48
#include "core/column/column_nullable.h"
49
#include "core/column/column_string.h"
50
#include "core/column/column_struct.h"
51
#include "core/custom_allocator.h"
52
#include "core/data_type/data_type_array.h"
53
#include "core/data_type/data_type_factory.hpp"
54
#include "core/data_type/data_type_map.h"
55
#include "core/data_type/data_type_number.h" // IWYU pragma: keep
56
#include "core/data_type/data_type_struct.h"
57
#include "core/data_type/define_primitive_type.h"
58
#include "exec/scan/scanner.h"
59
#include "exprs/json_functions.h"
60
#include "format/file_reader/new_plain_text_line_reader.h"
61
#include "io/file_factory.h"
62
#include "io/fs/buffered_reader.h"
63
#include "io/fs/file_reader.h"
64
#include "io/fs/stream_load_pipe.h"
65
#include "io/fs/tracing_file_reader.h"
66
#include "runtime/descriptors.h"
67
#include "runtime/runtime_state.h"
68
#include "util/slice.h"
69
70
namespace doris::io {
71
struct IOContext;
72
enum class FileCachePolicy : uint8_t;
73
} // namespace doris::io
74
75
namespace doris {
76
using namespace ErrorCode;
77
78
NewJsonReader::NewJsonReader(RuntimeState* state, RuntimeProfile* profile, ScannerCounter* counter,
79
                             const TFileScanRangeParams& params, const TFileRangeDesc& range,
80
                             const std::vector<SlotDescriptor*>& file_slot_descs, bool* scanner_eof,
81
                             size_t batch_size, io::IOContext* io_ctx,
82
                             std::shared_ptr<io::IOContext> io_ctx_holder)
83
0
        : _vhandle_json_callback(nullptr),
84
0
          _state(state),
85
0
          _profile(profile),
86
0
          _counter(counter),
87
0
          _params(params),
88
0
          _range(range),
89
0
          _file_slot_descs(file_slot_descs),
90
0
          _file_reader(nullptr),
91
0
          _line_reader(nullptr),
92
0
          _reader_eof(false),
93
0
          _decompressor(nullptr),
94
0
          _skip_first_line(false),
95
0
          _next_row(0),
96
0
          _total_rows(0),
97
0
          _value_allocator(_value_buffer, sizeof(_value_buffer)),
98
0
          _parse_allocator(_parse_buffer, sizeof(_parse_buffer)),
99
0
          _origin_json_doc(&_value_allocator, sizeof(_parse_buffer), &_parse_allocator),
100
0
          _scanner_eof(scanner_eof),
101
0
          _current_offset(0),
102
0
          _io_ctx(io_ctx),
103
0
          _io_ctx_holder(std::move(io_ctx_holder)),
104
0
          _batch_size(std::max(batch_size, 1UL)) {
105
0
    if (_io_ctx == nullptr && _io_ctx_holder) {
106
0
        _io_ctx = _io_ctx_holder.get();
107
0
    }
108
0
    _read_timer = ADD_TIMER(_profile, "ReadTime");
109
0
    if (_range.__isset.compress_type) {
110
        // for compatibility
111
0
        _file_compress_type = _range.compress_type;
112
0
    } else {
113
0
        _file_compress_type = _params.compress_type;
114
0
    }
115
0
    _init_system_properties();
116
0
    _init_file_description();
117
0
}
118
119
NewJsonReader::NewJsonReader(RuntimeProfile* profile, const TFileScanRangeParams& params,
120
                             const TFileRangeDesc& range,
121
                             const std::vector<SlotDescriptor*>& file_slot_descs, size_t batch_size,
122
                             io::IOContext* io_ctx, std::shared_ptr<io::IOContext> io_ctx_holder)
123
2
        : _vhandle_json_callback(nullptr),
124
2
          _state(nullptr),
125
2
          _profile(profile),
126
2
          _params(params),
127
2
          _range(range),
128
2
          _file_slot_descs(file_slot_descs),
129
2
          _line_reader(nullptr),
130
2
          _reader_eof(false),
131
2
          _decompressor(nullptr),
132
2
          _skip_first_line(false),
133
2
          _next_row(0),
134
2
          _total_rows(0),
135
2
          _value_allocator(_value_buffer, sizeof(_value_buffer)),
136
2
          _parse_allocator(_parse_buffer, sizeof(_parse_buffer)),
137
2
          _origin_json_doc(&_value_allocator, sizeof(_parse_buffer), &_parse_allocator),
138
2
          _io_ctx(io_ctx),
139
2
          _io_ctx_holder(std::move(io_ctx_holder)),
140
2
          _batch_size(std::max(batch_size, 1UL)) {
141
2
    if (_io_ctx == nullptr && _io_ctx_holder) {
142
0
        _io_ctx = _io_ctx_holder.get();
143
0
    }
144
2
    if (_range.__isset.compress_type) {
145
        // for compatibility
146
0
        _file_compress_type = _range.compress_type;
147
2
    } else {
148
2
        _file_compress_type = _params.compress_type;
149
2
    }
150
2
    _init_system_properties();
151
2
    _init_file_description();
152
2
}
153
154
2
void NewJsonReader::_init_system_properties() {
155
2
    if (_range.__isset.file_type) {
156
        // for compatibility
157
0
        _system_properties.system_type = _range.file_type;
158
2
    } else {
159
2
        _system_properties.system_type = _params.file_type;
160
2
    }
161
2
    _system_properties.properties = _params.properties;
162
2
    _system_properties.hdfs_params = _params.hdfs_params;
163
2
    if (_params.__isset.broker_addresses) {
164
0
        _system_properties.broker_addresses.assign(_params.broker_addresses.begin(),
165
0
                                                   _params.broker_addresses.end());
166
0
    }
167
2
}
168
169
2
void NewJsonReader::_init_file_description() {
170
2
    _file_description.path = _range.path;
171
2
    _file_description.file_size = _range.__isset.file_size ? _range.file_size : -1;
172
173
2
    if (_range.__isset.fs_name) {
174
0
        _file_description.fs_name = _range.fs_name;
175
0
    }
176
2
    if (_range.__isset.file_cache_admission) {
177
0
        _file_description.file_cache_admission = _range.file_cache_admission;
178
0
    }
179
2
}
180
181
Status NewJsonReader::init_reader(
182
        const std::unordered_map<std::string, VExprContextSPtr>& col_default_value_ctx,
183
0
        bool is_load) {
184
0
    _is_load = is_load;
185
186
    // generate _col_default_value_map
187
0
    RETURN_IF_ERROR(_get_column_default_value(_file_slot_descs, col_default_value_ctx));
188
189
    //use serde insert data to column.
190
0
    for (auto* slot_desc : _file_slot_descs) {
191
0
        _serdes.emplace_back(slot_desc->get_data_type_ptr()->get_serde());
192
0
    }
193
194
    // create decompressor.
195
    // _decompressor may be nullptr if this is not a compressed file
196
0
    RETURN_IF_ERROR(Decompressor::create_decompressor(_file_compress_type, &_decompressor));
197
198
0
    RETURN_IF_ERROR(_simdjson_init_reader());
199
0
    return Status::OK();
200
0
}
201
202
// ---- Unified init_reader(ReaderInitContext*) overrides ----
203
204
0
Status NewJsonReader::_open_file_reader(ReaderInitContext* /*ctx*/) {
205
0
    RETURN_IF_ERROR(_get_range_params());
206
0
    RETURN_IF_ERROR(_open_file_reader(false));
207
0
    return Status::OK();
208
0
}
209
210
0
Status NewJsonReader::_do_init_reader(ReaderInitContext* base_ctx) {
211
0
    auto* ctx = checked_context_cast<JsonInitContext>(base_ctx);
212
0
    _is_load = ctx->is_load;
213
214
0
    RETURN_IF_ERROR(_get_column_default_value(_file_slot_descs, *ctx->col_default_value_ctx));
215
0
    for (auto* slot_desc : _file_slot_descs) {
216
0
        _serdes.emplace_back(slot_desc->get_data_type_ptr()->get_serde());
217
0
    }
218
219
    // Create decompressor (needed by line reader below)
220
0
    RETURN_IF_ERROR(Decompressor::create_decompressor(_file_compress_type, &_decompressor));
221
222
0
    if (LIKELY(_read_json_by_line)) {
223
0
        RETURN_IF_ERROR(_open_line_reader());
224
0
    }
225
0
    RETURN_IF_ERROR(_parse_jsonpath_and_json_root());
226
227
0
    if (_parsed_jsonpaths.empty()) {
228
0
        _vhandle_json_callback = &NewJsonReader::_simdjson_handle_simple_json;
229
0
    } else {
230
0
        if (_strip_outer_array) {
231
0
            _vhandle_json_callback = &NewJsonReader::_simdjson_handle_flat_array_complex_json;
232
0
        } else {
233
0
            _vhandle_json_callback = &NewJsonReader::_simdjson_handle_nested_complex_json;
234
0
        }
235
0
    }
236
0
    _ondemand_json_parser = std::make_unique<simdjson::ondemand::parser>();
237
0
    for (int i = 0; i < _file_slot_descs.size(); ++i) {
238
0
        _slot_desc_index[StringRef {_file_slot_descs[i]->col_name()}] = i;
239
0
        if (_file_slot_descs[i]->is_skip_bitmap_col()) {
240
0
            skip_bitmap_col_idx = i;
241
0
        }
242
0
    }
243
0
    _simdjson_ondemand_padding_buffer.resize(_padded_size);
244
0
    _simdjson_ondemand_unscape_padding_buffer.resize(_padded_size);
245
0
    return Status::OK();
246
0
}
247
248
5
void NewJsonReader::set_batch_size(size_t batch_size) {
249
    // 0 means "not set" / "use default" for the row-based readers; we must
250
    // never let _batch_size be 0 because _do_get_next_block uses it as the
251
    // upper bound of a `while (block->rows() < batch_size)` loop and a 0
252
    // would make the reader return without setting eof, causing the scanner
253
    // to spin on empty blocks.
254
5
    _batch_size = std::max(batch_size, 1UL);
255
5
}
256
257
0
Status NewJsonReader::_do_get_next_block(Block* block, size_t* read_rows, bool* eof) {
258
0
    if (_reader_eof) {
259
0
        *eof = true;
260
0
        return Status::OK();
261
0
    }
262
263
0
    const auto batch_size = _batch_size;
264
0
    const auto max_block_bytes = _state->preferred_block_size_bytes();
265
266
0
    while (block->rows() < batch_size && !_reader_eof && (block->bytes() < max_block_bytes)) {
267
0
        if (UNLIKELY(_read_json_by_line && _skip_first_line)) {
268
0
            size_t size = 0;
269
0
            const uint8_t* line_ptr = nullptr;
270
0
            RETURN_IF_ERROR(_line_reader->read_line(&line_ptr, &size, &_reader_eof, _io_ctx));
271
0
            _skip_first_line = false;
272
0
            continue;
273
0
        }
274
275
0
        bool is_empty_row = false;
276
277
0
        RETURN_IF_ERROR(
278
0
                _read_json_column(_state, *block, _file_slot_descs, &is_empty_row, &_reader_eof));
279
0
        if (is_empty_row) {
280
            // Read empty row, just continue
281
0
            continue;
282
0
        }
283
0
        ++(*read_rows);
284
0
    }
285
286
0
    return Status::OK();
287
0
}
288
289
Status NewJsonReader::_get_columns_impl(
290
0
        std::unordered_map<std::string, DataTypePtr>* name_to_type) {
291
0
    for (const auto& slot : _file_slot_descs) {
292
0
        name_to_type->emplace(slot->col_name(), slot->type());
293
0
    }
294
0
    return Status::OK();
295
0
}
296
297
// init decompressor, file reader and line reader for parsing schema
298
0
Status NewJsonReader::init_schema_reader() {
299
0
    RETURN_IF_ERROR(_get_range_params());
300
    // create decompressor.
301
    // _decompressor may be nullptr if this is not a compressed file
302
0
    RETURN_IF_ERROR(Decompressor::create_decompressor(_file_compress_type, &_decompressor));
303
0
    RETURN_IF_ERROR(_open_file_reader(true));
304
0
    if (_read_json_by_line) {
305
0
        RETURN_IF_ERROR(_open_line_reader());
306
0
    }
307
    // generate _parsed_jsonpaths and _parsed_json_root
308
0
    RETURN_IF_ERROR(_parse_jsonpath_and_json_root());
309
0
    return Status::OK();
310
0
}
311
312
Status NewJsonReader::get_parsed_schema(std::vector<std::string>* col_names,
313
0
                                        std::vector<DataTypePtr>* col_types) {
314
0
    bool eof = false;
315
0
    const uint8_t* json_str = nullptr;
316
0
    DorisUniqueBufferPtr<uint8_t> json_str_ptr;
317
0
    size_t size = 0;
318
0
    if (_line_reader != nullptr) {
319
0
        RETURN_IF_ERROR(_line_reader->read_line(&json_str, &size, &eof, _io_ctx));
320
0
    } else {
321
0
        size_t read_size = 0;
322
0
        RETURN_IF_ERROR(_read_one_message(&json_str_ptr, &read_size));
323
0
        json_str = json_str_ptr.get();
324
0
        size = read_size;
325
0
        if (read_size == 0) {
326
0
            eof = true;
327
0
        }
328
0
    }
329
330
0
    if (size == 0 || eof) {
331
0
        return Status::EndOfFile("Empty file.");
332
0
    }
333
334
    // clear memory here.
335
0
    _value_allocator.Clear();
336
0
    _parse_allocator.Clear();
337
0
    bool has_parse_error = false;
338
339
    // parse jsondata to JsonDoc
340
    // As the issue: https://github.com/Tencent/rapidjson/issues/1458
341
    // Now, rapidjson only support uint64_t, So lagreint load cause bug. We use kParseNumbersAsStringsFlag.
342
0
    if (_num_as_string) {
343
0
        has_parse_error =
344
0
                _origin_json_doc.Parse<rapidjson::kParseNumbersAsStringsFlag>((char*)json_str, size)
345
0
                        .HasParseError();
346
0
    } else {
347
0
        has_parse_error = _origin_json_doc.Parse((char*)json_str, size).HasParseError();
348
0
    }
349
350
0
    if (has_parse_error) {
351
0
        return Status::DataQualityError(
352
0
                "Parse json data for JsonDoc failed. code: {}, error info: {}",
353
0
                _origin_json_doc.GetParseError(),
354
0
                rapidjson::GetParseError_En(_origin_json_doc.GetParseError()));
355
0
    }
356
357
    // set json root
358
0
    if (!_parsed_json_root.empty()) {
359
0
        _json_doc = JsonFunctions::get_json_object_from_parsed_json(
360
0
                _parsed_json_root, &_origin_json_doc, _origin_json_doc.GetAllocator());
361
0
        if (_json_doc == nullptr) {
362
0
            return Status::DataQualityError("JSON Root not found.");
363
0
        }
364
0
    } else {
365
0
        _json_doc = &_origin_json_doc;
366
0
    }
367
368
0
    if (_json_doc->IsArray() && !_strip_outer_array) {
369
0
        return Status::DataQualityError(
370
0
                "JSON data is array-object, `strip_outer_array` must be TRUE.");
371
0
    }
372
0
    if (!_json_doc->IsArray() && _strip_outer_array) {
373
0
        return Status::DataQualityError(
374
0
                "JSON data is not an array-object, `strip_outer_array` must be FALSE.");
375
0
    }
376
377
0
    rapidjson::Value* objectValue = nullptr;
378
0
    if (_json_doc->IsArray()) {
379
0
        if (_json_doc->Size() == 0) {
380
            // may be passing an empty json, such as "[]"
381
0
            return Status::InternalError<false>("Empty first json line");
382
0
        }
383
0
        objectValue = &(*_json_doc)[0];
384
0
    } else {
385
0
        objectValue = _json_doc;
386
0
    }
387
388
0
    if (!objectValue->IsObject()) {
389
0
        return Status::DataQualityError("JSON data is not an object. but: {}",
390
0
                                        objectValue->GetType());
391
0
    }
392
393
    // use jsonpaths to col_names
394
0
    if (!_parsed_jsonpaths.empty()) {
395
0
        for (auto& _parsed_jsonpath : _parsed_jsonpaths) {
396
0
            size_t len = _parsed_jsonpath.size();
397
0
            if (len == 0) {
398
0
                return Status::InvalidArgument("It's invalid jsonpaths.");
399
0
            }
400
0
            std::string key = _parsed_jsonpath[len - 1].key;
401
0
            col_names->emplace_back(key);
402
0
            col_types->emplace_back(
403
0
                    DataTypeFactory::instance().create_data_type(PrimitiveType::TYPE_STRING, true));
404
0
        }
405
0
        return Status::OK();
406
0
    }
407
408
0
    for (int i = 0; i < objectValue->MemberCount(); ++i) {
409
0
        auto it = objectValue->MemberBegin() + i;
410
0
        col_names->emplace_back(it->name.GetString());
411
0
        col_types->emplace_back(make_nullable(std::make_shared<DataTypeString>()));
412
0
    }
413
0
    return Status::OK();
414
0
}
415
416
0
Status NewJsonReader::_get_range_params() {
417
0
    if (!_params.__isset.file_attributes) {
418
0
        return Status::InternalError<false>("BE cat get file_attributes");
419
0
    }
420
421
    // get line_delimiter
422
0
    if (_params.file_attributes.__isset.text_params &&
423
0
        _params.file_attributes.text_params.__isset.line_delimiter) {
424
0
        _line_delimiter = _params.file_attributes.text_params.line_delimiter;
425
0
        _line_delimiter_length = _line_delimiter.size();
426
0
    }
427
428
0
    if (_params.file_attributes.__isset.jsonpaths) {
429
0
        _jsonpaths = _params.file_attributes.jsonpaths;
430
0
    }
431
0
    if (_params.file_attributes.__isset.json_root) {
432
0
        _json_root = _params.file_attributes.json_root;
433
0
    }
434
0
    if (_params.file_attributes.__isset.read_json_by_line) {
435
0
        _read_json_by_line = _params.file_attributes.read_json_by_line;
436
0
    }
437
0
    if (_params.file_attributes.__isset.strip_outer_array) {
438
0
        _strip_outer_array = _params.file_attributes.strip_outer_array;
439
0
    }
440
0
    if (_params.file_attributes.__isset.num_as_string) {
441
0
        _num_as_string = _params.file_attributes.num_as_string;
442
0
    }
443
0
    if (_params.file_attributes.__isset.fuzzy_parse) {
444
0
        _fuzzy_parse = _params.file_attributes.fuzzy_parse;
445
0
    }
446
0
    if (_range.table_format_params.table_format_type == "hive") {
447
0
        _is_hive_table = true;
448
0
    }
449
0
    if (_params.file_attributes.__isset.openx_json_ignore_malformed) {
450
0
        _openx_json_ignore_malformed = _params.file_attributes.openx_json_ignore_malformed;
451
0
    }
452
0
    return Status::OK();
453
0
}
454
455
0
static Status ignore_malformed_json_append_null(Block& block) {
456
0
    for (auto& column : block.get_columns()) {
457
0
        if (!column->is_nullable()) [[unlikely]] {
458
0
            return Status::DataQualityError("malformed json, but the column `{}` is not nullable.",
459
0
                                            column->get_name());
460
0
        }
461
0
        static_cast<ColumnNullable*>(column->assume_mutable().get())->insert_default();
462
0
    }
463
0
    return Status::OK();
464
0
}
465
466
0
Status NewJsonReader::_open_file_reader(bool need_schema) {
467
0
    int64_t start_offset = _range.start_offset;
468
0
    if (start_offset != 0) {
469
0
        start_offset -= 1;
470
0
    }
471
472
0
    _current_offset = start_offset;
473
474
0
    if (_params.file_type == TFileType::FILE_STREAM) {
475
        // Due to http_stream needs to pre read a portion of the data to parse column information, so it is set to true here
476
0
        RETURN_IF_ERROR(FileFactory::create_pipe_reader(_range.load_id, &_file_reader, _state,
477
0
                                                        need_schema));
478
0
    } else {
479
0
        _file_description.mtime = _range.__isset.modification_time ? _range.modification_time : 0;
480
0
        io::FileReaderOptions reader_options =
481
0
                FileFactory::get_reader_options(_state, _file_description);
482
0
        io::FileReaderSPtr file_reader;
483
0
        if (_io_ctx_holder) {
484
0
            file_reader = DORIS_TRY(io::DelegateReader::create_file_reader(
485
0
                    _profile, _system_properties, _file_description, reader_options,
486
0
                    io::DelegateReader::AccessMode::SEQUENTIAL,
487
0
                    std::static_pointer_cast<const io::IOContext>(_io_ctx_holder),
488
0
                    io::PrefetchRange(_range.start_offset, _range.size)));
489
0
        } else {
490
0
            file_reader = DORIS_TRY(io::DelegateReader::create_file_reader(
491
0
                    _profile, _system_properties, _file_description, reader_options,
492
0
                    io::DelegateReader::AccessMode::SEQUENTIAL, _io_ctx,
493
0
                    io::PrefetchRange(_range.start_offset, _range.size)));
494
0
        }
495
0
        _file_reader = _io_ctx ? std::make_shared<io::TracingFileReader>(std::move(file_reader),
496
0
                                                                         _io_ctx->file_reader_stats)
497
0
                               : file_reader;
498
0
    }
499
0
    return Status::OK();
500
0
}
501
502
0
Status NewJsonReader::_open_line_reader() {
503
0
    int64_t size = _range.size;
504
0
    if (_range.start_offset != 0) {
505
        // When we fetch range doesn't start from 0, size will += 1.
506
0
        size += 1;
507
0
        _skip_first_line = true;
508
0
    } else {
509
0
        _skip_first_line = false;
510
0
    }
511
0
    _line_reader = NewPlainTextLineReader::create_unique(
512
0
            _profile, _file_reader, _decompressor.get(),
513
0
            std::make_shared<PlainTextLineReaderCtx>(_line_delimiter, _line_delimiter_length,
514
0
                                                     false),
515
0
            size, _current_offset);
516
0
    return Status::OK();
517
0
}
518
519
0
Status NewJsonReader::_parse_jsonpath_and_json_root() {
520
    // parse jsonpaths
521
0
    if (!_jsonpaths.empty()) {
522
0
        rapidjson::Document jsonpaths_doc;
523
0
        if (!jsonpaths_doc.Parse(_jsonpaths.c_str(), _jsonpaths.length()).HasParseError()) {
524
0
            if (!jsonpaths_doc.IsArray()) {
525
0
                return Status::InvalidJsonPath("Invalid json path: {}", _jsonpaths);
526
0
            }
527
0
            for (int i = 0; i < jsonpaths_doc.Size(); i++) {
528
0
                const rapidjson::Value& path = jsonpaths_doc[i];
529
0
                if (!path.IsString()) {
530
0
                    return Status::InvalidJsonPath("Invalid json path: {}", _jsonpaths);
531
0
                }
532
0
                std::string json_path = path.GetString();
533
                // $ -> $. in json_path
534
0
                if (UNLIKELY(json_path.size() == 1 && json_path[0] == '$')) {
535
0
                    json_path.insert(1, ".");
536
0
                }
537
0
                std::vector<JsonPath> parsed_paths;
538
0
                JsonFunctions::parse_json_paths(json_path, &parsed_paths);
539
0
                _parsed_jsonpaths.push_back(std::move(parsed_paths));
540
0
            }
541
542
0
        } else {
543
0
            return Status::InvalidJsonPath("Invalid json path: {}", _jsonpaths);
544
0
        }
545
0
    }
546
547
    // parse jsonroot
548
0
    if (!_json_root.empty()) {
549
0
        std::string json_root = _json_root;
550
        //  $ -> $. in json_root
551
0
        if (json_root.size() == 1 && json_root[0] == '$') {
552
0
            json_root.insert(1, ".");
553
0
        }
554
0
        JsonFunctions::parse_json_paths(json_root, &_parsed_json_root);
555
0
    }
556
0
    return Status::OK();
557
0
}
558
559
Status NewJsonReader::_read_json_column(RuntimeState* state, Block& block,
560
                                        const std::vector<SlotDescriptor*>& slot_descs,
561
0
                                        bool* is_empty_row, bool* eof) {
562
0
    return (this->*_vhandle_json_callback)(state, block, slot_descs, is_empty_row, eof);
563
0
}
564
565
Status NewJsonReader::_read_one_message(DorisUniqueBufferPtr<uint8_t>* file_buf,
566
0
                                        size_t* read_size) {
567
0
    switch (_params.file_type) {
568
0
    case TFileType::FILE_LOCAL:
569
0
        [[fallthrough]];
570
0
    case TFileType::FILE_HDFS:
571
0
    case TFileType::FILE_HTTP:
572
0
        [[fallthrough]];
573
0
    case TFileType::FILE_S3: {
574
0
        size_t file_size = _file_reader->size();
575
0
        *file_buf = make_unique_buffer<uint8_t>(file_size);
576
0
        Slice result(file_buf->get(), file_size);
577
0
        RETURN_IF_ERROR(_file_reader->read_at(_current_offset, result, read_size, _io_ctx));
578
0
        _current_offset += *read_size;
579
0
        break;
580
0
    }
581
0
    case TFileType::FILE_STREAM: {
582
0
        RETURN_IF_ERROR(_read_one_message_from_pipe(file_buf, read_size));
583
0
        break;
584
0
    }
585
0
    default: {
586
0
        return Status::NotSupported<false>("no supported file reader type: {}", _params.file_type);
587
0
    }
588
0
    }
589
0
    return Status::OK();
590
0
}
591
592
Status NewJsonReader::_read_one_message_from_pipe(DorisUniqueBufferPtr<uint8_t>* file_buf,
593
0
                                                  size_t* read_size) {
594
0
    auto* stream_load_pipe = dynamic_cast<io::StreamLoadPipe*>(_file_reader.get());
595
596
    // first read: read from the pipe once.
597
0
    RETURN_IF_ERROR(stream_load_pipe->read_one_message(file_buf, read_size));
598
599
    // When the file is not chunked, the entire file has already been read.
600
0
    if (!stream_load_pipe->is_chunked_transfer()) {
601
0
        return Status::OK();
602
0
    }
603
604
0
    std::vector<uint8_t> buf;
605
0
    uint64_t cur_size = 0;
606
607
    // second read: continuously read data from the pipe until all data is read.
608
0
    DorisUniqueBufferPtr<uint8_t> read_buf;
609
0
    size_t read_buf_size = 0;
610
0
    while (true) {
611
0
        RETURN_IF_ERROR(stream_load_pipe->read_one_message(&read_buf, &read_buf_size));
612
0
        if (read_buf_size == 0) {
613
0
            break;
614
0
        } else {
615
0
            buf.insert(buf.end(), read_buf.get(), read_buf.get() + read_buf_size);
616
0
            cur_size += read_buf_size;
617
0
            read_buf_size = 0;
618
0
            read_buf.reset();
619
0
        }
620
0
    }
621
622
    // No data is available during the second read.
623
0
    if (cur_size == 0) {
624
0
        return Status::OK();
625
0
    }
626
627
0
    DorisUniqueBufferPtr<uint8_t> total_buf = make_unique_buffer<uint8_t>(cur_size + *read_size);
628
629
    // copy the data during the first read
630
0
    memcpy(total_buf.get(), file_buf->get(), *read_size);
631
632
    // copy the data during the second read
633
0
    memcpy(total_buf.get() + *read_size, buf.data(), cur_size);
634
0
    *file_buf = std::move(total_buf);
635
0
    *read_size += cur_size;
636
0
    return Status::OK();
637
0
}
638
639
// ---------SIMDJSON----------
640
// simdjson, replace none simdjson function if it is ready
641
0
Status NewJsonReader::_simdjson_init_reader() {
642
0
    RETURN_IF_ERROR(_get_range_params());
643
644
0
    RETURN_IF_ERROR(_open_file_reader(false));
645
0
    if (LIKELY(_read_json_by_line)) {
646
0
        RETURN_IF_ERROR(_open_line_reader());
647
0
    }
648
649
    // generate _parsed_jsonpaths and _parsed_json_root
650
0
    RETURN_IF_ERROR(_parse_jsonpath_and_json_root());
651
652
    //improve performance
653
0
    if (_parsed_jsonpaths.empty()) { // input is a simple json-string
654
0
        _vhandle_json_callback = &NewJsonReader::_simdjson_handle_simple_json;
655
0
    } else { // input is a complex json-string and a json-path
656
0
        if (_strip_outer_array) {
657
0
            _vhandle_json_callback = &NewJsonReader::_simdjson_handle_flat_array_complex_json;
658
0
        } else {
659
0
            _vhandle_json_callback = &NewJsonReader::_simdjson_handle_nested_complex_json;
660
0
        }
661
0
    }
662
0
    _ondemand_json_parser = std::make_unique<simdjson::ondemand::parser>();
663
0
    for (int i = 0; i < _file_slot_descs.size(); ++i) {
664
0
        _slot_desc_index[StringRef {_file_slot_descs[i]->col_name()}] = i;
665
0
        if (_file_slot_descs[i]->is_skip_bitmap_col()) {
666
0
            skip_bitmap_col_idx = i;
667
0
        }
668
0
    }
669
0
    _simdjson_ondemand_padding_buffer.resize(_padded_size);
670
0
    _simdjson_ondemand_unscape_padding_buffer.resize(_padded_size);
671
0
    return Status::OK();
672
0
}
673
674
Status NewJsonReader::_handle_simdjson_error(simdjson::simdjson_error& error, Block& block,
675
0
                                             size_t num_rows, bool* eof) {
676
0
    fmt::memory_buffer error_msg;
677
0
    fmt::format_to(error_msg, "Parse json data failed. code: {}, error info: {}", error.error(),
678
0
                   error.what());
679
0
    _counter->num_rows_filtered++;
680
    // Before continuing to process other rows, we need to first clean the fail parsed row.
681
0
    for (int i = 0; i < block.columns(); ++i) {
682
0
        auto column = block.get_by_position(i).column->assume_mutable();
683
0
        if (column->size() > num_rows) {
684
0
            column->pop_back(column->size() - num_rows);
685
0
        }
686
0
    }
687
688
0
    RETURN_IF_ERROR(_state->append_error_msg_to_file(
689
0
            [&]() -> std::string {
690
0
                return std::string(_simdjson_ondemand_padding_buffer.data(), _original_doc_size);
691
0
            },
692
0
            [&]() -> std::string { return fmt::to_string(error_msg); }));
693
0
    return Status::OK();
694
0
}
695
696
Status NewJsonReader::_simdjson_handle_simple_json(RuntimeState* /*state*/, Block& block,
697
                                                   const std::vector<SlotDescriptor*>& slot_descs,
698
0
                                                   bool* is_empty_row, bool* eof) {
699
    // simple json
700
0
    size_t size = 0;
701
0
    simdjson::error_code error;
702
0
    size_t num_rows = block.rows();
703
0
    try {
704
        // step1: get and parse buf to get json doc
705
0
        RETURN_IF_ERROR(_simdjson_parse_json(&size, is_empty_row, eof, &error));
706
0
        if (size == 0 || *eof) {
707
0
            *is_empty_row = true;
708
0
            return Status::OK();
709
0
        }
710
711
        // step2: get json value by json doc
712
0
        Status st = _get_json_value(&size, eof, &error, is_empty_row);
713
0
        if (st.is<DATA_QUALITY_ERROR>()) {
714
0
            if (_is_load) {
715
0
                return Status::OK();
716
0
            } else if (_openx_json_ignore_malformed) {
717
0
                RETURN_IF_ERROR(ignore_malformed_json_append_null(block));
718
0
                return Status::OK();
719
0
            }
720
0
        }
721
722
0
        RETURN_IF_ERROR(st);
723
0
        if (*is_empty_row || *eof) {
724
0
            return Status::OK();
725
0
        }
726
727
        // step 3: write columns by json value
728
0
        RETURN_IF_ERROR(
729
0
                _simdjson_handle_simple_json_write_columns(block, slot_descs, is_empty_row, eof));
730
0
    } catch (simdjson::simdjson_error& e) {
731
0
        RETURN_IF_ERROR(_handle_simdjson_error(e, block, num_rows, eof));
732
0
        if (*_scanner_eof) {
733
            // When _scanner_eof is true and valid is false, it means that we have encountered
734
            // unqualified data and decided to stop the scan.
735
0
            *is_empty_row = true;
736
0
            return Status::OK();
737
0
        }
738
0
    }
739
740
0
    return Status::OK();
741
0
}
742
743
Status NewJsonReader::_simdjson_handle_simple_json_write_columns(
744
        Block& block, const std::vector<SlotDescriptor*>& slot_descs, bool* is_empty_row,
745
0
        bool* eof) {
746
0
    simdjson::ondemand::object objectValue;
747
0
    size_t num_rows = block.rows();
748
0
    bool valid = false;
749
0
    try {
750
0
        if (_json_value.type() == simdjson::ondemand::json_type::array) {
751
0
            _array = _json_value.get_array();
752
0
            if (_array.count_elements() == 0) {
753
                // may be passing an empty json, such as "[]"
754
0
                RETURN_IF_ERROR(_append_error_msg(nullptr, "Empty json line", "", nullptr));
755
0
                if (*_scanner_eof) {
756
0
                    *is_empty_row = true;
757
0
                    return Status::OK();
758
0
                }
759
0
                return Status::OK();
760
0
            }
761
762
0
            _array_iter = _array.begin();
763
0
            while (true) {
764
0
                objectValue = *_array_iter;
765
0
                RETURN_IF_ERROR(
766
0
                        _simdjson_set_column_value(&objectValue, block, slot_descs, &valid));
767
0
                if (!valid) {
768
0
                    if (*_scanner_eof) {
769
                        // When _scanner_eof is true and valid is false, it means that we have encountered
770
                        // unqualified data and decided to stop the scan.
771
0
                        *is_empty_row = true;
772
0
                        return Status::OK();
773
0
                    }
774
0
                }
775
0
                ++_array_iter;
776
0
                if (_array_iter == _array.end()) {
777
                    // Hint to read next json doc
778
0
                    break;
779
0
                }
780
0
            }
781
0
        } else {
782
0
            objectValue = _json_value;
783
0
            RETURN_IF_ERROR(_simdjson_set_column_value(&objectValue, block, slot_descs, &valid));
784
0
            if (!valid) {
785
0
                if (*_scanner_eof) {
786
0
                    *is_empty_row = true;
787
0
                    return Status::OK();
788
0
                }
789
0
            }
790
0
            *is_empty_row = false;
791
0
        }
792
0
    } catch (simdjson::simdjson_error& e) {
793
0
        RETURN_IF_ERROR(_handle_simdjson_error(e, block, num_rows, eof));
794
0
        if (!valid) {
795
0
            if (*_scanner_eof) {
796
0
                *is_empty_row = true;
797
0
                return Status::OK();
798
0
            }
799
0
        }
800
0
    }
801
0
    return Status::OK();
802
0
}
803
804
Status NewJsonReader::_simdjson_handle_flat_array_complex_json(
805
        RuntimeState* /*state*/, Block& block, const std::vector<SlotDescriptor*>& slot_descs,
806
0
        bool* is_empty_row, bool* eof) {
807
    // array complex json
808
0
    size_t size = 0;
809
0
    simdjson::error_code error;
810
0
    size_t num_rows = block.rows();
811
0
    try {
812
        // step1: get and parse buf to get json doc
813
0
        RETURN_IF_ERROR(_simdjson_parse_json(&size, is_empty_row, eof, &error));
814
0
        if (size == 0 || *eof) {
815
0
            *is_empty_row = true;
816
0
            return Status::OK();
817
0
        }
818
819
        // step2: get json value by json doc
820
0
        Status st = _get_json_value(&size, eof, &error, is_empty_row);
821
0
        if (st.is<DATA_QUALITY_ERROR>()) {
822
0
            return Status::OK();
823
0
        }
824
0
        RETURN_IF_ERROR(st);
825
0
        if (*is_empty_row) {
826
0
            return Status::OK();
827
0
        }
828
829
        // step 3: write columns by json value
830
0
        RETURN_IF_ERROR(_simdjson_handle_flat_array_complex_json_write_columns(block, slot_descs,
831
0
                                                                               is_empty_row, eof));
832
0
    } catch (simdjson::simdjson_error& e) {
833
0
        RETURN_IF_ERROR(_handle_simdjson_error(e, block, num_rows, eof));
834
0
        if (*_scanner_eof) {
835
            // When _scanner_eof is true and valid is false, it means that we have encountered
836
            // unqualified data and decided to stop the scan.
837
0
            *is_empty_row = true;
838
0
            return Status::OK();
839
0
        }
840
0
    }
841
842
0
    return Status::OK();
843
0
}
844
845
Status NewJsonReader::_simdjson_handle_flat_array_complex_json_write_columns(
846
        Block& block, const std::vector<SlotDescriptor*>& slot_descs, bool* is_empty_row,
847
0
        bool* eof) {
848
// Advance one row in array list, if it is the endpoint, stop advance and break the loop
849
0
#define ADVANCE_ROW()                  \
850
0
    ++_array_iter;                     \
851
0
    if (_array_iter == _array.end()) { \
852
0
        break;                         \
853
0
    }
854
855
0
    simdjson::ondemand::object cur;
856
0
    size_t num_rows = block.rows();
857
0
    try {
858
0
        bool valid = true;
859
0
        _array = _json_value.get_array();
860
0
        _array_iter = _array.begin();
861
862
0
        while (true) {
863
0
            cur = (*_array_iter).get_object();
864
            // extract root
865
0
            if (!_parsed_from_json_root && !_parsed_json_root.empty()) {
866
0
                simdjson::ondemand::value val;
867
0
                Status st = JsonFunctions::extract_from_object(cur, _parsed_json_root, &val);
868
0
                if (UNLIKELY(!st.ok())) {
869
0
                    if (st.is<NOT_FOUND>()) {
870
0
                        RETURN_IF_ERROR(_append_error_msg(nullptr, st.to_string(), "", nullptr));
871
0
                        ADVANCE_ROW();
872
0
                        continue;
873
0
                    }
874
0
                    return st;
875
0
                }
876
0
                if (val.type() != simdjson::ondemand::json_type::object) {
877
0
                    RETURN_IF_ERROR(_append_error_msg(nullptr, "Not object item", "", nullptr));
878
0
                    ADVANCE_ROW();
879
0
                    continue;
880
0
                }
881
0
                cur = val.get_object();
882
0
            }
883
0
            RETURN_IF_ERROR(_simdjson_write_columns_by_jsonpath(&cur, slot_descs, block, &valid));
884
0
            ADVANCE_ROW();
885
0
            if (!valid) {
886
0
                continue; // process next line
887
0
            }
888
0
            *is_empty_row = false;
889
0
        }
890
0
    } catch (simdjson::simdjson_error& e) {
891
0
        RETURN_IF_ERROR(_handle_simdjson_error(e, block, num_rows, eof));
892
0
        if (*_scanner_eof) {
893
            // When _scanner_eof is true and valid is false, it means that we have encountered
894
            // unqualified data and decided to stop the scan.
895
0
            *is_empty_row = true;
896
0
            return Status::OK();
897
0
        }
898
0
    }
899
900
0
    return Status::OK();
901
0
}
902
903
Status NewJsonReader::_simdjson_handle_nested_complex_json(
904
        RuntimeState* /*state*/, Block& block, const std::vector<SlotDescriptor*>& slot_descs,
905
0
        bool* is_empty_row, bool* eof) {
906
    // nested complex json
907
0
    while (true) {
908
0
        size_t num_rows = block.rows();
909
0
        simdjson::ondemand::object cur;
910
0
        size_t size = 0;
911
0
        simdjson::error_code error;
912
0
        try {
913
0
            RETURN_IF_ERROR(_simdjson_parse_json(&size, is_empty_row, eof, &error));
914
0
            if (size == 0 || *eof) {
915
0
                *is_empty_row = true;
916
0
                return Status::OK();
917
0
            }
918
0
            Status st = _get_json_value(&size, eof, &error, is_empty_row);
919
0
            if (st.is<DATA_QUALITY_ERROR>()) {
920
0
                continue; // continue to read next
921
0
            }
922
0
            RETURN_IF_ERROR(st);
923
0
            if (*is_empty_row) {
924
0
                return Status::OK();
925
0
            }
926
0
            *is_empty_row = false;
927
0
            bool valid = true;
928
0
            if (_json_value.type() != simdjson::ondemand::json_type::object) {
929
0
                RETURN_IF_ERROR(_append_error_msg(nullptr, "Not object item", "", nullptr));
930
0
                continue;
931
0
            }
932
0
            cur = _json_value.get_object();
933
0
            st = _simdjson_write_columns_by_jsonpath(&cur, slot_descs, block, &valid);
934
0
            if (!st.ok()) {
935
0
                RETURN_IF_ERROR(_append_error_msg(nullptr, st.to_string(), "", nullptr));
936
                // Before continuing to process other rows, we need to first clean the fail parsed row.
937
0
                for (int i = 0; i < block.columns(); ++i) {
938
0
                    auto column = block.get_by_position(i).column->assume_mutable();
939
0
                    if (column->size() > num_rows) {
940
0
                        column->pop_back(column->size() - num_rows);
941
0
                    }
942
0
                }
943
0
                continue;
944
0
            }
945
0
            if (!valid) {
946
                // there is only one line in this case, so if it return false, just set is_empty_row true
947
                // so that the caller will continue reading next line.
948
0
                *is_empty_row = true;
949
0
            }
950
0
            break; // read a valid row
951
0
        } catch (simdjson::simdjson_error& e) {
952
0
            RETURN_IF_ERROR(_handle_simdjson_error(e, block, num_rows, eof));
953
0
            if (*_scanner_eof) {
954
                // When _scanner_eof is true and valid is false, it means that we have encountered
955
                // unqualified data and decided to stop the scan.
956
0
                *is_empty_row = true;
957
0
                return Status::OK();
958
0
            }
959
0
            continue;
960
0
        }
961
0
    }
962
0
    return Status::OK();
963
0
}
964
965
0
size_t NewJsonReader::_column_index(const StringRef& name, size_t key_index) {
966
    /// Optimization by caching the order of fields (which is almost always the same)
967
    /// and a quick check to match the next expected field, instead of searching the hash table.
968
0
    if (_prev_positions.size() > key_index && name == _prev_positions[key_index]->first) {
969
0
        return _prev_positions[key_index]->second;
970
0
    }
971
0
    auto it = _slot_desc_index.find(name);
972
0
    if (it != _slot_desc_index.end()) {
973
0
        if (key_index < _prev_positions.size()) {
974
0
            _prev_positions[key_index] = it;
975
0
        }
976
0
        return it->second;
977
0
    }
978
0
    return size_t(-1);
979
0
}
980
981
Status NewJsonReader::_simdjson_set_column_value(simdjson::ondemand::object* value, Block& block,
982
                                                 const std::vector<SlotDescriptor*>& slot_descs,
983
0
                                                 bool* valid) {
984
    // set
985
0
    _seen_columns.assign(block.columns(), false);
986
0
    size_t cur_row_count = block.rows();
987
0
    bool has_valid_value = false;
988
    // iterate through object, simdjson::ondemond will parsing on the fly
989
0
    size_t key_index = 0;
990
991
0
    for (auto field : *value) {
992
0
        std::string_view key = field.unescaped_key();
993
0
        StringRef name_ref(key.data(), key.size());
994
0
        std::string key_string;
995
0
        if (_is_hive_table) {
996
0
            key_string = name_ref.to_string();
997
0
            std::transform(key_string.begin(), key_string.end(), key_string.begin(), ::tolower);
998
0
            name_ref = StringRef(key_string);
999
0
        }
1000
0
        const size_t column_index = _column_index(name_ref, key_index++);
1001
0
        if (UNLIKELY(ssize_t(column_index) < 0)) {
1002
            // This key is not exist in slot desc, just ignore
1003
0
            continue;
1004
0
        }
1005
0
        if (column_index == skip_bitmap_col_idx) {
1006
0
            continue;
1007
0
        }
1008
0
        if (_seen_columns[column_index]) {
1009
0
            if (_is_hive_table) {
1010
                //Since value can only be traversed once,
1011
                // we can only insert the original value first, then delete it, and then reinsert the new value
1012
0
                block.get_by_position(column_index).column->assume_mutable()->pop_back(1);
1013
0
            } else {
1014
0
                continue;
1015
0
            }
1016
0
        }
1017
0
        simdjson::ondemand::value val = field.value();
1018
0
        auto* column_ptr = block.get_by_position(column_index).column->assume_mutable().get();
1019
0
        RETURN_IF_ERROR(_simdjson_write_data_to_column<false>(
1020
0
                val, slot_descs[column_index]->type(), column_ptr,
1021
0
                slot_descs[column_index]->col_name(), _serdes[column_index], valid));
1022
0
        if (!(*valid)) {
1023
0
            return Status::OK();
1024
0
        }
1025
0
        _seen_columns[column_index] = true;
1026
0
        has_valid_value = true;
1027
0
    }
1028
1029
0
    if (!has_valid_value && _is_load) {
1030
0
        std::string col_names;
1031
0
        for (auto* slot_desc : slot_descs) {
1032
0
            col_names.append(slot_desc->col_name() + ", ");
1033
0
        }
1034
0
        RETURN_IF_ERROR(_append_error_msg(value,
1035
0
                                          "There is no column matching jsonpaths in the json file, "
1036
0
                                          "columns:[{}], please check columns "
1037
0
                                          "and jsonpaths:" +
1038
0
                                                  _jsonpaths,
1039
0
                                          col_names, valid));
1040
0
        return Status::OK();
1041
0
    }
1042
1043
0
    if (_should_process_skip_bitmap_col()) {
1044
0
        _append_empty_skip_bitmap_value(block, cur_row_count);
1045
0
    }
1046
1047
    // fill missing slot
1048
0
    int nullcount = 0;
1049
0
    for (size_t i = 0; i < slot_descs.size(); ++i) {
1050
0
        if (_seen_columns[i]) {
1051
0
            continue;
1052
0
        }
1053
0
        if (i == skip_bitmap_col_idx) {
1054
0
            continue;
1055
0
        }
1056
1057
0
        auto* slot_desc = slot_descs[i];
1058
0
        auto* column_ptr = block.get_by_position(i).column->assume_mutable().get();
1059
1060
        // Quick path to insert default value, instead of using default values in the value map.
1061
0
        if (!_should_process_skip_bitmap_col() &&
1062
0
            (_col_default_value_map.empty() ||
1063
0
             _col_default_value_map.find(slot_desc->col_name()) == _col_default_value_map.end())) {
1064
0
            column_ptr->insert_default();
1065
0
            continue;
1066
0
        }
1067
0
        if (column_ptr->size() < cur_row_count + 1) {
1068
0
            DCHECK(column_ptr->size() == cur_row_count);
1069
0
            if (_should_process_skip_bitmap_col()) {
1070
                // not found, skip this column in flexible partial update
1071
0
                if (slot_desc->is_key() && !slot_desc->is_auto_increment()) {
1072
0
                    RETURN_IF_ERROR(
1073
0
                            _append_error_msg(value,
1074
0
                                              "The key columns can not be ommited in flexible "
1075
0
                                              "partial update, missing key column: {}",
1076
0
                                              slot_desc->col_name(), valid));
1077
                    // remove this line in block
1078
0
                    for (size_t index = 0; index < block.columns(); ++index) {
1079
0
                        auto column = block.get_by_position(index).column->assume_mutable();
1080
0
                        if (column->size() != cur_row_count) {
1081
0
                            DCHECK(column->size() == cur_row_count + 1);
1082
0
                            column->pop_back(1);
1083
0
                            DCHECK(column->size() == cur_row_count);
1084
0
                        }
1085
0
                    }
1086
0
                    return Status::OK();
1087
0
                }
1088
0
                _set_skip_bitmap_mark(slot_desc, column_ptr, block, cur_row_count, valid);
1089
0
                column_ptr->insert_default();
1090
0
            } else {
1091
0
                RETURN_IF_ERROR(_fill_missing_column(slot_desc, _serdes[i], column_ptr, valid));
1092
0
                if (!(*valid)) {
1093
0
                    return Status::OK();
1094
0
                }
1095
0
            }
1096
0
            ++nullcount;
1097
0
        }
1098
0
        DCHECK(column_ptr->size() == cur_row_count + 1);
1099
0
    }
1100
1101
    // There is at least one valid value here
1102
0
    DCHECK(nullcount < block.columns());
1103
0
    *valid = true;
1104
0
    return Status::OK();
1105
0
}
1106
1107
template <bool use_string_cache>
1108
Status NewJsonReader::_simdjson_write_data_to_column(simdjson::ondemand::value& value,
1109
                                                     const DataTypePtr& type_desc,
1110
                                                     IColumn* column_ptr,
1111
                                                     const std::string& column_name,
1112
0
                                                     DataTypeSerDeSPtr serde, bool* valid) {
1113
0
    ColumnNullable* nullable_column = nullptr;
1114
0
    IColumn* data_column_ptr = column_ptr;
1115
0
    DataTypeSerDeSPtr data_serde = serde;
1116
1117
0
    if (column_ptr->is_nullable()) {
1118
0
        nullable_column = reinterpret_cast<ColumnNullable*>(column_ptr);
1119
1120
0
        data_column_ptr = nullable_column->get_nested_column().get_ptr().get();
1121
0
        data_serde = serde->get_nested_serdes()[0];
1122
1123
        // kNullType will put 1 into the Null map, so there is no need to push 0 for kNullType.
1124
0
        if (value.type() == simdjson::ondemand::json_type::null) {
1125
0
            nullable_column->insert_default();
1126
0
            *valid = true;
1127
0
            return Status::OK();
1128
0
        }
1129
0
    } else if (value.type() == simdjson::ondemand::json_type::null) [[unlikely]] {
1130
0
        if (_is_load) {
1131
0
            RETURN_IF_ERROR(_append_error_msg(
1132
0
                    nullptr, "Json value is null, but the column `{}` is not nullable.",
1133
0
                    column_name, valid));
1134
0
            return Status::OK();
1135
0
        } else {
1136
0
            return Status::DataQualityError(
1137
0
                    "Json value is null, but the column `{}` is not nullable.", column_name);
1138
0
        }
1139
0
    }
1140
1141
0
    auto primitive_type = type_desc->get_primitive_type();
1142
0
    if (_is_load || !is_complex_type(primitive_type)) {
1143
0
        if (value.type() == simdjson::ondemand::json_type::string) {
1144
0
            std::string_view value_string;
1145
0
            if constexpr (use_string_cache) {
1146
0
                const auto cache_key = value.raw_json().value();
1147
0
                if (_cached_string_values.contains(cache_key)) {
1148
0
                    value_string = _cached_string_values[cache_key];
1149
0
                } else {
1150
0
                    value_string = value.get_string();
1151
0
                    _cached_string_values.emplace(cache_key, value_string);
1152
0
                }
1153
0
            } else {
1154
0
                DCHECK(_cached_string_values.empty());
1155
0
                value_string = value.get_string();
1156
0
            }
1157
1158
0
            Slice slice {value_string.data(), value_string.size()};
1159
0
            RETURN_IF_ERROR(data_serde->deserialize_one_cell_from_json(*data_column_ptr, slice,
1160
0
                                                                       _serde_options));
1161
1162
0
        } else if (value.type() == simdjson::ondemand::json_type::boolean) {
1163
0
            const char* str_value = nullptr;
1164
            // insert "1"/"0" , not "true"/"false".
1165
0
            if (value.get_bool()) {
1166
0
                str_value = (char*)"1";
1167
0
            } else {
1168
0
                str_value = (char*)"0";
1169
0
            }
1170
0
            Slice slice {str_value, 1};
1171
0
            RETURN_IF_ERROR(data_serde->deserialize_one_cell_from_json(*data_column_ptr, slice,
1172
0
                                                                       _serde_options));
1173
0
        } else {
1174
            // Maybe we can `switch (value->GetType()) case: kNumberType`.
1175
            // Note that `if (value->IsInt())`, but column is FloatColumn.
1176
0
            std::string_view json_str = simdjson::to_json_string(value);
1177
0
            Slice slice {json_str.data(), json_str.size()};
1178
0
            RETURN_IF_ERROR(data_serde->deserialize_one_cell_from_json(*data_column_ptr, slice,
1179
0
                                                                       _serde_options));
1180
0
        }
1181
0
    } else if (primitive_type == TYPE_STRUCT) {
1182
0
        if (value.type() != simdjson::ondemand::json_type::object) [[unlikely]] {
1183
0
            return Status::DataQualityError(
1184
0
                    "Json value isn't object, but the column `{}` is struct.", column_name);
1185
0
        }
1186
1187
0
        const auto* type_struct =
1188
0
                assert_cast<const DataTypeStruct*>(remove_nullable(type_desc).get());
1189
0
        auto sub_col_size = type_struct->get_elements().size();
1190
0
        simdjson::ondemand::object struct_value = value.get_object();
1191
0
        auto sub_serdes = data_serde->get_nested_serdes();
1192
0
        auto* struct_column_ptr = assert_cast<ColumnStruct*>(data_column_ptr);
1193
1194
0
        std::map<std::string, size_t> sub_col_name_to_idx;
1195
0
        for (size_t sub_col_idx = 0; sub_col_idx < sub_col_size; sub_col_idx++) {
1196
0
            sub_col_name_to_idx.emplace(type_struct->get_element_name(sub_col_idx), sub_col_idx);
1197
0
        }
1198
0
        std::vector<bool> has_value(sub_col_size, false);
1199
0
        for (simdjson::ondemand::field sub : struct_value) {
1200
0
            std::string_view sub_key_view = sub.unescaped_key();
1201
0
            std::string sub_key(sub_key_view.data(), sub_key_view.length());
1202
0
            std::transform(sub_key.begin(), sub_key.end(), sub_key.begin(), ::tolower);
1203
1204
0
            if (sub_col_name_to_idx.find(sub_key) == sub_col_name_to_idx.end()) [[unlikely]] {
1205
0
                continue;
1206
0
            }
1207
0
            size_t sub_column_idx = sub_col_name_to_idx[sub_key];
1208
0
            auto sub_column_ptr = struct_column_ptr->get_column(sub_column_idx).get_ptr();
1209
1210
0
            if (has_value[sub_column_idx]) [[unlikely]] {
1211
                // Since struct_value can only be traversed once, we can only insert
1212
                // the original value first, then delete it, and then reinsert the new value.
1213
0
                sub_column_ptr->pop_back(1);
1214
0
            }
1215
0
            has_value[sub_column_idx] = true;
1216
1217
0
            const auto& sub_col_type = type_struct->get_element(sub_column_idx);
1218
0
            RETURN_IF_ERROR(_simdjson_write_data_to_column<use_string_cache>(
1219
0
                    sub.value(), sub_col_type, sub_column_ptr.get(), column_name + "." + sub_key,
1220
0
                    sub_serdes[sub_column_idx], valid));
1221
0
        }
1222
1223
        //fill missing subcolumn
1224
0
        for (size_t sub_col_idx = 0; sub_col_idx < sub_col_size; sub_col_idx++) {
1225
0
            if (has_value[sub_col_idx]) {
1226
0
                continue;
1227
0
            }
1228
1229
0
            auto sub_column_ptr = struct_column_ptr->get_column(sub_col_idx).get_ptr();
1230
0
            if (sub_column_ptr->is_nullable()) {
1231
0
                sub_column_ptr->insert_default();
1232
0
                continue;
1233
0
            } else [[unlikely]] {
1234
0
                return Status::DataQualityError(
1235
0
                        "Json file structColumn miss field {} and this column isn't nullable.",
1236
0
                        column_name + "." + type_struct->get_element_name(sub_col_idx));
1237
0
            }
1238
0
        }
1239
0
    } else if (primitive_type == TYPE_MAP) {
1240
0
        if (value.type() != simdjson::ondemand::json_type::object) [[unlikely]] {
1241
0
            return Status::DataQualityError("Json value isn't object, but the column `{}` is map.",
1242
0
                                            column_name);
1243
0
        }
1244
0
        simdjson::ondemand::object object_value = value.get_object();
1245
1246
0
        auto sub_serdes = data_serde->get_nested_serdes();
1247
0
        auto* map_column_ptr = assert_cast<ColumnMap*>(data_column_ptr);
1248
1249
0
        size_t field_count = 0;
1250
0
        for (simdjson::ondemand::field member_value : object_value) {
1251
0
            auto f = [](std::string_view key_view, const DataTypePtr& type_desc,
1252
0
                        IColumn* column_ptr, DataTypeSerDeSPtr serde,
1253
0
                        DataTypeSerDe::FormatOptions serde_options, bool* valid) {
1254
0
                auto* data_column_ptr = column_ptr;
1255
0
                auto data_serde = serde;
1256
0
                if (column_ptr->is_nullable()) {
1257
0
                    auto* nullable_column = static_cast<ColumnNullable*>(column_ptr);
1258
1259
0
                    nullable_column->get_null_map_data().push_back(0);
1260
0
                    data_column_ptr = nullable_column->get_nested_column().get_ptr().get();
1261
0
                    data_serde = serde->get_nested_serdes()[0];
1262
0
                }
1263
0
                Slice slice(key_view.data(), key_view.length());
1264
1265
0
                RETURN_IF_ERROR(data_serde->deserialize_one_cell_from_json(*data_column_ptr, slice,
1266
0
                                                                           serde_options));
1267
0
                return Status::OK();
1268
0
            };
Unexecuted instantiation: _ZZN5doris13NewJsonReader30_simdjson_write_data_to_columnILb0EEENS_6StatusERN8simdjson8fallback8ondemand5valueERKSt10shared_ptrIKNS_9IDataTypeEEPNS_7IColumnERKNSt7__cxx1112basic_stringIcSt11char_traitsIcESaIcEEES8_INS_13DataTypeSerDeEEPbENKUlSt17basic_string_viewIcSJ_ESD_SF_SP_NSO_13FormatOptionsESQ_E_clESS_SD_SF_SP_ST_SQ_
Unexecuted instantiation: _ZZN5doris13NewJsonReader30_simdjson_write_data_to_columnILb1EEENS_6StatusERN8simdjson8fallback8ondemand5valueERKSt10shared_ptrIKNS_9IDataTypeEEPNS_7IColumnERKNSt7__cxx1112basic_stringIcSt11char_traitsIcESaIcEEES8_INS_13DataTypeSerDeEEPbENKUlSt17basic_string_viewIcSJ_ESD_SF_SP_NSO_13FormatOptionsESQ_E_clESS_SD_SF_SP_ST_SQ_
1269
1270
0
            RETURN_IF_ERROR(f(member_value.unescaped_key(),
1271
0
                              assert_cast<const DataTypeMap*>(remove_nullable(type_desc).get())
1272
0
                                      ->get_key_type(),
1273
0
                              map_column_ptr->get_keys_ptr()->assume_mutable()->get_ptr().get(),
1274
0
                              sub_serdes[0], _serde_options, valid));
1275
1276
0
            simdjson::ondemand::value field_value = member_value.value();
1277
0
            RETURN_IF_ERROR(_simdjson_write_data_to_column<use_string_cache>(
1278
0
                    field_value,
1279
0
                    assert_cast<const DataTypeMap*>(remove_nullable(type_desc).get())
1280
0
                            ->get_value_type(),
1281
0
                    map_column_ptr->get_values_ptr()->assume_mutable()->get_ptr().get(),
1282
0
                    column_name + ".value", sub_serdes[1], valid));
1283
0
            field_count++;
1284
0
        }
1285
1286
0
        auto& offsets = map_column_ptr->get_offsets();
1287
0
        offsets.emplace_back(offsets.back() + field_count);
1288
1289
0
    } else if (primitive_type == TYPE_ARRAY) {
1290
0
        if (value.type() != simdjson::ondemand::json_type::array) [[unlikely]] {
1291
0
            return Status::DataQualityError("Json value isn't array, but the column `{}` is array.",
1292
0
                                            column_name);
1293
0
        }
1294
1295
0
        simdjson::ondemand::array array_value = value.get_array();
1296
1297
0
        auto sub_serdes = data_serde->get_nested_serdes();
1298
0
        auto* array_column_ptr = assert_cast<ColumnArray*>(data_column_ptr);
1299
1300
0
        int field_count = 0;
1301
0
        for (simdjson::ondemand::value sub_value : array_value) {
1302
0
            RETURN_IF_ERROR(_simdjson_write_data_to_column<use_string_cache>(
1303
0
                    sub_value,
1304
0
                    assert_cast<const DataTypeArray*>(remove_nullable(type_desc).get())
1305
0
                            ->get_nested_type(),
1306
0
                    array_column_ptr->get_data().get_ptr().get(), column_name + ".element",
1307
0
                    sub_serdes[0], valid));
1308
0
            field_count++;
1309
0
        }
1310
0
        auto& offsets = array_column_ptr->get_offsets();
1311
0
        offsets.emplace_back(offsets.back() + field_count);
1312
1313
0
    } else {
1314
0
        return Status::InternalError("Not support load to complex column.");
1315
0
    }
1316
    //We need to finally set the nullmap of column_nullable to keep the size consistent with data_column
1317
0
    if (nullable_column && value.type() != simdjson::ondemand::json_type::null) {
1318
0
        nullable_column->get_null_map_data().push_back(0);
1319
0
    }
1320
0
    *valid = true;
1321
0
    return Status::OK();
1322
0
}
Unexecuted instantiation: _ZN5doris13NewJsonReader30_simdjson_write_data_to_columnILb0EEENS_6StatusERN8simdjson8fallback8ondemand5valueERKSt10shared_ptrIKNS_9IDataTypeEEPNS_7IColumnERKNSt7__cxx1112basic_stringIcSt11char_traitsIcESaIcEEES8_INS_13DataTypeSerDeEEPb
Unexecuted instantiation: _ZN5doris13NewJsonReader30_simdjson_write_data_to_columnILb1EEENS_6StatusERN8simdjson8fallback8ondemand5valueERKSt10shared_ptrIKNS_9IDataTypeEEPNS_7IColumnERKNSt7__cxx1112basic_stringIcSt11char_traitsIcESaIcEEES8_INS_13DataTypeSerDeEEPb
1323
1324
Status NewJsonReader::_append_error_msg(simdjson::ondemand::object* obj, std::string error_msg,
1325
0
                                        std::string col_name, bool* valid) {
1326
0
    std::string err_msg;
1327
0
    if (!col_name.empty()) {
1328
0
        fmt::memory_buffer error_buf;
1329
0
        fmt::format_to(error_buf, error_msg, col_name, _jsonpaths);
1330
0
        err_msg = fmt::to_string(error_buf);
1331
0
    } else {
1332
0
        err_msg = error_msg;
1333
0
    }
1334
1335
0
    _counter->num_rows_filtered++;
1336
0
    if (valid != nullptr) {
1337
        // current row is invalid
1338
0
        *valid = false;
1339
0
    }
1340
1341
0
    RETURN_IF_ERROR(_state->append_error_msg_to_file(
1342
0
            [&]() -> std::string {
1343
0
                if (!obj) {
1344
0
                    return "";
1345
0
                }
1346
0
                std::string_view str_view;
1347
0
                (void)!obj->raw_json().get(str_view);
1348
0
                return std::string(str_view.data(), str_view.size());
1349
0
            },
1350
0
            [&]() -> std::string { return err_msg; }));
1351
0
    return Status::OK();
1352
0
}
1353
1354
Status NewJsonReader::_simdjson_parse_json(size_t* size, bool* is_empty_row, bool* eof,
1355
0
                                           simdjson::error_code* error) {
1356
0
    SCOPED_TIMER(_read_timer);
1357
    // step1: read buf from pipe.
1358
0
    if (_line_reader != nullptr) {
1359
0
        RETURN_IF_ERROR(_line_reader->read_line(&_json_str, size, eof, _io_ctx));
1360
0
    } else {
1361
0
        size_t length = 0;
1362
0
        RETURN_IF_ERROR(_read_one_message(&_json_str_ptr, &length));
1363
0
        _json_str = _json_str_ptr.get();
1364
0
        *size = length;
1365
0
        if (length == 0) {
1366
0
            *eof = true;
1367
0
        }
1368
0
    }
1369
0
    if (*eof) {
1370
0
        return Status::OK();
1371
0
    }
1372
1373
    // step2: init json parser iterate.
1374
0
    if (*size + simdjson::SIMDJSON_PADDING > _padded_size) {
1375
        // For efficiency reasons, simdjson requires a string with a few bytes (simdjson::SIMDJSON_PADDING) at the end.
1376
        // Hence, a re-allocation is needed if the space is not enough.
1377
0
        _simdjson_ondemand_padding_buffer.resize(*size + simdjson::SIMDJSON_PADDING);
1378
0
        _simdjson_ondemand_unscape_padding_buffer.resize(*size + simdjson::SIMDJSON_PADDING);
1379
0
        _padded_size = *size + simdjson::SIMDJSON_PADDING;
1380
0
    }
1381
    // trim BOM since simdjson does not handle UTF-8 Unicode (with BOM)
1382
0
    if (*size >= 3 && static_cast<char>(_json_str[0]) == '\xEF' &&
1383
0
        static_cast<char>(_json_str[1]) == '\xBB' && static_cast<char>(_json_str[2]) == '\xBF') {
1384
        // skip the first three BOM bytes
1385
0
        _json_str += 3;
1386
0
        *size -= 3;
1387
0
    }
1388
0
    memcpy(&_simdjson_ondemand_padding_buffer.front(), _json_str, *size);
1389
0
    _original_doc_size = *size;
1390
0
    *error = _ondemand_json_parser
1391
0
                     ->iterate(std::string_view(_simdjson_ondemand_padding_buffer.data(), *size),
1392
0
                               _padded_size)
1393
0
                     .get(_original_json_doc);
1394
0
    return Status::OK();
1395
0
}
1396
1397
0
Status NewJsonReader::_judge_empty_row(size_t size, bool eof, bool* is_empty_row) {
1398
0
    if (size == 0 || eof) {
1399
0
        *is_empty_row = true;
1400
0
        return Status::OK();
1401
0
    }
1402
1403
0
    if (!_parsed_jsonpaths.empty() && _strip_outer_array) {
1404
0
        _total_rows = _json_value.count_elements().value();
1405
0
        _next_row = 0;
1406
1407
0
        if (_total_rows == 0) {
1408
            // meet an empty json array.
1409
0
            *is_empty_row = true;
1410
0
        }
1411
0
    }
1412
0
    return Status::OK();
1413
0
}
1414
1415
Status NewJsonReader::_get_json_value(size_t* size, bool* eof, simdjson::error_code* error,
1416
0
                                      bool* is_empty_row) {
1417
0
    SCOPED_TIMER(_read_timer);
1418
0
    auto return_quality_error = [&](fmt::memory_buffer& error_msg,
1419
0
                                    const std::string& doc_info) -> Status {
1420
0
        _counter->num_rows_filtered++;
1421
0
        RETURN_IF_ERROR(_state->append_error_msg_to_file(
1422
0
                [&]() -> std::string { return doc_info; },
1423
0
                [&]() -> std::string { return fmt::to_string(error_msg); }));
1424
0
        if (*_scanner_eof) {
1425
            // Case A: if _scanner_eof is set to true in "append_error_msg_to_file", which means
1426
            // we meet enough invalid rows and the scanner should be stopped.
1427
            // So we set eof to true and return OK, the caller will stop the process as we meet the end of file.
1428
0
            *eof = true;
1429
0
            return Status::OK();
1430
0
        }
1431
0
        return Status::DataQualityError(fmt::to_string(error_msg));
1432
0
    };
1433
0
    if (*error != simdjson::error_code::SUCCESS) {
1434
0
        fmt::memory_buffer error_msg;
1435
0
        fmt::format_to(error_msg, "Parse json data for JsonDoc failed. code: {}, error info: {}",
1436
0
                       *error, simdjson::error_message(*error));
1437
0
        return return_quality_error(error_msg, std::string((char*)_json_str, *size));
1438
0
    }
1439
0
    auto type_res = _original_json_doc.type();
1440
0
    if (type_res.error() != simdjson::error_code::SUCCESS) {
1441
0
        fmt::memory_buffer error_msg;
1442
0
        fmt::format_to(error_msg, "Parse json data for JsonDoc failed. code: {}, error info: {}",
1443
0
                       type_res.error(), simdjson::error_message(type_res.error()));
1444
0
        return return_quality_error(error_msg, std::string((char*)_json_str, *size));
1445
0
    }
1446
0
    simdjson::ondemand::json_type type = type_res.value();
1447
0
    if (type != simdjson::ondemand::json_type::object &&
1448
0
        type != simdjson::ondemand::json_type::array) {
1449
0
        fmt::memory_buffer error_msg;
1450
0
        fmt::format_to(error_msg, "Not an json object or json array");
1451
0
        return return_quality_error(error_msg, std::string((char*)_json_str, *size));
1452
0
    }
1453
0
    if (!_parsed_json_root.empty() && type == simdjson::ondemand::json_type::object) {
1454
0
        try {
1455
            // set json root
1456
            // if it is an array at top level, then we should iterate the entire array in
1457
            // ::_simdjson_handle_flat_array_complex_json
1458
0
            simdjson::ondemand::object object = _original_json_doc;
1459
0
            Status st = JsonFunctions::extract_from_object(object, _parsed_json_root, &_json_value);
1460
0
            if (!st.ok()) {
1461
0
                fmt::memory_buffer error_msg;
1462
0
                fmt::format_to(error_msg, "{}", st.to_string());
1463
0
                return return_quality_error(error_msg, std::string((char*)_json_str, *size));
1464
0
            }
1465
0
            _parsed_from_json_root = true;
1466
0
        } catch (simdjson::simdjson_error& e) {
1467
0
            fmt::memory_buffer error_msg;
1468
0
            fmt::format_to(error_msg, "Encounter error while extract_from_object, error: {}",
1469
0
                           e.what());
1470
0
            return return_quality_error(error_msg, std::string((char*)_json_str, *size));
1471
0
        }
1472
0
    } else {
1473
0
        _json_value = _original_json_doc;
1474
0
    }
1475
1476
0
    if (_json_value.type() == simdjson::ondemand::json_type::array && !_strip_outer_array) {
1477
0
        fmt::memory_buffer error_msg;
1478
0
        fmt::format_to(error_msg, "{}",
1479
0
                       "JSON data is array-object, `strip_outer_array` must be TRUE.");
1480
0
        return return_quality_error(error_msg, std::string((char*)_json_str, *size));
1481
0
    }
1482
1483
0
    if (_json_value.type() != simdjson::ondemand::json_type::array && _strip_outer_array) {
1484
0
        fmt::memory_buffer error_msg;
1485
0
        fmt::format_to(error_msg, "{}",
1486
0
                       "JSON data is not an array-object, `strip_outer_array` must be FALSE.");
1487
0
        return return_quality_error(error_msg, std::string((char*)_json_str, *size));
1488
0
    }
1489
0
    RETURN_IF_ERROR(_judge_empty_row(*size, *eof, is_empty_row));
1490
0
    return Status::OK();
1491
0
}
1492
1493
Status NewJsonReader::_simdjson_write_columns_by_jsonpath(
1494
        simdjson::ondemand::object* value, const std::vector<SlotDescriptor*>& slot_descs,
1495
0
        Block& block, bool* valid) {
1496
    // write by jsonpath
1497
0
    bool has_valid_value = false;
1498
1499
0
    Defer clear_defer([this]() { _cached_string_values.clear(); });
1500
1501
0
    for (size_t i = 0; i < slot_descs.size(); i++) {
1502
0
        auto* slot_desc = slot_descs[i];
1503
0
        auto* column_ptr = block.get_by_position(i).column->assume_mutable().get();
1504
0
        simdjson::ondemand::value json_value;
1505
0
        Status st;
1506
0
        if (i < _parsed_jsonpaths.size()) {
1507
0
            st = JsonFunctions::extract_from_object(*value, _parsed_jsonpaths[i], &json_value);
1508
0
            if (!st.ok() && !st.is<NOT_FOUND>()) {
1509
0
                return st;
1510
0
            }
1511
0
        }
1512
0
        if (i < _parsed_jsonpaths.size() && JsonFunctions::is_root_path(_parsed_jsonpaths[i])) {
1513
            // Indicate that the jsonpath is "$" or "$.", read the full root json object, insert the original doc directly
1514
0
            ColumnNullable* nullable_column = nullptr;
1515
0
            IColumn* target_column_ptr = nullptr;
1516
0
            if (slot_desc->is_nullable()) {
1517
0
                nullable_column = assert_cast<ColumnNullable*>(column_ptr);
1518
0
                target_column_ptr = &nullable_column->get_nested_column();
1519
0
                nullable_column->get_null_map_data().push_back(0);
1520
0
            }
1521
0
            auto* column_string = assert_cast<ColumnString*>(target_column_ptr);
1522
0
            column_string->insert_data(_simdjson_ondemand_padding_buffer.data(),
1523
0
                                       _original_doc_size);
1524
0
            has_valid_value = true;
1525
0
        } else if (i >= _parsed_jsonpaths.size() || st.is<NOT_FOUND>()) {
1526
            // not match in jsondata, filling with default value
1527
0
            RETURN_IF_ERROR(_fill_missing_column(slot_desc, _serdes[i], column_ptr, valid));
1528
0
            if (!(*valid)) {
1529
0
                return Status::OK();
1530
0
            }
1531
0
        } else {
1532
0
            RETURN_IF_ERROR(_simdjson_write_data_to_column<true>(json_value, slot_desc->type(),
1533
0
                                                                 column_ptr, slot_desc->col_name(),
1534
0
                                                                 _serdes[i], valid));
1535
0
            if (!(*valid)) {
1536
0
                return Status::OK();
1537
0
            }
1538
0
            has_valid_value = true;
1539
0
        }
1540
0
    }
1541
0
    if (!has_valid_value) {
1542
        // there is no valid value in json line but has filled with default value before
1543
        // so remove this line in block
1544
0
        std::string col_names;
1545
0
        for (int i = 0; i < block.columns(); ++i) {
1546
0
            auto column = block.get_by_position(i).column->assume_mutable();
1547
0
            column->pop_back(1);
1548
0
        }
1549
0
        for (auto* slot_desc : slot_descs) {
1550
0
            col_names.append(slot_desc->col_name() + ", ");
1551
0
        }
1552
0
        RETURN_IF_ERROR(_append_error_msg(value,
1553
0
                                          "There is no column matching jsonpaths in the json file, "
1554
0
                                          "columns:[{}], please check columns "
1555
0
                                          "and jsonpaths:" +
1556
0
                                                  _jsonpaths,
1557
0
                                          col_names, valid));
1558
0
        return Status::OK();
1559
0
    }
1560
0
    *valid = true;
1561
0
    return Status::OK();
1562
0
}
1563
1564
Status NewJsonReader::_get_column_default_value(
1565
        const std::vector<SlotDescriptor*>& slot_descs,
1566
0
        const std::unordered_map<std::string, VExprContextSPtr>& col_default_value_ctx) {
1567
0
    for (auto* slot_desc : slot_descs) {
1568
0
        auto it = col_default_value_ctx.find(slot_desc->col_name());
1569
0
        if (it != col_default_value_ctx.end() && it->second != nullptr) {
1570
0
            const auto& ctx = it->second;
1571
            // NULL_LITERAL means no valid value of current column
1572
0
            if (ctx->root()->node_type() == TExprNodeType::type::NULL_LITERAL) {
1573
0
                continue;
1574
0
            }
1575
0
            ColumnWithTypeAndName result;
1576
0
            RETURN_IF_ERROR(ctx->execute_const_expr(result));
1577
0
            DCHECK(result.column->size() == 1);
1578
0
            _col_default_value_map.emplace(slot_desc->col_name(),
1579
0
                                           result.column->get_data_at(0).to_string());
1580
0
        }
1581
0
    }
1582
0
    return Status::OK();
1583
0
}
1584
1585
Status NewJsonReader::_fill_missing_column(SlotDescriptor* slot_desc, DataTypeSerDeSPtr serde,
1586
0
                                           IColumn* column_ptr, bool* valid) {
1587
0
    auto col_value = _col_default_value_map.find(slot_desc->col_name());
1588
0
    if (col_value == _col_default_value_map.end()) {
1589
0
        if (slot_desc->is_nullable()) {
1590
0
            auto* nullable_column = static_cast<ColumnNullable*>(column_ptr);
1591
0
            nullable_column->insert_default();
1592
0
        } else {
1593
0
            if (_is_load) {
1594
0
                RETURN_IF_ERROR(_append_error_msg(
1595
0
                        nullptr, "The column `{}` is not nullable, but it's not found in jsondata.",
1596
0
                        slot_desc->col_name(), valid));
1597
0
            } else {
1598
0
                return Status::DataQualityError(
1599
0
                        "The column `{}` is not nullable, but it's not found in jsondata.",
1600
0
                        slot_desc->col_name());
1601
0
            }
1602
0
        }
1603
0
    } else {
1604
0
        const std::string& v_str = col_value->second;
1605
0
        Slice column_default_value {v_str};
1606
0
        RETURN_IF_ERROR(serde->deserialize_one_cell_from_json(*column_ptr, column_default_value,
1607
0
                                                              _serde_options));
1608
0
    }
1609
0
    *valid = true;
1610
0
    return Status::OK();
1611
0
}
1612
1613
0
void NewJsonReader::_append_empty_skip_bitmap_value(Block& block, size_t cur_row_count) {
1614
0
    auto* skip_bitmap_nullable_col_ptr = assert_cast<ColumnNullable*>(
1615
0
            block.get_by_position(skip_bitmap_col_idx).column->assume_mutable().get());
1616
0
    auto* skip_bitmap_col_ptr =
1617
0
            assert_cast<ColumnBitmap*>(skip_bitmap_nullable_col_ptr->get_nested_column_ptr().get());
1618
0
    DCHECK(skip_bitmap_nullable_col_ptr->size() == cur_row_count);
1619
    // should append an empty bitmap for every row wheather this line misses columns
1620
0
    skip_bitmap_nullable_col_ptr->get_null_map_data().push_back(0);
1621
0
    skip_bitmap_col_ptr->insert_default();
1622
0
    DCHECK(skip_bitmap_col_ptr->size() == cur_row_count + 1);
1623
0
}
1624
1625
void NewJsonReader::_set_skip_bitmap_mark(SlotDescriptor* slot_desc, IColumn* column_ptr,
1626
0
                                          Block& block, size_t cur_row_count, bool* valid) {
1627
    // we record the missing column's column unique id in skip bitmap
1628
    // to indicate which columns need to do the alignment process
1629
0
    auto* skip_bitmap_nullable_col_ptr = assert_cast<ColumnNullable*>(
1630
0
            block.get_by_position(skip_bitmap_col_idx).column->assume_mutable().get());
1631
0
    auto* skip_bitmap_col_ptr =
1632
0
            assert_cast<ColumnBitmap*>(skip_bitmap_nullable_col_ptr->get_nested_column_ptr().get());
1633
0
    DCHECK(skip_bitmap_col_ptr->size() == cur_row_count + 1);
1634
0
    auto& skip_bitmap = skip_bitmap_col_ptr->get_data().back();
1635
0
    skip_bitmap.add(slot_desc->col_unique_id());
1636
0
}
1637
1638
0
void NewJsonReader::_collect_profile_before_close() {
1639
0
    if (_line_reader != nullptr) {
1640
0
        _line_reader->collect_profile_before_close();
1641
0
    }
1642
0
    if (_file_reader != nullptr) {
1643
0
        _file_reader->collect_profile_before_close();
1644
0
    }
1645
0
}
1646
1647
} // namespace doris