Coverage Report

Created: 2026-04-13 11:59

next uncovered line (L), next uncovered region (R), next uncovered branch (B)
be/src/format/csv/csv_reader.cpp
Line
Count
Source
1
// Licensed to the Apache Software Foundation (ASF) under one
2
// or more contributor license agreements.  See the NOTICE file
3
// distributed with this work for additional information
4
// regarding copyright ownership.  The ASF licenses this file
5
// to you under the Apache License, Version 2.0 (the
6
// "License"); you may not use this file except in compliance
7
// with the License.  You may obtain a copy of the License at
8
//
9
//   http://www.apache.org/licenses/LICENSE-2.0
10
//
11
// Unless required by applicable law or agreed to in writing,
12
// software distributed under the License is distributed on an
13
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
14
// KIND, either express or implied.  See the License for the
15
// specific language governing permissions and limitations
16
// under the License.
17
18
#include "format/csv/csv_reader.h"
19
20
#include <fmt/format.h>
21
#include <gen_cpp/PlanNodes_types.h>
22
#include <gen_cpp/Types_types.h>
23
#include <glog/logging.h>
24
25
#include <algorithm>
26
#include <cstddef>
27
#include <map>
28
#include <memory>
29
#include <ostream>
30
#include <regex>
31
#include <utility>
32
33
#include "common/compiler_util.h" // IWYU pragma: keep
34
#include "common/config.h"
35
#include "common/consts.h"
36
#include "common/status.h"
37
#include "core/block/block.h"
38
#include "core/block/column_with_type_and_name.h"
39
#include "core/data_type/data_type_factory.hpp"
40
#include "exec/scan/scanner.h"
41
#include "format/file_reader/new_plain_binary_line_reader.h"
42
#include "format/file_reader/new_plain_text_line_reader.h"
43
#include "format/line_reader.h"
44
#include "io/file_factory.h"
45
#include "io/fs/broker_file_reader.h"
46
#include "io/fs/buffered_reader.h"
47
#include "io/fs/file_reader.h"
48
#include "io/fs/s3_file_reader.h"
49
#include "io/fs/tracing_file_reader.h"
50
#include "runtime/descriptors.h"
51
#include "runtime/runtime_state.h"
52
#include "util/decompressor.h"
53
#include "util/string_util.h"
54
#include "util/utf8_check.h"
55
56
namespace doris {
57
class RuntimeProfile;
58
class IColumn;
59
namespace io {
60
struct IOContext;
61
enum class FileCachePolicy : uint8_t;
62
} // namespace io
63
} // namespace doris
64
65
namespace doris {
66
67
878
void EncloseCsvTextFieldSplitter::do_split(const Slice& line, std::vector<Slice>* splitted_values) {
68
878
    const char* data = line.data;
69
878
    const auto& column_sep_positions = _text_line_reader_ctx->column_sep_positions();
70
878
    size_t value_start_offset = 0;
71
2.98k
    for (auto idx : column_sep_positions) {
72
2.98k
        process_value_func(data, value_start_offset, idx - value_start_offset, _trimming_char,
73
2.98k
                           splitted_values);
74
2.98k
        value_start_offset = idx + _value_sep_len;
75
2.98k
    }
76
878
    if (line.size >= value_start_offset) {
77
        // process the last column
78
876
        process_value_func(data, value_start_offset, line.size - value_start_offset, _trimming_char,
79
876
                           splitted_values);
80
876
    }
81
878
}
82
83
void PlainCsvTextFieldSplitter::_split_field_single_char(const Slice& line,
84
14.6M
                                                         std::vector<Slice>* splitted_values) {
85
14.6M
    const char* data = line.data;
86
14.6M
    const size_t size = line.size;
87
14.6M
    size_t value_start = 0;
88
3.52G
    for (size_t i = 0; i < size; ++i) {
89
3.51G
        if (data[i] == _value_sep[0]) {
90
441M
            process_value_func(data, value_start, i - value_start, _trimming_char, splitted_values);
91
441M
            value_start = i + _value_sep_len;
92
441M
        }
93
3.51G
    }
94
14.6M
    process_value_func(data, value_start, size - value_start, _trimming_char, splitted_values);
95
14.6M
}
96
97
void PlainCsvTextFieldSplitter::_split_field_multi_char(const Slice& line,
98
2.38k
                                                        std::vector<Slice>* splitted_values) {
99
2.38k
    size_t start = 0;  // point to the start pos of next col value.
100
2.38k
    size_t curpos = 0; // point to the start pos of separator matching sequence.
101
102
    // value_sep : AAAA
103
    // line.data : 1234AAAA5678
104
    // -> 1234,5678
105
106
    //    start   start
107
    //      ▼       ▼
108
    //      1234AAAA5678\0
109
    //          ▲       ▲
110
    //      curpos     curpos
111
112
    //kmp
113
2.38k
    std::vector<int> next(_value_sep_len);
114
2.38k
    next[0] = -1;
115
4.96k
    for (int i = 1, j = -1; i < _value_sep_len; i++) {
116
2.60k
        while (j > -1 && _value_sep[i] != _value_sep[j + 1]) {
117
20
            j = next[j];
118
20
        }
119
2.58k
        if (_value_sep[i] == _value_sep[j + 1]) {
120
2.44k
            j++;
121
2.44k
        }
122
2.58k
        next[i] = j;
123
2.58k
    }
124
125
42.1k
    for (int i = 0, j = -1; i < line.size; i++) {
126
        // i : line
127
        // j : _value_sep
128
42.4k
        while (j > -1 && line[i] != _value_sep[j + 1]) {
129
2.66k
            j = next[j];
130
2.66k
        }
131
39.7k
        if (line[i] == _value_sep[j + 1]) {
132
7.02k
            j++;
133
7.02k
        }
134
39.7k
        if (j == _value_sep_len - 1) {
135
3.19k
            curpos = i - _value_sep_len + 1;
136
137
            /*
138
             * column_separator : "xx"
139
             * data.csv :  data1xxxxdata2
140
             *
141
             * Parse incorrectly:
142
             *      data1[xx]xxdata2
143
             *      data1x[xx]xdata2
144
             *      data1xx[xx]data2
145
             * The string "xxxx" is parsed into three "xx" delimiters.
146
             *
147
             * Parse correctly:
148
             *      data1[xx]xxdata2
149
             *      data1xx[xx]data2
150
             */
151
152
3.19k
            if (curpos >= start) {
153
3.13k
                process_value_func(line.data, start, curpos - start, _trimming_char,
154
3.13k
                                   splitted_values);
155
3.13k
                start = i + 1;
156
3.13k
            }
157
158
3.19k
            j = next[j];
159
3.19k
        }
160
39.7k
    }
161
2.38k
    process_value_func(line.data, start, line.size - start, _trimming_char, splitted_values);
162
2.38k
}
163
164
14.6M
void PlainCsvTextFieldSplitter::do_split(const Slice& line, std::vector<Slice>* splitted_values) {
165
14.6M
    if (is_single_char_delim) {
166
14.6M
        _split_field_single_char(line, splitted_values);
167
18.4E
    } else {
168
18.4E
        _split_field_multi_char(line, splitted_values);
169
18.4E
    }
170
14.6M
}
171
172
CsvReader::CsvReader(RuntimeState* state, RuntimeProfile* profile, ScannerCounter* counter,
173
                     const TFileScanRangeParams& params, const TFileRangeDesc& range,
174
                     const std::vector<SlotDescriptor*>& file_slot_descs, io::IOContext* io_ctx,
175
                     std::shared_ptr<io::IOContext> io_ctx_holder)
176
8.12k
        : _profile(profile),
177
8.12k
          _params(params),
178
8.12k
          _file_reader(nullptr),
179
8.12k
          _line_reader(nullptr),
180
8.12k
          _decompressor(nullptr),
181
8.12k
          _state(state),
182
8.12k
          _counter(counter),
183
8.12k
          _range(range),
184
8.12k
          _file_slot_descs(file_slot_descs),
185
8.12k
          _line_reader_eof(false),
186
8.12k
          _skip_lines(0),
187
8.12k
          _io_ctx(io_ctx),
188
8.12k
          _io_ctx_holder(std::move(io_ctx_holder)) {
189
8.12k
    if (_io_ctx == nullptr && _io_ctx_holder) {
190
0
        _io_ctx = _io_ctx_holder.get();
191
0
    }
192
8.12k
    _file_format_type = _params.format_type;
193
8.12k
    _is_proto_format = _file_format_type == TFileFormatType::FORMAT_PROTO;
194
8.12k
    if (_range.__isset.compress_type) {
195
        // for compatibility
196
6.17k
        _file_compress_type = _range.compress_type;
197
6.17k
    } else {
198
1.95k
        _file_compress_type = _params.compress_type;
199
1.95k
    }
200
8.12k
    _size = _range.size;
201
202
8.12k
    _split_values.reserve(_file_slot_descs.size());
203
8.12k
    _init_system_properties();
204
8.12k
    _init_file_description();
205
8.12k
    _serdes = create_data_type_serdes(_file_slot_descs);
206
8.12k
}
207
208
8.12k
void CsvReader::_init_system_properties() {
209
8.12k
    if (_range.__isset.file_type) {
210
        // for compatibility
211
6.00k
        _system_properties.system_type = _range.file_type;
212
6.00k
    } else {
213
2.12k
        _system_properties.system_type = _params.file_type;
214
2.12k
    }
215
8.12k
    _system_properties.properties = _params.properties;
216
8.12k
    _system_properties.hdfs_params = _params.hdfs_params;
217
8.12k
    if (_params.__isset.broker_addresses) {
218
1.85k
        _system_properties.broker_addresses.assign(_params.broker_addresses.begin(),
219
1.85k
                                                   _params.broker_addresses.end());
220
1.85k
    }
221
8.12k
}
222
223
8.12k
void CsvReader::_init_file_description() {
224
8.12k
    _file_description.path = _range.path;
225
8.12k
    _file_description.file_size = _range.__isset.file_size ? _range.file_size : -1;
226
8.12k
    if (_range.__isset.fs_name) {
227
4.88k
        _file_description.fs_name = _range.fs_name;
228
4.88k
    }
229
8.12k
    if (_range.__isset.file_cache_admission) {
230
5.37k
        _file_description.file_cache_admission = _range.file_cache_admission;
231
5.37k
    }
232
8.12k
}
233
234
7.33k
Status CsvReader::init_reader(bool is_load) {
235
    // set the skip lines and start offset
236
7.33k
    _start_offset = _range.start_offset;
237
7.33k
    if (_start_offset == 0) {
238
        // check header typer first
239
7.21k
        if (_params.__isset.file_attributes && _params.file_attributes.__isset.header_type &&
240
7.20k
            !_params.file_attributes.header_type.empty()) {
241
92
            std::string header_type = to_lower(_params.file_attributes.header_type);
242
92
            if (header_type == BeConsts::CSV_WITH_NAMES) {
243
64
                _skip_lines = 1;
244
64
            } else if (header_type == BeConsts::CSV_WITH_NAMES_AND_TYPES) {
245
28
                _skip_lines = 2;
246
28
            }
247
7.11k
        } else if (_params.file_attributes.__isset.skip_lines) {
248
7.11k
            _skip_lines = _params.file_attributes.skip_lines;
249
7.11k
        }
250
7.20k
    } else if (_start_offset != 0) {
251
126
        if ((_file_compress_type != TFileCompressType::PLAIN) ||
252
126
            (_file_compress_type == TFileCompressType::UNKNOWN &&
253
126
             _file_format_type != TFileFormatType::FORMAT_CSV_PLAIN)) {
254
0
            return Status::InternalError<false>("For now we do not support split compressed file");
255
0
        }
256
        // pre-read to promise first line skipped always read
257
126
        int64_t pre_read_len = std::min(
258
126
                static_cast<int64_t>(_params.file_attributes.text_params.line_delimiter.size()),
259
126
                _start_offset);
260
126
        _start_offset -= pre_read_len;
261
126
        _size += pre_read_len;
262
        // not first range will always skip one line
263
126
        _skip_lines = 1;
264
126
    }
265
266
7.33k
    _use_nullable_string_opt.resize(_file_slot_descs.size());
267
211k
    for (int i = 0; i < _file_slot_descs.size(); ++i) {
268
204k
        auto data_type_ptr = _file_slot_descs[i]->get_data_type_ptr();
269
204k
        if (data_type_ptr->is_nullable() && is_string_type(data_type_ptr->get_primitive_type())) {
270
45.4k
            _use_nullable_string_opt[i] = 1;
271
45.4k
        }
272
204k
    }
273
274
7.33k
    RETURN_IF_ERROR(_init_options());
275
7.33k
    RETURN_IF_ERROR(_create_file_reader(false));
276
7.33k
    RETURN_IF_ERROR(_create_decompressor());
277
7.33k
    RETURN_IF_ERROR(_create_line_reader());
278
279
7.33k
    _is_load = is_load;
280
7.33k
    if (!_is_load) {
281
        // For query task, there are 2 slot mapping.
282
        // One is from file slot to values in line.
283
        //      eg, the file_slot_descs is k1, k3, k5, and values in line are k1, k2, k3, k4, k5
284
        //      the _col_idxs will save: 0, 2, 4
285
        // The other is from file slot to columns in output block
286
        //      eg, the file_slot_descs is k1, k3, k5, and columns in block are p1, k1, k3, k5
287
        //      where "p1" is the partition col which does not exist in file
288
        //      the _file_slot_idx_map will save: 1, 2, 3
289
5.48k
        DCHECK(_params.__isset.column_idxs);
290
5.48k
        _col_idxs = _params.column_idxs;
291
5.48k
        int idx = 0;
292
189k
        for (const auto& slot_info : _params.required_slots) {
293
189k
            if (slot_info.is_file_slot) {
294
187k
                _file_slot_idx_map.push_back(idx);
295
187k
            }
296
189k
            idx++;
297
189k
        }
298
5.48k
    } else {
299
        // For load task, the column order is same as file column order
300
1.84k
        int i = 0;
301
16.9k
        for (const auto& desc [[maybe_unused]] : _file_slot_descs) {
302
16.9k
            _col_idxs.push_back(i++);
303
16.9k
        }
304
1.84k
    }
305
306
7.33k
    _line_reader_eof = false;
307
7.33k
    return Status::OK();
308
7.33k
}
309
310
// !FIXME: Here we should use MutableBlock
311
17.5k
Status CsvReader::get_next_block(Block* block, size_t* read_rows, bool* eof) {
312
17.5k
    if (_line_reader_eof) {
313
7.26k
        *eof = true;
314
7.26k
        return Status::OK();
315
7.26k
    }
316
317
10.2k
    const int batch_size = std::max(_state->batch_size(), (int)_MIN_BATCH_SIZE);
318
10.2k
    const int64_t max_block_bytes =
319
10.2k
            (_state->query_type() == TQueryType::LOAD && config::load_reader_max_block_bytes > 0)
320
10.2k
                    ? config::load_reader_max_block_bytes
321
10.2k
                    : 0;
322
10.2k
    size_t rows = 0;
323
10.2k
    size_t block_bytes = 0;
324
325
10.2k
    bool success = false;
326
10.2k
    bool is_remove_bom = false;
327
10.2k
    if (_push_down_agg_type == TPushAggOp::type::COUNT) {
328
140k
        while (rows < batch_size && !_line_reader_eof &&
329
140k
               (max_block_bytes <= 0 || (int64_t)block_bytes < max_block_bytes)) {
330
139k
            const uint8_t* ptr = nullptr;
331
139k
            size_t size = 0;
332
139k
            RETURN_IF_ERROR(_line_reader->read_line(&ptr, &size, &_line_reader_eof, _io_ctx));
333
334
            // _skip_lines == 0 means this line is the actual data beginning line for the entire file
335
            // is_remove_bom means _remove_bom should only execute once
336
140k
            if (_skip_lines == 0 && !is_remove_bom) {
337
568
                ptr = _remove_bom(ptr, size);
338
568
                is_remove_bom = true;
339
568
            }
340
341
            // _skip_lines > 0 means we do not need to remove bom
342
139k
            if (_skip_lines > 0) {
343
0
                _skip_lines--;
344
0
                is_remove_bom = true;
345
0
                continue;
346
0
            }
347
139k
            if (size == 0) {
348
568
                if (!_line_reader_eof && _state->is_read_csv_empty_line_as_null()) {
349
0
                    ++rows;
350
0
                }
351
                // Read empty line, continue
352
568
                continue;
353
568
            }
354
355
139k
            RETURN_IF_ERROR(_validate_line(Slice(ptr, size), &success));
356
139k
            ++rows;
357
139k
            block_bytes += size;
358
139k
        }
359
568
        auto mutate_columns = block->mutate_columns();
360
568
        for (auto& col : mutate_columns) {
361
568
            col->resize(rows);
362
568
        }
363
568
        block->set_columns(std::move(mutate_columns));
364
9.67k
    } else {
365
9.67k
        auto columns = block->mutate_columns();
366
14.8M
        while (rows < batch_size && !_line_reader_eof &&
367
14.8M
               (max_block_bytes <= 0 || (int64_t)block_bytes < max_block_bytes)) {
368
14.8M
            const uint8_t* ptr = nullptr;
369
14.8M
            size_t size = 0;
370
14.8M
            RETURN_IF_ERROR(_line_reader->read_line(&ptr, &size, &_line_reader_eof, _io_ctx));
371
372
            // _skip_lines == 0 means this line is the actual data beginning line for the entire file
373
            // is_remove_bom means _remove_bom should only execute once
374
14.8M
            if (!is_remove_bom && _skip_lines == 0) {
375
9.43k
                ptr = _remove_bom(ptr, size);
376
9.43k
                is_remove_bom = true;
377
9.43k
            }
378
379
            // _skip_lines > 0 means we do not remove bom
380
14.8M
            if (_skip_lines > 0) {
381
315
                _skip_lines--;
382
315
                is_remove_bom = true;
383
315
                continue;
384
315
            }
385
14.8M
            if (size == 0) {
386
6.80k
                if (!_line_reader_eof && _state->is_read_csv_empty_line_as_null()) {
387
12
                    RETURN_IF_ERROR(_fill_empty_line(block, columns, &rows));
388
12
                }
389
                // Read empty line, continue
390
6.80k
                continue;
391
6.80k
            }
392
393
14.8M
            RETURN_IF_ERROR(_validate_line(Slice(ptr, size), &success));
394
14.8M
            if (!success) {
395
128
                continue;
396
128
            }
397
14.8M
            RETURN_IF_ERROR(_fill_dest_columns(Slice(ptr, size), block, columns, &rows));
398
14.8M
            block_bytes += size;
399
14.8M
        }
400
9.65k
        block->set_columns(std::move(columns));
401
9.65k
    }
402
403
10.2k
    *eof = (rows == 0);
404
10.2k
    *read_rows = rows;
405
406
10.2k
    return Status::OK();
407
10.2k
}
408
409
Status CsvReader::get_columns(std::unordered_map<std::string, DataTypePtr>* name_to_type,
410
7.33k
                              std::unordered_set<std::string>* missing_cols) {
411
204k
    for (const auto& slot : _file_slot_descs) {
412
204k
        name_to_type->emplace(slot->col_name(), slot->type());
413
204k
    }
414
7.33k
    return Status::OK();
415
7.33k
}
416
417
// init decompressor, file reader and line reader for parsing schema
418
794
Status CsvReader::init_schema_reader() {
419
794
    _start_offset = _range.start_offset;
420
794
    if (_start_offset != 0) {
421
0
        return Status::InvalidArgument(
422
0
                "start offset of TFileRangeDesc must be zero in get parsered schema");
423
0
    }
424
794
    if (_params.file_type == TFileType::FILE_BROKER) {
425
0
        return Status::InternalError<false>(
426
0
                "Getting parsered schema from csv file do not support stream load and broker "
427
0
                "load.");
428
0
    }
429
430
    // csv file without names line and types line.
431
794
    _read_line = 1;
432
794
    _is_parse_name = false;
433
434
794
    if (_params.__isset.file_attributes && _params.file_attributes.__isset.header_type &&
435
794
        !_params.file_attributes.header_type.empty()) {
436
92
        std::string header_type = to_lower(_params.file_attributes.header_type);
437
92
        if (header_type == BeConsts::CSV_WITH_NAMES) {
438
63
            _is_parse_name = true;
439
63
        } else if (header_type == BeConsts::CSV_WITH_NAMES_AND_TYPES) {
440
29
            _read_line = 2;
441
29
            _is_parse_name = true;
442
29
        }
443
92
    }
444
445
794
    RETURN_IF_ERROR(_init_options());
446
794
    RETURN_IF_ERROR(_create_file_reader(true));
447
794
    RETURN_IF_ERROR(_create_decompressor());
448
794
    RETURN_IF_ERROR(_create_line_reader());
449
794
    return Status::OK();
450
794
}
451
452
Status CsvReader::get_parsed_schema(std::vector<std::string>* col_names,
453
794
                                    std::vector<DataTypePtr>* col_types) {
454
794
    if (_read_line == 1) {
455
765
        if (!_is_parse_name) { //parse csv file without names and types
456
702
            size_t col_nums = 0;
457
702
            RETURN_IF_ERROR(_parse_col_nums(&col_nums));
458
7.88k
            for (size_t i = 0; i < col_nums; ++i) {
459
7.19k
                col_names->emplace_back("c" + std::to_string(i + 1));
460
7.19k
            }
461
695
        } else { // parse csv file with names
462
63
            RETURN_IF_ERROR(_parse_col_names(col_names));
463
63
        }
464
465
8.16k
        for (size_t j = 0; j < col_names->size(); ++j) {
466
7.41k
            col_types->emplace_back(
467
7.41k
                    DataTypeFactory::instance().create_data_type(PrimitiveType::TYPE_STRING, true));
468
7.41k
        }
469
758
    } else { // parse csv file with names and types
470
29
        RETURN_IF_ERROR(_parse_col_names(col_names));
471
29
        RETURN_IF_ERROR(_parse_col_types(col_names->size(), col_types));
472
29
    }
473
787
    return Status::OK();
474
794
}
475
476
210M
Status CsvReader::_deserialize_nullable_string(IColumn& column, Slice& slice) {
477
210M
    auto& null_column = assert_cast<ColumnNullable&>(column);
478
210M
    if (_empty_field_as_null) {
479
60
        if (slice.size == 0) {
480
5
            null_column.insert_data(nullptr, 0);
481
5
            return Status::OK();
482
5
        }
483
60
    }
484
211M
    if (_options.null_len > 0 && !(_options.converted_from_string && slice.trim_double_quotes())) {
485
211M
        if (slice.compare(Slice(_options.null_format, _options.null_len)) == 0) {
486
31.1k
            null_column.insert_data(nullptr, 0);
487
31.1k
            return Status::OK();
488
31.1k
        }
489
211M
    }
490
210M
    static DataTypeStringSerDe stringSerDe(TYPE_STRING);
491
210M
    auto st = stringSerDe.deserialize_one_cell_from_csv(null_column.get_nested_column(), slice,
492
210M
                                                        _options);
493
210M
    if (!st.ok()) {
494
        // fill null if fail
495
0
        null_column.insert_data(nullptr, 0); // 0 is meaningless here
496
0
        return Status::OK();
497
0
    }
498
    // fill not null if success
499
210M
    null_column.get_null_map_data().push_back(0);
500
210M
    return Status::OK();
501
210M
}
502
503
3.56k
Status CsvReader::_init_options() {
504
    // get column_separator and line_delimiter
505
3.56k
    _value_separator = _params.file_attributes.text_params.column_separator;
506
3.56k
    _value_separator_length = _value_separator.size();
507
3.56k
    _line_delimiter = _params.file_attributes.text_params.line_delimiter;
508
3.56k
    _line_delimiter_length = _line_delimiter.size();
509
3.56k
    if (_params.file_attributes.text_params.__isset.enclose) {
510
3.56k
        _enclose = _params.file_attributes.text_params.enclose;
511
3.56k
    }
512
3.56k
    if (_params.file_attributes.text_params.__isset.escape) {
513
3.56k
        _escape = _params.file_attributes.text_params.escape;
514
3.56k
    }
515
516
3.56k
    _trim_tailing_spaces =
517
3.56k
            (_state != nullptr && _state->trim_tailing_spaces_for_external_table_query());
518
519
3.56k
    _options.escape_char = _escape;
520
3.56k
    _options.quote_char = _enclose;
521
522
3.56k
    if (_params.file_attributes.text_params.collection_delimiter.empty()) {
523
3.56k
        _options.collection_delim = ',';
524
3.56k
    } else {
525
0
        _options.collection_delim = _params.file_attributes.text_params.collection_delimiter[0];
526
0
    }
527
3.56k
    if (_params.file_attributes.text_params.mapkv_delimiter.empty()) {
528
3.56k
        _options.map_key_delim = ':';
529
3.56k
    } else {
530
0
        _options.map_key_delim = _params.file_attributes.text_params.mapkv_delimiter[0];
531
0
    }
532
533
3.56k
    if (_params.file_attributes.text_params.__isset.null_format) {
534
34
        _options.null_format = _params.file_attributes.text_params.null_format.data();
535
34
        _options.null_len = _params.file_attributes.text_params.null_format.length();
536
34
    }
537
538
3.56k
    if (_params.file_attributes.__isset.trim_double_quotes) {
539
3.56k
        _trim_double_quotes = _params.file_attributes.trim_double_quotes;
540
3.56k
    }
541
3.56k
    _options.converted_from_string = _trim_double_quotes;
542
543
3.56k
    if (_state != nullptr) {
544
2.77k
        _keep_cr = _state->query_options().keep_carriage_return;
545
2.77k
    }
546
547
3.56k
    if (_params.file_attributes.text_params.__isset.empty_field_as_null) {
548
3.52k
        _empty_field_as_null = _params.file_attributes.text_params.empty_field_as_null;
549
3.52k
    }
550
3.56k
    return Status::OK();
551
3.56k
}
552
553
8.12k
Status CsvReader::_create_decompressor() {
554
8.12k
    if (_file_compress_type != TFileCompressType::UNKNOWN) {
555
8.04k
        RETURN_IF_ERROR(Decompressor::create_decompressor(_file_compress_type, &_decompressor));
556
8.04k
    } else {
557
82
        RETURN_IF_ERROR(Decompressor::create_decompressor(_file_format_type, &_decompressor));
558
82
    }
559
560
8.12k
    return Status::OK();
561
8.12k
}
562
563
8.13k
Status CsvReader::_create_file_reader(bool need_schema) {
564
8.13k
    if (_params.file_type == TFileType::FILE_STREAM) {
565
1.93k
        RETURN_IF_ERROR(FileFactory::create_pipe_reader(_range.load_id, &_file_reader, _state,
566
1.93k
                                                        need_schema));
567
6.20k
    } else {
568
6.20k
        _file_description.mtime = _range.__isset.modification_time ? _range.modification_time : 0;
569
6.20k
        io::FileReaderOptions reader_options =
570
6.20k
                FileFactory::get_reader_options(_state, _file_description);
571
6.20k
        io::FileReaderSPtr file_reader;
572
6.20k
        if (_io_ctx_holder) {
573
670
            file_reader = DORIS_TRY(io::DelegateReader::create_file_reader(
574
670
                    _profile, _system_properties, _file_description, reader_options,
575
670
                    io::DelegateReader::AccessMode::SEQUENTIAL,
576
670
                    std::static_pointer_cast<const io::IOContext>(_io_ctx_holder),
577
670
                    io::PrefetchRange(_range.start_offset, _range.start_offset + _range.size)));
578
5.53k
        } else {
579
5.53k
            file_reader = DORIS_TRY(io::DelegateReader::create_file_reader(
580
5.53k
                    _profile, _system_properties, _file_description, reader_options,
581
5.53k
                    io::DelegateReader::AccessMode::SEQUENTIAL, _io_ctx,
582
5.53k
                    io::PrefetchRange(_range.start_offset, _range.start_offset + _range.size)));
583
5.53k
        }
584
6.20k
        _file_reader = _io_ctx ? std::make_shared<io::TracingFileReader>(std::move(file_reader),
585
6.19k
                                                                         _io_ctx->file_reader_stats)
586
6.20k
                               : file_reader;
587
6.20k
    }
588
8.13k
    if (_file_reader->size() == 0 && _params.file_type != TFileType::FILE_STREAM &&
589
8.13k
        _params.file_type != TFileType::FILE_BROKER) {
590
0
        return Status::EndOfFile("init reader failed, empty csv file: " + _range.path);
591
0
    }
592
8.13k
    return Status::OK();
593
8.13k
}
594
595
3.56k
Status CsvReader::_create_line_reader() {
596
3.56k
    std::shared_ptr<TextLineReaderContextIf> text_line_reader_ctx;
597
3.56k
    if (_enclose == 0) {
598
3.40k
        text_line_reader_ctx = std::make_shared<PlainTextLineReaderCtx>(
599
3.40k
                _line_delimiter, _line_delimiter_length, _keep_cr);
600
3.40k
        _fields_splitter = std::make_unique<PlainCsvTextFieldSplitter>(
601
3.40k
                _trim_tailing_spaces, false, _value_separator, _value_separator_length, -1);
602
603
3.40k
    } else {
604
        // in load task, the _file_slot_descs is empty vector, so we need to set col_sep_num to 0
605
155
        size_t col_sep_num = _file_slot_descs.size() > 1 ? _file_slot_descs.size() - 1 : 0;
606
155
        _enclose_reader_ctx = std::make_shared<EncloseCsvLineReaderCtx>(
607
155
                _line_delimiter, _line_delimiter_length, _value_separator, _value_separator_length,
608
155
                col_sep_num, _enclose, _escape, _keep_cr);
609
155
        text_line_reader_ctx = _enclose_reader_ctx;
610
611
155
        _fields_splitter = std::make_unique<EncloseCsvTextFieldSplitter>(
612
155
                _trim_tailing_spaces, true, _enclose_reader_ctx, _value_separator_length, _enclose);
613
155
    }
614
3.56k
    switch (_file_format_type) {
615
3.48k
    case TFileFormatType::FORMAT_CSV_PLAIN:
616
3.48k
        [[fallthrough]];
617
3.48k
    case TFileFormatType::FORMAT_CSV_GZ:
618
3.48k
        [[fallthrough]];
619
3.48k
    case TFileFormatType::FORMAT_CSV_BZ2:
620
3.48k
        [[fallthrough]];
621
3.48k
    case TFileFormatType::FORMAT_CSV_LZ4FRAME:
622
3.48k
        [[fallthrough]];
623
3.48k
    case TFileFormatType::FORMAT_CSV_LZ4BLOCK:
624
3.48k
        [[fallthrough]];
625
3.48k
    case TFileFormatType::FORMAT_CSV_LZOP:
626
3.48k
        [[fallthrough]];
627
3.48k
    case TFileFormatType::FORMAT_CSV_SNAPPYBLOCK:
628
3.48k
        [[fallthrough]];
629
3.48k
    case TFileFormatType::FORMAT_CSV_DEFLATE:
630
3.48k
        _line_reader =
631
3.48k
                NewPlainTextLineReader::create_unique(_profile, _file_reader, _decompressor.get(),
632
3.48k
                                                      text_line_reader_ctx, _size, _start_offset);
633
634
3.48k
        break;
635
77
    case TFileFormatType::FORMAT_PROTO:
636
77
        _fields_splitter = std::make_unique<CsvProtoFieldSplitter>();
637
77
        _line_reader = NewPlainBinaryLineReader::create_unique(_file_reader);
638
77
        break;
639
0
    default:
640
0
        return Status::InternalError<false>(
641
0
                "Unknown format type, cannot init line reader in csv reader, type={}",
642
0
                _file_format_type);
643
3.56k
    }
644
3.56k
    return Status::OK();
645
3.56k
}
646
647
5.24k
Status CsvReader::_deserialize_one_cell(DataTypeSerDeSPtr serde, IColumn& column, Slice& slice) {
648
5.24k
    return serde->deserialize_one_cell_from_csv(column, slice, _options);
649
5.24k
}
650
651
Status CsvReader::_fill_dest_columns(const Slice& line, Block* block,
652
14.9M
                                     std::vector<MutableColumnPtr>& columns, size_t* rows) {
653
14.9M
    bool is_success = false;
654
655
14.9M
    RETURN_IF_ERROR(_line_split_to_values(line, &is_success));
656
14.9M
    if (UNLIKELY(!is_success)) {
657
        // If not success, which means we met an invalid row, filter this row and return.
658
539
        return Status::OK();
659
539
    }
660
661
229M
    for (int i = 0; i < _file_slot_descs.size(); ++i) {
662
214M
        int col_idx = _col_idxs[i];
663
        // col idx is out of range, fill with null format
664
214M
        auto value = col_idx < _split_values.size()
665
215M
                             ? _split_values[col_idx]
666
18.4E
                             : Slice(_options.null_format, _options.null_len);
667
668
214M
        IColumn* col_ptr = columns[i].get();
669
214M
        if (!_is_load) {
670
            // block is a Block*, and get_by_position returns a ColumnPtr,
671
            // which is a const pointer. Therefore, using const_cast is permissible.
672
41.1M
            col_ptr = const_cast<IColumn*>(
673
41.1M
                    block->get_by_position(_file_slot_idx_map[i]).column.get());
674
41.1M
        }
675
676
214M
        if (_use_nullable_string_opt[i]) {
677
            // For load task, we always read "string" from file.
678
            // So serdes[i] here must be DataTypeNullableSerDe, and DataTypeNullableSerDe -> nested_serde must be DataTypeStringSerDe.
679
            // So we use deserialize_nullable_string and stringSerDe to reduce virtual function calls.
680
213M
            RETURN_IF_ERROR(_deserialize_nullable_string(*col_ptr, value));
681
213M
        } else {
682
744k
            RETURN_IF_ERROR(_deserialize_one_cell(_serdes[i], *col_ptr, value));
683
744k
        }
684
214M
    }
685
14.9M
    ++(*rows);
686
687
14.9M
    return Status::OK();
688
14.9M
}
689
690
Status CsvReader::_fill_empty_line(Block* block, std::vector<MutableColumnPtr>& columns,
691
12
                                   size_t* rows) {
692
48
    for (int i = 0; i < _file_slot_descs.size(); ++i) {
693
36
        IColumn* col_ptr = columns[i].get();
694
36
        if (!_is_load) {
695
            // block is a Block*, and get_by_position returns a ColumnPtr,
696
            // which is a const pointer. Therefore, using const_cast is permissible.
697
36
            col_ptr = const_cast<IColumn*>(
698
36
                    block->get_by_position(_file_slot_idx_map[i]).column.get());
699
36
        }
700
36
        auto& null_column = assert_cast<ColumnNullable&>(*col_ptr);
701
36
        null_column.insert_data(nullptr, 0);
702
36
    }
703
12
    ++(*rows);
704
12
    return Status::OK();
705
12
}
706
707
14.6M
Status CsvReader::_validate_line(const Slice& line, bool* success) {
708
14.6M
    if (!_is_proto_format && !validate_utf8(_params, line.data, line.size)) {
709
130
        if (!_is_load) {
710
2
            return Status::InternalError<false>("Only support csv data in utf8 codec");
711
128
        } else {
712
128
            _counter->num_rows_filtered++;
713
128
            *success = false;
714
128
            RETURN_IF_ERROR(_state->append_error_msg_to_file(
715
128
                    [&]() -> std::string { return std::string(line.data, line.size); },
716
128
                    [&]() -> std::string {
717
128
                        return "Invalid file encoding: all CSV files must be UTF-8 encoded";
718
128
                    }));
719
128
            return Status::OK();
720
128
        }
721
130
    }
722
14.6M
    *success = true;
723
14.6M
    return Status::OK();
724
14.6M
}
725
726
14.9M
Status CsvReader::_line_split_to_values(const Slice& line, bool* success) {
727
14.9M
    _split_line(line);
728
729
14.9M
    if (_is_load) {
730
        // Only check for load task. For query task, the non exist column will be filled "null".
731
        // if actual column number in csv file is not equal to _file_slot_descs.size()
732
        // then filter this line.
733
10.7M
        bool ignore_col = false;
734
10.7M
        ignore_col = _params.__isset.file_attributes &&
735
10.7M
                     _params.file_attributes.__isset.ignore_csv_redundant_col &&
736
10.7M
                     _params.file_attributes.ignore_csv_redundant_col;
737
738
10.7M
        if ((!ignore_col && _split_values.size() != _file_slot_descs.size()) ||
739
10.7M
            (ignore_col && _split_values.size() < _file_slot_descs.size())) {
740
544
            _counter->num_rows_filtered++;
741
544
            *success = false;
742
544
            RETURN_IF_ERROR(_state->append_error_msg_to_file(
743
544
                    [&]() -> std::string { return std::string(line.data, line.size); },
744
544
                    [&]() -> std::string {
745
544
                        fmt::memory_buffer error_msg;
746
544
                        fmt::format_to(error_msg,
747
544
                                       "Column count mismatch: expected {}, but found {}",
748
544
                                       _file_slot_descs.size(), _split_values.size());
749
544
                        std::string escaped_separator =
750
544
                                std::regex_replace(_value_separator, std::regex("\t"), "\\t");
751
544
                        std::string escaped_delimiter =
752
544
                                std::regex_replace(_line_delimiter, std::regex("\n"), "\\n");
753
544
                        fmt::format_to(error_msg, " (sep:{} delim:{}", escaped_separator,
754
544
                                       escaped_delimiter);
755
544
                        if (_enclose != 0) {
756
544
                            fmt::format_to(error_msg, " encl:{}", _enclose);
757
544
                        }
758
544
                        if (_escape != 0) {
759
544
                            fmt::format_to(error_msg, " esc:{}", _escape);
760
544
                        }
761
544
                        fmt::format_to(error_msg, ")");
762
544
                        return fmt::to_string(error_msg);
763
544
                    }));
764
539
            return Status::OK();
765
544
        }
766
10.7M
    }
767
768
14.9M
    *success = true;
769
14.9M
    return Status::OK();
770
14.9M
}
771
772
14.9M
void CsvReader::_split_line(const Slice& line) {
773
14.9M
    _split_values.clear();
774
14.9M
    _fields_splitter->split_line(line, &_split_values);
775
14.9M
}
776
777
702
Status CsvReader::_parse_col_nums(size_t* col_nums) {
778
702
    const uint8_t* ptr = nullptr;
779
702
    size_t size = 0;
780
702
    RETURN_IF_ERROR(_line_reader->read_line(&ptr, &size, &_line_reader_eof, _io_ctx));
781
702
    if (size == 0) {
782
2
        return Status::InternalError<false>(
783
2
                "The first line is empty, can not parse column numbers");
784
2
    }
785
700
    if (!validate_utf8(_params, reinterpret_cast<const char*>(ptr), size)) {
786
5
        return Status::InternalError<false>("Only support csv data in utf8 codec");
787
5
    }
788
695
    ptr = _remove_bom(ptr, size);
789
695
    _split_line(Slice(ptr, size));
790
695
    *col_nums = _split_values.size();
791
695
    return Status::OK();
792
700
}
793
794
92
Status CsvReader::_parse_col_names(std::vector<std::string>* col_names) {
795
92
    const uint8_t* ptr = nullptr;
796
92
    size_t size = 0;
797
    // no use of _line_reader_eof
798
92
    RETURN_IF_ERROR(_line_reader->read_line(&ptr, &size, &_line_reader_eof, _io_ctx));
799
92
    if (size == 0) {
800
0
        return Status::InternalError<false>("The first line is empty, can not parse column names");
801
0
    }
802
92
    if (!validate_utf8(_params, reinterpret_cast<const char*>(ptr), size)) {
803
0
        return Status::InternalError<false>("Only support csv data in utf8 codec");
804
0
    }
805
92
    ptr = _remove_bom(ptr, size);
806
92
    _split_line(Slice(ptr, size));
807
345
    for (auto _split_value : _split_values) {
808
345
        col_names->emplace_back(_split_value.to_string());
809
345
    }
810
92
    return Status::OK();
811
92
}
812
813
// TODO(ftw): parse type
814
29
Status CsvReader::_parse_col_types(size_t col_nums, std::vector<DataTypePtr>* col_types) {
815
    // delete after.
816
155
    for (size_t i = 0; i < col_nums; ++i) {
817
126
        col_types->emplace_back(make_nullable(std::make_shared<DataTypeString>()));
818
126
    }
819
820
    // 1. check _line_reader_eof
821
    // 2. read line
822
    // 3. check utf8
823
    // 4. check size
824
    // 5. check _split_values.size must equal to col_nums.
825
    // 6. fill col_types
826
29
    return Status::OK();
827
29
}
828
829
10.7k
const uint8_t* CsvReader::_remove_bom(const uint8_t* ptr, size_t& size) {
830
10.7k
    if (size >= 3 && ptr[0] == 0xEF && ptr[1] == 0xBB && ptr[2] == 0xBF) {
831
7
        LOG(INFO) << "remove bom";
832
7
        constexpr size_t bom_size = 3;
833
7
        size -= bom_size;
834
        // In enclose mode, column_sep_positions were computed on the original line
835
        // (including BOM). After shifting the pointer, we must adjust those positions
836
        // so they remain correct relative to the new start.
837
7
        if (_enclose_reader_ctx) {
838
1
            _enclose_reader_ctx->adjust_column_sep_positions(bom_size);
839
1
        }
840
7
        return ptr + bom_size;
841
7
    }
842
10.7k
    return ptr;
843
10.7k
}
844
845
7.33k
Status CsvReader::close() {
846
7.33k
    if (_line_reader) {
847
7.33k
        _line_reader->close();
848
7.33k
    }
849
850
7.33k
    if (_file_reader) {
851
7.33k
        RETURN_IF_ERROR(_file_reader->close());
852
7.33k
    }
853
854
7.33k
    return Status::OK();
855
7.33k
}
856
857
} // namespace doris