1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
use super::{AsyncReader, ByteRecord};
use ahash::AHashSet;
use crate::datatypes::{DataType, Field};
use crate::error::Result;
use crate::io::csv::utils::merge_schema;
use futures::{AsyncRead, AsyncSeek};
pub async fn infer_schema<R, F>(
reader: &mut AsyncReader<R>,
max_rows: Option<usize>,
has_header: bool,
infer: &F,
) -> Result<(Vec<Field>, usize)>
where
R: AsyncRead + AsyncSeek + Unpin + Send + Sync,
F: Fn(&[u8]) -> DataType,
{
let headers: Vec<String> = if has_header {
reader
.headers()
.await?
.iter()
.map(|s| s.to_string())
.collect()
} else {
let first_record_count = &reader.headers().await?.len();
(0..*first_record_count)
.map(|i| format!("column_{}", i + 1))
.collect()
};
let position = reader.position().clone();
let header_length = headers.len();
let mut column_types: Vec<AHashSet<DataType>> = vec![AHashSet::new(); header_length];
let mut records_count = 0;
let mut record = ByteRecord::new();
let max_records = max_rows.unwrap_or(usize::MAX);
while records_count < max_records {
if !reader.read_byte_record(&mut record).await? {
break;
}
records_count += 1;
for (i, column) in column_types.iter_mut().enumerate() {
if let Some(string) = record.get(i) {
column.insert(infer(string));
}
}
}
let fields = merge_schema(&headers, &mut column_types);
reader.seek(position).await?;
Ok((fields, records_count))
}