File tree 3 files changed +11
-12
lines changed
3 files changed +11
-12
lines changed Original file line number Diff line number Diff line change @@ -19,7 +19,7 @@ type batch[T any] struct {
19
19
done chan struct {}
20
20
}
21
21
22
- const minBatchSize = 64
22
+ const minBatchSize = 2
23
23
24
24
func NewAsyncBatchIterator [T , N any ](
25
25
iterator Iterator [T ],
Original file line number Diff line number Diff line change @@ -28,12 +28,6 @@ type repeatedRowIterator[T any] struct {
28
28
}
29
29
30
30
const (
31
- // Batch size specifies how many rows to be read
32
- // from a column at once. Note that the batched rows
33
- // are buffered in-memory, but not reference pages
34
- // they were read from.
35
- defaultRepeatedRowIteratorBatchSize = 32
36
-
37
31
// The value specifies how many individual values to be
38
32
// read (decoded) from the page.
39
33
//
@@ -57,8 +51,14 @@ func NewRepeatedRowIterator[T any](
57
51
rows : rows ,
58
52
columns : NewMultiColumnIterator (ctx ,
59
53
WrapWithRowNumber (rowNumbers ),
60
- defaultRepeatedRowIteratorBatchSize ,
61
- rowGroups , columns ... ),
54
+ // Batch size specifies how many rows to be read
55
+ // from a column at once. Note that the batched rows
56
+ // are buffered in-memory, but not reference pages
57
+ // they were read from.
58
+ 4 ,
59
+ rowGroups ,
60
+ columns ... ,
61
+ ),
62
62
}
63
63
}
64
64
Original file line number Diff line number Diff line change 1
1
package query
2
2
3
3
import (
4
+ "slices"
4
5
"strings"
5
6
6
7
"github.com/colega/zeropool"
@@ -86,9 +87,7 @@ var parquetValuesPool = zeropool.New(func() []parquet.Value { return nil })
86
87
87
88
func CloneParquetValues (values []parquet.Value ) []parquet.Value {
88
89
p := parquetValuesPool .Get ()
89
- if l := len (values ); cap (p ) < l {
90
- p = make ([]parquet.Value , 0 , 2 * l )
91
- }
90
+ p = slices .Grow (p , len (values ))
92
91
p = p [:len (values )]
93
92
for i , v := range values {
94
93
p [i ] = v .Clone ()
You can’t perform that action at this time.
0 commit comments