Details
-
Bug
-
Status: Open (View Workflow)
-
Minor
-
Resolution: Unresolved
-
None
-
None
-
None
Description
The following query reads the full t1 table, although it should only need to fill the tmp table with 1 value, that of min(a) and a random value from a.
The problem seems to be that we consider 'a' from bit_and(a) to be a separate column, yet we could get the value of a when computing min(a).
create table t1 (a int primary key); |
insert into t1 values (2), (1), (0); |
analyze format=json select bit_and(a) over(order by 1), min(a) from t1; |
ANALYZE
|
{
|
"query_block": {
|
"select_id": 1,
|
"r_loops": 1,
|
"r_total_time_ms": 14744,
|
"window_functions_computation": {
|
"sorts": {
|
"filesort": {
|
"sort_key": "1",
|
"r_loops": 1,
|
"r_total_time_ms": 0.0835,
|
"r_used_priority_queue": false,
|
"r_output_rows": 1,
|
"r_buffer_size": "264"
|
}
|
},
|
"temporary_table": {
|
"table": {
|
"table_name": "t1",
|
"access_type": "index",
|
"key": "PRIMARY",
|
"key_length": "4",
|
"used_key_parts": ["a"],
|
"r_loops": 1,
|
"rows": 3,
|
"r_rows": 3,
|
"r_total_time_ms": 0.0285,
|
"filtered": 100,
|
"r_filtered": 100,
|
"using_index": true
|
}
|
}
|
}
|
}
|
}
|