Kouhei Sutou 2019-04-17 12:11:06 +0900 (Wed, 17 Apr 2019) Revision: 1750e487ab3d1c999546923338946cbc68ded311 https://github.com/groonga/groonga/commit/1750e487ab3d1c999546923338946cbc68ded311 Message: logical_count: add support for shard over window function Copied files: test/command/suite/sharding/logical_count/columns/stage/filtered/window_function.expected (from test/command/suite/sharding/logical_count/post_filter/min.expected) test/command/suite/sharding/logical_count/columns/stage/filtered/window_function.test (from test/command/suite/sharding/logical_count/post_filter/no_filter.expected) test/command/suite/sharding/logical_count/columns/stage/initial/window_function.expected (from test/command/suite/sharding/logical_count/post_filter/min.expected) test/command/suite/sharding/logical_count/columns/stage/initial/window_function.test (from test/command/suite/sharding/logical_count/post_filter/no_filter.expected) Modified files: plugins/sharding/logical_count.rb test/command/suite/sharding/logical_count/columns/stage/initial/filter.expected test/command/suite/sharding/logical_count/post_filter/filtered_column.expected test/command/suite/sharding/logical_count/post_filter/min.expected test/command/suite/sharding/logical_count/post_filter/no_filter.expected test/command/suite/sharding/logical_count/post_filter/no_filtered_column.expected Modified: plugins/sharding/logical_count.rb (+114 -87) =================================================================== --- plugins/sharding/logical_count.rb 2019-04-17 10:17:28 +0900 (e1f9a94b6) +++ plugins/sharding/logical_count.rb 2019-04-17 12:11:06 +0900 (490dfd266) @@ -19,9 +19,14 @@ module Groonga counter = Counter.new(input, enumerator.target_range) total = 0 have_shard = false - enumerator.each do |shard, shard_range| - have_shard = true - total += counter.count(shard, shard_range) + begin + enumerator.each do |shard, shard_range| + have_shard = true + counter.count_pre(shard, shard_range) + end + total += counter.count + ensure + counter.close end unless have_shard message = @@ -50,6 +55,19 @@ module Groonga key end + class ShardCountContext + attr_reader :shard + attr_reader :cover_type + attr_reader :range_index + attr_accessor :table + def initialize(shard, cover_type, range_index) + @shard = shard + @cover_type = cover_type + @range_idnex = range_index + @table = shard.table + end + end + class Counter def initialize(input, target_range) @logger = Context.instance.logger @@ -57,11 +75,14 @@ module Groonga @post_filter = input[:post_filter] @dynamic_columns = DynamicColumns.parse(input) @target_range = target_range + @contexts = [] + @temporary_tables = [] + @temporary_expressions = [] end - def count(shard, shard_range) + def count_pre(shard, shard_range) cover_type = @target_range.cover_type(shard_range) - return 0 if cover_type == :none + return if cover_type == :none shard_key = shard.key if shard_key.nil? @@ -69,41 +90,41 @@ module Groonga "<#{shard.key_name}>" raise InvalidArgument, message end - table_name = shard.table_name - prepare_table(shard) do |table| - if cover_type == :all - log_use_range_index(false, table_name, "covered", - __LINE__, __method__) - if @filter or @post_filter - return filtered_count_n_records(table, shard_key, cover_type) - else - return table.size - end - end - - range_index = nil - if @filter or @post_filter - log_use_range_index(false, table_name, "need filter", + table_name = shard.table_name + range_index = nil + if @filter or @post_filter + log_use_range_index(false, table_name, "need filter", + __LINE__, __method__) + elsif cover_type == :all + log_use_range_index(false, table_name, "covered", + __LINE__, __method__) + else + index_info = shard_key.find_index(Operator::LESS) + range_index = index_info.index if index_info + if range_index + log_use_range_index(true, table_name, "range index is available", __LINE__, __method__) else - index_info = shard_key.find_index(Operator::LESS) - range_index = index_info.index if index_info - if range_index - log_use_range_index(true, table_name, "range index is available", - __LINE__, __method__) - else - log_use_range_index(false, table_name, "no range index", - __LINE__, __method__) - end + log_use_range_index(false, table_name, "no range index", + __LINE__, __method__) end + end + @contexts << ShardCountContext.new(shard, cover_type, range_index) + end - if range_index - count_n_records_in_range(range_index, cover_type) - else - filtered_count_n_records(table, shard_key, cover_type) - end + def count + prepare_contexts + total = 0 + @contexts.each do |context| + total += count_shard(context) end + total + end + + def close + @temporary_tables.each(&:close) + @temporary_expressions.each(&:close) end private @@ -122,34 +143,51 @@ module Groonga message) end - def prepare_table(shard) - table = shard.table - return yield(table) if****@filte*****? and @post_filter.nil? - - begin - @dynamic_columns.each_initial do |dynamic_column| - if table == shard.table - table = table.select_all + def prepare_contexts + if @filter or @post_filter + if @dynamic_columns.have_initial? + apply_targets = [] + @contexts.each do |context| + table = context.table.select_all + @temporary_tables << table + context.table = table + apply_targets << [table, nil] end - dynamic_column.apply(table) + @dynamic_columns.apply_initial(apply_targets) + end + end + @contexts.each do |context| + filter_shard(context) + end + if @post_filter + if @dynamic_columns.have_filtered? + apply_targets =****@conte***** do |context| + [context.table, nil] + end + @dynamic_columns.apply_filtered(apply_targets) + end + @contexts.each do |context| + post_filter_shard(context) end - - yield(table) - ensure - table.close if table != shard.table end end - def filtered_count_n_records(table, shard_key, cover_type) - expression = nil - filtered_table = nil - - expression_builder = RangeExpressionBuilder.new(shard_key, - @target_range) - expression_builder.filter = @filter - begin - expression = Expression.create(table) - case cover_type + def filter_shard(context) + return if context.range_index + + if context.cover_type == :all and****@filte*****? + if @post_filter and @dynamic_columns.have_filtered? + filtered_table = context.table.select_all + @temporary_tables << filtered_table + context.table = filtered_table + end + else + expression = Expression.create(context.table) + @temporary_expressions << expression + expression_builder = RangeExpressionBuilder.new(context.shard.key, + @target_range) + expression_builder.filter = @filter + case context.cover_type when :all expression_builder.build_all(expression) when :partial_min @@ -159,44 +197,33 @@ module Groonga when :partial_min_and_max expression_builder.build_partial_min_and_max(expression) end - if cover_type == :all and****@filte*****? - # TODO: We can drop needless select when filtered stage dynamic - # doesn't exist. - filtered_table = table.select_all - else - filtered_table = table.select(expression) - end - if @post_filter - post_filtered_count_n_records(filtered_table) - else - filtered_table.size - end - ensure - filtered_table.close if filtered_table - expression.close if expression + filtered_table = context.table.select(expression) + @temporary_tables << filtered_table + context.table = filtered_table end end - def post_filtered_count_n_records(filtered_table) - @dynamic_columns.each_filtered do |dynamic_column| - dynamic_column.apply(filtered_table) + def count_shard(context) + if context.range_index + count_n_records_in_range(context) + else + context.table.size end + end + def post_filter_shard(context) expression = nil post_filtered_table = nil - begin - expression = Expression.create(filtered_table) - expression.parse(@post_filter) - post_filtered_table = filtered_table.select(expression) - post_filtered_table.size - ensure - post_filtered_table.close if post_filtered_table - expression.close if expression - end + expression = Expression.create(context.table) + @temporary_expressions << expression + expression.parse(@post_filter) + filtered_table = context.table.select(expression) + @temporary_tables << filtered_table + context.table = filtered_table end - def count_n_records_in_range(range_index, cover_type) - case cover_type + def count_n_records_in_range(context) + case context.cover_type when :partial_min min = @target_range.min min_border = @target_range.min_border @@ -228,7 +255,7 @@ module Groonga flags |= TableCursorFlags::LT end - TableCursor.open(range_index.table, + TableCursor.open(context.range_index.table, :min => min, :max => max, :flags => flags) do |table_cursor| Copied: test/command/suite/sharding/logical_count/columns/stage/filtered/window_function.expected (+12 -10) 58% =================================================================== --- test/command/suite/sharding/logical_count/post_filter/min.expected 2019-04-17 10:17:28 +0900 (9b3d021ed) +++ test/command/suite/sharding/logical_count/columns/stage/filtered/window_function.expected 2019-04-17 12:11:06 +0900 (dc5776031) @@ -1,5 +1,7 @@ plugin_register sharding [[0,0.0,0.0],true] +plugin_register functions/time +[[0,0.0,0.0],true] table_create Logs_20170315 TABLE_NO_KEY [[0,0.0,0.0],true] column_create Logs_20170315 timestamp COLUMN_SCALAR Time @@ -27,16 +29,16 @@ load --table Logs_20170315 [[0,0.0,0.0],3] load --table Logs_20170316 [ -{"timestamp": "2017/03/16 10:00:00", "price": 530}, -{"timestamp": "2017/03/16 11:00:00", "price": 520}, -{"timestamp": "2017/03/16 12:00:00", "price": 110} +{"timestamp": "2017/03/16 00:00:00", "price": 530}, +{"timestamp": "2017/03/16 01:00:00", "price": 520}, +{"timestamp": "2017/03/16 02:00:00", "price": 110} ] [[0,0.0,0.0],3] load --table Logs_20170317 [ -{"timestamp": "2017/03/17 20:00:00", "price": 800}, -{"timestamp": "2017/03/17 21:00:00", "price": 400}, -{"timestamp": "2017/03/17 22:00:00", "price": 300} +{"timestamp": "2017/03/17 00:00:00", "price": 800}, +{"timestamp": "2017/03/17 01:00:00", "price": 400}, +{"timestamp": "2017/03/17 02:00:00", "price": 300} ] [[0,0.0,0.0],3] table_create Times TABLE_PAT_KEY Time @@ -49,10 +51,10 @@ column_create Times logs_20170317 COLUMN_INDEX Logs_20170317 timestamp [[0,0.0,0.0],true] log_level --level debug [[0,0.0,0.0],true] -logical_count Logs --shard_key timestamp --min "2017/03/15 01:00:00" --min_border "include" --columns[filtered_id].stage filtered --columns[filtered_id].type UInt32 --columns[filtered_id].flags COLUMN_SCALAR --columns[filtered_id].value '_id' --filter 'price >= 200' --post_filter 'filtered_id < 3' -[[0,0.0,0.0],5] +logical_count Logs --shard_key timestamp --columns[slided_day].stage initial --columns[slided_day].type Time --columns[slided_day].flags COLUMN_SCALAR --columns[slided_day].value 'time_classify_day(timestamp - 7200000000)' --filter 'price >= 300' --columns[price_per_day].stage filtered --columns[price_per_day].type UInt32 --columns[price_per_day].flags COLUMN_SCALAR --columns[price_per_day].value 'window_sum(price)' --columns[price_per_day].window.group_keys 'slided_day' --post_filter 'price_per_day <= 1200' +[[0,0.0,0.0],3] #|d| [logical_count][select] <Logs_20170315>: need filter -#|d| [logical_count][select] <Logs_20170316>: covered -#|d| [logical_count][select] <Logs_20170317>: covered +#|d| [logical_count][select] <Logs_20170316>: need filter +#|d| [logical_count][select] <Logs_20170317>: need filter log_level --level notice [[0,0.0,0.0],true] Copied: test/command/suite/sharding/logical_count/columns/stage/filtered/window_function.test (+32 -30) 50% =================================================================== --- test/command/suite/sharding/logical_count/post_filter/no_filter.expected 2019-04-17 10:17:28 +0900 (11ca3490e) +++ test/command/suite/sharding/logical_count/columns/stage/filtered/window_function.test 2019-04-17 12:11:06 +0900 (296112bed) @@ -1,58 +1,60 @@ +#@on-error omit plugin_register sharding -[[0,0.0,0.0],true] +plugin_register functions/time +#@on-error default + table_create Logs_20170315 TABLE_NO_KEY -[[0,0.0,0.0],true] column_create Logs_20170315 timestamp COLUMN_SCALAR Time -[[0,0.0,0.0],true] column_create Logs_20170315 price COLUMN_SCALAR UInt32 -[[0,0.0,0.0],true] + table_create Logs_20170316 TABLE_NO_KEY -[[0,0.0,0.0],true] column_create Logs_20170316 timestamp COLUMN_SCALAR Time -[[0,0.0,0.0],true] column_create Logs_20170316 price COLUMN_SCALAR UInt32 -[[0,0.0,0.0],true] + table_create Logs_20170317 TABLE_NO_KEY -[[0,0.0,0.0],true] column_create Logs_20170317 timestamp COLUMN_SCALAR Time -[[0,0.0,0.0],true] column_create Logs_20170317 price COLUMN_SCALAR UInt32 -[[0,0.0,0.0],true] + load --table Logs_20170315 [ {"timestamp": "2017/03/15 00:00:00", "price": 1000}, {"timestamp": "2017/03/15 01:00:00", "price": 900}, {"timestamp": "2017/03/15 02:00:00", "price": 300} ] -[[0,0.0,0.0],3] + load --table Logs_20170316 [ -{"timestamp": "2017/03/16 10:00:00", "price": 530}, -{"timestamp": "2017/03/16 11:00:00", "price": 520}, -{"timestamp": "2017/03/16 12:00:00", "price": 110} +{"timestamp": "2017/03/16 00:00:00", "price": 530}, +{"timestamp": "2017/03/16 01:00:00", "price": 520}, +{"timestamp": "2017/03/16 02:00:00", "price": 110} ] -[[0,0.0,0.0],3] + load --table Logs_20170317 [ -{"timestamp": "2017/03/17 20:00:00", "price": 800}, -{"timestamp": "2017/03/17 21:00:00", "price": 400}, -{"timestamp": "2017/03/17 22:00:00", "price": 300} +{"timestamp": "2017/03/17 00:00:00", "price": 800}, +{"timestamp": "2017/03/17 01:00:00", "price": 400}, +{"timestamp": "2017/03/17 02:00:00", "price": 300} ] -[[0,0.0,0.0],3] + table_create Times TABLE_PAT_KEY Time -[[0,0.0,0.0],true] column_create Times logs_20170315 COLUMN_INDEX Logs_20170315 timestamp -[[0,0.0,0.0],true] column_create Times logs_20170316 COLUMN_INDEX Logs_20170316 timestamp -[[0,0.0,0.0],true] column_create Times logs_20170317 COLUMN_INDEX Logs_20170317 timestamp -[[0,0.0,0.0],true] + +#@add-important-log-levels debug log_level --level debug -[[0,0.0,0.0],true] -logical_count Logs --shard_key timestamp --post_filter '_id < 3' -[[0,0.0,0.0],6] -#|d| [logical_count][select] <Logs_20170315>: covered -#|d| [logical_count][select] <Logs_20170316>: covered -#|d| [logical_count][select] <Logs_20170317>: covered +logical_count Logs \ + --shard_key timestamp \ + --columns[slided_day].stage initial \ + --columns[slided_day].type Time \ + --columns[slided_day].flags COLUMN_SCALAR \ + --columns[slided_day].value 'time_classify_day(timestamp - 7200000000)' \ + --filter 'price >= 300' \ + --columns[price_per_day].stage filtered \ + --columns[price_per_day].type UInt32 \ + --columns[price_per_day].flags COLUMN_SCALAR \ + --columns[price_per_day].value 'window_sum(price)' \ + --columns[price_per_day].window.group_keys 'slided_day' \ + --post_filter 'price_per_day <= 1200' log_level --level notice -[[0,0.0,0.0],true] +#@remove-important-log-levels debug Modified: test/command/suite/sharding/logical_count/columns/stage/initial/filter.expected (+3 -3) =================================================================== --- test/command/suite/sharding/logical_count/columns/stage/initial/filter.expected 2019-04-17 10:17:28 +0900 (636add034) +++ test/command/suite/sharding/logical_count/columns/stage/initial/filter.expected 2019-04-17 12:11:06 +0900 (515cefe9e) @@ -51,8 +51,8 @@ log_level --level debug [[0,0.0,0.0],true] logical_count Logs --shard_key timestamp --columns[price_with_tax].stage initial --columns[price_with_tax].type UInt32 --columns[price_with_tax].flags COLUMN_SCALAR --columns[price_with_tax].value 'price * 1.08' --filter 'price_with_tax > 550' [[0,0.0,0.0],5] -#|d| [logical_count][select] <Logs_20170315>: covered -#|d| [logical_count][select] <Logs_20170316>: covered -#|d| [logical_count][select] <Logs_20170317>: covered +#|d| [logical_count][select] <Logs_20170315>: need filter +#|d| [logical_count][select] <Logs_20170316>: need filter +#|d| [logical_count][select] <Logs_20170317>: need filter log_level --level notice [[0,0.0,0.0],true] Copied: test/command/suite/sharding/logical_count/columns/stage/initial/window_function.expected (+11 -9) 59% =================================================================== --- test/command/suite/sharding/logical_count/post_filter/min.expected 2019-04-17 10:17:28 +0900 (9b3d021ed) +++ test/command/suite/sharding/logical_count/columns/stage/initial/window_function.expected 2019-04-17 12:11:06 +0900 (6b841835d) @@ -1,5 +1,7 @@ plugin_register sharding [[0,0.0,0.0],true] +plugin_register functions/time +[[0,0.0,0.0],true] table_create Logs_20170315 TABLE_NO_KEY [[0,0.0,0.0],true] column_create Logs_20170315 timestamp COLUMN_SCALAR Time @@ -27,16 +29,16 @@ load --table Logs_20170315 [[0,0.0,0.0],3] load --table Logs_20170316 [ -{"timestamp": "2017/03/16 10:00:00", "price": 530}, -{"timestamp": "2017/03/16 11:00:00", "price": 520}, -{"timestamp": "2017/03/16 12:00:00", "price": 110} +{"timestamp": "2017/03/16 00:00:00", "price": 530}, +{"timestamp": "2017/03/16 01:00:00", "price": 520}, +{"timestamp": "2017/03/16 02:00:00", "price": 110} ] [[0,0.0,0.0],3] load --table Logs_20170317 [ -{"timestamp": "2017/03/17 20:00:00", "price": 800}, -{"timestamp": "2017/03/17 21:00:00", "price": 400}, -{"timestamp": "2017/03/17 22:00:00", "price": 300} +{"timestamp": "2017/03/17 00:00:00", "price": 800}, +{"timestamp": "2017/03/17 01:00:00", "price": 400}, +{"timestamp": "2017/03/17 02:00:00", "price": 300} ] [[0,0.0,0.0],3] table_create Times TABLE_PAT_KEY Time @@ -49,10 +51,10 @@ column_create Times logs_20170317 COLUMN_INDEX Logs_20170317 timestamp [[0,0.0,0.0],true] log_level --level debug [[0,0.0,0.0],true] -logical_count Logs --shard_key timestamp --min "2017/03/15 01:00:00" --min_border "include" --columns[filtered_id].stage filtered --columns[filtered_id].type UInt32 --columns[filtered_id].flags COLUMN_SCALAR --columns[filtered_id].value '_id' --filter 'price >= 200' --post_filter 'filtered_id < 3' +logical_count Logs --shard_key timestamp --columns[slided_day].stage initial --columns[slided_day].type Time --columns[slided_day].flags COLUMN_SCALAR --columns[slided_day].value 'time_classify_day(timestamp - 7200000000)' --columns[price_per_day].stage initial --columns[price_per_day].type UInt32 --columns[price_per_day].flags COLUMN_SCALAR --columns[price_per_day].value 'window_sum(price)' --columns[price_per_day].window.group_keys 'slided_day' --filter 'price_per_day >= 1350' [[0,0.0,0.0],5] #|d| [logical_count][select] <Logs_20170315>: need filter -#|d| [logical_count][select] <Logs_20170316>: covered -#|d| [logical_count][select] <Logs_20170317>: covered +#|d| [logical_count][select] <Logs_20170316>: need filter +#|d| [logical_count][select] <Logs_20170317>: need filter log_level --level notice [[0,0.0,0.0],true] Copied: test/command/suite/sharding/logical_count/columns/stage/initial/window_function.test (+31 -30) 51% =================================================================== --- test/command/suite/sharding/logical_count/post_filter/no_filter.expected 2019-04-17 10:17:28 +0900 (11ca3490e) +++ test/command/suite/sharding/logical_count/columns/stage/initial/window_function.test 2019-04-17 12:11:06 +0900 (88d671a8f) @@ -1,58 +1,59 @@ +#@on-error omit plugin_register sharding -[[0,0.0,0.0],true] +plugin_register functions/time +#@on-error default + table_create Logs_20170315 TABLE_NO_KEY -[[0,0.0,0.0],true] column_create Logs_20170315 timestamp COLUMN_SCALAR Time -[[0,0.0,0.0],true] column_create Logs_20170315 price COLUMN_SCALAR UInt32 -[[0,0.0,0.0],true] + table_create Logs_20170316 TABLE_NO_KEY -[[0,0.0,0.0],true] column_create Logs_20170316 timestamp COLUMN_SCALAR Time -[[0,0.0,0.0],true] column_create Logs_20170316 price COLUMN_SCALAR UInt32 -[[0,0.0,0.0],true] + table_create Logs_20170317 TABLE_NO_KEY -[[0,0.0,0.0],true] column_create Logs_20170317 timestamp COLUMN_SCALAR Time -[[0,0.0,0.0],true] column_create Logs_20170317 price COLUMN_SCALAR UInt32 -[[0,0.0,0.0],true] + load --table Logs_20170315 [ {"timestamp": "2017/03/15 00:00:00", "price": 1000}, {"timestamp": "2017/03/15 01:00:00", "price": 900}, {"timestamp": "2017/03/15 02:00:00", "price": 300} ] -[[0,0.0,0.0],3] + load --table Logs_20170316 [ -{"timestamp": "2017/03/16 10:00:00", "price": 530}, -{"timestamp": "2017/03/16 11:00:00", "price": 520}, -{"timestamp": "2017/03/16 12:00:00", "price": 110} +{"timestamp": "2017/03/16 00:00:00", "price": 530}, +{"timestamp": "2017/03/16 01:00:00", "price": 520}, +{"timestamp": "2017/03/16 02:00:00", "price": 110} ] -[[0,0.0,0.0],3] + load --table Logs_20170317 [ -{"timestamp": "2017/03/17 20:00:00", "price": 800}, -{"timestamp": "2017/03/17 21:00:00", "price": 400}, -{"timestamp": "2017/03/17 22:00:00", "price": 300} +{"timestamp": "2017/03/17 00:00:00", "price": 800}, +{"timestamp": "2017/03/17 01:00:00", "price": 400}, +{"timestamp": "2017/03/17 02:00:00", "price": 300} ] -[[0,0.0,0.0],3] + table_create Times TABLE_PAT_KEY Time -[[0,0.0,0.0],true] column_create Times logs_20170315 COLUMN_INDEX Logs_20170315 timestamp -[[0,0.0,0.0],true] column_create Times logs_20170316 COLUMN_INDEX Logs_20170316 timestamp -[[0,0.0,0.0],true] column_create Times logs_20170317 COLUMN_INDEX Logs_20170317 timestamp -[[0,0.0,0.0],true] + +#@add-important-log-levels debug log_level --level debug -[[0,0.0,0.0],true] -logical_count Logs --shard_key timestamp --post_filter '_id < 3' -[[0,0.0,0.0],6] -#|d| [logical_count][select] <Logs_20170315>: covered -#|d| [logical_count][select] <Logs_20170316>: covered -#|d| [logical_count][select] <Logs_20170317>: covered +logical_count Logs \ + --shard_key timestamp \ + --columns[slided_day].stage initial \ + --columns[slided_day].type Time \ + --columns[slided_day].flags COLUMN_SCALAR \ + --columns[slided_day].value 'time_classify_day(timestamp - 7200000000)' \ + --columns[price_per_day].stage initial \ + --columns[price_per_day].type UInt32 \ + --columns[price_per_day].flags COLUMN_SCALAR \ + --columns[price_per_day].value 'window_sum(price)' \ + --columns[price_per_day].window.group_keys 'slided_day' \ + --filter 'price_per_day >= 1350' log_level --level notice -[[0,0.0,0.0],true] +#@remove-important-log-levels debug Modified: test/command/suite/sharding/logical_count/post_filter/filtered_column.expected (+3 -3) =================================================================== --- test/command/suite/sharding/logical_count/post_filter/filtered_column.expected 2019-04-17 10:17:28 +0900 (372a3a3a4) +++ test/command/suite/sharding/logical_count/post_filter/filtered_column.expected 2019-04-17 12:11:06 +0900 (c5e20a389) @@ -51,8 +51,8 @@ log_level --level debug [[0,0.0,0.0],true] logical_count Logs --shard_key timestamp --columns[filtered_id].stage filtered --columns[filtered_id].type UInt32 --columns[filtered_id].flags COLUMN_SCALAR --columns[filtered_id].value '_id' --filter 'price >= 200' --post_filter 'filtered_id > 1' [[0,0.0,0.0],5] -#|d| [logical_count][select] <Logs_20170315>: covered -#|d| [logical_count][select] <Logs_20170316>: covered -#|d| [logical_count][select] <Logs_20170317>: covered +#|d| [logical_count][select] <Logs_20170315>: need filter +#|d| [logical_count][select] <Logs_20170316>: need filter +#|d| [logical_count][select] <Logs_20170317>: need filter log_level --level notice [[0,0.0,0.0],true] Modified: test/command/suite/sharding/logical_count/post_filter/min.expected (+2 -2) =================================================================== --- test/command/suite/sharding/logical_count/post_filter/min.expected 2019-04-17 10:17:28 +0900 (9b3d021ed) +++ test/command/suite/sharding/logical_count/post_filter/min.expected 2019-04-17 12:11:06 +0900 (9b5c0c9dc) @@ -52,7 +52,7 @@ log_level --level debug logical_count Logs --shard_key timestamp --min "2017/03/15 01:00:00" --min_border "include" --columns[filtered_id].stage filtered --columns[filtered_id].type UInt32 --columns[filtered_id].flags COLUMN_SCALAR --columns[filtered_id].value '_id' --filter 'price >= 200' --post_filter 'filtered_id < 3' [[0,0.0,0.0],5] #|d| [logical_count][select] <Logs_20170315>: need filter -#|d| [logical_count][select] <Logs_20170316>: covered -#|d| [logical_count][select] <Logs_20170317>: covered +#|d| [logical_count][select] <Logs_20170316>: need filter +#|d| [logical_count][select] <Logs_20170317>: need filter log_level --level notice [[0,0.0,0.0],true] Modified: test/command/suite/sharding/logical_count/post_filter/no_filter.expected (+3 -3) =================================================================== --- test/command/suite/sharding/logical_count/post_filter/no_filter.expected 2019-04-17 10:17:28 +0900 (11ca3490e) +++ test/command/suite/sharding/logical_count/post_filter/no_filter.expected 2019-04-17 12:11:06 +0900 (50f6c2d9e) @@ -51,8 +51,8 @@ log_level --level debug [[0,0.0,0.0],true] logical_count Logs --shard_key timestamp --post_filter '_id < 3' [[0,0.0,0.0],6] -#|d| [logical_count][select] <Logs_20170315>: covered -#|d| [logical_count][select] <Logs_20170316>: covered -#|d| [logical_count][select] <Logs_20170317>: covered +#|d| [logical_count][select] <Logs_20170315>: need filter +#|d| [logical_count][select] <Logs_20170316>: need filter +#|d| [logical_count][select] <Logs_20170317>: need filter log_level --level notice [[0,0.0,0.0],true] Modified: test/command/suite/sharding/logical_count/post_filter/no_filtered_column.expected (+3 -3) =================================================================== --- test/command/suite/sharding/logical_count/post_filter/no_filtered_column.expected 2019-04-17 10:17:28 +0900 (62caba77c) +++ test/command/suite/sharding/logical_count/post_filter/no_filtered_column.expected 2019-04-17 12:11:06 +0900 (baa559af5) @@ -51,8 +51,8 @@ log_level --level debug [[0,0.0,0.0],true] logical_count Logs --shard_key timestamp --filter 'price >= 200' --post_filter '_id > 1' [[0,0.0,0.0],5] -#|d| [logical_count][select] <Logs_20170315>: covered -#|d| [logical_count][select] <Logs_20170316>: covered -#|d| [logical_count][select] <Logs_20170317>: covered +#|d| [logical_count][select] <Logs_20170315>: need filter +#|d| [logical_count][select] <Logs_20170316>: need filter +#|d| [logical_count][select] <Logs_20170317>: need filter log_level --level notice [[0,0.0,0.0],true] -------------- next part -------------- An HTML attachment was scrubbed... URL: <https://lists.osdn.me/mailman/archives/groonga-commit/attachments/20190417/bb4bd490/attachment-0001.html>