Kouhei Sutou 2018-10-30 16:01:47 +0900 (Tue, 30 Oct 2018) Revision: 48eb1f2c98711cedd8a1fc10a6e600d6af8f3931 https://github.com/groonga/groonga/commit/48eb1f2c98711cedd8a1fc10a6e600d6af8f3931 Message: test tokenize: add token filter options case Added files: test/command/suite/tokenize/with_token_filters_options.expected test/command/suite/tokenize/with_token_filters_options.test Added: test/command/suite/tokenize/with_token_filters_options.expected (+4 -0) 100644 =================================================================== --- /dev/null +++ test/command/suite/tokenize/with_token_filters_options.expected 2018-10-30 16:01:47 +0900 (6d429ff94) @@ -0,0 +1,4 @@ +plugin_register token_filters/stem +[[0,0.0,0.0],true] +tokenize TokenBigram "maintenait" NormalizerAuto --token_filters 'TokenFilterStem("algorithm", "french")' +[[0,0.0,0.0],[{"value":"mainten","position":0,"force_prefix":false}]] Added: test/command/suite/tokenize/with_token_filters_options.test (+9 -0) 100644 =================================================================== --- /dev/null +++ test/command/suite/tokenize/with_token_filters_options.test 2018-10-30 16:01:47 +0900 (48c15e91d) @@ -0,0 +1,9 @@ +#@on-error omit +plugin_register token_filters/stem +#@on-error default + +tokenize \ + TokenBigram \ + "maintenait" \ + NormalizerAuto \ + --token_filters 'TokenFilterStem("algorithm", "french")' -------------- next part -------------- An HTML attachment was scrubbed... URL: <https://lists.osdn.me/mailman/archives/groonga-commit/attachments/20181030/c1c2cf40/attachment.html>