Files
oddmu/tokenizer_test.go
Alex Schroeder 541725ecd7 Query string tokens include punctuation
Drop the tokenize function since there are now dedicated tokenizers
for query string and hashtag finding.
2023-09-27 08:24:33 +02:00

17 lines
422 B
Go

package main
import (
"github.com/stretchr/testify/assert"
"testing"
)
func TestHashtags(t *testing.T) {
assert.EqualValues(t, []string{"#truth"}, hashtags([]byte("This is boring. #Truth")), "hashtags")
}
func TestTokensAndPredicates(t *testing.T) {
predicates, terms := predicatesAndTokens("foo title:bar")
assert.EqualValues(t, []string{"foo"}, terms)
assert.EqualValues(t, []string{"title:bar"}, predicates)
}