1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102
|
# ------------------------------------
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT License.
# ------------------------------------
from azure.search.documents.indexes.models import (
SearchIndex,
RegexFlags,
PatternAnalyzer,
PatternTokenizer,
)
from azure.search.documents.indexes._generated.models import (
PatternAnalyzer as _PatternAnalyzer,
PatternTokenizer as _PatternTokenizer,
)
def test_unpack_search_index():
pattern_analyzer = _PatternAnalyzer(name="test_analyzer", flags="CANON_EQ")
analyzers = []
analyzers.append(pattern_analyzer)
pattern_tokenizer = _PatternTokenizer(name="test_tokenizer", flags="CANON_EQ")
tokenizers = []
tokenizers.append(pattern_tokenizer)
index = SearchIndex(name="test", fields=None, analyzers=analyzers, tokenizers=tokenizers)
result = SearchIndex._from_generated(index)
assert isinstance(result.analyzers[0], PatternAnalyzer)
assert isinstance(result.analyzers[0].flags, list)
assert result.analyzers[0].flags[0] == "CANON_EQ"
assert isinstance(result.tokenizers[0], PatternTokenizer)
assert isinstance(result.tokenizers[0].flags, list)
assert result.tokenizers[0].flags[0] == "CANON_EQ"
def test_multi_unpack_search_index():
pattern_analyzer = _PatternAnalyzer(name="test_analyzer", flags="CANON_EQ|MULTILINE")
analyzers = []
analyzers.append(pattern_analyzer)
pattern_tokenizer = _PatternTokenizer(name="test_tokenizer", flags="CANON_EQ|MULTILINE")
tokenizers = []
tokenizers.append(pattern_tokenizer)
index = SearchIndex(name="test", fields=None, analyzers=analyzers, tokenizers=tokenizers)
result = SearchIndex._from_generated(index)
assert isinstance(result.analyzers[0], PatternAnalyzer)
assert isinstance(result.analyzers[0].flags, list)
assert result.analyzers[0].flags[0] == "CANON_EQ"
assert result.analyzers[0].flags[1] == "MULTILINE"
assert isinstance(result.tokenizers[0], PatternTokenizer)
assert isinstance(result.tokenizers[0].flags, list)
assert result.tokenizers[0].flags[0] == "CANON_EQ"
assert result.tokenizers[0].flags[1] == "MULTILINE"
def test_unpack_search_index_enum():
pattern_analyzer = _PatternAnalyzer(name="test_analyzer", flags=RegexFlags.canon_eq)
analyzers = []
analyzers.append(pattern_analyzer)
pattern_tokenizer = _PatternTokenizer(name="test_tokenizer", flags=RegexFlags.canon_eq)
tokenizers = []
tokenizers.append(pattern_tokenizer)
index = SearchIndex(name="test", fields=None, analyzers=analyzers, tokenizers=tokenizers)
result = SearchIndex._from_generated(index)
assert isinstance(result.analyzers[0], PatternAnalyzer)
assert isinstance(result.analyzers[0].flags, list)
assert result.analyzers[0].flags[0] == "CANON_EQ"
assert isinstance(result.tokenizers[0], PatternTokenizer)
assert isinstance(result.tokenizers[0].flags, list)
assert result.tokenizers[0].flags[0] == "CANON_EQ"
def test_pack_search_index():
pattern_analyzer = PatternAnalyzer(name="test_analyzer", flags=["CANON_EQ"])
analyzers = []
analyzers.append(pattern_analyzer)
pattern_tokenizer = PatternTokenizer(name="test_tokenizer", flags=["CANON_EQ"])
tokenizers = []
tokenizers.append(pattern_tokenizer)
index = SearchIndex(name="test", fields=None, analyzers=analyzers, tokenizers=tokenizers)
result = index._to_generated()
assert isinstance(result.analyzers[0], _PatternAnalyzer)
assert isinstance(result.analyzers[0].flags, str)
assert result.analyzers[0].flags == "CANON_EQ"
assert isinstance(result.tokenizers[0], _PatternTokenizer)
assert isinstance(result.tokenizers[0].flags, str)
assert result.tokenizers[0].flags == "CANON_EQ"
def test_multi_pack_search_index():
pattern_analyzer = PatternAnalyzer(name="test_analyzer", flags=["CANON_EQ", "MULTILINE"])
analyzers = []
analyzers.append(pattern_analyzer)
pattern_tokenizer = PatternTokenizer(name="test_analyzer", flags=["CANON_EQ", "MULTILINE"])
tokenizers = []
tokenizers.append(pattern_tokenizer)
index = SearchIndex(name="test", fields=None, analyzers=analyzers, tokenizers=tokenizers)
result = index._to_generated()
assert isinstance(result.analyzers[0], _PatternAnalyzer)
assert isinstance(result.analyzers[0].flags, str)
assert result.analyzers[0].flags == "CANON_EQ|MULTILINE"
assert isinstance(result.tokenizers[0], _PatternTokenizer)
assert isinstance(result.tokenizers[0].flags, str)
assert result.tokenizers[0].flags == "CANON_EQ|MULTILINE"
|