File: phrase_segmenter_unittest.cc

package info (click to toggle)
chromium 138.0.7204.183-1
  • links: PTS, VCS
  • area: main
  • in suites: trixie
  • size: 6,071,908 kB
  • sloc: cpp: 34,937,088; ansic: 7,176,967; javascript: 4,110,704; python: 1,419,953; asm: 946,768; xml: 739,971; pascal: 187,324; sh: 89,623; perl: 88,663; objc: 79,944; sql: 50,304; cs: 41,786; fortran: 24,137; makefile: 21,806; php: 13,980; tcl: 13,166; yacc: 8,925; ruby: 7,485; awk: 3,720; lisp: 3,096; lex: 1,327; ada: 727; jsp: 228; sed: 36
file content (101 lines) | stat: -rw-r--r-- 4,728 bytes parent folder | download | duplicates (7)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
// Copyright 2024 The Chromium Authors
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.

#include "chrome/renderer/accessibility/phrase_segmentation/phrase_segmenter.h"

#include <string>
#include <vector>

#include "chrome/renderer/accessibility/phrase_segmentation/dependency_tree.h"
#include "chrome/renderer/accessibility/phrase_segmentation/token_boundaries.h"
#include "chrome/renderer/accessibility/phrase_segmentation/tokenized_sentence.h"
#include "testing/gtest/include/gtest/gtest.h"

namespace {

class PhraseSegmentationPhraseSegmenterTest : public testing::Test {
 protected:
  PhraseSegmentationPhraseSegmenterTest() = default;
};

TEST_F(PhraseSegmentationPhraseSegmenterTest, SegmentTestWords) {
  const std::u16string sentence =
      u"The result is \"ice cream\"; a smooth, semisolid foam.";
  //    01234567890123 4567890123 4567890123456789012345678901
  const TokenizedSentence tokenized_sentence(sentence);
  const std::vector<int> dependency_heads = {1, 5,  5,  5,  5,  5, 5,
                                             5, 12, 12, 12, 12, 5, 5};
  ASSERT_EQ(dependency_heads.size(), tokenized_sentence.tokens().size());
  const DependencyTree dependency_tree(tokenized_sentence, dependency_heads);

  const TokenBoundaries token_boundaries(dependency_tree);
  PhraseSegmenter smart_highlight;
  std::vector<int> split_char_offsets =
      CalculatePhraseBoundaries(smart_highlight, tokenized_sentence,
                                token_boundaries, Strategy::kWords, 4);
  // The result is /"ice cream"; /a smooth, /semisolid foam.
  std::vector<int> expected_split_char_offsets = {0, 14, 27, 37};
  EXPECT_EQ(split_char_offsets, expected_split_char_offsets);
}

TEST_F(PhraseSegmentationPhraseSegmenterTest, SegmentTestCharacters) {
  const std::u16string sentence =
      u"The result is \"ice cream\"; a smooth, semisolid foam.";
  //    01234567890123 4567890123 4567890123456789012345678901
  const TokenizedSentence tokenized_sentence(sentence);
  const std::vector<int> dependency_heads = {1, 5,  5,  5,  5,  5, 5,
                                             5, 12, 12, 12, 12, 5, 5};
  ASSERT_EQ(dependency_heads.size(), tokenized_sentence.tokens().size());
  const DependencyTree dependency_tree(tokenized_sentence, dependency_heads);

  const TokenBoundaries token_boundaries(dependency_tree);
  PhraseSegmenter smart_highlight;
  std::vector<int> split_char_offsets =
      CalculatePhraseBoundaries(smart_highlight, tokenized_sentence,
                                token_boundaries, Strategy::kCharacters, 20);
  // The result is /"ice cream"; /a smooth, semisolid /foam.
  std::vector<int> expected_split_char_offsets = {0, 14, 27, 37};
  EXPECT_EQ(split_char_offsets, expected_split_char_offsets);
}

TEST_F(PhraseSegmentationPhraseSegmenterTest,
       SplitsPhrasesInCenterIfWeightsEqual) {
  const std::u16string sentence = u"a smooth, semisolid foam.";
  //                                0123456789012345678901234
  const TokenizedSentence tokenized_sentence(sentence);
  const std::vector<int> dependency_heads = {4, 4, 4, 4, 4, 4};
  ASSERT_EQ(dependency_heads.size(), tokenized_sentence.tokens().size());
  const DependencyTree dependency_tree(tokenized_sentence, dependency_heads);

  const TokenBoundaries token_boundaries(dependency_tree);
  PhraseSegmenter smart_highlight;
  std::vector<int> split_char_offsets =
      CalculatePhraseBoundaries(smart_highlight, tokenized_sentence,
                                token_boundaries, Strategy::kWords, 3);
  // a smooth, /semisolid foam.
  std::vector<int> expected_split_char_offsets = {0, 10};
  EXPECT_EQ(split_char_offsets, expected_split_char_offsets);
}

TEST_F(PhraseSegmentationPhraseSegmenterTest,
       SegmentTestCharactersWithLongWord) {
  const std::u16string sentence =
      u"A smooth, semisolidifyingableificationifical foam.";
  //    0123456789012345678901234567890123456789012345678901
  const TokenizedSentence tokenized_sentence(sentence);
  const std::vector<int> dependency_heads = {4, 4, 4, 4, 4, 4};
  ASSERT_EQ(dependency_heads.size(), tokenized_sentence.tokens().size());
  const DependencyTree dependency_tree(tokenized_sentence, dependency_heads);

  const TokenBoundaries token_boundaries(dependency_tree);
  PhraseSegmenter smart_highlight;
  std::vector<int> split_char_offsets =
      CalculatePhraseBoundaries(smart_highlight, tokenized_sentence,
                                token_boundaries, Strategy::kCharacters, 20);
  // The result is /"ice cream"; /a smooth, semisolid /foam.
  std::vector<int> expected_split_char_offsets = {0, 10, 45};
  EXPECT_EQ(split_char_offsets, expected_split_char_offsets);
}

}  // namespace