1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141
|
// Copyright 2023 The Chromium Authors
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
syntax = "proto2";
option optimize_for = LITE_RUNTIME;
option java_package = "org.chromium.components.optimization_guide.proto";
option java_outer_classname = "VisualSearchModelMetadataProto";
package optimization_guide.proto;
message FeatureLibrary {
// Next id: 16
enum ImageLevelFeatureName {
IMAGE_LEVEL_UNSPECIFIED = 0;
// Features of the original image.
IMAGE_ORIGINAL_AREA = 1;
IMAGE_ORIGINAL_ASPECT_RATIO = 2;
IMAGE_ORIGINAL_HEIGHT = 11;
IMAGE_ORIGINAL_WIDTH = 12;
// Features of the image as scaled on the page. Not all of the area may
// be visible.
IMAGE_ONPAGE_AREA = 3;
IMAGE_ONPAGE_HEIGHT = 9;
IMAGE_ONPAGE_WIDTH = 10;
IMAGE_ONPAGE_ASPECT_RATIO = 4;
// The actually visible area of the image on the page.
IMAGE_VISIBLE_AREA = 5;
// Fraction of image that is in viewport. Computed as
// IMAGE_VISIBLE_AREA / IMAGE_ONPAGE_AREA.
IMAGE_FRACTION_VISIBLE = 6;
// The Euclidean distance of the center of the image to the
// center of the viewport.
IMAGE_DISTANCE_TO_VIEWPORT_CENTER = 13;
// Scores of the shopping, sens, natural world, and public figures
// classifiers.
SHOPPING_CLASSIFIER_SCORE = 7;
SENS_CLASSIFIER_SCORE = 8;
NAT_WORLD_CLASSIFIER_SCORE = 14;
PUB_FIGURES_CLASSIFIER_SCORE = 15;
}
// Enum listing the ways in which we can normalize a feature.
enum NormalizingOp {
NORMALIZE_UNSPECIFIED = 0;
// Normalize by the size of the viewport area.
BY_VIEWPORT_AREA = 1;
// Normalize by the max value of the feature.
BY_MAX_VALUE = 2;
}
// Enum listing the types of thresholding operators.
enum ThresholdingOp {
THRESHOLDING_UNSPECIFIED = 0;
// Greater than.
GT = 1;
// Less than.
LT = 2;
}
// Enum listing the order in which the final list of eligible images is
// sorted.
enum SortingOrder {
SORT_UNSPECIFIED = 0;
SORT_ASCENDING = 1;
SORT_DESCENDING = 2;
}
}
// A rule that applies a threshold on a feature. The rule is satisfied if the
// value of the feature with /feature_type/ passes the /threshold/ according to
// the given /op/. Eg. suppose feature_type = IMAGE_ORIGINAL_AREA, op = GT, and
// threshold = 5. Then the rule is satisfied if the original area of the image
// is greater than 5.
message ThresholdingRule {
optional FeatureLibrary.ImageLevelFeatureName feature_name = 1;
// The normalizing_op is not required.
optional FeatureLibrary.NormalizingOp normalizing_op = 2;
optional FeatureLibrary.ThresholdingOp thresholding_op = 3;
optional float threshold = 4;
}
// An OR-of-thresholding-rules is satisfied if any of the thresholding rules it
// contains are satisfied.
message OrOfThresholdingRules {
repeated ThresholdingRule rules = 1;
}
// Specifies a feature by whose value we need to sort the final eligible images.
message SortingClause {
optional FeatureLibrary.ImageLevelFeatureName feature_name = 1;
optional FeatureLibrary.SortingOrder sorting_order = 2;
}
// Contains any additional options for cheap pruning that cannot be neatly
// expressed as an OrOfThresholdingRules.
message AdditionalCheapPruningOptions {
// Providing a value for this setting enables filtering out overlapping
// images by using the z-index setting (when available). When the
// overlap of the images is >= to the provided fraction (eg. 0.99),
// and there is a z-index value present, we will filter out the image
// with a lower z-index value.
optional float z_index_overlap_fraction = 1;
}
// Overall spec for the whole module.
message EligibilitySpec {
// List of light-weight thresholding rules that we use in the first-cut run to
// cheaply prune ineligible images. All of the eligibility rules must be
// satisfied for an image to be considered eligible.
repeated OrOfThresholdingRules cheap_pruning_rules = 1;
optional AdditionalCheapPruningOptions additional_cheap_pruning_options = 5;
// These are thresholding rules that depend on more expensive-to-compute
// inputs such as the sensitivity and intent classifications.
repeated OrOfThresholdingRules classifier_score_rules = 2;
// Before running these rules, normalizing features are recomputed only over
// the images that remain after the second pass.
repeated OrOfThresholdingRules post_renormalization_rules = 3;
// The final list of eligible images are sorted according to these clauses.
// The clauses are applied in the order in which they are provided here, eg,
// we may choose to first sort by image area and then by shoppy score.
repeated SortingClause sorting_clauses = 4;
}
// The message contains a set of params to config visual search classification.
message VisualSearchModelMetadata {
optional EligibilitySpec eligibility_spec = 1;
// Keep track of the version to ensure the client-side has been updated to
// handle new fields that may appear in the config protos.
optional int64 metadata_version = 2;
}
|