File: ai_on_device_session.cc

package info (click to toggle)
chromium 138.0.7204.183-1~deb12u1
  • links: PTS, VCS
  • area: main
  • in suites: bookworm-proposed-updates
  • size: 6,080,960 kB
  • sloc: cpp: 34,937,079; ansic: 7,176,967; javascript: 4,110,704; python: 1,419,954; asm: 946,768; xml: 739,971; pascal: 187,324; sh: 89,623; perl: 88,663; objc: 79,944; sql: 50,304; cs: 41,786; fortran: 24,137; makefile: 21,811; php: 13,980; tcl: 13,166; yacc: 8,925; ruby: 7,485; awk: 3,720; lisp: 3,096; lex: 1,327; ada: 727; jsp: 228; sed: 36
file content (50 lines) | stat: -rw-r--r-- 1,794 bytes parent folder | download | duplicates (4)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
// Copyright 2025 The Chromium Authors
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.

#include "chrome/browser/ai/ai_on_device_session.h"

AIOnDeviceSession::AIOnDeviceSession(
    std::unique_ptr<optimization_guide::OptimizationGuideModelExecutor::Session>
        session)
    : session_(std::move(session)) {}

AIOnDeviceSession::~AIOnDeviceSession() = default;

void AIOnDeviceSession::ExecuteModelOrQueue(
    optimization_guide::MultimodalMessage request,
    optimization_guide::OptimizationGuideModelExecutionResultStreamingCallback
        callback) {
  requests_.push({std::move(request), std::move(callback)});
  MaybeRunNextExecutionRequest();
}

void AIOnDeviceSession::MaybeRunNextExecutionRequest() {
  if (is_execution_in_progress_ || !session_ || requests_.empty()) {
    return;
  }

  auto request_pair = std::move(requests_.front());
  requests_.pop();

  is_execution_in_progress_ = true;
  session_->ExecuteModel(
      request_pair.first.BuildProtoMessage(),
      base::BindRepeating(&AIOnDeviceSession::ModelExecutionCallback,
                          weak_ptr_factory_.GetWeakPtr(),
                          std::move(request_pair.second)));
}

void AIOnDeviceSession::ModelExecutionCallback(
    optimization_guide::OptimizationGuideModelExecutionResultStreamingCallback
        final_callback,
    optimization_guide::OptimizationGuideModelStreamingExecutionResult result) {
  // Current execution is considered completed if there is no response value
  // (the error case), or streaming response is marked completed.
  is_execution_in_progress_ =
      result.response.has_value() && !result.response->is_complete;

  std::move(final_callback).Run(std::move(result));

  MaybeRunNextExecutionRequest();
}