File: supervised_user_blacklist_downloader.cc

package info (click to toggle)
chromium-browser 41.0.2272.118-1
  • links: PTS, VCS
  • area: main
  • in suites: jessie-kfreebsd
  • size: 2,189,132 kB
  • sloc: cpp: 9,691,462; ansic: 3,341,451; python: 712,689; asm: 518,779; xml: 208,926; java: 169,820; sh: 119,353; perl: 68,907; makefile: 28,311; yacc: 13,305; objc: 11,385; tcl: 3,186; cs: 2,225; sql: 2,217; lex: 2,215; lisp: 1,349; pascal: 1,256; awk: 407; ruby: 155; sed: 53; php: 14; exp: 11
file content (79 lines) | stat: -rw-r--r-- 2,618 bytes parent folder | download
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
// Copyright 2014 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.

#include "chrome/browser/supervised_user/experimental/supervised_user_blacklist_downloader.h"

#include "base/files/file_path.h"
#include "base/files/file_util.h"
#include "base/logging.h"
#include "content/public/browser/browser_thread.h"
#include "net/base/load_flags.h"
#include "net/http/http_status_code.h"
#include "net/url_request/url_fetcher.h"
#include "url/gurl.h"

using content::BrowserThread;
using net::URLFetcher;

const int kNumRetries = 1;

SupervisedUserBlacklistDownloader::SupervisedUserBlacklistDownloader(
    const GURL& url,
    const base::FilePath& path,
    net::URLRequestContextGetter* request_context,
    const DownloadFinishedCallback& callback)
    : callback_(callback),
      fetcher_(URLFetcher::Create(url, URLFetcher::GET, this)),
      weak_ptr_factory_(this) {
  fetcher_->SetRequestContext(request_context);
  fetcher_->SetLoadFlags(net::LOAD_DO_NOT_SEND_COOKIES |
                         net::LOAD_DO_NOT_SAVE_COOKIES |
                         net::LOAD_IS_DOWNLOAD);
  fetcher_->SetAutomaticallyRetryOnNetworkChanges(kNumRetries);
  fetcher_->SaveResponseToFileAtPath(
      path,
      BrowserThread::GetMessageLoopProxyForThread(BrowserThread::FILE));

  base::PostTaskAndReplyWithResult(
      BrowserThread::GetBlockingPool(),
      FROM_HERE,
      base::Bind(&base::PathExists, path),
      base::Bind(&SupervisedUserBlacklistDownloader::OnFileExistsCheckDone,
                 weak_ptr_factory_.GetWeakPtr()));
}

SupervisedUserBlacklistDownloader::~SupervisedUserBlacklistDownloader() {}

void SupervisedUserBlacklistDownloader::OnURLFetchComplete(
    const net::URLFetcher* source) {
  DCHECK_EQ(fetcher_.get(), source);

  const net::URLRequestStatus& status = source->GetStatus();
  if (!status.is_success()) {
    DLOG(WARNING) << "URLRequestStatus error " << status.error();
    callback_.Run(false);
    return;
  }

  int response_code = source->GetResponseCode();
  if (response_code != net::HTTP_OK) {
    DLOG(WARNING) << "HTTP error " << response_code;
    callback_.Run(false);
    return;
  }

  // Take ownership of the new file.
  base::FilePath response_path;
  bool success = source->GetResponseAsFilePath(true, &response_path);
  callback_.Run(success);
}

void SupervisedUserBlacklistDownloader::OnFileExistsCheckDone(bool exists) {
  if (exists) {
    // TODO(treib): Figure out a strategy for updating the file.
    callback_.Run(true);
  } else {
    fetcher_->Start();
  }
}