File: image_file_cache.rb

package info (click to toggle)
mikutter 4.1.3%2Bdfsg1-1
  • links: PTS, VCS
  • area: main
  • in suites: bullseye
  • size: 9,260 kB
  • sloc: ruby: 20,126; sh: 183; makefile: 19
file content (184 lines) | stat: -rw-r--r-- 6,554 bytes parent folder | download | duplicates (3)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
# -*- coding: utf-8 -*-

require 'moneta'

Plugin.create :image_file_cache do

  UserConfig[:image_file_cache_expire]     ||= 32
  UserConfig[:image_file_cache_size_limit] ||= 128

  @queue = Delayer.generate_class(priority: %i[none check_subdirs check_dirs],
                                  default: :none,
                                  expire: 0.02)
  @cache_directory = File.join(Environment::CACHE, 'image_file_cache').freeze
  @cache_journal_directory = File.join(Environment::CACHE, 'image_file_cache', 'journal').freeze
  @db = ::Moneta.build(&->(dir){ ->(this){
                                   this.use :Transformer, key: %i[md5 spread]
                                   this.adapter(:File, dir: dir)
                                 } }.(@cache_directory))
  @journal_db = ::Moneta.build(&->(dir){ ->(this){
                                           this.use :Transformer, key: %i[md5 spread], value: :marshal
                                           this.adapter(:File, dir: dir)
                                         } }.(@cache_journal_directory))
  @urls = nil

  on_image_file_cache_cache do |url|
    photos = Enumerator.new{|y|
      Plugin.filtering(:photo_filter, url, y)
    }
    Plugin.call(:image_file_cache_photo, photos.first)
  end

  on_image_file_cache_photo do |photo|
    cache_it(photo)
  end

  # キャッシュがあれば画像を返す
  filter_image_cache do |url, image, &stop|
    body = @db[url]
    if body
      @journal_db.increment("#{url}:read_count")
      stop.call([url, body])
    end
    [url, image]
  rescue => e
    error e
    [url, image]
  end

  settings _('画像キャッシュ') do
    adjustment(_('画像をキャッシュする最大日数(日)'), :image_file_cache_expire, 1, 365)
      .tooltip(_('キャッシュされた画像は、この日数が経てば、無条件に削除されます。もしもう一度使われることがあれば、ネット上から再取得されるでしょう。'))
    adjustment(_('画像キャッシュを保存する最大容量(MB)'), :image_file_cache_size_limit, 1, 1024)
      .tooltip(_('この容量に達するまで、アイコンなどの、また使われる可能性のある画像をローカルに保存しておきます。例えば1024を指定すると、最大で1GBのストレージを消費する可能性があります。'))
  end

  # キャッシュの有効期限を秒単位で返す
  def cache_expire
    [1, UserConfig[:image_file_cache_expire].to_i].max * 24 * 60 * 60
  end

  # キャッシュの容量制限を返す(Bytes)
  def cache_size_limit
    [1, UserConfig[:image_file_cache_size_limit].to_i].max << 20
  end

  # 容量オーバーの時、一度に開放する画像の最小点数
  def size_exceeded_minimum_photo_count_atonce
    128
  end

  def cache_it(photo)
    unless urls.include?(photo.uri.to_s)
      if photo.blob
        cache_blob(photo.uri.to_s, photo.blob)
      else
        photo.download.next{|downloaded|
          cache_blob(photo.uri.to_s, downloaded.blob)
        }
      end
    end
  end

  def cache_blob(uri, blob)
    return if blob.bytesize >= cache_size_limit
    SerialThread.new do
      unless urls.include?(uri)
        all_size = @journal_db.increment("all:size", blob.bytesize)
        if all_size >= cache_size_limit
          free_unused_cache
        end
        urls << uri
        @db[uri.to_s] = blob
        @journal_db["#{uri}:created"] = Time.now
        @journal_db["#{uri}:size"] = blob.bytesize
        @journal_db["all:urls"] = urls
        notice "image file cache added #{uri} (#{blob.bytesize}B, all_size: #{all_size}B)"
      end
    end
  end

  def urls
    @urls ||= Set.new(@journal_db.fetch("all:urls", []))
  end

  def free_unused_cache
    before_size = @journal_db.raw.fetch("all:size", 0).to_i
    notice "there are exists #{@urls.size} cache(s). it will delete #{[@urls.size/10, size_exceeded_minimum_photo_count_atonce].max.to_i} cache(s)."
    target_urls = @urls.to_a.sample([@urls.size/10, size_exceeded_minimum_photo_count_atonce].max.to_i)
    target_bytesize_sum = 0
    params = target_urls.map{|uri|
      count = @journal_db.raw.fetch("#{uri}:read_count", 0).to_i
      target_bytesize_sum += count
      { uri: uri,
        size: @journal_db.fetch("#{uri}:size"){ @db.fetch(uri, ''.freeze).bytesize },
        count: count }
    }
    target_bytesize_average = target_bytesize_sum.to_f / params.size
    delete_items = params.sort_by{|param| (param[:count] - target_bytesize_average) * param[:size] }.first(params.size/2)
    deleted_size = 0
    delete_items.each do |item|
      uri = item[:uri]
      notice "delete ((#{item[:count]} - #{target_bytesize_average}) * #{item[:size]} = #{(item[:count] - target_bytesize_average) * item[:size]}pts) #{uri}"
      urls.delete(uri)
      @db.delete(uri.to_s)
      @journal_db.delete("#{uri}:created")
      @journal_db.delete("#{uri}:size")
      @journal_db.raw.delete("#{uri}:read_count")
      deleted_size += item[:size]
    end
    @journal_db.decrement("all:size", deleted_size)
    @journal_db["all:urls"] = urls
    after_size = @journal_db.raw["all:size"].to_i
    notice "image file cache free. #{before_size} -> #{after_size} (#{before_size - after_size}B free)"
    activity :system, "image file cache free. #{before_size} -> #{after_size} (#{before_size - after_size}B free)"
  end

  def check_subdirs(dir)
    @queue.new(:check_subdirs) do
      Dir.foreach(dir)
        .map{|x| File.join(dir, x) }
        .select{|x| FileTest.file?(x) }
        .each{|x|
        Delayer.new(:destroy_cache, delay: (File.atime(x) rescue File.mtime(x)) + cache_expire) do
          notice "cache deleted #{x}"
          File.delete(x) if FileTest.file?(x)
          if Dir.foreach(dir).select{|y| File.file? File.join(dir, y) }.empty?
            Dir.delete(dir) rescue nil end end
      }
    end
  end

  def check_dirs(target_dir)
    @queue.new(:check_dirs) do
      Dir.foreach(target_dir)
        .select{|x| x =~ %r<\A(?:[a-fA-F0-9]{2})\Z> }
        .shuffle
        .each{|subdir|
        check_subdirs(File.join(target_dir, subdir))
      }
      Delayer.new(:destroy_cache, delay: cache_expire) do
        check_dirs(target_dir) end
    end
  end

  def _loop
    Delayer.new(:destroy_cache, delay: 60) do
      if @queue
        @queue.run
        _loop
      end
    end
  end

  on_unload do
    @db.close
    @journal_db.close
    @journal_db = @db = @queue = nil
  end

  check_dirs(@cache_directory)
  check_dirs(@cache_journal_directory)
  _loop

end