File: bucket.rb

package info (click to toggle)
ruby-aws 2.10.2-5
  • links: PTS, VCS
  • area: main
  • in suites: bookworm, bullseye, buster, forky, sid, trixie
  • size: 748 kB
  • sloc: ruby: 7,748; makefile: 16
file content (278 lines) | stat: -rw-r--r-- 10,105 bytes parent folder | download | duplicates (2)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
module Aws


  class S3::Bucket
    attr_reader :s3, :name, :owner, :creation_date

    # Create a Bucket instance.
    # If the bucket does not exist and +create+ is set, a new bucket
    # is created on S3. Launching this method with +create+=+true+ may
    # affect on the bucket's ACL if the bucket already exists.
    # Returns Bucket instance or +nil+ if the bucket does not exist
    # and +create+ is not set.
    #
    #  s3 = Aws::S3.new(aws_access_key_id, aws_secret_access_key)
    #   ...
    #  bucket1 = Aws::S3::Bucket.create(s3, 'my_awesome_bucket_1')
    #  bucket1.keys  #=> exception here if the bucket does not exists
    #   ...
    #  bucket2 = Aws::S3::Bucket.create(s3, 'my_awesome_bucket_2', true)
    #  bucket2.keys  #=> list of keys
    #  # create a bucket at the European location with public read access
    #  bucket3 = Aws::S3::Bucket.create(s3,'my-awesome-bucket-3', true, 'public-read', :location => :eu)
    #
    #  see http://docs.amazonwebservices.com/AmazonS3/2006-03-01/RESTAccessPolicy.html
    #  (section: Canned Access Policies)
    #
    def self.create(s3, name, create=false, perms=nil, headers={})
      s3.bucket(name, create, perms, headers)
    end


    # Create a bucket instance. In normal use this method should
    # not be called directly.
    # Use Aws::S3::Bucket.create or Aws::S3.bucket instead.
    def initialize(s3, name, creation_date=nil, owner=nil)
      @s3 = s3
      @name = name
      @owner = owner
      @creation_date = creation_date
      if @creation_date && !@creation_date.is_a?(Time)
        @creation_date = Time.parse(@creation_date)
      end
    end

    # Return bucket name as a String.
    #
    #  bucket = Aws::S3.bucket('my_awesome_bucket')
    #  puts bucket #=> 'my_awesome_bucket'
    #
    def to_s
      @name.to_s
    end

    alias_method :full_name, :to_s

    # Return a public link to bucket.
    #
    #  bucket.public_link #=> 'https://s3.amazonaws.com:443/my_awesome_bucket'
    #
    def public_link
      params = @s3.interface.params
      "#{params[:protocol]}://#{params[:server]}:#{params[:port]}/#{full_name}"
    end

    # Returns the bucket location
    def location
      @location ||= @s3.interface.bucket_location(@name)
    end

    # Retrieves the logging configuration for a bucket.
    # Returns a hash of {:enabled, :targetbucket, :targetprefix}
    #
    #   bucket.logging_info()
    #   => {:enabled=>true, :targetbucket=>"mylogbucket", :targetprefix=>"loggylogs/"}
    def logging_info
      @s3.interface.get_logging_parse(:bucket => @name)
    end

    # Enables S3 server access logging on a bucket.  The target bucket must have been properly configured to receive server
    # access logs.
    #  Params:
    #   :targetbucket - either the target bucket object or the name of the target bucket
    #   :targetprefix - the prefix under which all logs should be stored
    #
    #  bucket.enable_logging(:targetbucket=>"mylogbucket", :targetprefix=>"loggylogs/")
    #    => true
    def enable_logging(params)
      Utils.mandatory_arguments([:targetbucket, :targetprefix], params)
      Utils.allow_only([:targetbucket, :targetprefix], params)
      xmldoc = "<?xml version=\"1.0\" encoding=\"UTF-8\"?><BucketLoggingStatus xmlns=\"http://doc.s3.amazonaws.com/2006-03-01\"><LoggingEnabled><TargetBucket>#{params[:targetbucket]}</TargetBucket><TargetPrefix>#{params[:targetprefix]}</TargetPrefix></LoggingEnabled></BucketLoggingStatus>"
      @s3.interface.put_logging(:bucket => @name, :xmldoc => xmldoc)
    end

    # Disables S3 server access logging on a bucket.  Takes no arguments.
    def disable_logging
      xmldoc = "<?xml version=\"1.0\" encoding=\"UTF-8\"?><BucketLoggingStatus xmlns=\"http://doc.s3.amazonaws.com/2006-03-01\"></BucketLoggingStatus>"
      @s3.interface.put_logging(:bucket => @name, :xmldoc => xmldoc)
    end

    # Retrieve a group of keys from Amazon.
    # +options+ is a hash: { 'prefix'=>'', 'marker'=>'', 'max-keys'=>5, 'delimiter'=>'' }).
    # Retrieves meta-headers information if +head+ it +true+.
    # Returns an array of Key instances.
    #
    #  bucket.keys                     #=> # returns all keys from bucket
    #  bucket.keys('prefix' => 'logs') #=> # returns all keys that starts with 'logs'
    #
    def keys(options={}, head=false)
      keys_and_service(options, head)[0]
    end

    # Same as +keys+ method but return an array of [keys, service_data].
    # where +service_data+ is a hash with additional output information.
    #
    #  keys, service = bucket.keys_and_service({'max-keys'=> 2, 'prefix' => 'logs'})
    #  p keys    #=> # 2 keys array
    #  p service #=> {"max-keys"=>"2", "prefix"=>"logs", "name"=>"my_awesome_bucket", "marker"=>"", "is_truncated"=>true}
    #
    def keys_and_service(options={}, head=false)
      opt = {}; options.each { |key, value| opt[key.to_s] = value }
      service_data = {}
      service_list = {}
      list = []
      @s3.interface.incrementally_list_bucket(@name, opt) do |thislist|
        service_list = thislist
        thislist[:contents].each do |entry|
          owner = S3::Owner.new(entry[:owner_id], entry[:owner_display_name])
          key = S3::Key.new(self, entry[:key], nil, {}, {}, entry[:last_modified], entry[:e_tag], entry[:size], entry[:storage_class], owner)
          key.head if head
          list << key
        end
      end
      service_list.each_key do |key|
        service_data[key] = service_list[key] unless (key == :contents || key == :common_prefixes)
      end
      [list, service_data]
    end

    # Retrieve key information from Amazon.
    # The +key_name+ is a +String+ or Key instance.
    # Retrieves meta-header information if +head+ is +true+.
    # Returns new Key instance.
    #
    #  key = bucket.key('logs/today/1.log', true) #=> #<Aws::S3::Key:0xb7b1e240 ... >
    #   # is the same as:
    #  key = Aws::S3::Key.create(bucket, 'logs/today/1.log')
    #  key.head
    #
    def key(key_name, head=false)
      raise 'Key name can not be empty.' if Aws::Utils.blank?(key_name)
      key_instance = nil
      # if this key exists - find it ....
      keys({'prefix'=>key_name}, head).each do |key|
        if key.name == key_name.to_s
          key_instance = key
          break
        end
      end
      # .... else this key is unknown
      unless key_instance
        key_instance = S3::Key.create(self, key_name.to_s)
      end
      key_instance
    end

    # Store object data.
    # The +key+ is a +String+ or Key instance.
    # Returns +true+.
    #
    #  bucket.put('logs/today/1.log', 'Olala!') #=> true
    #
    def put(key, data=nil, meta_headers={}, perms=nil, headers={})
      key = S3::Key.create(self, key.to_s, data, meta_headers) unless key.is_a?(S3::Key)
      key.put(data, perms, headers)
    end

    # Retrieve object data from Amazon.
    # The +key+ is a +String+ or Key.
    # Returns data.
    #
    #  data = bucket.get('logs/today/1.log') #=>
    #  puts data #=> 'sasfasfasdf'
    #
    def get(key, headers={}, &block)
      key = S3::Key.create(self, key.to_s) unless key.is_a?(S3::Key)
      key.get(headers, &block)
    end

     # Retrieve object data from Amazon.
    # The +key+ is a +String+ or Key.
    # Returns Key instance.
    #
    #  key = bucket.get('logs/today/1.log') #=>
    #  puts key.data #=> 'sasfasfasdf'
    #
    def get_key(key, headers={})
      key = S3::Key.create(self, key.to_s, headers) unless key.is_a?(S3::Key)
      return key
    end

    # Rename object. Returns Aws::S3::Key instance.
    #
    #  new_key = bucket.rename_key('logs/today/1.log','logs/today/2.log')   #=> #<Aws::S3::Key:0xb7b1e240 ... >
    #  puts key.name   #=> 'logs/today/2.log'
    #  key.exists?     #=> true
    #
    def rename_key(old_key_or_name, new_name)
      old_key_or_name = S3::Key.create(self, old_key_or_name.to_s) unless old_key_or_name.is_a?(S3::Key)
      old_key_or_name.rename(new_name)
      old_key_or_name
    end

    # Create an object copy. Returns a destination Aws::S3::Key instance.
    #
    #  new_key = bucket.copy_key('logs/today/1.log','logs/today/2.log')   #=> #<Aws::S3::Key:0xb7b1e240 ... >
    #  puts key.name   #=> 'logs/today/2.log'
    #  key.exists?     #=> true
    #
    def copy_key(old_key_or_name, new_key_or_name)
      old_key_or_name = S3::Key.create(self, old_key_or_name.to_s) unless old_key_or_name.is_a?(S3::Key)
      old_key_or_name.copy(new_key_or_name)
    end

    # Move an object to other location. Returns a destination Aws::S3::Key instance.
    #
    #  new_key = bucket.copy_key('logs/today/1.log','logs/today/2.log')   #=> #<Aws::S3::Key:0xb7b1e240 ... >
    #  puts key.name   #=> 'logs/today/2.log'
    #  key.exists?     #=> true
    #
    def move_key(old_key_or_name, new_key_or_name)
      old_key_or_name = S3::Key.create(self, old_key_or_name.to_s) unless old_key_or_name.is_a?(S3::Key)
      old_key_or_name.move(new_key_or_name)
    end

    # Remove all keys from a bucket.
    # Returns +true+.
    #
    #  bucket.clear #=> true
    #
    def clear
      @s3.interface.clear_bucket(@name)
    end

    # Delete all keys where the 'folder_key' can be interpreted
    # as a 'folder' name.
    # Returns an array of string keys that have been deleted.
    #
    #  bucket.keys.map{|key| key.name}.join(', ') #=> 'test, test/2/34, test/3, test1, test1/logs'
    #  bucket.delete_folder('test')               #=> ['test','test/2/34','test/3']
    #
    def delete_folder(folder, separator='/')
      @s3.interface.delete_folder(@name, folder, separator)
    end

    # Delete a bucket. Bucket must be empty.
    # If +force+ is set, clears and deletes the bucket.
    # Returns +true+.
    #
    #  bucket.delete(true) #=> true
    #
    def delete(force=false)
      force ? @s3.interface.force_delete_bucket(@name) : @s3.interface.delete_bucket(@name)
    end

    # Deletes an object from s3 in this bucket.
    def delete_key(key)
      @s3.interface.delete(name, key)
    end

    # Return a list of grantees.
    #
    def grantees
      S3::Grantee::grantees(self)
    end

  end

end