File: cache_s3.Rd

package info (click to toggle)
r-cran-memoise 2.0.1-1
  • links: PTS, VCS
  • area: main
  • in suites: bookworm, sid, trixie
  • size: 208 kB
  • sloc: sh: 10; makefile: 5
file content (36 lines) | stat: -rw-r--r-- 942 bytes parent folder | download | duplicates (2)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
% Generated by roxygen2: do not edit by hand
% Please edit documentation in R/cache_s3.R
\name{cache_s3}
\alias{cache_s3}
\title{Amazon Web Services S3 Cache
Amazon Web Services S3 backed cache, for remote caching.}
\usage{
cache_s3(cache_name, algo = "sha512", compress = FALSE)
}
\arguments{
\item{cache_name}{Bucket name for storing cache files.}

\item{algo}{The hashing algorithm used for the cache, see
\code{\link[digest]{digest}} for available algorithms.}

\item{compress}{Argument passed to \code{saveRDS}. One of FALSE, "gzip",
"bzip2" or "xz". Default: FALSE.}
}
\description{
Amazon Web Services S3 Cache
Amazon Web Services S3 backed cache, for remote caching.
}
\examples{

\dontrun{
# Set AWS credentials.
Sys.setenv("AWS_ACCESS_KEY_ID" = "<access key>",
           "AWS_SECRET_ACCESS_KEY" = "<access secret>")

# Set up a unique bucket name.
s3 <- cache_s3("unique-bucket-name")
mem_runif <- memoise(runif, cache = s3)
}


}