File: bigmax.R

package info (click to toggle)
r-cran-foreach 1.5.2-1
  • links: PTS, VCS
  • area: main
  • in suites: bookworm, forky, sid, trixie
  • size: 648 kB
  • sloc: makefile: 2
file content (43 lines) | stat: -rw-r--r-- 1,252 bytes parent folder | download | duplicates (2)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
library(foreach)
library(RSQLite)

# Define a simple iterator for a query result, which is
# just a wrapper around the fetch function.
iquery <- function(con, statement, ..., n=1) {
  rs <- dbSendQuery(con, statement, ...)
  nextEl <- function() {
    d <- fetch(rs, n)
    if (nrow(d) == 0) {
      dbClearResult(rs)
      stop('StopIteration')
    }
    d
  }
  obj <- list(nextElem=nextEl)
  class(obj) <- c('abstractiter', 'iter')
  obj
}

# Create an SQLite instance.
m <- dbDriver('SQLite')

# Initialize a new database to a tempfile and copy a data frame
# into it repeatedly to get more data to process.
tfile <- tempfile()
con <- dbConnect(m, dbname=tfile)
data(USArrests)
dbWriteTable(con, 'USArrests', USArrests)
for (i in 1:99)
  dbWriteTable(con, 'USArrests', USArrests, append=TRUE)

# Create an iterator to issue the query, selecting the fields of interest.
# We then compute the maximum of each of those fields, 100 records at a time.
qit <- iquery(con, 'select Murder, Assault, Rape from USArrests', n=100)
r <- foreach(d=qit, .combine='pmax', .packages='foreach') %dopar% {
  foreach(x=iter(d, by='col'), .combine='c') %do% max(x)
}
print(r)

# Clean up
dbDisconnect(con)
file.remove(tfile)