1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144
|
package scala.collection.parallel
package immutable
import org.scalacheck._
import org.scalacheck.Gen
import org.scalacheck.Gen._
import org.scalacheck.Prop._
import org.scalacheck.Properties
import org.scalacheck.Arbitrary._
import scala.collection._
import scala.collection.parallel.ops._
abstract class ParallelHashMapCheck[K, V](tp: String) extends ParallelMapCheck[K, V]("immutable.ParHashMap[" + tp + "]") {
// ForkJoinTasks.defaultForkJoinPool.setMaximumPoolSize(Runtime.getRuntime.availableProcessors * 2)
// ForkJoinTasks.defaultForkJoinPool.setParallelism(Runtime.getRuntime.availableProcessors * 2)
type CollType = ParHashMap[K, V]
def isCheckingViews = false
def hasStrictOrder = false
def tasksupport: TaskSupport
def ofSize(vals: Seq[Gen[(K, V)]], sz: Int) = {
var hm = new immutable.HashMap[K, V]
val gen = vals(rnd.nextInt(vals.size))
for (i <- 0 until sz) hm += sample(gen)
hm
}
def fromTraversable(t: Traversable[(K, V)]) = {
var phm = new ParHashMap[K, V]
phm.tasksupport = tasksupport
var i = 0
for (kv <- t.toList) {
phm += kv
i += 1
}
phm
}
}
class IntIntParallelHashMapCheck(val tasksupport: TaskSupport) extends ParallelHashMapCheck[Int, Int]("Int, Int")
with PairOperators[Int, Int]
with PairValues[Int, Int]
{
def intvalues = new IntValues {}
def kvalues = intvalues.values
def vvalues = intvalues.values
val intoperators = new IntOperators {}
def voperators = intoperators
def koperators = intoperators
override def printDataStructureDebugInfo(ds: AnyRef) = ds match {
case pm: ParHashMap[k, v] =>
pm.printDebugInfo
case _ =>
println("could not match data structure type: " + ds.getClass)
}
}
abstract class ParallelHashSetCheck[T](tp: String) extends ParallelSetCheck[T]("immutable.ParHashSet[" + tp + "]") {
// ForkJoinTasks.defaultForkJoinPool.setMaximumPoolSize(Runtime.getRuntime.availableProcessors * 2)
// ForkJoinTasks.defaultForkJoinPool.setParallelism(Runtime.getRuntime.availableProcessors * 2)
type CollType = ParHashSet[T]
def isCheckingViews = false
def hasStrictOrder = false
def tasksupport: TaskSupport
def ofSize(vals: Seq[Gen[T]], sz: Int) = {
var hm = new immutable.HashSet[T]
val gen = vals(rnd.nextInt(vals.size))
for (i <- 0 until sz) hm += sample(gen)
hm
}
def fromTraversable(t: Traversable[T]) = {
var phs = new ParHashSet[T]
phs.tasksupport = tasksupport
var i = 0
for (kv <- t.toList) {
phs += kv
i += 1
}
phs
}
override def printDataStructureDebugInfo(ds: AnyRef) = ds match {
case pm: ParHashSet[t] =>
println("Parallel hash set")
case _ =>
println("could not match data structure type: " + ds.getClass)
}
}
class IntParallelHashSetCheck(val tasksupport: TaskSupport) extends ParallelHashSetCheck[Int]("Int")
with IntOperators
with IntValues
{
def intvalues = new IntValues {}
def kvalues = intvalues.values
def vvalues = intvalues.values
override def printDataStructureDebugInfo(ds: AnyRef) = ds match {
case pm: ParHashMap[k, v] =>
pm.printDebugInfo
case _ =>
println("could not match data structure type: " + ds.getClass)
}
}
|