diff --git a/core/src/main/scala/scala/collection/Parallel.scala b/core/src/main/scala/scala/collection/Parallel.scala new file mode 100644 index 00000000..174e3ab7 --- /dev/null +++ b/core/src/main/scala/scala/collection/Parallel.scala @@ -0,0 +1,17 @@ +/* __ *\ +** ________ ___ / / ___ Scala API ** +** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** +** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** +** /____/\___/_/ |_/____/_/ | | ** +** |/ ** +\* */ + +package scala +package collection + +/** A marker trait for collections which have their operations parallelised. + * + * @since 2.9 + * @author Aleksandar Prokopec + */ +trait Parallel diff --git a/core/src/main/scala/scala/collection/Parallelizable.scala b/core/src/main/scala/scala/collection/Parallelizable.scala index f419103d..462bf3f3 100644 --- a/core/src/main/scala/scala/collection/Parallelizable.scala +++ b/core/src/main/scala/scala/collection/Parallelizable.scala @@ -20,7 +20,7 @@ import parallel.Combiner */ trait Parallelizable[+A, +ParRepr <: Parallel] extends Any { - def seq: TraversableOnce[A] + def seq: IterableOnce[A] /** Returns a parallel implementation of this collection. * diff --git a/core/src/main/scala/scala/collection/generic/CanCombineFrom.scala b/core/src/main/scala/scala/collection/generic/CanCombineFrom.scala index ed262b54..c603126f 100644 --- a/core/src/main/scala/scala/collection/generic/CanCombineFrom.scala +++ b/core/src/main/scala/scala/collection/generic/CanCombineFrom.scala @@ -20,8 +20,13 @@ import scala.collection.parallel._ * @tparam To the type of the collection to be created. * @since 2.8 */ -trait CanCombineFrom[-From, -Elem, +To] extends CanBuildFrom[From, Elem, To] with Parallel { +trait CanCombineFrom[-From, -Elem, +To] extends OldCanBuildFrom[From, Elem, To] with Parallel { def apply(from: From): Combiner[Elem, To] def apply(): Combiner[Elem, To] } +// TODO Remove +trait OldCanBuildFrom[-From, -Elem, +To] { + def apply(from: From): collection.mutable.Builder[Elem, To] + def apply(): collection.mutable.Builder[Elem, To] +} diff --git a/core/src/main/scala/scala/collection/generic/GenericParCompanion.scala b/core/src/main/scala/scala/collection/generic/GenericParCompanion.scala index 9a740118..98f830a6 100644 --- a/core/src/main/scala/scala/collection/generic/GenericParCompanion.scala +++ b/core/src/main/scala/scala/collection/generic/GenericParCompanion.scala @@ -12,7 +12,7 @@ package generic import scala.collection.parallel.Combiner import scala.collection.parallel.ParIterable -import scala.collection.parallel.ParMap +//import scala.collection.parallel.ParMap import scala.language.higherKinds /** A template class for companion objects of parallel collection classes. @@ -32,7 +32,7 @@ trait GenericParCompanion[+CC[X] <: ParIterable[X]] { def newCombiner[A]: Combiner[A, CC[A]] } -trait GenericParMapCompanion[+CC[P, Q] <: ParMap[P, Q]] { - def newCombiner[P, Q]: Combiner[(P, Q), CC[P, Q]] -} +//trait GenericParMapCompanion[+CC[P, Q] <: ParMap[P, Q]] { +// def newCombiner[P, Q]: Combiner[(P, Q), CC[P, Q]] +//} diff --git a/core/src/main/scala/scala/collection/generic/GenericParTemplate.scala b/core/src/main/scala/scala/collection/generic/GenericParTemplate.scala index e577be64..1bc5c8e9 100644 --- a/core/src/main/scala/scala/collection/generic/GenericParTemplate.scala +++ b/core/src/main/scala/scala/collection/generic/GenericParTemplate.scala @@ -12,7 +12,7 @@ package generic import scala.collection.parallel.Combiner import scala.collection.parallel.ParIterable -import scala.collection.parallel.ParMap +//import scala.collection.parallel.ParMap import scala.annotation.unchecked.uncheckedVariance import scala.language.higherKinds @@ -25,10 +25,10 @@ import scala.language.higherKinds * @since 2.8 */ trait GenericParTemplate[+A, +CC[X] <: ParIterable[X]] -extends GenericTraversableTemplate[A, CC] - with HasNewCombiner[A, CC[A] @uncheckedVariance] + extends GenericTraversableTemplate[A, CC] + with HasNewCombiner[A, CC[A] @uncheckedVariance] { - def companion: GenericCompanion[CC] with GenericParCompanion[CC] + def companion: /*GenericCompanion[CC] with*/ GenericParCompanion[CC] protected[this] override def newBuilder: scala.collection.mutable.Builder[A, CC[A]] = newCombiner @@ -47,18 +47,18 @@ extends GenericTraversableTemplate[A, CC] } -trait GenericParMapTemplate[K, +V, +CC[X, Y] <: ParMap[X, Y]] extends GenericParTemplate[(K, V), ParIterable] -{ - protected[this] override def newCombiner: Combiner[(K, V), CC[K, V]] = { - val cb = mapCompanion.newCombiner[K, V] - cb - } - - def mapCompanion: GenericParMapCompanion[CC] - - def genericMapCombiner[P, Q]: Combiner[(P, Q), CC[P, Q]] = { - val cb = mapCompanion.newCombiner[P, Q] - cb - } -} +//trait GenericParMapTemplate[K, +V, +CC[X, Y] <: ParMap[X, Y]] extends GenericParTemplate[(K, V), ParIterable] +//{ +// protected[this] override def newCombiner: Combiner[(K, V), CC[K, V]] = { +// val cb = mapCompanion.newCombiner[K, V] +// cb +// } +// +// def mapCompanion: GenericParMapCompanion[CC] +// +// def genericMapCombiner[P, Q]: Combiner[(P, Q), CC[P, Q]] = { +// val cb = mapCompanion.newCombiner[P, Q] +// cb +// } +//} diff --git a/core/src/main/scala/scala/collection/generic/GenericTraversableTemplate.scala b/core/src/main/scala/scala/collection/generic/GenericTraversableTemplate.scala new file mode 100644 index 00000000..41b481e3 --- /dev/null +++ b/core/src/main/scala/scala/collection/generic/GenericTraversableTemplate.scala @@ -0,0 +1,214 @@ +package scala.collection.generic + +import scala.language.higherKinds +import scala.annotation.migration +import scala.annotation.unchecked.uncheckedVariance +//import scala.collection.GenTraversableOnce +import scala.collection.mutable.Builder +import scala.collection.parallel.ParIterable + +// TODO inline in GenericParTempalte +trait GenericTraversableTemplate[+A, +CC[X] /*<: GenTraversable[X]*/] extends HasNewBuilder[A, CC[A] @uncheckedVariance] { + + def seq: Iterable[A] + + /** Applies a function `f` to all elements of this $coll. + * + * @param f the function that is applied for its side-effect to every element. + * The result of function `f` is discarded. + * + * @tparam U the type parameter describing the result of function `f`. + * This result will always be ignored. Typically `U` is `Unit`, + * but this is not necessary. + * + * @usecase def foreach(f: A => Unit): Unit + */ + def foreach[U](f: A => U): Unit + + /** Selects the first element of this $coll. + * + * @return the first element of this $coll. + * @throws NoSuchElementException if the $coll is empty. + */ + def head: A + + /** Tests whether this $coll is empty. + * + * @return `true` if the $coll contain no elements, `false` otherwise. + */ + def isEmpty: Boolean + + /** The factory companion object that builds instances of class $Coll. + * (or its `Iterable` superclass where class $Coll is not a `Seq`.) + */ +// def companion: GenericCompanion[CC] + + /** The builder that builds instances of type $Coll[A] + */ + protected[this] def newBuilder: Builder[A, CC[A]]/* = companion.newBuilder[A]*/ + + /** The generic builder that builds instances of $Coll + * at arbitrary element types. + */ + def genericBuilder[B]: Builder[B, CC[B]]/* = companion.newBuilder[B]*/ + + private def sequential: IterableOnce[A] = this.asInstanceOf[ParIterable[A]].seq /*this.asInstanceOf[GenTraversableOnce[A]].seq*/ + + /** Converts this $coll of pairs into two collections of the first and second + * half of each pair. + * + * {{{ + * val xs = $Coll( + * (1, "one"), + * (2, "two"), + * (3, "three")).unzip + * // xs == ($Coll(1, 2, 3), + * // $Coll(one, two, three)) + * }}} + * + * @tparam A1 the type of the first half of the element pairs + * @tparam A2 the type of the second half of the element pairs + * @param asPair an implicit conversion which asserts that the element type + * of this $coll is a pair. + * @return a pair of ${coll}s, containing the first, respectively second + * half of each element pair of this $coll. + */ + def unzip[A1, A2](implicit asPair: A => (A1, A2)): (CC[A1], CC[A2]) = { + val b1 = genericBuilder[A1] + val b2 = genericBuilder[A2] + for (xy <- sequential.iterator) { + val (x, y) = asPair(xy) + b1 += x + b2 += y + } + (b1.result(), b2.result()) + } + + /** Converts this $coll of triples into three collections of the first, second, + * and third element of each triple. + * + * {{{ + * val xs = $Coll( + * (1, "one", '1'), + * (2, "two", '2'), + * (3, "three", '3')).unzip3 + * // xs == ($Coll(1, 2, 3), + * // $Coll(one, two, three), + * // $Coll(1, 2, 3)) + * }}} + * + * @tparam A1 the type of the first member of the element triples + * @tparam A2 the type of the second member of the element triples + * @tparam A3 the type of the third member of the element triples + * @param asTriple an implicit conversion which asserts that the element type + * of this $coll is a triple. + * @return a triple of ${coll}s, containing the first, second, respectively + * third member of each element triple of this $coll. + */ + def unzip3[A1, A2, A3](implicit asTriple: A => (A1, A2, A3)): (CC[A1], CC[A2], CC[A3]) = { + val b1 = genericBuilder[A1] + val b2 = genericBuilder[A2] + val b3 = genericBuilder[A3] + + for (xyz <- sequential.iterator) { + val (x, y, z) = asTriple(xyz) + b1 += x + b2 += y + b3 += z + } + (b1.result(), b2.result(), b3.result()) + } + + /** Converts this $coll of traversable collections into + * a $coll formed by the elements of these traversable + * collections. + * + * @tparam B the type of the elements of each traversable collection. + * @param asTraversable an implicit conversion which asserts that the element + * type of this $coll is a `GenTraversable`. + * @return a new $coll resulting from concatenating all element ${coll}s. + * + * @usecase def flatten[B]: $Coll[B] + * + * @inheritdoc + * + * The resulting collection's type will be guided by the + * static type of $coll. For example: + * + * {{{ + * val xs = List( + * Set(1, 2, 3), + * Set(1, 2, 3) + * ).flatten + * // xs == List(1, 2, 3, 1, 2, 3) + * + * val ys = Set( + * List(1, 2, 3), + * List(3, 2, 1) + * ).flatten + * // ys == Set(1, 2, 3) + * }}} + */ + def flatten[B](implicit asTraversable: A => /*<: /*<:= headSize) fail + bs(i) += x + i += 1 + } + if (i != headSize) + fail + } + val bb = genericBuilder[CC[B]] + for (b <- bs) bb += b.result + bb.result() + } +} diff --git a/core/src/main/scala/scala/collection/generic/HasNewBuilder.scala b/core/src/main/scala/scala/collection/generic/HasNewBuilder.scala new file mode 100644 index 00000000..aa0ce669 --- /dev/null +++ b/core/src/main/scala/scala/collection/generic/HasNewBuilder.scala @@ -0,0 +1,17 @@ +/* __ *\ +** ________ ___ / / ___ Scala API ** +** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** +** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** +** /____/\___/_/ |_/____/_/ | | ** +** |/ ** +\* */ +package scala +package collection +package generic + +import mutable.Builder + +trait HasNewBuilder[+A, +Repr] extends Any { + /** The builder that builds instances of Repr */ + protected[this] def newBuilder: Builder[A, Repr] +} diff --git a/core/src/main/scala/scala/collection/generic/ParFactory.scala b/core/src/main/scala/scala/collection/generic/ParFactory.scala index 9ee9dd42..c841bca0 100644 --- a/core/src/main/scala/scala/collection/generic/ParFactory.scala +++ b/core/src/main/scala/scala/collection/generic/ParFactory.scala @@ -24,11 +24,11 @@ import scala.language.higherKinds * @since 2.8 */ abstract class ParFactory[CC[X] <: ParIterable[X] with GenericParTemplate[X, CC]] -extends GenTraversableFactory[CC] - with GenericParCompanion[CC] { +extends /*GenTraversableFactory[CC] + with*/ GenericParCompanion[CC] { //type EPC[T, C] = scala.collection.parallel.EnvironmentPassingCombiner[T, C] - +/* /** A generic implementation of the `CanCombineFrom` trait, which forwards * all calls to `apply(from)` to the `genericParBuilder` method of the $coll * `from`, and calls to `apply()` to this factory. @@ -36,5 +36,5 @@ extends GenTraversableFactory[CC] class GenericCanCombineFrom[A] extends GenericCanBuildFrom[A] with CanCombineFrom[CC[_], A, CC[A]] { override def apply(from: Coll) = from.genericCombiner override def apply() = newBuilder[A] - } + }*/ } diff --git a/core/src/main/scala/scala/collection/generic/ParMapFactory.scala b/core/src/main/scala/scala/collection/generic/ParMapFactory.scala index ff077c4a..b2aaf024 100644 --- a/core/src/main/scala/scala/collection/generic/ParMapFactory.scala +++ b/core/src/main/scala/scala/collection/generic/ParMapFactory.scala @@ -10,45 +10,45 @@ package scala package collection package generic -import scala.collection.parallel.ParMap -import scala.collection.parallel.ParMapLike -import scala.collection.parallel.Combiner -import scala.collection.mutable.Builder -import scala.language.higherKinds - -/** A template class for companion objects of `ParMap` and subclasses thereof. - * This class extends `TraversableFactory` and provides a set of operations - * to create `$Coll` objects. - * - * @define coll parallel map - * @define Coll `ParMap` - * @define factoryInfo - * This object provides a set of operations needed to create `$Coll` values. - * @author Aleksandar Prokopec - * @since 2.8 - */ -abstract class ParMapFactory[CC[X, Y] <: ParMap[X, Y] with ParMapLike[X, Y, CC[X, Y], _]] -extends GenMapFactory[CC] - with GenericParMapCompanion[CC] { - - type MapColl = CC[_, _] - - /** The default builder for $Coll objects. - * @tparam K the type of the keys - * @tparam V the type of the associated values - */ - override def newBuilder[K, V]: Builder[(K, V), CC[K, V]] = newCombiner[K, V] - - /** The default combiner for $Coll objects. - * @tparam K the type of the keys - * @tparam V the type of the associated values - */ - def newCombiner[K, V]: Combiner[(K, V), CC[K, V]] - - class CanCombineFromMap[K, V] extends CanCombineFrom[CC[_, _], (K, V), CC[K, V]] { - def apply(from: MapColl) = from.genericMapCombiner[K, V].asInstanceOf[Combiner[(K, V), CC[K, V]]] - def apply() = newCombiner[K, V] - } - -} +//import scala.collection.parallel.ParMap +//import scala.collection.parallel.ParMapLike +//import scala.collection.parallel.Combiner +//import scala.collection.mutable.Builder +//import scala.language.higherKinds +// +///** A template class for companion objects of `ParMap` and subclasses thereof. +// * This class extends `TraversableFactory` and provides a set of operations +// * to create `$Coll` objects. +// * +// * @define coll parallel map +// * @define Coll `ParMap` +// * @define factoryInfo +// * This object provides a set of operations needed to create `$Coll` values. +// * @author Aleksandar Prokopec +// * @since 2.8 +// */ +//abstract class ParMapFactory[CC[X, Y] <: ParMap[X, Y] with ParMapLike[X, Y, CC[X, Y], _]] +//extends GenMapFactory[CC] +// with GenericParMapCompanion[CC] { +// +// type MapColl = CC[_, _] +// +// /** The default builder for $Coll objects. +// * @tparam K the type of the keys +// * @tparam V the type of the associated values +// */ +// override def newBuilder[K, V]: Builder[(K, V), CC[K, V]] = newCombiner[K, V] +// +// /** The default combiner for $Coll objects. +// * @tparam K the type of the keys +// * @tparam V the type of the associated values +// */ +// def newCombiner[K, V]: Combiner[(K, V), CC[K, V]] +// +// class CanCombineFromMap[K, V] extends CanCombineFrom[CC[_, _], (K, V), CC[K, V]] { +// def apply(from: MapColl) = from.genericMapCombiner[K, V].asInstanceOf[Combiner[(K, V), CC[K, V]]] +// def apply() = newCombiner[K, V] +// } +// +//} diff --git a/core/src/main/scala/scala/collection/generic/ParSetFactory.scala b/core/src/main/scala/scala/collection/generic/ParSetFactory.scala index ea0dac4d..a79e302a 100644 --- a/core/src/main/scala/scala/collection/generic/ParSetFactory.scala +++ b/core/src/main/scala/scala/collection/generic/ParSetFactory.scala @@ -10,28 +10,28 @@ package scala package collection package generic -import scala.collection.parallel.Combiner -import scala.collection.parallel.ParSet -import scala.collection.parallel.ParSetLike -import scala.language.higherKinds - -/** - * @define factoryInfo - * This object provides a set of operations needed to create `$Coll` values. - * @author Aleksandar Prokopec - * @since 2.8 - */ -abstract class ParSetFactory[CC[X] <: ParSet[X] with ParSetLike[X, CC[X], _] with GenericParTemplate[X, CC]] - extends GenSetFactory[CC] - with GenericParCompanion[CC] -{ - def newBuilder[A]: Combiner[A, CC[A]] = newCombiner[A] - - def newCombiner[A]: Combiner[A, CC[A]] - - class GenericCanCombineFrom[A] extends CanCombineFrom[CC[_], A, CC[A]] { - override def apply(from: Coll) = from.genericCombiner[A] - override def apply() = newCombiner[A] - } -} +//import scala.collection.parallel.Combiner +//import scala.collection.parallel.ParSet +//import scala.collection.parallel.ParSetLike +//import scala.language.higherKinds +// +///** +// * @define factoryInfo +// * This object provides a set of operations needed to create `$Coll` values. +// * @author Aleksandar Prokopec +// * @since 2.8 +// */ +//abstract class ParSetFactory[CC[X] <: ParSet[X] with ParSetLike[X, CC[X], _] with GenericParTemplate[X, CC]] +// extends GenSetFactory[CC] +// with GenericParCompanion[CC] +//{ +// def newBuilder[A]: Combiner[A, CC[A]] = newCombiner[A] +// +// def newCombiner[A]: Combiner[A, CC[A]] +// +// class GenericCanCombineFrom[A] extends CanCombineFrom[CC[_], A, CC[A]] { +// override def apply(from: Coll) = from.genericCombiner[A] +// override def apply() = newCombiner[A] +// } +//} diff --git a/core/src/main/scala/scala/collection/generic/Signalling.scala b/core/src/main/scala/scala/collection/generic/Signalling.scala new file mode 100644 index 00000000..e05acaa9 --- /dev/null +++ b/core/src/main/scala/scala/collection/generic/Signalling.scala @@ -0,0 +1,176 @@ +/* __ *\ +** ________ ___ / / ___ Scala API ** +** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** +** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** +** /____/\___/_/ |_/____/_/ | | ** +** |/ ** +\* */ + +package scala +package collection +package generic + +import java.util.concurrent.atomic.AtomicInteger + +/** + * A message interface serves as a unique interface to the + * part of the collection capable of receiving messages from + * a different task. + * + * One example of use of this is the `find` method, which can use the + * signalling interface to inform worker threads that an element has + * been found and no further search is necessary. + * + * @author prokopec + * + * @define abortflag + * Abort flag being true means that a worker can abort and produce whatever result, + * since its result will not affect the final result of computation. An example + * of operations using this are `find`, `forall` and `exists` methods. + * + * @define indexflag + * The index flag holds an integer which carries some operation-specific meaning. For + * instance, `takeWhile` operation sets the index flag to the position of the element + * where the predicate fails. Other workers may check this index against the indices + * they are working on and return if this index is smaller than their index. Examples + * of operations using this are `takeWhile`, `dropWhile`, `span` and `indexOf`. + */ +trait Signalling { + /** + * Checks whether an abort signal has been issued. + * + * $abortflag + * @return the state of the abort + */ + def isAborted: Boolean + + /** + * Sends an abort signal to other workers. + * + * $abortflag + */ + def abort(): Unit + + /** + * Returns the value of the index flag. + * + * $indexflag + * @return the value of the index flag + */ + def indexFlag: Int + + /** + * Sets the value of the index flag. + * + * $indexflag + * @param f the value to which the index flag is set. + */ + def setIndexFlag(f: Int): Unit + + /** + * Sets the value of the index flag if argument is greater than current value. + * This method does this atomically. + * + * $indexflag + * @param f the value to which the index flag is set + */ + def setIndexFlagIfGreater(f: Int): Unit + + /** + * Sets the value of the index flag if argument is lesser than current value. + * This method does this atomically. + * + * $indexflag + * @param f the value to which the index flag is set + */ + def setIndexFlagIfLesser(f: Int): Unit + + /** + * A read only tag specific to the signalling object. It is used to give + * specific workers information on the part of the collection being operated on. + */ + def tag: Int +} + +/** + * This signalling implementation returns default values and ignores received signals. + */ +class DefaultSignalling extends Signalling with VolatileAbort { + def indexFlag = -1 + def setIndexFlag(f: Int): Unit = () + def setIndexFlagIfGreater(f: Int): Unit = () + def setIndexFlagIfLesser(f: Int): Unit = () + + def tag = -1 +} + +/** + * An object that returns default values and ignores received signals. + */ +object IdleSignalling extends DefaultSignalling + +/** + * A mixin trait that implements abort flag behaviour using volatile variables. + */ +trait VolatileAbort extends Signalling { + @volatile private var abortflag = false + override def isAborted = abortflag + override def abort() = abortflag = true +} + +/** + * A mixin trait that implements index flag behaviour using atomic integers. + * The `setIndex` operation is wait-free, while conditional set operations `setIndexIfGreater` + * and `setIndexIfLesser` are lock-free and support only monotonic changes. + */ +trait AtomicIndexFlag extends Signalling { + private val intflag: AtomicInteger = new AtomicInteger(-1) + abstract override def indexFlag = intflag.get + abstract override def setIndexFlag(f: Int) = intflag.set(f) + abstract override def setIndexFlagIfGreater(f: Int) = { + var loop = true + do { + val old = intflag.get + if (f <= old) loop = false + else if (intflag.compareAndSet(old, f)) loop = false + } while (loop) + } + abstract override def setIndexFlagIfLesser(f: Int) = { + var loop = true + do { + val old = intflag.get + if (f >= old) loop = false + else if (intflag.compareAndSet(old, f)) loop = false + } while (loop) + } +} + +/** + * An implementation of the signalling interface using delegates. + */ +trait DelegatedSignalling extends Signalling { + /** + * A delegate that method calls are redirected to. + */ + var signalDelegate: Signalling + + def isAborted = signalDelegate.isAborted + def abort() = signalDelegate.abort() + + def indexFlag = signalDelegate.indexFlag + def setIndexFlag(f: Int) = signalDelegate.setIndexFlag(f) + def setIndexFlagIfGreater(f: Int) = signalDelegate.setIndexFlagIfGreater(f) + def setIndexFlagIfLesser(f: Int) = signalDelegate.setIndexFlagIfLesser(f) + + def tag = signalDelegate.tag +} + +/** + * Class implementing delegated signalling. + */ +class DelegatedContext(var signalDelegate: Signalling) extends DelegatedSignalling + +/** + * Class implementing delegated signalling, but having its own distinct `tag`. + */ +class TaggedDelegatedContext(deleg: Signalling, override val tag: Int) extends DelegatedContext(deleg) diff --git a/core/src/main/scala/scala/collection/generic/Sizing.scala b/core/src/main/scala/scala/collection/generic/Sizing.scala new file mode 100644 index 00000000..73584ce8 --- /dev/null +++ b/core/src/main/scala/scala/collection/generic/Sizing.scala @@ -0,0 +1,17 @@ +/* __ *\ +** ________ ___ / / ___ Scala API ** +** / __/ __// _ | / / / _ | (c) 2010-2013, LAMP/EPFL ** +** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** +** /____/\___/_/ |_/____/_/ | | ** +** |/ ** +\* */ + +package scala +package collection +package generic + +/** A trait for objects which have a size. + */ +trait Sizing { + def size: Int +} diff --git a/core/src/main/scala/scala/collection/parallel/CollectionConverters.scala b/core/src/main/scala/scala/collection/parallel/CollectionConverters.scala index 21d94ba0..308b8975 100644 --- a/core/src/main/scala/scala/collection/parallel/CollectionConverters.scala +++ b/core/src/main/scala/scala/collection/parallel/CollectionConverters.scala @@ -8,221 +8,221 @@ package scala.collection.parallel -import scala.language.implicitConversions -import scala.{collection => sc} -import scala.collection.{mutable => scm, immutable => sci, concurrent => scc} - -import scala.collection._ - -/** Extension methods for `.par` on sequential collections. */ -object CollectionConverters extends CollectionConvertersLowPriority { - - // Traversable & Iterable - - implicit class TraversableIsParallelizable[A](private val coll: sc.Traversable[A]) extends AnyVal with CustomParallelizable[A, ParIterable[A]] { - def seq = coll - override def par = coll match { - case coll: sc.Set[_] => new SetIsParallelizable(coll.asInstanceOf[sc.Set[A]]).par - case coll: sc.Map[_, _] => new MapIsParallelizable(coll.asInstanceOf[sc.Map[_, _]]).par.asInstanceOf[ParIterable[A]] - case coll: sci.Iterable[_] => new ImmutableIterableIsParallelizable(coll.asInstanceOf[sci.Iterable[A]]).par - case coll: scm.Iterable[_] => new MutableIterableIsParallelizable(coll.asInstanceOf[scm.Iterable[A]]).par - case coll: Parallelizable[_, _] => coll.asInstanceOf[Parallelizable[A, ParIterable[A]]].par - case _ => ParIterable.newCombiner[A].fromSequential(seq) // builds ParArray, same as for scm.Iterable - } - } - - implicit class MutableIterableIsParallelizable[A](private val coll: scm.Iterable[A]) extends AnyVal with CustomParallelizable[A, mutable.ParIterable[A]] { - def seq = coll - override def par = coll match { - case coll: scm.Seq[_] => new MutableSeqIsParallelizable(coll.asInstanceOf[scm.Seq[A]]).par - case coll: scm.Set[_] => new MutableSetIsParallelizable(coll.asInstanceOf[scm.Set[A]]).par - case coll: scm.Map[_, _] => new MutableMapIsParallelizable(coll.asInstanceOf[scm.Map[_, _]]).par.asInstanceOf[mutable.ParIterable[A]] - case coll: Parallelizable[_, _] => coll.asInstanceOf[Parallelizable[A, mutable.ParIterable[A]]].par - case _ => mutable.ParIterable.newCombiner[A].fromSequential(seq) // builds ParArray - } - } - - implicit class ImmutableIterableIsParallelizable[A](private val coll: sci.Iterable[A]) extends AnyVal with CustomParallelizable[A, immutable.ParIterable[A]] { - def seq = coll - override def par = coll match { - case coll: sci.Seq[_] => new ImmutableSeqIsParallelizable(coll.asInstanceOf[sci.Seq[A]]).par - case coll: sci.Set[_] => new ImmutableSetIsParallelizable(coll.asInstanceOf[sci.Set[A]]).par - case coll: sci.Map[_, _] => new ImmutableMapIsParallelizable(coll.asInstanceOf[sci.Map[_, _]]).par.asInstanceOf[immutable.ParIterable[A]] - case coll: Parallelizable[_, _] => coll.asInstanceOf[Parallelizable[A, immutable.ParIterable[A]]].par - case _ => immutable.ParIterable.newCombiner[A].fromSequential(seq) // builds ParVector - } - } - - // mutable.Seq - - implicit class MutableSeqIsParallelizable[A](private val coll: scm.Seq[A]) extends AnyVal with CustomParallelizable[A, mutable.ParSeq[A]] { - def seq = coll - override def par = coll match { - case coll: scm.WrappedArray[_] => new WrappedArrayIsParallelizable(coll.asInstanceOf[scm.WrappedArray[A]]).par - case coll: scm.ArraySeq[_] => new MutableArraySeqIsParallelizable(coll.asInstanceOf[scm.ArraySeq[A]]).par - case coll: scm.ArrayBuffer[_] => new MutableArrayBufferIsParallelizable(coll.asInstanceOf[scm.ArrayBuffer[A]]).par - case coll: Parallelizable[_, _] => coll.asInstanceOf[Parallelizable[A, mutable.ParSeq[A]]].par - case _ => mutable.ParSeq.newCombiner[A].fromSequential(seq) - } - } - - implicit class WrappedArrayIsParallelizable[T](private val coll: scm.WrappedArray[T]) extends AnyVal with CustomParallelizable[T, mutable.ParArray[T]] { - def seq = coll - override def par = mutable.ParArray.handoff(coll.array) - } - - implicit class MutableArraySeqIsParallelizable[T](private val coll: scm.ArraySeq[T]) extends AnyVal with CustomParallelizable[T, mutable.ParArray[T]] { - def seq = coll - override def par = mutable.ParArray.handoff(coll.array.asInstanceOf[Array[T]], coll.length) - } - - implicit class MutableArrayBufferIsParallelizable[T](private val coll: scm.ArrayBuffer[T]) extends AnyVal with CustomParallelizable[T, mutable.ParArray[T]] { - def seq = coll - override def par = mutable.ParArray.handoff[T](coll.array.asInstanceOf[Array[T]], coll.size) - } - - // immutable.Seq - - implicit class ImmutableSeqIsParallelizable[A](private val coll: sci.Seq[A]) extends AnyVal with CustomParallelizable[A, immutable.ParSeq[A]] { - def seq = coll - override def par = coll match { - case coll: sci.Vector[_] => new VectorIsParallelizable(coll.asInstanceOf[sci.Vector[A]]).par - case coll: sci.Range => new RangeIsParallelizable(coll).par.asInstanceOf[immutable.ParSeq[A]] - case coll: Parallelizable[_, _] => coll.asInstanceOf[Parallelizable[A, immutable.ParSeq[A]]].par - case _ => immutable.ParSeq.newCombiner[A].fromSequential(seq) - } - } - - implicit class RangeIsParallelizable(private val coll: sci.Range) extends AnyVal with CustomParallelizable[Int, immutable.ParRange] { - def seq = coll - override def par = new immutable.ParRange(coll) - } - - implicit class VectorIsParallelizable[T](private val coll: sci.Vector[T]) extends AnyVal with CustomParallelizable[T, immutable.ParVector[T]] { - def seq = coll - override def par = new immutable.ParVector(coll) - } - - // Set - - implicit class SetIsParallelizable[A](private val coll: sc.Set[A]) extends AnyVal with CustomParallelizable[A, ParSet[A]] { - def seq = coll - override def par = coll match { - case coll: sci.Set[_] => new ImmutableSetIsParallelizable(coll.asInstanceOf[sci.Set[A]]).par - case coll: scm.Set[_] => new MutableSetIsParallelizable(coll.asInstanceOf[scm.Set[A]]).par - case coll: Parallelizable[_, _] => coll.asInstanceOf[Parallelizable[A, ParSet[A]]].par - case _ => ParSet.newCombiner[A].fromSequential(seq) - } - } - - implicit class ImmutableSetIsParallelizable[A](private val coll: sci.Set[A]) extends AnyVal with CustomParallelizable[A, immutable.ParSet[A]] { - def seq = coll - override def par = coll match { - case coll: sci.HashSet[_] => new ImmutableHashSetIsParallelizable(coll.asInstanceOf[sci.HashSet[A]]).par - case coll: Parallelizable[_, _] => coll.asInstanceOf[Parallelizable[A, immutable.ParSet[A]]].par - case _ => immutable.ParSet.newCombiner[A].fromSequential(seq) - } - } - - implicit class MutableSetIsParallelizable[A](private val coll: scm.Set[A]) extends AnyVal with CustomParallelizable[A, mutable.ParSet[A]] { - def seq = coll - override def par = coll match { - case coll: scm.HashSet[_] => new MutableHashSetIsParallelizable(coll.asInstanceOf[scm.HashSet[A]]).par - case coll: Parallelizable[_, _] => coll.asInstanceOf[Parallelizable[A, mutable.ParSet[A]]].par - case _ => mutable.ParSet.newCombiner[A].fromSequential(seq) - } - } - - implicit class MutableHashSetIsParallelizable[T](private val coll: scm.HashSet[T]) extends AnyVal with CustomParallelizable[T, mutable.ParHashSet[T]] { - def seq = coll - override def par = new mutable.ParHashSet(coll.hashTableContents) - } - - implicit class ImmutableHashSetIsParallelizable[T](private val coll: sci.HashSet[T]) extends AnyVal with CustomParallelizable[T, immutable.ParHashSet[T]] { - def seq = coll - override def par = immutable.ParHashSet.fromTrie(coll) - } - - // Map - - implicit class MapIsParallelizable[K, V](private val coll: sc.Map[K, V]) extends AnyVal with CustomParallelizable[(K, V), ParMap[K, V]] { - def seq = coll - override def par = coll match { - case coll: sci.Map[_, _] => new ImmutableMapIsParallelizable(coll.asInstanceOf[sci.Map[K, V]]).par - case coll: scm.Map[_, _] => new MutableMapIsParallelizable(coll.asInstanceOf[scm.Map[K, V]]).par - case coll: Parallelizable[_, _] => coll.asInstanceOf[Parallelizable[(K, V), ParMap[K, V]]].par - case _ => ParMap.newCombiner[K, V].fromSequential(seq) - } - } - - implicit class ImmutableMapIsParallelizable[K, V](private val coll: sci.Map[K, V]) extends AnyVal with CustomParallelizable[(K, V), immutable.ParMap[K, V]] { - def seq = coll - override def par = coll match { - case coll: sci.HashMap[_, _] => new ImmutableHashMapIsParallelizable(coll.asInstanceOf[sci.HashMap[K, V]]).par - case coll: Parallelizable[_, _] => coll.asInstanceOf[Parallelizable[(K, V), immutable.ParMap[K, V]]].par - case _ => immutable.ParMap.newCombiner[K, V].fromSequential(seq) - } - } - - implicit class MutableMapIsParallelizable[K, V](private val coll: scm.Map[K, V]) extends AnyVal with CustomParallelizable[(K, V), mutable.ParMap[K, V]] { - def seq = coll - override def par = coll match { - case coll: scm.HashMap[_, _] => new MutableHashMapIsParallelizable(coll.asInstanceOf[scm.HashMap[K, V]]).par - case coll: scc.TrieMap[_, _] => new ConcurrentTrieMapIsParallelizable(coll.asInstanceOf[scc.TrieMap[K, V]]).par - case coll: Parallelizable[_, _] => coll.asInstanceOf[Parallelizable[(K, V), mutable.ParMap[K, V]]].par - case _ => mutable.ParMap.newCombiner[K, V].fromSequential(seq) - } - } - - implicit class ImmutableHashMapIsParallelizable[K, V](private val coll: sci.HashMap[K, V]) extends AnyVal with CustomParallelizable[(K, V), immutable.ParHashMap[K, V]] { - def seq = coll - override def par = immutable.ParHashMap.fromTrie(coll) - } - - implicit class MutableHashMapIsParallelizable[K, V](private val coll: scm.HashMap[K, V]) extends AnyVal with CustomParallelizable[(K, V), mutable.ParHashMap[K, V]] { - def seq = coll - override def par = new mutable.ParHashMap[K, V](coll.hashTableContents) - } - - implicit class ConcurrentTrieMapIsParallelizable[K, V](private val coll: scc.TrieMap[K, V]) extends AnyVal with CustomParallelizable[(K, V), mutable.ParTrieMap[K, V]] { - def seq = coll - override def par = new mutable.ParTrieMap(coll) - } - - // Other - - implicit class ArrayIsParallelizable[T](private val a: Array[T]) extends AnyVal with CustomParallelizable[T, mutable.ParArray[T]] { - def seq = a // via ArrayOps - override def par = mutable.ParArray.handoff(a) - } -} - -trait CollectionConvertersLowPriority { self: CollectionConverters.type => - - // Generic - - implicit def genTraversableLikeIsParallelizable[A, Repr](coll: sc.GenTraversableLike[A, Repr]): Parallelizable[A, ParIterable[A]] = coll match { - case coll: Parallelizable[_, _] => coll.asInstanceOf[Parallelizable[A, ParIterable[A]]].par - case coll: sc.Traversable[_] => new TraversableIsParallelizable(coll.asInstanceOf[sc.Traversable[A]]) - case coll => throw new IllegalArgumentException("Unexpected type "+coll.getClass.getName+" - every scala.collection.GenTraversableLike must be Parallelizable or a scala.collection.Traversable") - } - - implicit def genSeqLikeIsParallelizable[A, Repr](coll: sc.GenSeqLike[A, Repr]): Parallelizable[A, ParSeq[A]] = coll match { - case coll: Parallelizable[_, _] => coll.asInstanceOf[Parallelizable[A, ParSeq[A]]].par - case it: scm.Seq[_] => new MutableSeqIsParallelizable(it.asInstanceOf[scm.Seq[A]]) - case it: sci.Seq[_] => new ImmutableSeqIsParallelizable(it.asInstanceOf[sci.Seq[A]]) - case coll => throw new IllegalArgumentException("Unexpected type "+coll.getClass.getName+" - every scala.collection.GenSeqLike must be Parallelizable or a scala.collection.mutable.Seq or scala.collection.immutable.Seq") - } - - implicit def genSetLikeIsParallelizable[A, Repr](coll: sc.GenSetLike[A, Repr]): Parallelizable[A, ParSet[A]] = coll match { - case coll: Parallelizable[_, _] => coll.asInstanceOf[Parallelizable[A, ParSet[A]]].par - case it: sc.Set[_] => new SetIsParallelizable(it.asInstanceOf[sc.Set[A]]) - case coll => throw new IllegalArgumentException("Unexpected type "+coll.getClass.getName+" - every scala.collection.GenSetLike must be Parallelizable or a scala.collection.Set") - } - - implicit def genMapLikeIsParallelizable[K, V, Repr](coll: sc.GenMapLike[K, V, Repr]): Parallelizable[(K, V), ParMap[K, V]] = coll match { - case coll: Parallelizable[_, _] => coll.asInstanceOf[Parallelizable[(K, V), ParMap[K, V]]].par - case it: sc.Map[_, _] => new MapIsParallelizable(it.asInstanceOf[sc.Map[K, V]]) - case coll => throw new IllegalArgumentException("Unexpected type "+coll.getClass.getName+" - every scala.collection.GenMapLike must be Parallelizable or a scala.collection.Map") - } -} +//import scala.language.implicitConversions +//import scala.{collection => sc} +//import scala.collection.{mutable => scm, immutable => sci, concurrent => scc} +// +//import scala.collection._ +// +///** Extension methods for `.par` on sequential collections. */ +//object CollectionConverters extends CollectionConvertersLowPriority { +// +// // Traversable & Iterable +// +// implicit class TraversableIsParallelizable[A](private val coll: sc.Traversable[A]) extends AnyVal with CustomParallelizable[A, ParIterable[A]] { +// def seq = coll +// override def par = coll match { +// case coll: sc.Set[_] => new SetIsParallelizable(coll.asInstanceOf[sc.Set[A]]).par +// case coll: sc.Map[_, _] => new MapIsParallelizable(coll.asInstanceOf[sc.Map[_, _]]).par.asInstanceOf[ParIterable[A]] +// case coll: sci.Iterable[_] => new ImmutableIterableIsParallelizable(coll.asInstanceOf[sci.Iterable[A]]).par +// case coll: scm.Iterable[_] => new MutableIterableIsParallelizable(coll.asInstanceOf[scm.Iterable[A]]).par +// case coll: Parallelizable[_, _] => coll.asInstanceOf[Parallelizable[A, ParIterable[A]]].par +// case _ => ParIterable.newCombiner[A].fromSequential(seq) // builds ParArray, same as for scm.Iterable +// } +// } +// +// implicit class MutableIterableIsParallelizable[A](private val coll: scm.Iterable[A]) extends AnyVal with CustomParallelizable[A, mutable.ParIterable[A]] { +// def seq = coll +// override def par = coll match { +// case coll: scm.Seq[_] => new MutableSeqIsParallelizable(coll.asInstanceOf[scm.Seq[A]]).par +// case coll: scm.Set[_] => new MutableSetIsParallelizable(coll.asInstanceOf[scm.Set[A]]).par +// case coll: scm.Map[_, _] => new MutableMapIsParallelizable(coll.asInstanceOf[scm.Map[_, _]]).par.asInstanceOf[mutable.ParIterable[A]] +// case coll: Parallelizable[_, _] => coll.asInstanceOf[Parallelizable[A, mutable.ParIterable[A]]].par +// case _ => mutable.ParIterable.newCombiner[A].fromSequential(seq) // builds ParArray +// } +// } +// +// implicit class ImmutableIterableIsParallelizable[A](private val coll: sci.Iterable[A]) extends AnyVal with CustomParallelizable[A, immutable.ParIterable[A]] { +// def seq = coll +// override def par = coll match { +// case coll: sci.Seq[_] => new ImmutableSeqIsParallelizable(coll.asInstanceOf[sci.Seq[A]]).par +// case coll: sci.Set[_] => new ImmutableSetIsParallelizable(coll.asInstanceOf[sci.Set[A]]).par +// case coll: sci.Map[_, _] => new ImmutableMapIsParallelizable(coll.asInstanceOf[sci.Map[_, _]]).par.asInstanceOf[immutable.ParIterable[A]] +// case coll: Parallelizable[_, _] => coll.asInstanceOf[Parallelizable[A, immutable.ParIterable[A]]].par +// case _ => immutable.ParIterable.newCombiner[A].fromSequential(seq) // builds ParVector +// } +// } +// +// // mutable.Seq +// +// implicit class MutableSeqIsParallelizable[A](private val coll: scm.Seq[A]) extends AnyVal with CustomParallelizable[A, mutable.ParSeq[A]] { +// def seq = coll +// override def par = coll match { +// case coll: scm.WrappedArray[_] => new WrappedArrayIsParallelizable(coll.asInstanceOf[scm.WrappedArray[A]]).par +// case coll: scm.ArraySeq[_] => new MutableArraySeqIsParallelizable(coll.asInstanceOf[scm.ArraySeq[A]]).par +// case coll: scm.ArrayBuffer[_] => new MutableArrayBufferIsParallelizable(coll.asInstanceOf[scm.ArrayBuffer[A]]).par +// case coll: Parallelizable[_, _] => coll.asInstanceOf[Parallelizable[A, mutable.ParSeq[A]]].par +// case _ => mutable.ParSeq.newCombiner[A].fromSequential(seq) +// } +// } +// +// implicit class WrappedArrayIsParallelizable[T](private val coll: scm.WrappedArray[T]) extends AnyVal with CustomParallelizable[T, mutable.ParArray[T]] { +// def seq = coll +// override def par = mutable.ParArray.handoff(coll.array) +// } +// +// implicit class MutableArraySeqIsParallelizable[T](private val coll: scm.ArraySeq[T]) extends AnyVal with CustomParallelizable[T, mutable.ParArray[T]] { +// def seq = coll +// override def par = mutable.ParArray.handoff(coll.array.asInstanceOf[Array[T]], coll.length) +// } +// +// implicit class MutableArrayBufferIsParallelizable[T](private val coll: scm.ArrayBuffer[T]) extends AnyVal with CustomParallelizable[T, mutable.ParArray[T]] { +// def seq = coll +// override def par = mutable.ParArray.handoff[T](coll.array.asInstanceOf[Array[T]], coll.size) +// } +// +// // immutable.Seq +// +// implicit class ImmutableSeqIsParallelizable[A](private val coll: sci.Seq[A]) extends AnyVal with CustomParallelizable[A, immutable.ParSeq[A]] { +// def seq = coll +// override def par = coll match { +// case coll: sci.Vector[_] => new VectorIsParallelizable(coll.asInstanceOf[sci.Vector[A]]).par +// case coll: sci.Range => new RangeIsParallelizable(coll).par.asInstanceOf[immutable.ParSeq[A]] +// case coll: Parallelizable[_, _] => coll.asInstanceOf[Parallelizable[A, immutable.ParSeq[A]]].par +// case _ => immutable.ParSeq.newCombiner[A].fromSequential(seq) +// } +// } +// +// implicit class RangeIsParallelizable(private val coll: sci.Range) extends AnyVal with CustomParallelizable[Int, immutable.ParRange] { +// def seq = coll +// override def par = new immutable.ParRange(coll) +// } +// +// implicit class VectorIsParallelizable[T](private val coll: sci.Vector[T]) extends AnyVal with CustomParallelizable[T, immutable.ParVector[T]] { +// def seq = coll +// override def par = new immutable.ParVector(coll) +// } +// +// // Set +// +// implicit class SetIsParallelizable[A](private val coll: sc.Set[A]) extends AnyVal with CustomParallelizable[A, ParSet[A]] { +// def seq = coll +// override def par = coll match { +// case coll: sci.Set[_] => new ImmutableSetIsParallelizable(coll.asInstanceOf[sci.Set[A]]).par +// case coll: scm.Set[_] => new MutableSetIsParallelizable(coll.asInstanceOf[scm.Set[A]]).par +// case coll: Parallelizable[_, _] => coll.asInstanceOf[Parallelizable[A, ParSet[A]]].par +// case _ => ParSet.newCombiner[A].fromSequential(seq) +// } +// } +// +// implicit class ImmutableSetIsParallelizable[A](private val coll: sci.Set[A]) extends AnyVal with CustomParallelizable[A, immutable.ParSet[A]] { +// def seq = coll +// override def par = coll match { +// case coll: sci.HashSet[_] => new ImmutableHashSetIsParallelizable(coll.asInstanceOf[sci.HashSet[A]]).par +// case coll: Parallelizable[_, _] => coll.asInstanceOf[Parallelizable[A, immutable.ParSet[A]]].par +// case _ => immutable.ParSet.newCombiner[A].fromSequential(seq) +// } +// } +// +// implicit class MutableSetIsParallelizable[A](private val coll: scm.Set[A]) extends AnyVal with CustomParallelizable[A, mutable.ParSet[A]] { +// def seq = coll +// override def par = coll match { +// case coll: scm.HashSet[_] => new MutableHashSetIsParallelizable(coll.asInstanceOf[scm.HashSet[A]]).par +// case coll: Parallelizable[_, _] => coll.asInstanceOf[Parallelizable[A, mutable.ParSet[A]]].par +// case _ => mutable.ParSet.newCombiner[A].fromSequential(seq) +// } +// } +// +// implicit class MutableHashSetIsParallelizable[T](private val coll: scm.HashSet[T]) extends AnyVal with CustomParallelizable[T, mutable.ParHashSet[T]] { +// def seq = coll +// override def par = new mutable.ParHashSet(coll.hashTableContents) +// } +// +// implicit class ImmutableHashSetIsParallelizable[T](private val coll: sci.HashSet[T]) extends AnyVal with CustomParallelizable[T, immutable.ParHashSet[T]] { +// def seq = coll +// override def par = immutable.ParHashSet.fromTrie(coll) +// } +// +// // Map +// +// implicit class MapIsParallelizable[K, V](private val coll: sc.Map[K, V]) extends AnyVal with CustomParallelizable[(K, V), ParMap[K, V]] { +// def seq = coll +// override def par = coll match { +// case coll: sci.Map[_, _] => new ImmutableMapIsParallelizable(coll.asInstanceOf[sci.Map[K, V]]).par +// case coll: scm.Map[_, _] => new MutableMapIsParallelizable(coll.asInstanceOf[scm.Map[K, V]]).par +// case coll: Parallelizable[_, _] => coll.asInstanceOf[Parallelizable[(K, V), ParMap[K, V]]].par +// case _ => ParMap.newCombiner[K, V].fromSequential(seq) +// } +// } +// +// implicit class ImmutableMapIsParallelizable[K, V](private val coll: sci.Map[K, V]) extends AnyVal with CustomParallelizable[(K, V), immutable.ParMap[K, V]] { +// def seq = coll +// override def par = coll match { +// case coll: sci.HashMap[_, _] => new ImmutableHashMapIsParallelizable(coll.asInstanceOf[sci.HashMap[K, V]]).par +// case coll: Parallelizable[_, _] => coll.asInstanceOf[Parallelizable[(K, V), immutable.ParMap[K, V]]].par +// case _ => immutable.ParMap.newCombiner[K, V].fromSequential(seq) +// } +// } +// +// implicit class MutableMapIsParallelizable[K, V](private val coll: scm.Map[K, V]) extends AnyVal with CustomParallelizable[(K, V), mutable.ParMap[K, V]] { +// def seq = coll +// override def par = coll match { +// case coll: scm.HashMap[_, _] => new MutableHashMapIsParallelizable(coll.asInstanceOf[scm.HashMap[K, V]]).par +// case coll: scc.TrieMap[_, _] => new ConcurrentTrieMapIsParallelizable(coll.asInstanceOf[scc.TrieMap[K, V]]).par +// case coll: Parallelizable[_, _] => coll.asInstanceOf[Parallelizable[(K, V), mutable.ParMap[K, V]]].par +// case _ => mutable.ParMap.newCombiner[K, V].fromSequential(seq) +// } +// } +// +// implicit class ImmutableHashMapIsParallelizable[K, V](private val coll: sci.HashMap[K, V]) extends AnyVal with CustomParallelizable[(K, V), immutable.ParHashMap[K, V]] { +// def seq = coll +// override def par = immutable.ParHashMap.fromTrie(coll) +// } +// +// implicit class MutableHashMapIsParallelizable[K, V](private val coll: scm.HashMap[K, V]) extends AnyVal with CustomParallelizable[(K, V), mutable.ParHashMap[K, V]] { +// def seq = coll +// override def par = new mutable.ParHashMap[K, V](coll.hashTableContents) +// } +// +// implicit class ConcurrentTrieMapIsParallelizable[K, V](private val coll: scc.TrieMap[K, V]) extends AnyVal with CustomParallelizable[(K, V), mutable.ParTrieMap[K, V]] { +// def seq = coll +// override def par = new mutable.ParTrieMap(coll) +// } +// +// // Other +// +// implicit class ArrayIsParallelizable[T](private val a: Array[T]) extends AnyVal with CustomParallelizable[T, mutable.ParArray[T]] { +// def seq = a // via ArrayOps +// override def par = mutable.ParArray.handoff(a) +// } +//} +// +//trait CollectionConvertersLowPriority { self: CollectionConverters.type => +// +// // Generic +// +// implicit def genTraversableLikeIsParallelizable[A, Repr](coll: sc.GenTraversableLike[A, Repr]): Parallelizable[A, ParIterable[A]] = coll match { +// case coll: Parallelizable[_, _] => coll.asInstanceOf[Parallelizable[A, ParIterable[A]]].par +// case coll: sc.Traversable[_] => new TraversableIsParallelizable(coll.asInstanceOf[sc.Traversable[A]]) +// case coll => throw new IllegalArgumentException("Unexpected type "+coll.getClass.getName+" - every scala.collection.GenTraversableLike must be Parallelizable or a scala.collection.Traversable") +// } +// +// implicit def genSeqLikeIsParallelizable[A, Repr](coll: sc.GenSeqLike[A, Repr]): Parallelizable[A, ParSeq[A]] = coll match { +// case coll: Parallelizable[_, _] => coll.asInstanceOf[Parallelizable[A, ParSeq[A]]].par +// case it: scm.Seq[_] => new MutableSeqIsParallelizable(it.asInstanceOf[scm.Seq[A]]) +// case it: sci.Seq[_] => new ImmutableSeqIsParallelizable(it.asInstanceOf[sci.Seq[A]]) +// case coll => throw new IllegalArgumentException("Unexpected type "+coll.getClass.getName+" - every scala.collection.GenSeqLike must be Parallelizable or a scala.collection.mutable.Seq or scala.collection.immutable.Seq") +// } +// +// implicit def genSetLikeIsParallelizable[A, Repr](coll: sc.GenSetLike[A, Repr]): Parallelizable[A, ParSet[A]] = coll match { +// case coll: Parallelizable[_, _] => coll.asInstanceOf[Parallelizable[A, ParSet[A]]].par +// case it: sc.Set[_] => new SetIsParallelizable(it.asInstanceOf[sc.Set[A]]) +// case coll => throw new IllegalArgumentException("Unexpected type "+coll.getClass.getName+" - every scala.collection.GenSetLike must be Parallelizable or a scala.collection.Set") +// } +// +// implicit def genMapLikeIsParallelizable[K, V, Repr](coll: sc.GenMapLike[K, V, Repr]): Parallelizable[(K, V), ParMap[K, V]] = coll match { +// case coll: Parallelizable[_, _] => coll.asInstanceOf[Parallelizable[(K, V), ParMap[K, V]]].par +// case it: sc.Map[_, _] => new MapIsParallelizable(it.asInstanceOf[sc.Map[K, V]]) +// case coll => throw new IllegalArgumentException("Unexpected type "+coll.getClass.getName+" - every scala.collection.GenMapLike must be Parallelizable or a scala.collection.Map") +// } +//} diff --git a/core/src/main/scala/scala/collection/parallel/Combiner.scala b/core/src/main/scala/scala/collection/parallel/Combiner.scala index a9d81ef9..dd5ac00a 100644 --- a/core/src/main/scala/scala/collection/parallel/Combiner.scala +++ b/core/src/main/scala/scala/collection/parallel/Combiner.scala @@ -89,8 +89,8 @@ trait Combiner[-Elem, +To] extends Builder[Elem, To] with Sizing with Parallel { /** Add all elements from a sequential collection and return the result. */ - def fromSequential(seq: TraversableOnce[Elem]): To = { - for (x <- seq) this += x + def fromSequential(seq: IterableOnce[Elem]): To = { + for (x <- seq.iterator) this += x result() } } diff --git a/core/src/main/scala/scala/collection/parallel/ParIterable.scala b/core/src/main/scala/scala/collection/parallel/ParIterable.scala index ca6d805b..95e950ee 100644 --- a/core/src/main/scala/scala/collection/parallel/ParIterable.scala +++ b/core/src/main/scala/scala/collection/parallel/ParIterable.scala @@ -9,9 +9,9 @@ package scala package collection.parallel -import scala.collection.GenIterable +//import scala.collection.GenIterable import scala.collection.generic._ -import scala.collection.parallel.mutable.ParArrayCombiner +//import scala.collection.parallel.mutable.ParArrayCombiner /** A template trait for parallel iterable collections. * @@ -25,11 +25,10 @@ import scala.collection.parallel.mutable.ParArrayCombiner * @since 2.9 */ trait ParIterable[+T] -extends GenIterable[T] - with GenericParTemplate[T, ParIterable] - with ParIterableLike[T, ParIterable[T], Iterable[T]] { - override def companion: GenericCompanion[ParIterable] with GenericParCompanion[ParIterable] = ParIterable - //protected[this] override def newBuilder = ParIterable.newBuilder[T] + extends /*GenIterable[T] + with*/ GenericParTemplate[T, ParIterable] + with ParIterableLike[T, ParIterable, ParIterable[T], Iterable[T]] { + override def companion: GenericParCompanion[ParIterable] = ParIterable def stringPrefix = "ParIterable" } @@ -37,10 +36,9 @@ extends GenIterable[T] /** $factoryInfo */ object ParIterable extends ParFactory[ParIterable] { - implicit def canBuildFrom[T]: CanCombineFrom[Coll, T, ParIterable[T]] = new GenericCanCombineFrom[T] - def newBuilder[T]: Combiner[T, ParIterable[T]] = ParArrayCombiner[T] + def newBuilder[T]: Combiner[T, ParIterable[T]] = immutable.ParVector.newBuilder /*ParArrayCombiner[T]*/ - def newCombiner[T]: Combiner[T, ParIterable[T]] = ParArrayCombiner[T] + def newCombiner[T]: Combiner[T, ParIterable[T]] = immutable.ParVector.newCombiner /*ParArrayCombiner[T]*/ } diff --git a/core/src/main/scala/scala/collection/parallel/ParIterableLike.scala b/core/src/main/scala/scala/collection/parallel/ParIterableLike.scala index 1e64f231..5798b89d 100644 --- a/core/src/main/scala/scala/collection/parallel/ParIterableLike.scala +++ b/core/src/main/scala/scala/collection/parallel/ParIterableLike.scala @@ -10,23 +10,14 @@ package scala package collection.parallel import scala.language.{ higherKinds, implicitConversions } - import scala.collection.mutable.Builder import scala.collection.mutable.ArrayBuffer -import scala.collection.IterableLike -import scala.collection.Parallel -import scala.collection.CustomParallelizable +import scala.collection.{CustomParallelizable, /*GenIterable, GenTraversable, GenTraversableOnce,*/ IterableOps, Parallel} import scala.collection.generic._ -import scala.collection.GenIterableLike -import scala.collection.GenIterable -import scala.collection.GenTraversableOnce -import scala.collection.GenTraversable -import immutable.HashMapCombiner +//import immutable.HashMapCombiner import scala.reflect.ClassTag - -import scala.annotation.unchecked.uncheckedVariance - -import scala.collection.parallel.ParallelCollectionImplicits._ +//import scala.annotation.unchecked.uncheckedVariance +//import scala.collection.parallel.ParallelCollectionImplicits._ /** A template trait for parallel collections of type `ParIterable[T]`. @@ -154,13 +145,17 @@ import scala.collection.parallel.ParallelCollectionImplicits._ * @define Coll `ParIterable` * @define coll parallel iterable */ -trait ParIterableLike[+T, +Repr <: ParIterable[T], +Sequential <: Iterable[T] with IterableLike[T, Sequential]] -extends GenIterableLike[T, Repr] - with CustomParallelizable[T, Repr] +trait ParIterableLike[+T, +CC[X] <: ParIterable[X], +Repr <: ParIterable[T], +Sequential <: Iterable[T] with IterableOps[T, Iterable /* TODO */, Sequential]] +extends /*GenIterableLike[T, Repr] + with*/ CustomParallelizable[T, Repr] with Parallel with HasNewCombiner[T, Repr] { -self: ParIterableLike[T, Repr, Sequential] => +self => + + def size: Int + def stringPrefix: String + def companion: GenericParCompanion[CC] @transient @volatile @@ -339,7 +334,7 @@ self: ParIterableLike[T, Repr, Sequential] => def asCombiner = cb.asInstanceOf[Combiner[Elem, To]] } - protected[this] def bf2seq[S, That](bf: CanBuildFrom[Repr, S, That]) = new CanBuildFrom[Sequential, S, That] { + protected[this] def bf2seq[S, That](bf: OldCanBuildFrom[Repr, S, That]) = new OldCanBuildFrom[Sequential, S, That] { def apply(from: Sequential) = bf.apply(newCombiner.fromSequential(from)) def apply() = bf.apply() } @@ -496,26 +491,17 @@ self: ParIterableLike[T, Repr, Sequential] => reduce((x, y) => if (cmp.lteq(f(x), f(y))) x else y) } - def map[S, That](f: T => S)(implicit bf: CanBuildFrom[Repr, S, That]): That = if (bf(repr).isCombiner) { - tasksupport.executeAndWaitResult(new Map[S, That](f, combinerFactory(() => bf(repr).asCombiner), splitter) mapResult { _.resultWithTaskSupport }) - } else setTaskSupport(seq.map(f)(bf2seq(bf)), tasksupport) - /*bf ifParallel { pbf => - tasksupport.executeAndWaitResult(new Map[S, That](f, pbf, splitter) mapResult { _.result }) - } otherwise seq.map(f)(bf2seq(bf))*/ + def map[S](f: T => S): CC[S] = { + tasksupport.executeAndWaitResult(new Map[S, CC[S]](f, combinerFactory(() => companion.newCombiner[S]), splitter) mapResult { _.resultWithTaskSupport }) + } - def collect[S, That](pf: PartialFunction[T, S])(implicit bf: CanBuildFrom[Repr, S, That]): That = if (bf(repr).isCombiner) { - tasksupport.executeAndWaitResult(new Collect[S, That](pf, combinerFactory(() => bf(repr).asCombiner), splitter) mapResult { _.resultWithTaskSupport }) - } else setTaskSupport(seq.collect(pf)(bf2seq(bf)), tasksupport) - /*bf ifParallel { pbf => - tasksupport.executeAndWaitResult(new Collect[S, That](pf, pbf, splitter) mapResult { _.result }) - } otherwise seq.collect(pf)(bf2seq(bf))*/ + def collect[S](pf: PartialFunction[T, S]): CC[S] = { + tasksupport.executeAndWaitResult(new Collect[S, CC[S]](pf, combinerFactory(() => companion.newCombiner[S]), splitter) mapResult { _.resultWithTaskSupport }) + } - def flatMap[S, That](f: T => GenTraversableOnce[S])(implicit bf: CanBuildFrom[Repr, S, That]): That = if (bf(repr).isCombiner) { - tasksupport.executeAndWaitResult(new FlatMap[S, That](f, combinerFactory(() => bf(repr).asCombiner), splitter) mapResult { _.resultWithTaskSupport }) - } else setTaskSupport(seq.flatMap(f)(bf2seq(bf)), tasksupport) - /*bf ifParallel { pbf => - tasksupport.executeAndWaitResult(new FlatMap[S, That](f, pbf, splitter) mapResult { _.result }) - } otherwise seq.flatMap(f)(bf2seq(bf))*/ + def flatMap[S](f: T => ParIterable[S] /*GenTraversableOnce[S]*/): CC[S] = { + tasksupport.executeAndWaitResult(new FlatMap[S, CC[S]](f, combinerFactory(() => companion.newCombiner[S]), splitter) mapResult { _.resultWithTaskSupport }) + } /** Tests whether a predicate holds for all elements of this $coll. * @@ -600,8 +586,8 @@ self: ParIterableLike[T, Repr, Sequential] => def filterNot(pred: T => Boolean): Repr = { tasksupport.executeAndWaitResult(new FilterNot(pred, combinerFactory, splitter) mapResult { _.resultWithTaskSupport }) } - - def ++[U >: T, That](that: GenTraversableOnce[U])(implicit bf: CanBuildFrom[Repr, U, That]): That = { +/* + def ++[U >: T, That](that: GenTraversableOnce[U])(implicit bf: OldCanBuildFrom[Repr, U, That]): That = { if (that.isParallel && bf.isParallel) { // println("case both are parallel") val other = that.asParIterable @@ -633,7 +619,7 @@ self: ParIterableLike[T, Repr, Sequential] => setTaskSupport(b.result(), tasksupport) } } - +*/ def partition(pred: T => Boolean): (Repr, Repr) = { tasksupport.executeAndWaitResult( new Partition(pred, combinerFactory, combinerFactory, splitter) mapResult { @@ -642,12 +628,12 @@ self: ParIterableLike[T, Repr, Sequential] => ) } - def groupBy[K](f: T => K): immutable.ParMap[K, Repr] = { +/* def groupBy[K](f: T => K): immutable.ParMap[K, Repr] = { val r = tasksupport.executeAndWaitResult(new GroupBy(f, () => HashMapCombiner[K, T], splitter) mapResult { rcb => rcb.groupByKey(() => combinerFactory()) }) setTaskSupport(r, tasksupport) - } + }*/ def take(n: Int): Repr = { val actualn = if (size > n) n else size @@ -683,7 +669,7 @@ self: ParIterableLike[T, Repr, Sequential] => cb.resultWithTaskSupport } - override def slice(unc_from: Int, unc_until: Int): Repr = { + /*override*/ def slice(unc_from: Int, unc_until: Int): Repr = { val from = unc_from min size max 0 val until = unc_until min size max from if ((until - from) <= MIN_FOR_COPY) slice_sequential(from, until) @@ -714,31 +700,22 @@ self: ParIterableLike[T, Repr, Sequential] => * Note: The neutral element `z` may be applied more than once. * * @tparam U element type of the resulting collection - * @tparam That type of the resulting collection * @param z neutral element for the operator `op` * @param op the associative operator for the scan - * @param bf $pbfinfo - * @return a collection containing the prefix scan of the elements in the original collection - * - * @usecase def scan(z: T)(op: (T, T) => T): $Coll[T] - * @inheritdoc - * - * @return a new $coll containing the prefix scan of the elements in this $coll + * @return a new $coll containing the prefix scan of the elements in this $coll */ - def scan[U >: T, That](z: U)(op: (U, U) => U)(implicit bf: CanBuildFrom[Repr, U, That]): That = if (bf(repr).isCombiner) { - if (tasksupport.parallelismLevel > 1) { - if (size > 0) tasksupport.executeAndWaitResult(new CreateScanTree(0, size, z, op, splitter) mapResult { - tree => tasksupport.executeAndWaitResult(new FromScanTree(tree, z, op, combinerFactory(() => bf(repr).asCombiner)) mapResult { - cb => cb.resultWithTaskSupport - }) - }) else setTaskSupport((bf(repr) += z).result(), tasksupport) - } else setTaskSupport(seq.scan(z)(op)(bf2seq(bf)), tasksupport) - } else setTaskSupport(seq.scan(z)(op)(bf2seq(bf)), tasksupport) - + def scan[U >: T](z: U)(op: (U, U) => U): CC[U] = { + if (size > 0) tasksupport.executeAndWaitResult(new CreateScanTree(0, size, z, op, splitter) mapResult { + tree => tasksupport.executeAndWaitResult(new FromScanTree(tree, z, op, combinerFactory(() => companion.newCombiner[U])) mapResult { + cb => cb.resultWithTaskSupport + }) + }) else setTaskSupport((companion.newCombiner[U] += z).result(), tasksupport) + } +/* def scanLeft[S, That](z: S)(op: (S, T) => S)(implicit bf: CanBuildFrom[Repr, S, That]) = setTaskSupport(seq.scanLeft(z)(op)(bf2seq(bf)), tasksupport) def scanRight[S, That](z: S)(op: (T, S) => S)(implicit bf: CanBuildFrom[Repr, S, That]) = setTaskSupport(seq.scanRight(z)(op)(bf2seq(bf)), tasksupport) - +*/ /** Takes the longest prefix of elements that satisfy the predicate. * * $indexsignalling @@ -811,24 +788,24 @@ self: ParIterableLike[T, Repr, Sequential] => ) } - def copyToArray[U >: T](xs: Array[U]) = copyToArray(xs, 0) + def copyToArray[U >: T](xs: Array[U]): Unit = copyToArray(xs, 0) - def copyToArray[U >: T](xs: Array[U], start: Int) = copyToArray(xs, start, xs.length - start) + def copyToArray[U >: T](xs: Array[U], start: Int): Unit = copyToArray(xs, start, xs.length - start) - def copyToArray[U >: T](xs: Array[U], start: Int, len: Int) = if (len > 0) { + def copyToArray[U >: T](xs: Array[U], start: Int, len: Int): Unit = if (len > 0) { tasksupport.executeAndWaitResult(new CopyToArray(start, len, xs, splitter)) } - def sameElements[U >: T](that: GenIterable[U]) = seq.sameElements(that) - - def zip[U >: T, S, That](that: GenIterable[S])(implicit bf: CanBuildFrom[Repr, (U, S), That]): That = if (bf(repr).isCombiner && that.isParSeq) { + def sameElements[U >: T](that: Iterable[U]) = seq.iterator.sameElements(that) +/* + def zip[U >: T, S, That](that: GenIterable[S])(implicit bf: OldCanBuildFrom[Repr, (U, S), That]): That = if (bf(repr).isCombiner && that.isParSeq) { val thatseq = that.asParSeq tasksupport.executeAndWaitResult(new Zip(combinerFactory(() => bf(repr).asCombiner), splitter, thatseq.splitter) mapResult { _.resultWithTaskSupport }) } else setTaskSupport(seq.zip(that)(bf2seq(bf)), tasksupport) - def zipWithIndex[U >: T, That](implicit bf: CanBuildFrom[Repr, (U, Int), That]): That = this zip immutable.ParRange(0, size, 1, inclusive = false) + def zipWithIndex[U >: T, That](implicit bf: OldCanBuildFrom[Repr, (U, Int), That]): That = this zip immutable.ParRange(0, size, 1, inclusive = false) - def zipAll[S, U >: T, That](that: GenIterable[S], thisElem: U, thatElem: S)(implicit bf: CanBuildFrom[Repr, (U, S), That]): That = if (bf(repr).isCombiner && that.isParSeq) { + def zipAll[S, U >: T, That](that: GenIterable[S], thisElem: U, thatElem: S)(implicit bf: OldCanBuildFrom[Repr, (U, S), That]): That = if (bf(repr).isCombiner && that.isParSeq) { val thatseq = that.asParSeq tasksupport.executeAndWaitResult( new ZipAll(size max thatseq.length, thisElem, thatElem, combinerFactory(() => bf(repr).asCombiner), splitter, thatseq.splitter) mapResult { @@ -836,7 +813,7 @@ self: ParIterableLike[T, Repr, Sequential] => } ) } else setTaskSupport(seq.zipAll(that, thisElem, thatElem)(bf2seq(bf)), tasksupport) - +*/ protected def toParCollection[U >: T, That](cbf: () => Combiner[U, That]): That = { tasksupport.executeAndWaitResult(new ToParCollection(combinerFactory(cbf), splitter) mapResult { _.resultWithTaskSupport }) } @@ -845,40 +822,40 @@ self: ParIterableLike[T, Repr, Sequential] => tasksupport.executeAndWaitResult(new ToParMap(combinerFactory(cbf), splitter)(ev) mapResult { _.resultWithTaskSupport }) } - override def toArray[U >: T: ClassTag]: Array[U] = { + /*override*/ def toArray[U >: T: ClassTag]: Array[U] = { val arr = new Array[U](size) copyToArray(arr) arr } - override def toList: List[T] = seq.toList + /*override*/ def toList: List[T] = seq.toList - override def toIndexedSeq: scala.collection.immutable.IndexedSeq[T] = seq.toIndexedSeq + /*override*/ def toIndexedSeq: scala.collection.immutable.IndexedSeq[T] = seq.toIndexedSeq - override def toStream: Stream[T] = seq.toStream +// override def toStream: Stream[T] = seq.toStream - override def toIterator: Iterator[T] = splitter + /*override*/ def toIterator: Iterator[T] = splitter // the methods below are overridden - override def toBuffer[U >: T]: scala.collection.mutable.Buffer[U] = seq.toBuffer // have additional, parallel buffers? - + /*override*/ def toBuffer[U >: T]: scala.collection.mutable.Buffer[U] = seq.toBuffer // have additional, parallel buffers? +/* override def toTraversable: GenTraversable[T] = this.asInstanceOf[GenTraversable[T]] +*/ + /*override*/ def toIterable: ParIterable[T] = this.asInstanceOf[ParIterable[T]] - override def toIterable: ParIterable[T] = this.asInstanceOf[ParIterable[T]] - - override def toSeq: ParSeq[T] = toParCollection[T, ParSeq[T]](() => ParSeq.newCombiner[T]) - + /*override*/ def toSeq: ParSeq[T] = toParCollection[T, ParSeq[T]](() => immutable.ParVector/*ParSeq*/.newCombiner[T]) +/* override def toSet[U >: T]: immutable.ParSet[U] = toParCollection[U, immutable.ParSet[U]](() => immutable.ParSet.newCombiner[U]) override def toMap[K, V](implicit ev: T <:< (K, V)): immutable.ParMap[K, V] = toParMap[K, V, immutable.ParMap[K, V]](() => immutable.ParMap.newCombiner[K, V]) override def toVector: Vector[T] = to[Vector] - override def to[Col[_]](implicit cbf: CanBuildFrom[Nothing, T, Col[T @uncheckedVariance]]): Col[T @uncheckedVariance] = if (cbf().isCombiner) { + override def to[Col[_]](implicit cbf: OldCanBuildFrom[Nothing, T, Col[T @uncheckedVariance]]): Col[T @uncheckedVariance] = if (cbf().isCombiner) { toParCollection[T, Col[T]](() => cbf().asCombiner) } else seq.to(cbf) - +*/ /* tasks */ protected trait StrictSplitterCheckTask[R, Tp] extends Task[R, Tp] { @@ -1064,7 +1041,7 @@ self: ParIterableLike[T, Repr, Sequential] => } protected[this] class FlatMap[S, That] - (f: T => GenTraversableOnce[S], pbf: CombinerFactory[S, That], protected[this] val pit: IterableSplitter[T]) + (f: T => ParIterable[S] /*GenTraversableOnce[S]*/, pbf: CombinerFactory[S, That], protected[this] val pit: IterableSplitter[T]) extends Transformer[Combiner[S, That], FlatMap[S, That]] { @volatile var result: Combiner[S, That] = null def leaf(prev: Option[Combiner[S, That]]) = result = pit.flatmap2combiner(f, pbf()) @@ -1137,7 +1114,7 @@ self: ParIterableLike[T, Repr, Sequential] => override def merge(that: Partition[U, This]) = result = (result._1 combine that.result._1, result._2 combine that.result._2) } - protected[this] class GroupBy[K, U >: T]( +/* protected[this] class GroupBy[K, U >: T]( f: U => K, mcf: () => HashMapCombiner[K, U], protected[this] val pit: IterableSplitter[T] @@ -1158,7 +1135,7 @@ self: ParIterableLike[T, Repr, Sequential] => // --> we know we're not dropping any mappings result = (result combine that.result).asInstanceOf[HashMapCombiner[K, U]] } - } + }*/ protected[this] class Take[U >: T, This >: Repr] (n: Int, cbf: CombinerFactory[U, This], protected[this] val pit: IterableSplitter[T]) @@ -1307,7 +1284,7 @@ self: ParIterableLike[T, Repr, Sequential] => } else { val opits = othpit.psplitWithSignalling(pit.remaining) val diff = len - pit.remaining - Seq( + scala.collection.immutable.Seq( new ZipAll(pit.remaining, thiselem, thatelem, pbf, pit, opits(0)), // nothing wrong will happen with the cast below - elem T is never accessed new ZipAll(diff, thiselem, thatelem, pbf, immutable.repetition(thiselem, diff).splitter.asInstanceOf[IterableSplitter[T]], opits(1)) ) @@ -1490,11 +1467,11 @@ self: ParIterableLike[T, Repr, Sequential] => debugBuffer += s } - import scala.collection.DebugUtils._ +/* import scala.collection.DebugUtils._ private[parallel] def printDebugBuffer() = println(buildString { append => for (s <- debugBuffer) { append(s) } - }) + })*/ } diff --git a/core/src/main/scala/scala/collection/parallel/ParMap.scala b/core/src/main/scala/scala/collection/parallel/ParMap.scala index bcab9e84..4b9dfe62 100644 --- a/core/src/main/scala/scala/collection/parallel/ParMap.scala +++ b/core/src/main/scala/scala/collection/parallel/ParMap.scala @@ -9,60 +9,60 @@ package scala package collection.parallel -import scala.collection.Map -import scala.collection.GenMap -import scala.collection.generic.ParMapFactory -import scala.collection.generic.GenericParMapTemplate -import scala.collection.generic.GenericParMapCompanion -import scala.collection.generic.CanCombineFrom - -/** A template trait for parallel maps. - * - * $sideeffects - * - * @tparam K the key type of the map - * @tparam V the value type of the map - * - * @author Aleksandar Prokopec - * @since 2.9 - */ -trait ParMap[K, +V] -extends GenMap[K, V] - with GenericParMapTemplate[K, V, ParMap] - with ParIterable[(K, V)] - with ParMapLike[K, V, ParMap[K, V], Map[K, V]] -{ -self => - - def mapCompanion: GenericParMapCompanion[ParMap] = ParMap - - //protected[this] override def newCombiner: Combiner[(K, V), ParMap[K, V]] = ParMap.newCombiner[K, V] - - def empty: ParMap[K, V] = new mutable.ParHashMap[K, V] - - override def stringPrefix = "ParMap" - - override def updated [U >: V](key: K, value: U): ParMap[K, U] = this + ((key, value)) - - def + [U >: V](kv: (K, U)): ParMap[K, U] -} - - - -object ParMap extends ParMapFactory[ParMap] { - def empty[K, V]: ParMap[K, V] = new mutable.ParHashMap[K, V] - - def newCombiner[K, V]: Combiner[(K, V), ParMap[K, V]] = mutable.ParHashMapCombiner[K, V] - - implicit def canBuildFrom[K, V]: CanCombineFrom[Coll, (K, V), ParMap[K, V]] = new CanCombineFromMap[K, V] - - /** An abstract shell used by { mutable, immutable }.Map but not by collection.Map - * because of variance issues. - */ - abstract class WithDefault[A, +B](underlying: ParMap[A, B], d: A => B) extends ParMap[A, B] { - override def size = underlying.size - def get(key: A) = underlying.get(key) - def splitter = underlying.splitter - override def default(key: A): B = d(key) - } -} +//import scala.collection.Map +//import scala.collection.GenMap +//import scala.collection.generic.ParMapFactory +//import scala.collection.generic.GenericParMapTemplate +//import scala.collection.generic.GenericParMapCompanion +//import scala.collection.generic.CanCombineFrom +// +///** A template trait for parallel maps. +// * +// * $sideeffects +// * +// * @tparam K the key type of the map +// * @tparam V the value type of the map +// * +// * @author Aleksandar Prokopec +// * @since 2.9 +// */ +//trait ParMap[K, +V] +//extends GenMap[K, V] +// with GenericParMapTemplate[K, V, ParMap] +// with ParIterable[(K, V)] +// with ParMapLike[K, V, ParMap[K, V], Map[K, V]] +//{ +//self => +// +// def mapCompanion: GenericParMapCompanion[ParMap] = ParMap +// +// //protected[this] override def newCombiner: Combiner[(K, V), ParMap[K, V]] = ParMap.newCombiner[K, V] +// +// def empty: ParMap[K, V] = new mutable.ParHashMap[K, V] +// +// override def stringPrefix = "ParMap" +// +// override def updated [U >: V](key: K, value: U): ParMap[K, U] = this + ((key, value)) +// +// def + [U >: V](kv: (K, U)): ParMap[K, U] +//} +// +// +// +//object ParMap extends ParMapFactory[ParMap] { +// def empty[K, V]: ParMap[K, V] = new mutable.ParHashMap[K, V] +// +// def newCombiner[K, V]: Combiner[(K, V), ParMap[K, V]] = mutable.ParHashMapCombiner[K, V] +// +// implicit def canBuildFrom[K, V]: CanCombineFrom[Coll, (K, V), ParMap[K, V]] = new CanCombineFromMap[K, V] +// +// /** An abstract shell used by { mutable, immutable }.Map but not by collection.Map +// * because of variance issues. +// */ +// abstract class WithDefault[A, +B](underlying: ParMap[A, B], d: A => B) extends ParMap[A, B] { +// override def size = underlying.size +// def get(key: A) = underlying.get(key) +// def splitter = underlying.splitter +// override def default(key: A): B = d(key) +// } +//} diff --git a/core/src/main/scala/scala/collection/parallel/ParMapLike.scala b/core/src/main/scala/scala/collection/parallel/ParMapLike.scala index e3361642..a3d5ee13 100644 --- a/core/src/main/scala/scala/collection/parallel/ParMapLike.scala +++ b/core/src/main/scala/scala/collection/parallel/ParMapLike.scala @@ -9,133 +9,133 @@ package scala package collection.parallel -import scala.collection.MapLike -import scala.collection.GenMapLike -import scala.collection.Map - -import scala.annotation.unchecked.uncheckedVariance - -/** A template trait for mutable parallel maps. This trait is to be mixed in - * with concrete parallel maps to override the representation type. - * - * $sideeffects - * - * @tparam K the key type of the map - * @tparam V the value type of the map - * @define Coll `ParMap` - * @define coll parallel map - * - * @author Aleksandar Prokopec - * @since 2.9 - */ -trait ParMapLike[K, - +V, - +Repr <: ParMapLike[K, V, Repr, Sequential] with ParMap[K, V], - +Sequential <: Map[K, V] with MapLike[K, V, Sequential]] -extends GenMapLike[K, V, Repr] - with ParIterableLike[(K, V), Repr, Sequential] -{ -self => - - def default(key: K): V = throw new NoSuchElementException("key not found: " + key) - - def empty: Repr - - def apply(key: K) = get(key) match { - case Some(v) => v - case None => default(key) - } - - def getOrElse[U >: V](key: K, default: => U): U = get(key) match { - case Some(v) => v - case None => default - } - - def contains(key: K): Boolean = get(key).isDefined - - def isDefinedAt(key: K): Boolean = contains(key) - - private[this] def keysIterator(s: IterableSplitter[(K, V)] @uncheckedVariance): IterableSplitter[K] = - new IterableSplitter[K] { - i => - val iter = s - def hasNext = iter.hasNext - def next() = iter.next()._1 - def split = { - val ss = iter.split.map(keysIterator(_)) - ss.foreach { _.signalDelegate = i.signalDelegate } - ss - } - def remaining = iter.remaining - def dup = keysIterator(iter.dup) - } - - def keysIterator: IterableSplitter[K] = keysIterator(splitter) - - private[this] def valuesIterator(s: IterableSplitter[(K, V)] @uncheckedVariance): IterableSplitter[V] = - new IterableSplitter[V] { - i => - val iter = s - def hasNext = iter.hasNext - def next() = iter.next()._2 - def split = { - val ss = iter.split.map(valuesIterator(_)) - ss.foreach { _.signalDelegate = i.signalDelegate } - ss - } - def remaining = iter.remaining - def dup = valuesIterator(iter.dup) - } - - def valuesIterator: IterableSplitter[V] = valuesIterator(splitter) - - protected class DefaultKeySet extends ParSet[K] { - def contains(key : K) = self.contains(key) - def splitter = keysIterator(self.splitter) - def + (elem: K): ParSet[K] = - (ParSet[K]() ++ this + elem).asInstanceOf[ParSet[K]] // !!! concrete overrides abstract problem - def - (elem: K): ParSet[K] = - (ParSet[K]() ++ this - elem).asInstanceOf[ParSet[K]] // !!! concrete overrides abstract problem - override def size = self.size - override def foreach[U](f: K => U) = for ((k, v) <- self) f(k) - override def seq = self.seq.keySet - } - - protected class DefaultValuesIterable extends ParIterable[V] { - def splitter = valuesIterator(self.splitter) - override def size = self.size - override def foreach[U](f: V => U) = for ((k, v) <- self) f(v) - def seq = self.seq.values - } - - def keySet: ParSet[K] = new DefaultKeySet - - def keys: ParIterable[K] = keySet - - def values: ParIterable[V] = new DefaultValuesIterable - - def filterKeys(p: K => Boolean): ParMap[K, V] = new ParMap[K, V] { - lazy val filtered = self.filter(kv => p(kv._1)) - override def foreach[U](f: ((K, V)) => U): Unit = for (kv <- self) if (p(kv._1)) f(kv) - def splitter = filtered.splitter - override def contains(key: K) = self.contains(key) && p(key) - def get(key: K) = if (!p(key)) None else self.get(key) - def seq = self.seq.filterKeys(p) - def size = filtered.size - def + [U >: V](kv: (K, U)): ParMap[K, U] = ParMap[K, U]() ++ this + kv - def - (key: K): ParMap[K, V] = ParMap[K, V]() ++ this - key - } - - def mapValues[S](f: V => S): ParMap[K, S] = new ParMap[K, S] { - override def foreach[U](g: ((K, S)) => U): Unit = for ((k, v) <- self) g((k, f(v))) - def splitter = self.splitter.map(kv => (kv._1, f(kv._2))) - override def size = self.size - override def contains(key: K) = self.contains(key) - def get(key: K) = self.get(key).map(f) - def seq = self.seq.mapValues(f) - def + [U >: S](kv: (K, U)): ParMap[K, U] = ParMap[K, U]() ++ this + kv - def - (key: K): ParMap[K, S] = ParMap[K, S]() ++ this - key - } - - // note - should not override toMap (could be mutable) -} +//import scala.collection.MapLike +//import scala.collection.GenMapLike +//import scala.collection.Map +// +//import scala.annotation.unchecked.uncheckedVariance +// +///** A template trait for mutable parallel maps. This trait is to be mixed in +// * with concrete parallel maps to override the representation type. +// * +// * $sideeffects +// * +// * @tparam K the key type of the map +// * @tparam V the value type of the map +// * @define Coll `ParMap` +// * @define coll parallel map +// * +// * @author Aleksandar Prokopec +// * @since 2.9 +// */ +//trait ParMapLike[K, +// +V, +// +Repr <: ParMapLike[K, V, Repr, Sequential] with ParMap[K, V], +// +Sequential <: Map[K, V] with MapLike[K, V, Sequential]] +//extends GenMapLike[K, V, Repr] +// with ParIterableLike[(K, V), Repr, Sequential] +//{ +//self => +// +// def default(key: K): V = throw new NoSuchElementException("key not found: " + key) +// +// def empty: Repr +// +// def apply(key: K) = get(key) match { +// case Some(v) => v +// case None => default(key) +// } +// +// def getOrElse[U >: V](key: K, default: => U): U = get(key) match { +// case Some(v) => v +// case None => default +// } +// +// def contains(key: K): Boolean = get(key).isDefined +// +// def isDefinedAt(key: K): Boolean = contains(key) +// +// private[this] def keysIterator(s: IterableSplitter[(K, V)] @uncheckedVariance): IterableSplitter[K] = +// new IterableSplitter[K] { +// i => +// val iter = s +// def hasNext = iter.hasNext +// def next() = iter.next()._1 +// def split = { +// val ss = iter.split.map(keysIterator(_)) +// ss.foreach { _.signalDelegate = i.signalDelegate } +// ss +// } +// def remaining = iter.remaining +// def dup = keysIterator(iter.dup) +// } +// +// def keysIterator: IterableSplitter[K] = keysIterator(splitter) +// +// private[this] def valuesIterator(s: IterableSplitter[(K, V)] @uncheckedVariance): IterableSplitter[V] = +// new IterableSplitter[V] { +// i => +// val iter = s +// def hasNext = iter.hasNext +// def next() = iter.next()._2 +// def split = { +// val ss = iter.split.map(valuesIterator(_)) +// ss.foreach { _.signalDelegate = i.signalDelegate } +// ss +// } +// def remaining = iter.remaining +// def dup = valuesIterator(iter.dup) +// } +// +// def valuesIterator: IterableSplitter[V] = valuesIterator(splitter) +// +// protected class DefaultKeySet extends ParSet[K] { +// def contains(key : K) = self.contains(key) +// def splitter = keysIterator(self.splitter) +// def + (elem: K): ParSet[K] = +// (ParSet[K]() ++ this + elem).asInstanceOf[ParSet[K]] // !!! concrete overrides abstract problem +// def - (elem: K): ParSet[K] = +// (ParSet[K]() ++ this - elem).asInstanceOf[ParSet[K]] // !!! concrete overrides abstract problem +// override def size = self.size +// override def foreach[U](f: K => U) = for ((k, v) <- self) f(k) +// override def seq = self.seq.keySet +// } +// +// protected class DefaultValuesIterable extends ParIterable[V] { +// def splitter = valuesIterator(self.splitter) +// override def size = self.size +// override def foreach[U](f: V => U) = for ((k, v) <- self) f(v) +// def seq = self.seq.values +// } +// +// def keySet: ParSet[K] = new DefaultKeySet +// +// def keys: ParIterable[K] = keySet +// +// def values: ParIterable[V] = new DefaultValuesIterable +// +// def filterKeys(p: K => Boolean): ParMap[K, V] = new ParMap[K, V] { +// lazy val filtered = self.filter(kv => p(kv._1)) +// override def foreach[U](f: ((K, V)) => U): Unit = for (kv <- self) if (p(kv._1)) f(kv) +// def splitter = filtered.splitter +// override def contains(key: K) = self.contains(key) && p(key) +// def get(key: K) = if (!p(key)) None else self.get(key) +// def seq = self.seq.filterKeys(p) +// def size = filtered.size +// def + [U >: V](kv: (K, U)): ParMap[K, U] = ParMap[K, U]() ++ this + kv +// def - (key: K): ParMap[K, V] = ParMap[K, V]() ++ this - key +// } +// +// def mapValues[S](f: V => S): ParMap[K, S] = new ParMap[K, S] { +// override def foreach[U](g: ((K, S)) => U): Unit = for ((k, v) <- self) g((k, f(v))) +// def splitter = self.splitter.map(kv => (kv._1, f(kv._2))) +// override def size = self.size +// override def contains(key: K) = self.contains(key) +// def get(key: K) = self.get(key).map(f) +// def seq = self.seq.mapValues(f) +// def + [U >: S](kv: (K, U)): ParMap[K, U] = ParMap[K, U]() ++ this + kv +// def - (key: K): ParMap[K, S] = ParMap[K, S]() ++ this - key +// } +// +// // note - should not override toMap (could be mutable) +//} diff --git a/core/src/main/scala/scala/collection/parallel/ParSeq.scala b/core/src/main/scala/scala/collection/parallel/ParSeq.scala index aa179df4..c75fc927 100644 --- a/core/src/main/scala/scala/collection/parallel/ParSeq.scala +++ b/core/src/main/scala/scala/collection/parallel/ParSeq.scala @@ -9,13 +9,13 @@ package scala package collection.parallel -import scala.collection.generic.GenericCompanion +//import scala.collection.generic.GenericCompanion import scala.collection.generic.GenericParCompanion import scala.collection.generic.GenericParTemplate import scala.collection.generic.ParFactory -import scala.collection.generic.CanCombineFrom -import scala.collection.GenSeq -import scala.collection.parallel.mutable.ParArrayCombiner +//import scala.collection.generic.CanCombineFrom +//import scala.collection.GenSeq +//import scala.collection.parallel.mutable.ParArrayCombiner /** A template trait for parallel sequences. * @@ -27,12 +27,12 @@ import scala.collection.parallel.mutable.ParArrayCombiner * * @author Aleksandar Prokopec */ -trait ParSeq[+T] extends GenSeq[T] - with ParIterable[T] +trait ParSeq[+T] extends /*GenSeq[T] + with*/ ParIterable[T] with GenericParTemplate[T, ParSeq] - with ParSeqLike[T, ParSeq[T], Seq[T]] + with ParIterableLike[T, ParSeq, ParSeq[T], Seq[T]] { - override def companion: GenericCompanion[ParSeq] with GenericParCompanion[ParSeq] = ParSeq + override def companion: /*GenericCompanion[ParSeq] with*/ GenericParCompanion[ParSeq] = ParSeq //protected[this] override def newBuilder = ParSeq.newBuilder[T] def apply(i: Int): T @@ -40,11 +40,15 @@ trait ParSeq[+T] extends GenSeq[T] override def toString = super[ParIterable].toString override def stringPrefix = getClass.getSimpleName + + // TODO remove these definitions inlined from ParSeqLike + final def size: Int = length + def length: Int } object ParSeq extends ParFactory[ParSeq] { - implicit def canBuildFrom[T]: CanCombineFrom[Coll, T, ParSeq[T]] = new GenericCanCombineFrom[T] +// implicit def canBuildFrom[T]: CanCombineFrom[Coll, T, ParSeq[T]] = new GenericCanCombineFrom[T] - def newBuilder[T]: Combiner[T, ParSeq[T]] = ParArrayCombiner[T] - def newCombiner[T]: Combiner[T, ParSeq[T]] = ParArrayCombiner[T] + def newBuilder[T]: Combiner[T, ParSeq[T]] = immutable.ParVector.newBuilder /*ParArrayCombiner[T]*/ + def newCombiner[T]: Combiner[T, ParSeq[T]] = immutable.ParVector.newCombiner /*ParArrayCombiner[T]*/ } diff --git a/core/src/main/scala/scala/collection/parallel/ParSeqLike.scala b/core/src/main/scala/scala/collection/parallel/ParSeqLike.scala index 3b460549..88198a68 100644 --- a/core/src/main/scala/scala/collection/parallel/ParSeqLike.scala +++ b/core/src/main/scala/scala/collection/parallel/ParSeqLike.scala @@ -9,473 +9,473 @@ package scala package collection.parallel -import scala.collection.{ SeqLike, GenSeq, GenIterable, Iterator } -import scala.collection.generic.DefaultSignalling -import scala.collection.generic.AtomicIndexFlag -import scala.collection.generic.CanBuildFrom -import scala.collection.generic.VolatileAbort - -import scala.collection.parallel.ParallelCollectionImplicits._ - -/** A template trait for sequences of type `ParSeq[T]`, representing - * parallel sequences with element type `T`. - * - * $parallelseqinfo - * - * @tparam T the type of the elements contained in this collection - * @tparam Repr the type of the actual collection containing the elements - * @tparam Sequential the type of the sequential version of this parallel collection - * - * @define parallelseqinfo - * Parallel sequences inherit the `Seq` trait. Their indexing and length computations - * are defined to be efficient. Like their sequential counterparts - * they always have a defined order of elements. This means they will produce resulting - * parallel sequences in the same way sequential sequences do. However, the order - * in which they perform bulk operations on elements to produce results is not defined and is generally - * nondeterministic. If the higher-order functions given to them produce no sideeffects, - * then this won't be noticeable. - * - * @define mayNotTerminateInf - * Note: may not terminate for infinite-sized collections. - * @define willNotTerminateInf - * Note: will not terminate for infinite-sized collections. - * - * This trait defines a new, more general `split` operation and reimplements the `split` - * operation of `ParallelIterable` trait using the new `split` operation. - * - * @author Aleksandar Prokopec - * @since 2.9 - */ -trait ParSeqLike[+T, +Repr <: ParSeq[T], +Sequential <: Seq[T] with SeqLike[T, Sequential]] -extends scala.collection.GenSeqLike[T, Repr] - with ParIterableLike[T, Repr, Sequential] { -self => - - protected[this] type SuperParIterator = IterableSplitter[T] - - /** A more refined version of the iterator found in the `ParallelIterable` trait, - * this iterator can be split into arbitrary subsets of iterators. - * - * @return an iterator that can be split into subsets of precise size - */ - protected[parallel] def splitter: SeqSplitter[T] - - override def iterator: PreciseSplitter[T] = splitter - - override def size = length - - /** Used to iterate elements using indices */ - protected abstract class Elements(start: Int, val end: Int) extends SeqSplitter[T] with BufferedIterator[T] { - private var i = start - - def hasNext = i < end - - def next(): T = if (i < end) { - val x = self(i) - i += 1 - x - } else Iterator.empty.next() - - def head = self(i) - - final def remaining = end - i - - def dup = new Elements(i, end) {} - - def split = psplit(remaining / 2, remaining - remaining / 2) - - def psplit(sizes: Int*) = { - val incr = sizes.scanLeft(0)(_ + _) - for ((from, until) <- incr.init zip incr.tail) yield { - new Elements(start + from, (start + until) min end) {} - } - } - - override def toString = "Elements(" + start + ", " + end + ")" - } - - /* ParallelSeq methods */ - - /** Returns the length of the longest segment of elements starting at - * a given position satisfying some predicate. - * - * $indexsignalling - * - * The index flag is initially set to maximum integer value. - * - * @param p the predicate used to test the elements - * @param from the starting offset for the search - * @return the length of the longest segment of elements starting at `from` and - * satisfying the predicate - */ - def segmentLength(p: T => Boolean, from: Int): Int = if (from >= length) 0 else { - val realfrom = if (from < 0) 0 else from - val ctx = new DefaultSignalling with AtomicIndexFlag - ctx.setIndexFlag(Int.MaxValue) - tasksupport.executeAndWaitResult(new SegmentLength(p, 0, splitter.psplitWithSignalling(realfrom, length - realfrom)(1) assign ctx))._1 - } - - /** Finds the first element satisfying some predicate. - * - * $indexsignalling - * - * The index flag is initially set to maximum integer value. - * - * @param p the predicate used to test the elements - * @param from the starting offset for the search - * @return the index `>= from` of the first element of this $coll that satisfies the predicate `p`, - * or `-1`, if none exists - */ - def indexWhere(p: T => Boolean, from: Int): Int = if (from >= length) -1 else { - val realfrom = if (from < 0) 0 else from - val ctx = new DefaultSignalling with AtomicIndexFlag - ctx.setIndexFlag(Int.MaxValue) - tasksupport.executeAndWaitResult(new IndexWhere(p, realfrom, splitter.psplitWithSignalling(realfrom, length - realfrom)(1) assign ctx)) - } - - /** Finds the last element satisfying some predicate. - * - * $indexsignalling - * - * The index flag is initially set to minimum integer value. - * - * @param p the predicate used to test the elements - * @param end the maximum offset for the search - * @return the index `<= end` of the first element of this $coll that satisfies the predicate `p`, - * or `-1`, if none exists - */ - def lastIndexWhere(p: T => Boolean, end: Int): Int = if (end < 0) -1 else { - val until = if (end >= length) length else end + 1 - val ctx = new DefaultSignalling with AtomicIndexFlag - ctx.setIndexFlag(Int.MinValue) - tasksupport.executeAndWaitResult(new LastIndexWhere(p, 0, splitter.psplitWithSignalling(until, length - until)(0) assign ctx)) - } - - def reverse: Repr = { - tasksupport.executeAndWaitResult(new Reverse(() => newCombiner, splitter) mapResult { _.resultWithTaskSupport }) - } - - def reverseMap[S, That](f: T => S)(implicit bf: CanBuildFrom[Repr, S, That]): That = if (bf(repr).isCombiner) { - tasksupport.executeAndWaitResult( - new ReverseMap[S, That](f, () => bf(repr).asCombiner, splitter) mapResult { _.resultWithTaskSupport } - ) - } else setTaskSupport(seq.reverseMap(f)(bf2seq(bf)), tasksupport) - /*bf ifParallel { pbf => - tasksupport.executeAndWaitResult(new ReverseMap[S, That](f, pbf, splitter) mapResult { _.result }) - } otherwise seq.reverseMap(f)(bf2seq(bf))*/ - - /** Tests whether this $coll contains the given sequence at a given index. - * - * $abortsignalling - * - * @tparam S the element type of `that` parallel sequence - * @param that the parallel sequence this sequence is being searched for - * @param offset the starting offset for the search - * @return `true` if there is a sequence `that` starting at `offset` in this sequence, `false` otherwise - */ - def startsWith[S](that: GenSeq[S], offset: Int): Boolean = that ifParSeq { pthat => - if (offset < 0 || offset >= length) offset == length && pthat.length == 0 - else if (pthat.length == 0) true - else if (pthat.length > length - offset) false - else { - val ctx = new DefaultSignalling with VolatileAbort - tasksupport.executeAndWaitResult( - new SameElements(splitter.psplitWithSignalling(offset, pthat.length)(1) assign ctx, pthat.splitter) - ) - } - } otherwise seq.startsWith(that, offset) - - override def sameElements[U >: T](that: GenIterable[U]): Boolean = that ifParSeq { pthat => - val ctx = new DefaultSignalling with VolatileAbort - length == pthat.length && tasksupport.executeAndWaitResult(new SameElements(splitter assign ctx, pthat.splitter)) - } otherwise seq.sameElements(that) - - /** Tests whether this $coll ends with the given parallel sequence. - * - * $abortsignalling - * - * @tparam S the type of the elements of `that` sequence - * @param that the sequence to test - * @return `true` if this $coll has `that` as a suffix, `false` otherwise - */ - def endsWith[S](that: GenSeq[S]): Boolean = that ifParSeq { pthat => - if (that.length == 0) true - else if (that.length > length) false - else { - val ctx = new DefaultSignalling with VolatileAbort - val tlen = that.length - tasksupport.executeAndWaitResult(new SameElements(splitter.psplitWithSignalling(length - tlen, tlen)(1) assign ctx, pthat.splitter)) - } - } otherwise seq.endsWith(that) - - def patch[U >: T, That](from: Int, patch: GenSeq[U], replaced: Int)(implicit bf: CanBuildFrom[Repr, U, That]): That = { - val realreplaced = replaced min (length - from) - if (patch.isParSeq && bf(repr).isCombiner && (size - realreplaced + patch.size) > MIN_FOR_COPY) { - val that = patch.asParSeq - val pits = splitter.psplitWithSignalling(from, replaced, length - from - realreplaced) - val cfactory = combinerFactory(() => bf(repr).asCombiner) - val copystart = new Copy[U, That](cfactory, pits(0)) - val copymiddle = wrap { - val tsk = new that.Copy[U, That](cfactory, that.splitter) - tasksupport.executeAndWaitResult(tsk) - } - val copyend = new Copy[U, That](cfactory, pits(2)) - tasksupport.executeAndWaitResult(((copystart parallel copymiddle) { _ combine _ } parallel copyend) { _ combine _ } mapResult { - _.resultWithTaskSupport - }) - } else patch_sequential(from, patch.seq, replaced) - } - - private def patch_sequential[U >: T, That](fromarg: Int, patch: Seq[U], r: Int)(implicit bf: CanBuildFrom[Repr, U, That]): That = { - val from = 0 max fromarg - val b = bf(repr) - val repl = (r min (length - from)) max 0 - val pits = splitter.psplitWithSignalling(from, repl, length - from - repl) - b ++= pits(0) - b ++= patch - b ++= pits(2) - setTaskSupport(b.result(), tasksupport) - } - - def updated[U >: T, That](index: Int, elem: U)(implicit bf: CanBuildFrom[Repr, U, That]): That = if (bf(repr).isCombiner) { - tasksupport.executeAndWaitResult( - new Updated(index, elem, combinerFactory(() => bf(repr).asCombiner), splitter) mapResult { - _.resultWithTaskSupport - } - ) - } else setTaskSupport(seq.updated(index, elem)(bf2seq(bf)), tasksupport) - /*bf ifParallel { pbf => - tasksupport.executeAndWaitResult(new Updated(index, elem, pbf, splitter) mapResult { _.result }) - } otherwise seq.updated(index, elem)(bf2seq(bf))*/ - - def +:[U >: T, That](elem: U)(implicit bf: CanBuildFrom[Repr, U, That]): That = { - patch(0, mutable.ParArray(elem), 0) - } - - def :+[U >: T, That](elem: U)(implicit bf: CanBuildFrom[Repr, U, That]): That = { - patch(length, mutable.ParArray(elem), 0) - } - - def padTo[U >: T, That](len: Int, elem: U)(implicit bf: CanBuildFrom[Repr, U, That]): That = if (length < len) { - patch(length, new immutable.Repetition(elem, len - length), 0) - } else patch(length, Nil, 0) - - override def zip[U >: T, S, That](that: GenIterable[S])(implicit bf: CanBuildFrom[Repr, (U, S), That]): That = if (bf(repr).isCombiner && that.isParSeq) { - val thatseq = that.asParSeq - tasksupport.executeAndWaitResult( - new Zip(length min thatseq.length, combinerFactory(() => bf(repr).asCombiner), splitter, thatseq.splitter) mapResult { - _.resultWithTaskSupport - } - ) - } else super.zip(that)(bf) - - /** Tests whether every element of this $coll relates to the - * corresponding element of another parallel sequence by satisfying a test predicate. - * - * $abortsignalling - * - * @param that the other parallel sequence - * @param p the test predicate, which relates elements from both sequences - * @tparam S the type of the elements of `that` - * @return `true` if both parallel sequences have the same length and - * `p(x, y)` is `true` for all corresponding elements `x` of this $coll - * and `y` of `that`, otherwise `false` - */ - def corresponds[S](that: GenSeq[S])(p: (T, S) => Boolean): Boolean = that ifParSeq { pthat => - val ctx = new DefaultSignalling with VolatileAbort - length == pthat.length && tasksupport.executeAndWaitResult(new Corresponds(p, splitter assign ctx, pthat.splitter)) - } otherwise seq.corresponds(that)(p) - - def diff[U >: T](that: GenSeq[U]): Repr = sequentially { - _ diff that - } - - /** Computes the multiset intersection between this $coll and another sequence. - * - * @param that the sequence of elements to intersect with. - * @tparam U the element type of `that` parallel sequence - * @return a new collection of type `That` which contains all elements of this $coll - * which also appear in `that`. - * If an element value `x` appears - * ''n'' times in `that`, then the first ''n'' occurrences of `x` will be retained - * in the result, but any following occurrences will be omitted. - * - * @usecase def intersect(that: Seq[T]): $Coll[T] - * @inheritdoc - * - * $mayNotTerminateInf - * - * @return a new $coll which contains all elements of this $coll - * which also appear in `that`. - * If an element value `x` appears - * ''n'' times in `that`, then the first ''n'' occurrences of `x` will be retained - * in the result, but any following occurrences will be omitted. - */ - def intersect[U >: T](that: GenSeq[U]) = sequentially { - _ intersect that - } - - /** Builds a new $coll from this $coll without any duplicate elements. - * $willNotTerminateInf - * - * @return A new $coll which contains the first occurrence of every element of this $coll. - */ - def distinct: Repr = sequentially { - _.distinct - } - - override def toString = seq.mkString(stringPrefix + "(", ", ", ")") - - override def toSeq = this.asInstanceOf[ParSeq[T]] - - /* tasks */ - - protected[this] def down(p: IterableSplitter[_]) = p.asInstanceOf[SeqSplitter[T]] - - protected trait Accessor[R, Tp] extends super.Accessor[R, Tp] { - protected[this] val pit: SeqSplitter[T] - } - - protected trait Transformer[R, Tp] extends Accessor[R, Tp] with super.Transformer[R, Tp] - - protected[this] class SegmentLength(pred: T => Boolean, from: Int, protected[this] val pit: SeqSplitter[T]) - extends Accessor[(Int, Boolean), SegmentLength] { - @volatile var result: (Int, Boolean) = null - def leaf(prev: Option[(Int, Boolean)]) = if (from < pit.indexFlag) { - val itsize = pit.remaining - val seglen = pit.prefixLength(pred) - result = (seglen, itsize == seglen) - if (!result._2) pit.setIndexFlagIfLesser(from) - } else result = (0, false) - protected[this] def newSubtask(p: SuperParIterator) = throw new UnsupportedOperationException - override def split = { - val pits = pit.splitWithSignalling - for ((p, untilp) <- pits zip pits.scanLeft(0)(_ + _.remaining)) yield new SegmentLength(pred, from + untilp, p) - } - override def merge(that: SegmentLength) = if (result._2) result = (result._1 + that.result._1, that.result._2) - override def requiresStrictSplitters = true - } - - protected[this] class IndexWhere(pred: T => Boolean, from: Int, protected[this] val pit: SeqSplitter[T]) - extends Accessor[Int, IndexWhere] { - @volatile var result: Int = -1 - def leaf(prev: Option[Int]) = if (from < pit.indexFlag) { - val r = pit.indexWhere(pred) - if (r != -1) { - result = from + r - pit.setIndexFlagIfLesser(from) - } - } - protected[this] def newSubtask(p: SuperParIterator) = throw new UnsupportedOperationException - override def split = { - val pits = pit.splitWithSignalling - for ((p, untilp) <- pits zip pits.scanLeft(from)(_ + _.remaining)) yield new IndexWhere(pred, untilp, p) - } - override def merge(that: IndexWhere) = result = if (result == -1) that.result else { - if (that.result != -1) result min that.result else result - } - override def requiresStrictSplitters = true - } - - protected[this] class LastIndexWhere(pred: T => Boolean, pos: Int, protected[this] val pit: SeqSplitter[T]) - extends Accessor[Int, LastIndexWhere] { - @volatile var result: Int = -1 - def leaf(prev: Option[Int]) = if (pos > pit.indexFlag) { - val r = pit.lastIndexWhere(pred) - if (r != -1) { - result = pos + r - pit.setIndexFlagIfGreater(pos) - } - } - protected[this] def newSubtask(p: SuperParIterator) = throw new UnsupportedOperationException - override def split = { - val pits = pit.splitWithSignalling - for ((p, untilp) <- pits zip pits.scanLeft(pos)(_ + _.remaining)) yield new LastIndexWhere(pred, untilp, p) - } - override def merge(that: LastIndexWhere) = result = if (result == -1) that.result else { - if (that.result != -1) result max that.result else result - } - override def requiresStrictSplitters = true - } - - protected[this] class Reverse[U >: T, This >: Repr](cbf: () => Combiner[U, This], protected[this] val pit: SeqSplitter[T]) - extends Transformer[Combiner[U, This], Reverse[U, This]] { - @volatile var result: Combiner[U, This] = null - def leaf(prev: Option[Combiner[U, This]]) = result = pit.reverse2combiner(reuse(prev, cbf())) - protected[this] def newSubtask(p: SuperParIterator) = new Reverse(cbf, down(p)) - override def merge(that: Reverse[U, This]) = result = that.result combine result - } - - protected[this] class ReverseMap[S, That](f: T => S, pbf: () => Combiner[S, That], protected[this] val pit: SeqSplitter[T]) - extends Transformer[Combiner[S, That], ReverseMap[S, That]] { - @volatile var result: Combiner[S, That] = null - def leaf(prev: Option[Combiner[S, That]]) = result = pit.reverseMap2combiner(f, pbf()) - protected[this] def newSubtask(p: SuperParIterator) = new ReverseMap(f, pbf, down(p)) - override def merge(that: ReverseMap[S, That]) = result = that.result combine result - } - - protected[this] class SameElements[U >: T](protected[this] val pit: SeqSplitter[T], val otherpit: SeqSplitter[U]) - extends Accessor[Boolean, SameElements[U]] { - @volatile var result: Boolean = true - def leaf(prev: Option[Boolean]) = if (!pit.isAborted) { - result = pit.sameElements(otherpit) - if (!result) pit.abort() - } - protected[this] def newSubtask(p: SuperParIterator) = throw new UnsupportedOperationException - override def split = { - val fp = pit.remaining / 2 - val sp = pit.remaining - fp - for ((p, op) <- pit.psplitWithSignalling(fp, sp) zip otherpit.psplitWithSignalling(fp, sp)) yield new SameElements(p, op) - } - override def merge(that: SameElements[U]) = result = result && that.result - override def requiresStrictSplitters = true - } - - protected[this] class Updated[U >: T, That](pos: Int, elem: U, pbf: CombinerFactory[U, That], protected[this] val pit: SeqSplitter[T]) - extends Transformer[Combiner[U, That], Updated[U, That]] { - @volatile var result: Combiner[U, That] = null - def leaf(prev: Option[Combiner[U, That]]) = result = pit.updated2combiner(pos, elem, pbf()) - protected[this] def newSubtask(p: SuperParIterator) = throw new UnsupportedOperationException - override def split = { - val pits = pit.splitWithSignalling - for ((p, untilp) <- pits zip pits.scanLeft(0)(_ + _.remaining)) yield new Updated(pos - untilp, elem, pbf, p) - } - override def merge(that: Updated[U, That]) = result = result combine that.result - override def requiresStrictSplitters = true - } - - protected[this] class Zip[U >: T, S, That](len: Int, cf: CombinerFactory[(U, S), That], protected[this] val pit: SeqSplitter[T], val otherpit: SeqSplitter[S]) - extends Transformer[Combiner[(U, S), That], Zip[U, S, That]] { - @volatile var result: Result = null - def leaf(prev: Option[Result]) = result = pit.zip2combiner[U, S, That](otherpit, cf()) - protected[this] def newSubtask(p: SuperParIterator) = throw new UnsupportedOperationException - override def split = { - val fp = len / 2 - val sp = len - len / 2 - val pits = pit.psplitWithSignalling(fp, sp) - val opits = otherpit.psplitWithSignalling(fp, sp) - Seq( - new Zip(fp, cf, pits(0), opits(0)), - new Zip(sp, cf, pits(1), opits(1)) - ) - } - override def merge(that: Zip[U, S, That]) = result = result combine that.result - } - - protected[this] class Corresponds[S](corr: (T, S) => Boolean, protected[this] val pit: SeqSplitter[T], val otherpit: SeqSplitter[S]) - extends Accessor[Boolean, Corresponds[S]] { - @volatile var result: Boolean = true - def leaf(prev: Option[Boolean]) = if (!pit.isAborted) { - result = pit.corresponds(corr)(otherpit) - if (!result) pit.abort() - } - protected[this] def newSubtask(p: SuperParIterator) = throw new UnsupportedOperationException - override def split = { - val fp = pit.remaining / 2 - val sp = pit.remaining - fp - for ((p, op) <- pit.psplitWithSignalling(fp, sp) zip otherpit.psplitWithSignalling(fp, sp)) yield new Corresponds(corr, p, op) - } - override def merge(that: Corresponds[S]) = result = result && that.result - override def requiresStrictSplitters = true - } -} +//import scala.collection.{ SeqLike, GenSeq, GenIterable, Iterator } +//import scala.collection.generic.DefaultSignalling +//import scala.collection.generic.AtomicIndexFlag +//import scala.collection.generic.CanBuildFrom +//import scala.collection.generic.VolatileAbort +// +//import scala.collection.parallel.ParallelCollectionImplicits._ +// +///** A template trait for sequences of type `ParSeq[T]`, representing +// * parallel sequences with element type `T`. +// * +// * $parallelseqinfo +// * +// * @tparam T the type of the elements contained in this collection +// * @tparam Repr the type of the actual collection containing the elements +// * @tparam Sequential the type of the sequential version of this parallel collection +// * +// * @define parallelseqinfo +// * Parallel sequences inherit the `Seq` trait. Their indexing and length computations +// * are defined to be efficient. Like their sequential counterparts +// * they always have a defined order of elements. This means they will produce resulting +// * parallel sequences in the same way sequential sequences do. However, the order +// * in which they perform bulk operations on elements to produce results is not defined and is generally +// * nondeterministic. If the higher-order functions given to them produce no sideeffects, +// * then this won't be noticeable. +// * +// * @define mayNotTerminateInf +// * Note: may not terminate for infinite-sized collections. +// * @define willNotTerminateInf +// * Note: will not terminate for infinite-sized collections. +// * +// * This trait defines a new, more general `split` operation and reimplements the `split` +// * operation of `ParallelIterable` trait using the new `split` operation. +// * +// * @author Aleksandar Prokopec +// * @since 2.9 +// */ +//trait ParSeqLike[+T, +Repr <: ParSeq[T], +Sequential <: Seq[T] with SeqLike[T, Sequential]] +//extends scala.collection.GenSeqLike[T, Repr] +// with ParIterableLike[T, Repr, Sequential] { +//self => +// +// protected[this] type SuperParIterator = IterableSplitter[T] +// +// /** A more refined version of the iterator found in the `ParallelIterable` trait, +// * this iterator can be split into arbitrary subsets of iterators. +// * +// * @return an iterator that can be split into subsets of precise size +// */ +// protected[parallel] def splitter: SeqSplitter[T] +// +// override def iterator: PreciseSplitter[T] = splitter +// +// override def size = length +// +// /** Used to iterate elements using indices */ +// protected abstract class Elements(start: Int, val end: Int) extends SeqSplitter[T] with BufferedIterator[T] { +// private var i = start +// +// def hasNext = i < end +// +// def next(): T = if (i < end) { +// val x = self(i) +// i += 1 +// x +// } else Iterator.empty.next() +// +// def head = self(i) +// +// final def remaining = end - i +// +// def dup = new Elements(i, end) {} +// +// def split = psplit(remaining / 2, remaining - remaining / 2) +// +// def psplit(sizes: Int*) = { +// val incr = sizes.scanLeft(0)(_ + _) +// for ((from, until) <- incr.init zip incr.tail) yield { +// new Elements(start + from, (start + until) min end) {} +// } +// } +// +// override def toString = "Elements(" + start + ", " + end + ")" +// } +// +// /* ParallelSeq methods */ +// +// /** Returns the length of the longest segment of elements starting at +// * a given position satisfying some predicate. +// * +// * $indexsignalling +// * +// * The index flag is initially set to maximum integer value. +// * +// * @param p the predicate used to test the elements +// * @param from the starting offset for the search +// * @return the length of the longest segment of elements starting at `from` and +// * satisfying the predicate +// */ +// def segmentLength(p: T => Boolean, from: Int): Int = if (from >= length) 0 else { +// val realfrom = if (from < 0) 0 else from +// val ctx = new DefaultSignalling with AtomicIndexFlag +// ctx.setIndexFlag(Int.MaxValue) +// tasksupport.executeAndWaitResult(new SegmentLength(p, 0, splitter.psplitWithSignalling(realfrom, length - realfrom)(1) assign ctx))._1 +// } +// +// /** Finds the first element satisfying some predicate. +// * +// * $indexsignalling +// * +// * The index flag is initially set to maximum integer value. +// * +// * @param p the predicate used to test the elements +// * @param from the starting offset for the search +// * @return the index `>= from` of the first element of this $coll that satisfies the predicate `p`, +// * or `-1`, if none exists +// */ +// def indexWhere(p: T => Boolean, from: Int): Int = if (from >= length) -1 else { +// val realfrom = if (from < 0) 0 else from +// val ctx = new DefaultSignalling with AtomicIndexFlag +// ctx.setIndexFlag(Int.MaxValue) +// tasksupport.executeAndWaitResult(new IndexWhere(p, realfrom, splitter.psplitWithSignalling(realfrom, length - realfrom)(1) assign ctx)) +// } +// +// /** Finds the last element satisfying some predicate. +// * +// * $indexsignalling +// * +// * The index flag is initially set to minimum integer value. +// * +// * @param p the predicate used to test the elements +// * @param end the maximum offset for the search +// * @return the index `<= end` of the first element of this $coll that satisfies the predicate `p`, +// * or `-1`, if none exists +// */ +// def lastIndexWhere(p: T => Boolean, end: Int): Int = if (end < 0) -1 else { +// val until = if (end >= length) length else end + 1 +// val ctx = new DefaultSignalling with AtomicIndexFlag +// ctx.setIndexFlag(Int.MinValue) +// tasksupport.executeAndWaitResult(new LastIndexWhere(p, 0, splitter.psplitWithSignalling(until, length - until)(0) assign ctx)) +// } +// +// def reverse: Repr = { +// tasksupport.executeAndWaitResult(new Reverse(() => newCombiner, splitter) mapResult { _.resultWithTaskSupport }) +// } +// +// def reverseMap[S, That](f: T => S)(implicit bf: CanBuildFrom[Repr, S, That]): That = if (bf(repr).isCombiner) { +// tasksupport.executeAndWaitResult( +// new ReverseMap[S, That](f, () => bf(repr).asCombiner, splitter) mapResult { _.resultWithTaskSupport } +// ) +// } else setTaskSupport(seq.reverseMap(f)(bf2seq(bf)), tasksupport) +// /*bf ifParallel { pbf => +// tasksupport.executeAndWaitResult(new ReverseMap[S, That](f, pbf, splitter) mapResult { _.result }) +// } otherwise seq.reverseMap(f)(bf2seq(bf))*/ +// +// /** Tests whether this $coll contains the given sequence at a given index. +// * +// * $abortsignalling +// * +// * @tparam S the element type of `that` parallel sequence +// * @param that the parallel sequence this sequence is being searched for +// * @param offset the starting offset for the search +// * @return `true` if there is a sequence `that` starting at `offset` in this sequence, `false` otherwise +// */ +// def startsWith[S](that: GenSeq[S], offset: Int): Boolean = that ifParSeq { pthat => +// if (offset < 0 || offset >= length) offset == length && pthat.length == 0 +// else if (pthat.length == 0) true +// else if (pthat.length > length - offset) false +// else { +// val ctx = new DefaultSignalling with VolatileAbort +// tasksupport.executeAndWaitResult( +// new SameElements(splitter.psplitWithSignalling(offset, pthat.length)(1) assign ctx, pthat.splitter) +// ) +// } +// } otherwise seq.startsWith(that, offset) +// +// override def sameElements[U >: T](that: GenIterable[U]): Boolean = that ifParSeq { pthat => +// val ctx = new DefaultSignalling with VolatileAbort +// length == pthat.length && tasksupport.executeAndWaitResult(new SameElements(splitter assign ctx, pthat.splitter)) +// } otherwise seq.sameElements(that) +// +// /** Tests whether this $coll ends with the given parallel sequence. +// * +// * $abortsignalling +// * +// * @tparam S the type of the elements of `that` sequence +// * @param that the sequence to test +// * @return `true` if this $coll has `that` as a suffix, `false` otherwise +// */ +// def endsWith[S](that: GenSeq[S]): Boolean = that ifParSeq { pthat => +// if (that.length == 0) true +// else if (that.length > length) false +// else { +// val ctx = new DefaultSignalling with VolatileAbort +// val tlen = that.length +// tasksupport.executeAndWaitResult(new SameElements(splitter.psplitWithSignalling(length - tlen, tlen)(1) assign ctx, pthat.splitter)) +// } +// } otherwise seq.endsWith(that) +// +// def patch[U >: T, That](from: Int, patch: GenSeq[U], replaced: Int)(implicit bf: CanBuildFrom[Repr, U, That]): That = { +// val realreplaced = replaced min (length - from) +// if (patch.isParSeq && bf(repr).isCombiner && (size - realreplaced + patch.size) > MIN_FOR_COPY) { +// val that = patch.asParSeq +// val pits = splitter.psplitWithSignalling(from, replaced, length - from - realreplaced) +// val cfactory = combinerFactory(() => bf(repr).asCombiner) +// val copystart = new Copy[U, That](cfactory, pits(0)) +// val copymiddle = wrap { +// val tsk = new that.Copy[U, That](cfactory, that.splitter) +// tasksupport.executeAndWaitResult(tsk) +// } +// val copyend = new Copy[U, That](cfactory, pits(2)) +// tasksupport.executeAndWaitResult(((copystart parallel copymiddle) { _ combine _ } parallel copyend) { _ combine _ } mapResult { +// _.resultWithTaskSupport +// }) +// } else patch_sequential(from, patch.seq, replaced) +// } +// +// private def patch_sequential[U >: T, That](fromarg: Int, patch: Seq[U], r: Int)(implicit bf: CanBuildFrom[Repr, U, That]): That = { +// val from = 0 max fromarg +// val b = bf(repr) +// val repl = (r min (length - from)) max 0 +// val pits = splitter.psplitWithSignalling(from, repl, length - from - repl) +// b ++= pits(0) +// b ++= patch +// b ++= pits(2) +// setTaskSupport(b.result(), tasksupport) +// } +// +// def updated[U >: T, That](index: Int, elem: U)(implicit bf: CanBuildFrom[Repr, U, That]): That = if (bf(repr).isCombiner) { +// tasksupport.executeAndWaitResult( +// new Updated(index, elem, combinerFactory(() => bf(repr).asCombiner), splitter) mapResult { +// _.resultWithTaskSupport +// } +// ) +// } else setTaskSupport(seq.updated(index, elem)(bf2seq(bf)), tasksupport) +// /*bf ifParallel { pbf => +// tasksupport.executeAndWaitResult(new Updated(index, elem, pbf, splitter) mapResult { _.result }) +// } otherwise seq.updated(index, elem)(bf2seq(bf))*/ +// +// def +:[U >: T, That](elem: U)(implicit bf: CanBuildFrom[Repr, U, That]): That = { +// patch(0, mutable.ParArray(elem), 0) +// } +// +// def :+[U >: T, That](elem: U)(implicit bf: CanBuildFrom[Repr, U, That]): That = { +// patch(length, mutable.ParArray(elem), 0) +// } +// +// def padTo[U >: T, That](len: Int, elem: U)(implicit bf: CanBuildFrom[Repr, U, That]): That = if (length < len) { +// patch(length, new immutable.Repetition(elem, len - length), 0) +// } else patch(length, Nil, 0) +// +// override def zip[U >: T, S, That](that: GenIterable[S])(implicit bf: CanBuildFrom[Repr, (U, S), That]): That = if (bf(repr).isCombiner && that.isParSeq) { +// val thatseq = that.asParSeq +// tasksupport.executeAndWaitResult( +// new Zip(length min thatseq.length, combinerFactory(() => bf(repr).asCombiner), splitter, thatseq.splitter) mapResult { +// _.resultWithTaskSupport +// } +// ) +// } else super.zip(that)(bf) +// +// /** Tests whether every element of this $coll relates to the +// * corresponding element of another parallel sequence by satisfying a test predicate. +// * +// * $abortsignalling +// * +// * @param that the other parallel sequence +// * @param p the test predicate, which relates elements from both sequences +// * @tparam S the type of the elements of `that` +// * @return `true` if both parallel sequences have the same length and +// * `p(x, y)` is `true` for all corresponding elements `x` of this $coll +// * and `y` of `that`, otherwise `false` +// */ +// def corresponds[S](that: GenSeq[S])(p: (T, S) => Boolean): Boolean = that ifParSeq { pthat => +// val ctx = new DefaultSignalling with VolatileAbort +// length == pthat.length && tasksupport.executeAndWaitResult(new Corresponds(p, splitter assign ctx, pthat.splitter)) +// } otherwise seq.corresponds(that)(p) +// +// def diff[U >: T](that: GenSeq[U]): Repr = sequentially { +// _ diff that +// } +// +// /** Computes the multiset intersection between this $coll and another sequence. +// * +// * @param that the sequence of elements to intersect with. +// * @tparam U the element type of `that` parallel sequence +// * @return a new collection of type `That` which contains all elements of this $coll +// * which also appear in `that`. +// * If an element value `x` appears +// * ''n'' times in `that`, then the first ''n'' occurrences of `x` will be retained +// * in the result, but any following occurrences will be omitted. +// * +// * @usecase def intersect(that: Seq[T]): $Coll[T] +// * @inheritdoc +// * +// * $mayNotTerminateInf +// * +// * @return a new $coll which contains all elements of this $coll +// * which also appear in `that`. +// * If an element value `x` appears +// * ''n'' times in `that`, then the first ''n'' occurrences of `x` will be retained +// * in the result, but any following occurrences will be omitted. +// */ +// def intersect[U >: T](that: GenSeq[U]) = sequentially { +// _ intersect that +// } +// +// /** Builds a new $coll from this $coll without any duplicate elements. +// * $willNotTerminateInf +// * +// * @return A new $coll which contains the first occurrence of every element of this $coll. +// */ +// def distinct: Repr = sequentially { +// _.distinct +// } +// +// override def toString = seq.mkString(stringPrefix + "(", ", ", ")") +// +// override def toSeq = this.asInstanceOf[ParSeq[T]] +// +// /* tasks */ +// +// protected[this] def down(p: IterableSplitter[_]) = p.asInstanceOf[SeqSplitter[T]] +// +// protected trait Accessor[R, Tp] extends super.Accessor[R, Tp] { +// protected[this] val pit: SeqSplitter[T] +// } +// +// protected trait Transformer[R, Tp] extends Accessor[R, Tp] with super.Transformer[R, Tp] +// +// protected[this] class SegmentLength(pred: T => Boolean, from: Int, protected[this] val pit: SeqSplitter[T]) +// extends Accessor[(Int, Boolean), SegmentLength] { +// @volatile var result: (Int, Boolean) = null +// def leaf(prev: Option[(Int, Boolean)]) = if (from < pit.indexFlag) { +// val itsize = pit.remaining +// val seglen = pit.prefixLength(pred) +// result = (seglen, itsize == seglen) +// if (!result._2) pit.setIndexFlagIfLesser(from) +// } else result = (0, false) +// protected[this] def newSubtask(p: SuperParIterator) = throw new UnsupportedOperationException +// override def split = { +// val pits = pit.splitWithSignalling +// for ((p, untilp) <- pits zip pits.scanLeft(0)(_ + _.remaining)) yield new SegmentLength(pred, from + untilp, p) +// } +// override def merge(that: SegmentLength) = if (result._2) result = (result._1 + that.result._1, that.result._2) +// override def requiresStrictSplitters = true +// } +// +// protected[this] class IndexWhere(pred: T => Boolean, from: Int, protected[this] val pit: SeqSplitter[T]) +// extends Accessor[Int, IndexWhere] { +// @volatile var result: Int = -1 +// def leaf(prev: Option[Int]) = if (from < pit.indexFlag) { +// val r = pit.indexWhere(pred) +// if (r != -1) { +// result = from + r +// pit.setIndexFlagIfLesser(from) +// } +// } +// protected[this] def newSubtask(p: SuperParIterator) = throw new UnsupportedOperationException +// override def split = { +// val pits = pit.splitWithSignalling +// for ((p, untilp) <- pits zip pits.scanLeft(from)(_ + _.remaining)) yield new IndexWhere(pred, untilp, p) +// } +// override def merge(that: IndexWhere) = result = if (result == -1) that.result else { +// if (that.result != -1) result min that.result else result +// } +// override def requiresStrictSplitters = true +// } +// +// protected[this] class LastIndexWhere(pred: T => Boolean, pos: Int, protected[this] val pit: SeqSplitter[T]) +// extends Accessor[Int, LastIndexWhere] { +// @volatile var result: Int = -1 +// def leaf(prev: Option[Int]) = if (pos > pit.indexFlag) { +// val r = pit.lastIndexWhere(pred) +// if (r != -1) { +// result = pos + r +// pit.setIndexFlagIfGreater(pos) +// } +// } +// protected[this] def newSubtask(p: SuperParIterator) = throw new UnsupportedOperationException +// override def split = { +// val pits = pit.splitWithSignalling +// for ((p, untilp) <- pits zip pits.scanLeft(pos)(_ + _.remaining)) yield new LastIndexWhere(pred, untilp, p) +// } +// override def merge(that: LastIndexWhere) = result = if (result == -1) that.result else { +// if (that.result != -1) result max that.result else result +// } +// override def requiresStrictSplitters = true +// } +// +// protected[this] class Reverse[U >: T, This >: Repr](cbf: () => Combiner[U, This], protected[this] val pit: SeqSplitter[T]) +// extends Transformer[Combiner[U, This], Reverse[U, This]] { +// @volatile var result: Combiner[U, This] = null +// def leaf(prev: Option[Combiner[U, This]]) = result = pit.reverse2combiner(reuse(prev, cbf())) +// protected[this] def newSubtask(p: SuperParIterator) = new Reverse(cbf, down(p)) +// override def merge(that: Reverse[U, This]) = result = that.result combine result +// } +// +// protected[this] class ReverseMap[S, That](f: T => S, pbf: () => Combiner[S, That], protected[this] val pit: SeqSplitter[T]) +// extends Transformer[Combiner[S, That], ReverseMap[S, That]] { +// @volatile var result: Combiner[S, That] = null +// def leaf(prev: Option[Combiner[S, That]]) = result = pit.reverseMap2combiner(f, pbf()) +// protected[this] def newSubtask(p: SuperParIterator) = new ReverseMap(f, pbf, down(p)) +// override def merge(that: ReverseMap[S, That]) = result = that.result combine result +// } +// +// protected[this] class SameElements[U >: T](protected[this] val pit: SeqSplitter[T], val otherpit: SeqSplitter[U]) +// extends Accessor[Boolean, SameElements[U]] { +// @volatile var result: Boolean = true +// def leaf(prev: Option[Boolean]) = if (!pit.isAborted) { +// result = pit.sameElements(otherpit) +// if (!result) pit.abort() +// } +// protected[this] def newSubtask(p: SuperParIterator) = throw new UnsupportedOperationException +// override def split = { +// val fp = pit.remaining / 2 +// val sp = pit.remaining - fp +// for ((p, op) <- pit.psplitWithSignalling(fp, sp) zip otherpit.psplitWithSignalling(fp, sp)) yield new SameElements(p, op) +// } +// override def merge(that: SameElements[U]) = result = result && that.result +// override def requiresStrictSplitters = true +// } +// +// protected[this] class Updated[U >: T, That](pos: Int, elem: U, pbf: CombinerFactory[U, That], protected[this] val pit: SeqSplitter[T]) +// extends Transformer[Combiner[U, That], Updated[U, That]] { +// @volatile var result: Combiner[U, That] = null +// def leaf(prev: Option[Combiner[U, That]]) = result = pit.updated2combiner(pos, elem, pbf()) +// protected[this] def newSubtask(p: SuperParIterator) = throw new UnsupportedOperationException +// override def split = { +// val pits = pit.splitWithSignalling +// for ((p, untilp) <- pits zip pits.scanLeft(0)(_ + _.remaining)) yield new Updated(pos - untilp, elem, pbf, p) +// } +// override def merge(that: Updated[U, That]) = result = result combine that.result +// override def requiresStrictSplitters = true +// } +// +// protected[this] class Zip[U >: T, S, That](len: Int, cf: CombinerFactory[(U, S), That], protected[this] val pit: SeqSplitter[T], val otherpit: SeqSplitter[S]) +// extends Transformer[Combiner[(U, S), That], Zip[U, S, That]] { +// @volatile var result: Result = null +// def leaf(prev: Option[Result]) = result = pit.zip2combiner[U, S, That](otherpit, cf()) +// protected[this] def newSubtask(p: SuperParIterator) = throw new UnsupportedOperationException +// override def split = { +// val fp = len / 2 +// val sp = len - len / 2 +// val pits = pit.psplitWithSignalling(fp, sp) +// val opits = otherpit.psplitWithSignalling(fp, sp) +// Seq( +// new Zip(fp, cf, pits(0), opits(0)), +// new Zip(sp, cf, pits(1), opits(1)) +// ) +// } +// override def merge(that: Zip[U, S, That]) = result = result combine that.result +// } +// +// protected[this] class Corresponds[S](corr: (T, S) => Boolean, protected[this] val pit: SeqSplitter[T], val otherpit: SeqSplitter[S]) +// extends Accessor[Boolean, Corresponds[S]] { +// @volatile var result: Boolean = true +// def leaf(prev: Option[Boolean]) = if (!pit.isAborted) { +// result = pit.corresponds(corr)(otherpit) +// if (!result) pit.abort() +// } +// protected[this] def newSubtask(p: SuperParIterator) = throw new UnsupportedOperationException +// override def split = { +// val fp = pit.remaining / 2 +// val sp = pit.remaining - fp +// for ((p, op) <- pit.psplitWithSignalling(fp, sp) zip otherpit.psplitWithSignalling(fp, sp)) yield new Corresponds(corr, p, op) +// } +// override def merge(that: Corresponds[S]) = result = result && that.result +// override def requiresStrictSplitters = true +// } +//} diff --git a/core/src/main/scala/scala/collection/parallel/ParSet.scala b/core/src/main/scala/scala/collection/parallel/ParSet.scala index 135af3d5..8582e5e7 100644 --- a/core/src/main/scala/scala/collection/parallel/ParSet.scala +++ b/core/src/main/scala/scala/collection/parallel/ParSet.scala @@ -10,35 +10,35 @@ package scala package collection package parallel -import scala.collection.generic._ - -/** A template trait for parallel sets. - * - * $sideeffects - * - * @tparam T the element type of the set - * - * @author Aleksandar Prokopec - * @since 2.9 - */ -trait ParSet[T] - extends GenSet[T] - with GenericParTemplate[T, ParSet] - with ParIterable[T] - with ParSetLike[T, ParSet[T], Set[T]] -{ self => - - override def empty: ParSet[T] = mutable.ParHashSet[T]() - - //protected[this] override def newCombiner: Combiner[T, ParSet[T]] = ParSet.newCombiner[T] - - override def companion: GenericCompanion[ParSet] with GenericParCompanion[ParSet] = ParSet - - override def stringPrefix = "ParSet" -} - -object ParSet extends ParSetFactory[ParSet] { - def newCombiner[T]: Combiner[T, ParSet[T]] = mutable.ParHashSetCombiner[T] - - implicit def canBuildFrom[T]: CanCombineFrom[Coll, T, ParSet[T]] = new GenericCanCombineFrom[T] -} +//import scala.collection.generic._ +// +///** A template trait for parallel sets. +// * +// * $sideeffects +// * +// * @tparam T the element type of the set +// * +// * @author Aleksandar Prokopec +// * @since 2.9 +// */ +//trait ParSet[T] +// extends GenSet[T] +// with GenericParTemplate[T, ParSet] +// with ParIterable[T] +// with ParSetLike[T, ParSet[T], Set[T]] +//{ self => +// +// override def empty: ParSet[T] = mutable.ParHashSet[T]() +// +// //protected[this] override def newCombiner: Combiner[T, ParSet[T]] = ParSet.newCombiner[T] +// +// override def companion: GenericCompanion[ParSet] with GenericParCompanion[ParSet] = ParSet +// +// override def stringPrefix = "ParSet" +//} +// +//object ParSet extends ParSetFactory[ParSet] { +// def newCombiner[T]: Combiner[T, ParSet[T]] = mutable.ParHashSetCombiner[T] +// +// implicit def canBuildFrom[T]: CanCombineFrom[Coll, T, ParSet[T]] = new GenericCanCombineFrom[T] +//} diff --git a/core/src/main/scala/scala/collection/parallel/ParSetLike.scala b/core/src/main/scala/scala/collection/parallel/ParSetLike.scala index 55797600..cb336658 100644 --- a/core/src/main/scala/scala/collection/parallel/ParSetLike.scala +++ b/core/src/main/scala/scala/collection/parallel/ParSetLike.scala @@ -9,39 +9,39 @@ package scala package collection.parallel -import scala.collection.SetLike -import scala.collection.GenSetLike -import scala.collection.GenSet -import scala.collection.Set - -/** A template trait for parallel sets. This trait is mixed in with concrete - * parallel sets to override the representation type. - * - * $sideeffects - * - * @tparam T the element type of the set - * @define Coll `ParSet` - * @define coll parallel set - * - * @author Aleksandar Prokopec - * @since 2.9 - */ -trait ParSetLike[T, - +Repr <: ParSetLike[T, Repr, Sequential] with ParSet[T], - +Sequential <: Set[T] with SetLike[T, Sequential]] -extends GenSetLike[T, Repr] - with ParIterableLike[T, Repr, Sequential] -{ self => - - def empty: Repr - - // note: should not override toSet (could be mutable) - - def union(that: GenSet[T]): Repr = sequentially { - _ union that - } - - def diff(that: GenSet[T]): Repr = sequentially { - _ diff that - } -} +//import scala.collection.SetLike +//import scala.collection.GenSetLike +//import scala.collection.GenSet +//import scala.collection.Set +// +///** A template trait for parallel sets. This trait is mixed in with concrete +// * parallel sets to override the representation type. +// * +// * $sideeffects +// * +// * @tparam T the element type of the set +// * @define Coll `ParSet` +// * @define coll parallel set +// * +// * @author Aleksandar Prokopec +// * @since 2.9 +// */ +//trait ParSetLike[T, +// +Repr <: ParSetLike[T, Repr, Sequential] with ParSet[T], +// +Sequential <: Set[T] with SetLike[T, Sequential]] +//extends GenSetLike[T, Repr] +// with ParIterableLike[T, Repr, Sequential] +//{ self => +// +// def empty: Repr +// +// // note: should not override toSet (could be mutable) +// +// def union(that: GenSet[T]): Repr = sequentially { +// _ union that +// } +// +// def diff(that: GenSet[T]): Repr = sequentially { +// _ diff that +// } +//} diff --git a/core/src/main/scala/scala/collection/parallel/RemainsIterator.scala b/core/src/main/scala/scala/collection/parallel/RemainsIterator.scala index 6b05287f..5b88a52a 100644 --- a/core/src/main/scala/scala/collection/parallel/RemainsIterator.scala +++ b/core/src/main/scala/scala/collection/parallel/RemainsIterator.scala @@ -13,7 +13,7 @@ import scala.collection.generic.Signalling import scala.collection.generic.DelegatedSignalling import scala.collection.generic.IdleSignalling import scala.collection.mutable.Builder -import scala.collection.GenTraversableOnce +//import scala.collection.GenTraversableOnce import scala.collection.parallel.immutable.repetition private[collection] trait RemainsIterator[+T] extends Iterator[T] { @@ -85,14 +85,14 @@ private[collection] trait AugmentedIterableIterator[+T] extends RemainsIterator[ r } - override def copyToArray[U >: T](array: Array[U], from: Int, len: Int): Unit = { - var i = from - val until = from + len - while (i < until && hasNext) { - array(i) = next() - i += 1 - } - } +// override def copyToArray[U >: T](array: Array[U], from: Int, len: Int): Unit = { +// var i = from +// val until = from + len +// while (i < until && hasNext) { +// array(i) = next() +// i += 1 +// } +// } def reduceLeft[U >: T](howmany: Int, op: (U, U) => U): U = { var i = howmany - 1 @@ -123,12 +123,12 @@ private[collection] trait AugmentedIterableIterator[+T] extends RemainsIterator[ cb } - def flatmap2combiner[S, That](f: T => GenTraversableOnce[S], cb: Combiner[S, That]): Combiner[S, That] = { + def flatmap2combiner[S, That](f: T => /*GenTraversableOnce[S]*/ ParIterable[S], cb: Combiner[S, That]): Combiner[S, That] = { //val cb = pbf(repr) while (hasNext) { val traversable = f(next()).seq - if (traversable.isInstanceOf[Iterable[_]]) cb ++= traversable.asInstanceOf[Iterable[S]].iterator - else cb ++= traversable + /*if (traversable.isInstanceOf[Iterable[_]]) cb ++= traversable.asInstanceOf[Iterable[S]].iterator + else*/ cb ++= traversable } cb } @@ -297,7 +297,7 @@ private[collection] trait AugmentedSeqIterator[+T] extends AugmentedIterableIter total } - override def indexWhere(pred: T => Boolean): Int = { + /*override*/ def indexWhere(pred: T => Boolean): Int = { var i = 0 var loop = true while (hasNext && loop) { @@ -651,12 +651,12 @@ self => override def zipAllParSeq[S, U >: T, R >: S](that: SeqSplitter[S], thisElem: U, thatElem: R) = new ZippedAll[U, R](that, thisElem, thatElem) - def reverse: SeqSplitter[T] = { - val pa = mutable.ParArray.fromTraversables(self).reverse - new pa.ParArrayIterator { - override def reverse = self - } - } +// def reverse: SeqSplitter[T] = { +// val pa = mutable.ParArray.fromTraversables(self).reverse +// new pa.ParArrayIterator { +// override def reverse = self +// } +// } class Patched[U >: T](from: Int, patch: SeqSplitter[U], replaced: Int) extends SeqSplitter[U] { signalDelegate = self.signalDelegate diff --git a/core/src/main/scala/scala/collection/parallel/Tasks.scala b/core/src/main/scala/scala/collection/parallel/Tasks.scala index d59e58eb..b74b89ba 100644 --- a/core/src/main/scala/scala/collection/parallel/Tasks.scala +++ b/core/src/main/scala/scala/collection/parallel/Tasks.scala @@ -23,7 +23,7 @@ trait Task[R, +Tp] { * Optionally is provided with the result from the previous completed task * or `None` if there was no previous task (or the previous task is uncompleted or unknown). */ - def leaf(result: Option[R]) + def leaf(result: Option[R]): Unit /** A result that can be accessed once the task is completed. */ var result: R @@ -93,11 +93,11 @@ trait Tasks { def split: Seq[WrappedTask[R, Tp]] /** Code that gets called after the task gets started - it may spawn other tasks instead of calling `leaf`. */ - def compute() + def compute(): Unit /** Start task. */ - def start() + def start(): Unit /** Wait for task to finish. */ - def sync() + def sync(): Unit /** Try to cancel the task. * @return `true` if cancellation is successful. */ @@ -193,7 +193,7 @@ trait AdaptiveWorkStealingTasks extends Tasks { var curr = this var chain = "chain: " while (curr != null) { - chain += curr + " ---> " + chain += curr.toString + " ---> " curr = curr.next } println(chain) diff --git a/core/src/main/scala/scala/collection/parallel/immutable/ParHashMap.scala b/core/src/main/scala/scala/collection/parallel/immutable/ParHashMap.scala index 41b9da24..dfccaacc 100644 --- a/core/src/main/scala/scala/collection/parallel/immutable/ParHashMap.scala +++ b/core/src/main/scala/scala/collection/parallel/immutable/ParHashMap.scala @@ -9,326 +9,326 @@ package scala package collection.parallel.immutable -import scala.collection.parallel.ParMapLike -import scala.collection.parallel.Combiner -import scala.collection.parallel.IterableSplitter -import scala.collection.mutable.UnrolledBuffer.Unrolled -import scala.collection.mutable.UnrolledBuffer -import scala.collection.generic.ParMapFactory -import scala.collection.generic.CanCombineFrom -import scala.collection.generic.GenericParMapTemplate -import scala.collection.generic.GenericParMapCompanion -import scala.collection.immutable.{ HashMap, TrieIterator } -import scala.annotation.unchecked.uncheckedVariance -import scala.collection.parallel.Task - -/** Immutable parallel hash map, based on hash tries. - * - * $paralleliterableinfo - * - * $sideeffects - * - * @tparam K the key type of the map - * @tparam V the value type of the map - * - * @author Aleksandar Prokopec - * @since 2.9 - * @see [[http://docs.scala-lang.org/overviews/parallel-collections/concrete-parallel-collections.html#parallel_hash_tries Scala's Parallel Collections Library overview]] - * section on Parallel Hash Tries for more information. - * - * @define Coll `immutable.ParHashMap` - * @define coll immutable parallel hash map - */ -@SerialVersionUID(1L) -class ParHashMap[K, +V] private[immutable] (private[this] val trie: HashMap[K, V]) -extends ParMap[K, V] - with GenericParMapTemplate[K, V, ParHashMap] - with ParMapLike[K, V, ParHashMap[K, V], HashMap[K, V]] - with Serializable -{ -self => - - def this() = this(HashMap.empty[K, V]) - - override def mapCompanion: GenericParMapCompanion[ParHashMap] = ParHashMap - - override def empty: ParHashMap[K, V] = new ParHashMap[K, V] - - protected[this] override def newCombiner = HashMapCombiner[K, V] - - def splitter: IterableSplitter[(K, V)] = new ParHashMapIterator(trie.iterator, trie.size) - - override def seq = trie - - def -(k: K) = new ParHashMap(trie - k) - - def +[U >: V](kv: (K, U)) = new ParHashMap(trie + kv) - - def get(k: K) = trie.get(k) - - override def size = trie.size - - protected override def reuse[S, That](oldc: Option[Combiner[S, That]], newc: Combiner[S, That]) = oldc match { - case Some(old) => old - case None => newc - } - - class ParHashMapIterator(var triter: Iterator[(K, V @uncheckedVariance)], val sz: Int) - extends IterableSplitter[(K, V)] { - var i = 0 - def dup = triter match { - case t: TrieIterator[_] => - dupFromIterator(t.dupIterator) - case _ => - val buff = triter.toBuffer - triter = buff.iterator - dupFromIterator(buff.iterator) - } - private def dupFromIterator(it: Iterator[(K, V @uncheckedVariance)]) = { - val phit = new ParHashMapIterator(it, sz) - phit.i = i - phit - } - def split: Seq[IterableSplitter[(K, V)]] = if (remaining < 2) Seq(this) else triter match { - case t: TrieIterator[_] => - val previousRemaining = remaining - val ((fst, fstlength), snd) = t.split - val sndlength = previousRemaining - fstlength - Seq( - new ParHashMapIterator(fst, fstlength), - new ParHashMapIterator(snd, sndlength) - ) - case _ => - // iterator of the collision map case - val buff = triter.toBuffer - val (fp, sp) = buff.splitAt(buff.length / 2) - Seq(fp, sp) map { b => new ParHashMapIterator(b.iterator, b.length) } - } - def next(): (K, V) = { - i += 1 - val r = triter.next() - r - } - def hasNext: Boolean = { - i < sz - } - def remaining = sz - i - override def toString = "HashTrieIterator(" + sz + ")" - } - - /* debug */ - - private[parallel] def printDebugInfo(): Unit = { - println("Parallel hash trie") - println("Top level inner trie type: " + trie.getClass) - trie match { - case hm: HashMap.HashMap1[k, v] => - println("single node type") - println("key stored: " + hm.getKey) - println("hash of key: " + hm.getHash) - println("computed hash of " + hm.getKey + ": " + hm.computeHashFor(hm.getKey)) - println("trie.get(key): " + hm.get(hm.getKey)) - case _ => - println("other kind of node") - } - } -} - -/** $factoryInfo - * @define Coll `immutable.ParHashMap` - * @define coll immutable parallel hash map - */ -object ParHashMap extends ParMapFactory[ParHashMap] { - def empty[K, V]: ParHashMap[K, V] = new ParHashMap[K, V] - - def newCombiner[K, V]: Combiner[(K, V), ParHashMap[K, V]] = HashMapCombiner[K, V] - - implicit def canBuildFrom[K, V]: CanCombineFrom[Coll, (K, V), ParHashMap[K, V]] = { - new CanCombineFromMap[K, V] - } - - def fromTrie[K, V](t: HashMap[K, V]) = new ParHashMap(t) - - var totalcombines = new java.util.concurrent.atomic.AtomicInteger(0) -} - -private[parallel] abstract class HashMapCombiner[K, V] -extends scala.collection.parallel.BucketCombiner[(K, V), ParHashMap[K, V], (K, V), HashMapCombiner[K, V]](HashMapCombiner.rootsize) { -//self: EnvironmentPassingCombiner[(K, V), ParHashMap[K, V]] => - import HashMapCombiner._ - val emptyTrie = HashMap.empty[K, V] - - def +=(elem: (K, V)) = { - sz += 1 - val hc = emptyTrie.computeHash(elem._1) - val pos = hc & 0x1f - if (buckets(pos) eq null) { - // initialize bucket - buckets(pos) = new UnrolledBuffer[(K, V)] - } - // add to bucket - buckets(pos) += elem - this - } - - def result = { - val bucks = buckets.filter(_ != null).map(_.headPtr) - val root = new Array[HashMap[K, V]](bucks.length) - - combinerTaskSupport.executeAndWaitResult(new CreateTrie(bucks, root, 0, bucks.length)) - - var bitmap = 0 - var i = 0 - while (i < rootsize) { - if (buckets(i) ne null) bitmap |= 1 << i - i += 1 - } - val sz = root.foldLeft(0)(_ + _.size) - - if (sz == 0) new ParHashMap[K, V] - else if (sz == 1) new ParHashMap[K, V](root(0)) - else { - val trie = new HashMap.HashTrieMap(bitmap, root, sz) - new ParHashMap[K, V](trie) - } - } - - def groupByKey[Repr](cbf: () => Combiner[V, Repr]): ParHashMap[K, Repr] = { - val bucks = buckets.filter(_ != null).map(_.headPtr) - val root = new Array[HashMap[K, AnyRef]](bucks.length) - - combinerTaskSupport.executeAndWaitResult(new CreateGroupedTrie(cbf, bucks, root, 0, bucks.length)) - - var bitmap = 0 - var i = 0 - while (i < rootsize) { - if (buckets(i) ne null) bitmap |= 1 << i - i += 1 - } - val sz = root.foldLeft(0)(_ + _.size) - - if (sz == 0) new ParHashMap[K, Repr] - else if (sz == 1) new ParHashMap[K, Repr](root(0).asInstanceOf[HashMap[K, Repr]]) - else { - val trie = new HashMap.HashTrieMap(bitmap, root.asInstanceOf[Array[HashMap[K, Repr]]], sz) - new ParHashMap[K, Repr](trie) - } - } - - override def toString = { - "HashTrieCombiner(sz: " + size + ")" - //"HashTrieCombiner(buckets:\n\t" + buckets.filter(_ != null).mkString("\n\t") + ")\n" - } - - /* tasks */ - - class CreateTrie(bucks: Array[Unrolled[(K, V)]], root: Array[HashMap[K, V]], offset: Int, howmany: Int) - extends Task[Unit, CreateTrie] { - @volatile var result = () - def leaf(prev: Option[Unit]) = { - var i = offset - val until = offset + howmany - while (i < until) { - root(i) = createTrie(bucks(i)) - i += 1 - } - result = result - } - private def createTrie(elems: Unrolled[(K, V)]): HashMap[K, V] = { - var trie = new HashMap[K, V] - - var unrolled = elems - var i = 0 - while (unrolled ne null) { - val chunkarr = unrolled.array - val chunksz = unrolled.size - while (i < chunksz) { - val kv = chunkarr(i) - val hc = trie.computeHash(kv._1) - trie = trie.updated0(kv._1, hc, rootbits, kv._2, kv, null) - i += 1 - } - i = 0 - unrolled = unrolled.next - } - - trie - } - def split = { - val fp = howmany / 2 - List(new CreateTrie(bucks, root, offset, fp), new CreateTrie(bucks, root, offset + fp, howmany - fp)) - } - def shouldSplitFurther = howmany > scala.collection.parallel.thresholdFromSize(root.length, combinerTaskSupport.parallelismLevel) - } - - class CreateGroupedTrie[Repr](cbf: () => Combiner[V, Repr], bucks: Array[Unrolled[(K, V)]], root: Array[HashMap[K, AnyRef]], offset: Int, howmany: Int) - extends Task[Unit, CreateGroupedTrie[Repr]] { - @volatile var result = () - def leaf(prev: Option[Unit]) = { - var i = offset - val until = offset + howmany - while (i < until) { - root(i) = createGroupedTrie(bucks(i)).asInstanceOf[HashMap[K, AnyRef]] - i += 1 - } - result = result - } - private def createGroupedTrie(elems: Unrolled[(K, V)]): HashMap[K, Repr] = { - var trie = new HashMap[K, Combiner[V, Repr]] - - var unrolled = elems - var i = 0 - while (unrolled ne null) { - val chunkarr = unrolled.array - val chunksz = unrolled.size - while (i < chunksz) { - val kv = chunkarr(i) - val hc = trie.computeHash(kv._1) - - // check to see if already present - val cmb: Combiner[V, Repr] = trie.get0(kv._1, hc, rootbits) match { - case Some(cmb) => cmb - case None => - val cmb: Combiner[V, Repr] = cbf() - trie = trie.updated0[Combiner[V, Repr]](kv._1, hc, rootbits, cmb, null, null) - cmb - } - cmb += kv._2 - i += 1 - } - i = 0 - unrolled = unrolled.next - } - - evaluateCombiners(trie).asInstanceOf[HashMap[K, Repr]] - } - private def evaluateCombiners(trie: HashMap[K, Combiner[V, Repr]]): HashMap[K, Repr] = trie match { - case hm1: HashMap.HashMap1[_, _] => - val evaledvalue = hm1.value.result - new HashMap.HashMap1[K, Repr](hm1.key, hm1.hash, evaledvalue, null) - case hmc: HashMap.HashMapCollision1[_, _] => - val evaledkvs = hmc.kvs map { p => (p._1, p._2.result) } - new HashMap.HashMapCollision1[K, Repr](hmc.hash, evaledkvs) - case htm: HashMap.HashTrieMap[k, v] => - var i = 0 - while (i < htm.elems.length) { - htm.elems(i) = evaluateCombiners(htm.elems(i)).asInstanceOf[HashMap[k, v]] - i += 1 - } - htm.asInstanceOf[HashMap[K, Repr]] - case empty => empty.asInstanceOf[HashMap[K, Repr]] - } - def split = { - val fp = howmany / 2 - List(new CreateGroupedTrie(cbf, bucks, root, offset, fp), new CreateGroupedTrie(cbf, bucks, root, offset + fp, howmany - fp)) - } - def shouldSplitFurther = howmany > scala.collection.parallel.thresholdFromSize(root.length, combinerTaskSupport.parallelismLevel) - } -} - -private[parallel] object HashMapCombiner { - def apply[K, V] = new HashMapCombiner[K, V] {} // was: with EnvironmentPassingCombiner[(K, V), ParHashMap[K, V]] - - private[immutable] val rootbits = 5 - private[immutable] val rootsize = 1 << 5 -} +//import scala.collection.parallel.ParMapLike +//import scala.collection.parallel.Combiner +//import scala.collection.parallel.IterableSplitter +//import scala.collection.mutable.UnrolledBuffer.Unrolled +//import scala.collection.mutable.UnrolledBuffer +//import scala.collection.generic.ParMapFactory +//import scala.collection.generic.CanCombineFrom +//import scala.collection.generic.GenericParMapTemplate +//import scala.collection.generic.GenericParMapCompanion +//import scala.collection.immutable.{ HashMap, TrieIterator } +//import scala.annotation.unchecked.uncheckedVariance +//import scala.collection.parallel.Task +// +///** Immutable parallel hash map, based on hash tries. +// * +// * $paralleliterableinfo +// * +// * $sideeffects +// * +// * @tparam K the key type of the map +// * @tparam V the value type of the map +// * +// * @author Aleksandar Prokopec +// * @since 2.9 +// * @see [[http://docs.scala-lang.org/overviews/parallel-collections/concrete-parallel-collections.html#parallel_hash_tries Scala's Parallel Collections Library overview]] +// * section on Parallel Hash Tries for more information. +// * +// * @define Coll `immutable.ParHashMap` +// * @define coll immutable parallel hash map +// */ +//@SerialVersionUID(1L) +//class ParHashMap[K, +V] private[immutable] (private[this] val trie: HashMap[K, V]) +//extends ParMap[K, V] +// with GenericParMapTemplate[K, V, ParHashMap] +// with ParMapLike[K, V, ParHashMap[K, V], HashMap[K, V]] +// with Serializable +//{ +//self => +// +// def this() = this(HashMap.empty[K, V]) +// +// override def mapCompanion: GenericParMapCompanion[ParHashMap] = ParHashMap +// +// override def empty: ParHashMap[K, V] = new ParHashMap[K, V] +// +// protected[this] override def newCombiner = HashMapCombiner[K, V] +// +// def splitter: IterableSplitter[(K, V)] = new ParHashMapIterator(trie.iterator, trie.size) +// +// override def seq = trie +// +// def -(k: K) = new ParHashMap(trie - k) +// +// def +[U >: V](kv: (K, U)) = new ParHashMap(trie + kv) +// +// def get(k: K) = trie.get(k) +// +// override def size = trie.size +// +// protected override def reuse[S, That](oldc: Option[Combiner[S, That]], newc: Combiner[S, That]) = oldc match { +// case Some(old) => old +// case None => newc +// } +// +// class ParHashMapIterator(var triter: Iterator[(K, V @uncheckedVariance)], val sz: Int) +// extends IterableSplitter[(K, V)] { +// var i = 0 +// def dup = triter match { +// case t: TrieIterator[_] => +// dupFromIterator(t.dupIterator) +// case _ => +// val buff = triter.toBuffer +// triter = buff.iterator +// dupFromIterator(buff.iterator) +// } +// private def dupFromIterator(it: Iterator[(K, V @uncheckedVariance)]) = { +// val phit = new ParHashMapIterator(it, sz) +// phit.i = i +// phit +// } +// def split: Seq[IterableSplitter[(K, V)]] = if (remaining < 2) Seq(this) else triter match { +// case t: TrieIterator[_] => +// val previousRemaining = remaining +// val ((fst, fstlength), snd) = t.split +// val sndlength = previousRemaining - fstlength +// Seq( +// new ParHashMapIterator(fst, fstlength), +// new ParHashMapIterator(snd, sndlength) +// ) +// case _ => +// // iterator of the collision map case +// val buff = triter.toBuffer +// val (fp, sp) = buff.splitAt(buff.length / 2) +// Seq(fp, sp) map { b => new ParHashMapIterator(b.iterator, b.length) } +// } +// def next(): (K, V) = { +// i += 1 +// val r = triter.next() +// r +// } +// def hasNext: Boolean = { +// i < sz +// } +// def remaining = sz - i +// override def toString = "HashTrieIterator(" + sz + ")" +// } +// +// /* debug */ +// +// private[parallel] def printDebugInfo(): Unit = { +// println("Parallel hash trie") +// println("Top level inner trie type: " + trie.getClass) +// trie match { +// case hm: HashMap.HashMap1[k, v] => +// println("single node type") +// println("key stored: " + hm.getKey) +// println("hash of key: " + hm.getHash) +// println("computed hash of " + hm.getKey + ": " + hm.computeHashFor(hm.getKey)) +// println("trie.get(key): " + hm.get(hm.getKey)) +// case _ => +// println("other kind of node") +// } +// } +//} +// +///** $factoryInfo +// * @define Coll `immutable.ParHashMap` +// * @define coll immutable parallel hash map +// */ +//object ParHashMap extends ParMapFactory[ParHashMap] { +// def empty[K, V]: ParHashMap[K, V] = new ParHashMap[K, V] +// +// def newCombiner[K, V]: Combiner[(K, V), ParHashMap[K, V]] = HashMapCombiner[K, V] +// +// implicit def canBuildFrom[K, V]: CanCombineFrom[Coll, (K, V), ParHashMap[K, V]] = { +// new CanCombineFromMap[K, V] +// } +// +// def fromTrie[K, V](t: HashMap[K, V]) = new ParHashMap(t) +// +// var totalcombines = new java.util.concurrent.atomic.AtomicInteger(0) +//} +// +//private[parallel] abstract class HashMapCombiner[K, V] +//extends scala.collection.parallel.BucketCombiner[(K, V), ParHashMap[K, V], (K, V), HashMapCombiner[K, V]](HashMapCombiner.rootsize) { +////self: EnvironmentPassingCombiner[(K, V), ParHashMap[K, V]] => +// import HashMapCombiner._ +// val emptyTrie = HashMap.empty[K, V] +// +// def +=(elem: (K, V)) = { +// sz += 1 +// val hc = emptyTrie.computeHash(elem._1) +// val pos = hc & 0x1f +// if (buckets(pos) eq null) { +// // initialize bucket +// buckets(pos) = new UnrolledBuffer[(K, V)] +// } +// // add to bucket +// buckets(pos) += elem +// this +// } +// +// def result = { +// val bucks = buckets.filter(_ != null).map(_.headPtr) +// val root = new Array[HashMap[K, V]](bucks.length) +// +// combinerTaskSupport.executeAndWaitResult(new CreateTrie(bucks, root, 0, bucks.length)) +// +// var bitmap = 0 +// var i = 0 +// while (i < rootsize) { +// if (buckets(i) ne null) bitmap |= 1 << i +// i += 1 +// } +// val sz = root.foldLeft(0)(_ + _.size) +// +// if (sz == 0) new ParHashMap[K, V] +// else if (sz == 1) new ParHashMap[K, V](root(0)) +// else { +// val trie = new HashMap.HashTrieMap(bitmap, root, sz) +// new ParHashMap[K, V](trie) +// } +// } +// +// def groupByKey[Repr](cbf: () => Combiner[V, Repr]): ParHashMap[K, Repr] = { +// val bucks = buckets.filter(_ != null).map(_.headPtr) +// val root = new Array[HashMap[K, AnyRef]](bucks.length) +// +// combinerTaskSupport.executeAndWaitResult(new CreateGroupedTrie(cbf, bucks, root, 0, bucks.length)) +// +// var bitmap = 0 +// var i = 0 +// while (i < rootsize) { +// if (buckets(i) ne null) bitmap |= 1 << i +// i += 1 +// } +// val sz = root.foldLeft(0)(_ + _.size) +// +// if (sz == 0) new ParHashMap[K, Repr] +// else if (sz == 1) new ParHashMap[K, Repr](root(0).asInstanceOf[HashMap[K, Repr]]) +// else { +// val trie = new HashMap.HashTrieMap(bitmap, root.asInstanceOf[Array[HashMap[K, Repr]]], sz) +// new ParHashMap[K, Repr](trie) +// } +// } +// +// override def toString = { +// "HashTrieCombiner(sz: " + size + ")" +// //"HashTrieCombiner(buckets:\n\t" + buckets.filter(_ != null).mkString("\n\t") + ")\n" +// } +// +// /* tasks */ +// +// class CreateTrie(bucks: Array[Unrolled[(K, V)]], root: Array[HashMap[K, V]], offset: Int, howmany: Int) +// extends Task[Unit, CreateTrie] { +// @volatile var result = () +// def leaf(prev: Option[Unit]) = { +// var i = offset +// val until = offset + howmany +// while (i < until) { +// root(i) = createTrie(bucks(i)) +// i += 1 +// } +// result = result +// } +// private def createTrie(elems: Unrolled[(K, V)]): HashMap[K, V] = { +// var trie = new HashMap[K, V] +// +// var unrolled = elems +// var i = 0 +// while (unrolled ne null) { +// val chunkarr = unrolled.array +// val chunksz = unrolled.size +// while (i < chunksz) { +// val kv = chunkarr(i) +// val hc = trie.computeHash(kv._1) +// trie = trie.updated0(kv._1, hc, rootbits, kv._2, kv, null) +// i += 1 +// } +// i = 0 +// unrolled = unrolled.next +// } +// +// trie +// } +// def split = { +// val fp = howmany / 2 +// List(new CreateTrie(bucks, root, offset, fp), new CreateTrie(bucks, root, offset + fp, howmany - fp)) +// } +// def shouldSplitFurther = howmany > scala.collection.parallel.thresholdFromSize(root.length, combinerTaskSupport.parallelismLevel) +// } +// +// class CreateGroupedTrie[Repr](cbf: () => Combiner[V, Repr], bucks: Array[Unrolled[(K, V)]], root: Array[HashMap[K, AnyRef]], offset: Int, howmany: Int) +// extends Task[Unit, CreateGroupedTrie[Repr]] { +// @volatile var result = () +// def leaf(prev: Option[Unit]) = { +// var i = offset +// val until = offset + howmany +// while (i < until) { +// root(i) = createGroupedTrie(bucks(i)).asInstanceOf[HashMap[K, AnyRef]] +// i += 1 +// } +// result = result +// } +// private def createGroupedTrie(elems: Unrolled[(K, V)]): HashMap[K, Repr] = { +// var trie = new HashMap[K, Combiner[V, Repr]] +// +// var unrolled = elems +// var i = 0 +// while (unrolled ne null) { +// val chunkarr = unrolled.array +// val chunksz = unrolled.size +// while (i < chunksz) { +// val kv = chunkarr(i) +// val hc = trie.computeHash(kv._1) +// +// // check to see if already present +// val cmb: Combiner[V, Repr] = trie.get0(kv._1, hc, rootbits) match { +// case Some(cmb) => cmb +// case None => +// val cmb: Combiner[V, Repr] = cbf() +// trie = trie.updated0[Combiner[V, Repr]](kv._1, hc, rootbits, cmb, null, null) +// cmb +// } +// cmb += kv._2 +// i += 1 +// } +// i = 0 +// unrolled = unrolled.next +// } +// +// evaluateCombiners(trie).asInstanceOf[HashMap[K, Repr]] +// } +// private def evaluateCombiners(trie: HashMap[K, Combiner[V, Repr]]): HashMap[K, Repr] = trie match { +// case hm1: HashMap.HashMap1[_, _] => +// val evaledvalue = hm1.value.result +// new HashMap.HashMap1[K, Repr](hm1.key, hm1.hash, evaledvalue, null) +// case hmc: HashMap.HashMapCollision1[_, _] => +// val evaledkvs = hmc.kvs map { p => (p._1, p._2.result) } +// new HashMap.HashMapCollision1[K, Repr](hmc.hash, evaledkvs) +// case htm: HashMap.HashTrieMap[k, v] => +// var i = 0 +// while (i < htm.elems.length) { +// htm.elems(i) = evaluateCombiners(htm.elems(i)).asInstanceOf[HashMap[k, v]] +// i += 1 +// } +// htm.asInstanceOf[HashMap[K, Repr]] +// case empty => empty.asInstanceOf[HashMap[K, Repr]] +// } +// def split = { +// val fp = howmany / 2 +// List(new CreateGroupedTrie(cbf, bucks, root, offset, fp), new CreateGroupedTrie(cbf, bucks, root, offset + fp, howmany - fp)) +// } +// def shouldSplitFurther = howmany > scala.collection.parallel.thresholdFromSize(root.length, combinerTaskSupport.parallelismLevel) +// } +//} +// +//private[parallel] object HashMapCombiner { +// def apply[K, V] = new HashMapCombiner[K, V] {} // was: with EnvironmentPassingCombiner[(K, V), ParHashMap[K, V]] +// +// private[immutable] val rootbits = 5 +// private[immutable] val rootsize = 1 << 5 +//} diff --git a/core/src/main/scala/scala/collection/parallel/immutable/ParHashSet.scala b/core/src/main/scala/scala/collection/parallel/immutable/ParHashSet.scala index 2d209a10..2bd344e1 100644 --- a/core/src/main/scala/scala/collection/parallel/immutable/ParHashSet.scala +++ b/core/src/main/scala/scala/collection/parallel/immutable/ParHashSet.scala @@ -11,213 +11,213 @@ package collection.parallel.immutable -import scala.collection.parallel.ParSetLike -import scala.collection.parallel.Combiner -import scala.collection.parallel.IterableSplitter -import scala.collection.mutable.UnrolledBuffer.Unrolled -import scala.collection.mutable.UnrolledBuffer -import scala.collection.generic.ParSetFactory -import scala.collection.generic.CanCombineFrom -import scala.collection.generic.GenericParTemplate -import scala.collection.generic.GenericParCompanion -import scala.collection.generic.GenericCompanion -import scala.collection.immutable.{ HashSet, TrieIterator } -import scala.collection.parallel.Task - - - -/** Immutable parallel hash set, based on hash tries. - * - * $paralleliterableinfo - * - * $sideeffects - * - * @tparam T the element type of the set - * - * @author Aleksandar Prokopec - * @since 2.9 - * @see [[http://docs.scala-lang.org/overviews/parallel-collections/concrete-parallel-collections.html#parallel_hash_tries Scala's Parallel Collections Library overview]] - * section on Parallel Hash Tries for more information. - * - * @define Coll `immutable.ParHashSet` - * @define coll immutable parallel hash set - */ -@SerialVersionUID(1L) -class ParHashSet[T] private[immutable] (private[this] val trie: HashSet[T]) -extends ParSet[T] - with GenericParTemplate[T, ParHashSet] - with ParSetLike[T, ParHashSet[T], HashSet[T]] - with Serializable -{ -self => - - def this() = this(HashSet.empty[T]) - - override def companion: GenericCompanion[ParHashSet] with GenericParCompanion[ParHashSet] = ParHashSet - - override def empty: ParHashSet[T] = new ParHashSet[T] - - def splitter: IterableSplitter[T] = new ParHashSetIterator(trie.iterator, trie.size) - - override def seq = trie - - def -(e: T) = new ParHashSet(trie - e) - - def +(e: T) = new ParHashSet(trie + e) - - def contains(e: T): Boolean = trie.contains(e) - - override def size = trie.size - - protected override def reuse[S, That](oldc: Option[Combiner[S, That]], newc: Combiner[S, That]) = oldc match { - case Some(old) => old - case None => newc - } - - class ParHashSetIterator(var triter: Iterator[T], val sz: Int) - extends IterableSplitter[T] { - var i = 0 - def dup = triter match { - case t: TrieIterator[_] => - dupFromIterator(t.dupIterator) - case _ => - val buff = triter.toBuffer - triter = buff.iterator - dupFromIterator(buff.iterator) - } - private def dupFromIterator(it: Iterator[T]) = { - val phit = new ParHashSetIterator(it, sz) - phit.i = i - phit - } - def split: Seq[IterableSplitter[T]] = if (remaining < 2) Seq(this) else triter match { - case t: TrieIterator[_] => - val previousRemaining = remaining - val ((fst, fstlength), snd) = t.split - val sndlength = previousRemaining - fstlength - Seq( - new ParHashSetIterator(fst, fstlength), - new ParHashSetIterator(snd, sndlength) - ) - case _ => - // iterator of the collision map case - val buff = triter.toBuffer - val (fp, sp) = buff.splitAt(buff.length / 2) - Seq(fp, sp) map { b => new ParHashSetIterator(b.iterator, b.length) } - } - def next(): T = { - i += 1 - triter.next() - } - def hasNext: Boolean = { - i < sz - } - def remaining = sz - i - } - -} - - -/** $factoryInfo - * @define Coll `immutable.ParHashSet` - * @define coll immutable parallel hash set - */ -object ParHashSet extends ParSetFactory[ParHashSet] { - def newCombiner[T]: Combiner[T, ParHashSet[T]] = HashSetCombiner[T] - - implicit def canBuildFrom[T]: CanCombineFrom[Coll, T, ParHashSet[T]] = - new GenericCanCombineFrom[T] - - def fromTrie[T](t: HashSet[T]) = new ParHashSet(t) -} - - -private[immutable] abstract class HashSetCombiner[T] -extends scala.collection.parallel.BucketCombiner[T, ParHashSet[T], Any, HashSetCombiner[T]](HashSetCombiner.rootsize) { -//self: EnvironmentPassingCombiner[T, ParHashSet[T]] => - import HashSetCombiner._ - val emptyTrie = HashSet.empty[T] - - def +=(elem: T) = { - sz += 1 - val hc = emptyTrie.computeHash(elem) - val pos = hc & 0x1f - if (buckets(pos) eq null) { - // initialize bucket - buckets(pos) = new UnrolledBuffer[Any] - } - // add to bucket - buckets(pos) += elem - this - } - - def result = { - val bucks = buckets.filter(_ != null).map(_.headPtr) - val root = new Array[HashSet[T]](bucks.length) - - combinerTaskSupport.executeAndWaitResult(new CreateTrie(bucks, root, 0, bucks.length)) - - var bitmap = 0 - var i = 0 - while (i < rootsize) { - if (buckets(i) ne null) bitmap |= 1 << i - i += 1 - } - val sz = root.foldLeft(0)(_ + _.size) - - if (sz == 0) new ParHashSet[T] - else if (sz == 1) new ParHashSet[T](root(0)) - else { - val trie = new HashSet.HashTrieSet(bitmap, root, sz) - new ParHashSet[T](trie) - } - } - - /* tasks */ - - class CreateTrie(bucks: Array[Unrolled[Any]], root: Array[HashSet[T]], offset: Int, howmany: Int) - extends Task[Unit, CreateTrie] { - var result = () - def leaf(prev: Option[Unit]) = { - var i = offset - val until = offset + howmany - while (i < until) { - root(i) = createTrie(bucks(i)) - i += 1 - } - } - private def createTrie(elems: Unrolled[Any]): HashSet[T] = { - var trie = new HashSet[T] - - var unrolled = elems - var i = 0 - while (unrolled ne null) { - val chunkarr = unrolled.array - val chunksz = unrolled.size - while (i < chunksz) { - val v = chunkarr(i).asInstanceOf[T] - val hc = trie.computeHash(v) - trie = trie.updated0(v, hc, rootbits) // internal API, private[collection] - i += 1 - } - i = 0 - unrolled = unrolled.next - } - - trie - } - def split = { - val fp = howmany / 2 - List(new CreateTrie(bucks, root, offset, fp), new CreateTrie(bucks, root, offset + fp, howmany - fp)) - } - def shouldSplitFurther = howmany > scala.collection.parallel.thresholdFromSize(root.length, combinerTaskSupport.parallelismLevel) - } -} - - -object HashSetCombiner { - def apply[T] = new HashSetCombiner[T] {} // was: with EnvironmentPassingCombiner[T, ParHashSet[T]] {} - - private[immutable] val rootbits = 5 - private[immutable] val rootsize = 1 << 5 -} +//import scala.collection.parallel.ParSetLike +//import scala.collection.parallel.Combiner +//import scala.collection.parallel.IterableSplitter +//import scala.collection.mutable.UnrolledBuffer.Unrolled +//import scala.collection.mutable.UnrolledBuffer +//import scala.collection.generic.ParSetFactory +//import scala.collection.generic.CanCombineFrom +//import scala.collection.generic.GenericParTemplate +//import scala.collection.generic.GenericParCompanion +//import scala.collection.generic.GenericCompanion +//import scala.collection.immutable.{ HashSet, TrieIterator } +//import scala.collection.parallel.Task +// +// +// +///** Immutable parallel hash set, based on hash tries. +// * +// * $paralleliterableinfo +// * +// * $sideeffects +// * +// * @tparam T the element type of the set +// * +// * @author Aleksandar Prokopec +// * @since 2.9 +// * @see [[http://docs.scala-lang.org/overviews/parallel-collections/concrete-parallel-collections.html#parallel_hash_tries Scala's Parallel Collections Library overview]] +// * section on Parallel Hash Tries for more information. +// * +// * @define Coll `immutable.ParHashSet` +// * @define coll immutable parallel hash set +// */ +//@SerialVersionUID(1L) +//class ParHashSet[T] private[immutable] (private[this] val trie: HashSet[T]) +//extends ParSet[T] +// with GenericParTemplate[T, ParHashSet] +// with ParSetLike[T, ParHashSet[T], HashSet[T]] +// with Serializable +//{ +//self => +// +// def this() = this(HashSet.empty[T]) +// +// override def companion: GenericCompanion[ParHashSet] with GenericParCompanion[ParHashSet] = ParHashSet +// +// override def empty: ParHashSet[T] = new ParHashSet[T] +// +// def splitter: IterableSplitter[T] = new ParHashSetIterator(trie.iterator, trie.size) +// +// override def seq = trie +// +// def -(e: T) = new ParHashSet(trie - e) +// +// def +(e: T) = new ParHashSet(trie + e) +// +// def contains(e: T): Boolean = trie.contains(e) +// +// override def size = trie.size +// +// protected override def reuse[S, That](oldc: Option[Combiner[S, That]], newc: Combiner[S, That]) = oldc match { +// case Some(old) => old +// case None => newc +// } +// +// class ParHashSetIterator(var triter: Iterator[T], val sz: Int) +// extends IterableSplitter[T] { +// var i = 0 +// def dup = triter match { +// case t: TrieIterator[_] => +// dupFromIterator(t.dupIterator) +// case _ => +// val buff = triter.toBuffer +// triter = buff.iterator +// dupFromIterator(buff.iterator) +// } +// private def dupFromIterator(it: Iterator[T]) = { +// val phit = new ParHashSetIterator(it, sz) +// phit.i = i +// phit +// } +// def split: Seq[IterableSplitter[T]] = if (remaining < 2) Seq(this) else triter match { +// case t: TrieIterator[_] => +// val previousRemaining = remaining +// val ((fst, fstlength), snd) = t.split +// val sndlength = previousRemaining - fstlength +// Seq( +// new ParHashSetIterator(fst, fstlength), +// new ParHashSetIterator(snd, sndlength) +// ) +// case _ => +// // iterator of the collision map case +// val buff = triter.toBuffer +// val (fp, sp) = buff.splitAt(buff.length / 2) +// Seq(fp, sp) map { b => new ParHashSetIterator(b.iterator, b.length) } +// } +// def next(): T = { +// i += 1 +// triter.next() +// } +// def hasNext: Boolean = { +// i < sz +// } +// def remaining = sz - i +// } +// +//} +// +// +///** $factoryInfo +// * @define Coll `immutable.ParHashSet` +// * @define coll immutable parallel hash set +// */ +//object ParHashSet extends ParSetFactory[ParHashSet] { +// def newCombiner[T]: Combiner[T, ParHashSet[T]] = HashSetCombiner[T] +// +// implicit def canBuildFrom[T]: CanCombineFrom[Coll, T, ParHashSet[T]] = +// new GenericCanCombineFrom[T] +// +// def fromTrie[T](t: HashSet[T]) = new ParHashSet(t) +//} +// +// +//private[immutable] abstract class HashSetCombiner[T] +//extends scala.collection.parallel.BucketCombiner[T, ParHashSet[T], Any, HashSetCombiner[T]](HashSetCombiner.rootsize) { +////self: EnvironmentPassingCombiner[T, ParHashSet[T]] => +// import HashSetCombiner._ +// val emptyTrie = HashSet.empty[T] +// +// def +=(elem: T) = { +// sz += 1 +// val hc = emptyTrie.computeHash(elem) +// val pos = hc & 0x1f +// if (buckets(pos) eq null) { +// // initialize bucket +// buckets(pos) = new UnrolledBuffer[Any] +// } +// // add to bucket +// buckets(pos) += elem +// this +// } +// +// def result = { +// val bucks = buckets.filter(_ != null).map(_.headPtr) +// val root = new Array[HashSet[T]](bucks.length) +// +// combinerTaskSupport.executeAndWaitResult(new CreateTrie(bucks, root, 0, bucks.length)) +// +// var bitmap = 0 +// var i = 0 +// while (i < rootsize) { +// if (buckets(i) ne null) bitmap |= 1 << i +// i += 1 +// } +// val sz = root.foldLeft(0)(_ + _.size) +// +// if (sz == 0) new ParHashSet[T] +// else if (sz == 1) new ParHashSet[T](root(0)) +// else { +// val trie = new HashSet.HashTrieSet(bitmap, root, sz) +// new ParHashSet[T](trie) +// } +// } +// +// /* tasks */ +// +// class CreateTrie(bucks: Array[Unrolled[Any]], root: Array[HashSet[T]], offset: Int, howmany: Int) +// extends Task[Unit, CreateTrie] { +// var result = () +// def leaf(prev: Option[Unit]) = { +// var i = offset +// val until = offset + howmany +// while (i < until) { +// root(i) = createTrie(bucks(i)) +// i += 1 +// } +// } +// private def createTrie(elems: Unrolled[Any]): HashSet[T] = { +// var trie = new HashSet[T] +// +// var unrolled = elems +// var i = 0 +// while (unrolled ne null) { +// val chunkarr = unrolled.array +// val chunksz = unrolled.size +// while (i < chunksz) { +// val v = chunkarr(i).asInstanceOf[T] +// val hc = trie.computeHash(v) +// trie = trie.updated0(v, hc, rootbits) // internal API, private[collection] +// i += 1 +// } +// i = 0 +// unrolled = unrolled.next +// } +// +// trie +// } +// def split = { +// val fp = howmany / 2 +// List(new CreateTrie(bucks, root, offset, fp), new CreateTrie(bucks, root, offset + fp, howmany - fp)) +// } +// def shouldSplitFurther = howmany > scala.collection.parallel.thresholdFromSize(root.length, combinerTaskSupport.parallelismLevel) +// } +//} +// +// +//object HashSetCombiner { +// def apply[T] = new HashSetCombiner[T] {} // was: with EnvironmentPassingCombiner[T, ParHashSet[T]] {} +// +// private[immutable] val rootbits = 5 +// private[immutable] val rootsize = 1 << 5 +//} diff --git a/core/src/main/scala/scala/collection/parallel/immutable/ParIterable.scala b/core/src/main/scala/scala/collection/parallel/immutable/ParIterable.scala index a0e94345..dabb74db 100644 --- a/core/src/main/scala/scala/collection/parallel/immutable/ParIterable.scala +++ b/core/src/main/scala/scala/collection/parallel/immutable/ParIterable.scala @@ -10,40 +10,40 @@ package scala package collection package parallel.immutable -import scala.collection.generic._ -import scala.collection.parallel.ParIterableLike -import scala.collection.parallel.Combiner - -/** A template trait for immutable parallel iterable collections. - * - * $paralleliterableinfo - * - * $sideeffects - * - * @tparam T the element type of the collection - * - * @author Aleksandar Prokopec - * @since 2.9 - */ -trait ParIterable[+T] -extends scala.collection.GenIterable[T] - with scala.collection.parallel.ParIterable[T] - with GenericParTemplate[T, ParIterable] - with ParIterableLike[T, ParIterable[T], scala.collection.immutable.Iterable[T]] - with Immutable -{ - override def companion: GenericCompanion[ParIterable] with GenericParCompanion[ParIterable] = ParIterable - // if `immutable.ParIterableLike` is introduced, please move these 4 methods there - override def toIterable: ParIterable[T] = this - override def toSeq: ParSeq[T] = toParCollection[T, ParSeq[T]](() => ParSeq.newCombiner[T]) -} - -/** $factoryInfo - */ -object ParIterable extends ParFactory[ParIterable] { - implicit def canBuildFrom[T]: CanCombineFrom[Coll, T, ParIterable[T]] = - new GenericCanCombineFrom[T] - - def newBuilder[T]: Combiner[T, ParIterable[T]] = ParVector.newBuilder[T] - def newCombiner[T]: Combiner[T, ParIterable[T]] = ParVector.newCombiner[T] -} +//import scala.collection.generic._ +//import scala.collection.parallel.ParIterableLike +//import scala.collection.parallel.Combiner +// +///** A template trait for immutable parallel iterable collections. +// * +// * $paralleliterableinfo +// * +// * $sideeffects +// * +// * @tparam T the element type of the collection +// * +// * @author Aleksandar Prokopec +// * @since 2.9 +// */ +//trait ParIterable[+T] +//extends scala.collection.GenIterable[T] +// with scala.collection.parallel.ParIterable[T] +// with GenericParTemplate[T, ParIterable] +// with ParIterableLike[T, ParIterable[T], scala.collection.immutable.Iterable[T]] +// with Immutable +//{ +// override def companion: GenericCompanion[ParIterable] with GenericParCompanion[ParIterable] = ParIterable +// // if `immutable.ParIterableLike` is introduced, please move these 4 methods there +// override def toIterable: ParIterable[T] = this +// override def toSeq: ParSeq[T] = toParCollection[T, ParSeq[T]](() => ParSeq.newCombiner[T]) +//} +// +///** $factoryInfo +// */ +//object ParIterable extends ParFactory[ParIterable] { +// implicit def canBuildFrom[T]: CanCombineFrom[Coll, T, ParIterable[T]] = +// new GenericCanCombineFrom[T] +// +// def newBuilder[T]: Combiner[T, ParIterable[T]] = ParVector.newBuilder[T] +// def newCombiner[T]: Combiner[T, ParIterable[T]] = ParVector.newCombiner[T] +//} diff --git a/core/src/main/scala/scala/collection/parallel/immutable/ParMap.scala b/core/src/main/scala/scala/collection/parallel/immutable/ParMap.scala index 206efdcf..37a2a8a5 100644 --- a/core/src/main/scala/scala/collection/parallel/immutable/ParMap.scala +++ b/core/src/main/scala/scala/collection/parallel/immutable/ParMap.scala @@ -10,83 +10,83 @@ package scala package collection package parallel.immutable -import scala.collection.generic.ParMapFactory -import scala.collection.generic.GenericParMapTemplate -import scala.collection.generic.GenericParMapCompanion -import scala.collection.generic.CanCombineFrom -import scala.collection.parallel.ParMapLike -import scala.collection.parallel.Combiner - -/** A template trait for immutable parallel maps. - * - * $sideeffects - * - * @tparam K the key type of the map - * @tparam V the value type of the map - * - * @author Aleksandar Prokopec - * @since 2.9 - */ -trait ParMap[K, +V] -extends scala.collection/*.immutable*/.GenMap[K, V] - with GenericParMapTemplate[K, V, ParMap] - with parallel.ParMap[K, V] - with ParIterable[(K, V)] - with ParMapLike[K, V, ParMap[K, V], scala.collection.immutable.Map[K, V]] -{ -self => - - override def mapCompanion: GenericParMapCompanion[ParMap] = ParMap - - override def empty: ParMap[K, V] = new ParHashMap[K, V] - - override def stringPrefix = "ParMap" - - override def toMap[P, Q](implicit ev: (K, V) <:< (P, Q)): ParMap[P, Q] = this.asInstanceOf[ParMap[P, Q]] - - override def updated [U >: V](key: K, value: U): ParMap[K, U] = this + ((key, value)) - - def + [U >: V](kv: (K, U)): ParMap[K, U] - - /** The same map with a given default function. - * Note: `get`, `contains`, `iterator`, `keys`, etc are not affected by `withDefault`. - * - * Invoking transformer methods (e.g. `map`) will not preserve the default value. - * - * @param d the function mapping keys to values, used for non-present keys - * @return a wrapper of the map with a default value - */ - def withDefault[U >: V](d: K => U): scala.collection.parallel.immutable.ParMap[K, U] = new ParMap.WithDefault[K, U](this, d) - - /** The same map with a given default value. - * - * Invoking transformer methods (e.g. `map`) will not preserve the default value. - * - * @param d default value used for non-present keys - * @return a wrapper of the map with a default value - */ - def withDefaultValue[U >: V](d: U): scala.collection.parallel.immutable.ParMap[K, U] = new ParMap.WithDefault[K, U](this, x => d) - -} - - - -object ParMap extends ParMapFactory[ParMap] { - def empty[K, V]: ParMap[K, V] = new ParHashMap[K, V] - - def newCombiner[K, V]: Combiner[(K, V), ParMap[K, V]] = HashMapCombiner[K, V] - - implicit def canBuildFrom[K, V]: CanCombineFrom[Coll, (K, V), ParMap[K, V]] = new CanCombineFromMap[K, V] - - class WithDefault[K, +V](underlying: ParMap[K, V], d: K => V) - extends scala.collection.parallel.ParMap.WithDefault[K, V](underlying, d) with ParMap[K, V] { - override def empty = new WithDefault(underlying.empty, d) - override def updated[U >: V](key: K, value: U): WithDefault[K, U] = new WithDefault[K, U](underlying.updated[U](key, value), d) - override def + [U >: V](kv: (K, U)): WithDefault[K, U] = updated(kv._1, kv._2) - override def - (key: K): WithDefault[K, V] = new WithDefault(underlying - key, d) - override def withDefault[U >: V](d: K => U): ParMap[K, U] = new WithDefault[K, U](underlying, d) - override def withDefaultValue[U >: V](d: U): ParMap[K, U] = new WithDefault[K, U](underlying, x => d) - override def seq = underlying.seq.withDefault(d) - } - -} +//import scala.collection.generic.ParMapFactory +//import scala.collection.generic.GenericParMapTemplate +//import scala.collection.generic.GenericParMapCompanion +//import scala.collection.generic.CanCombineFrom +//import scala.collection.parallel.ParMapLike +//import scala.collection.parallel.Combiner +// +///** A template trait for immutable parallel maps. +// * +// * $sideeffects +// * +// * @tparam K the key type of the map +// * @tparam V the value type of the map +// * +// * @author Aleksandar Prokopec +// * @since 2.9 +// */ +//trait ParMap[K, +V] +//extends scala.collection/*.immutable*/.GenMap[K, V] +// with GenericParMapTemplate[K, V, ParMap] +// with parallel.ParMap[K, V] +// with ParIterable[(K, V)] +// with ParMapLike[K, V, ParMap[K, V], scala.collection.immutable.Map[K, V]] +//{ +//self => +// +// override def mapCompanion: GenericParMapCompanion[ParMap] = ParMap +// +// override def empty: ParMap[K, V] = new ParHashMap[K, V] +// +// override def stringPrefix = "ParMap" +// +// override def toMap[P, Q](implicit ev: (K, V) <:< (P, Q)): ParMap[P, Q] = this.asInstanceOf[ParMap[P, Q]] +// +// override def updated [U >: V](key: K, value: U): ParMap[K, U] = this + ((key, value)) +// +// def + [U >: V](kv: (K, U)): ParMap[K, U] +// +// /** The same map with a given default function. +// * Note: `get`, `contains`, `iterator`, `keys`, etc are not affected by `withDefault`. +// * +// * Invoking transformer methods (e.g. `map`) will not preserve the default value. +// * +// * @param d the function mapping keys to values, used for non-present keys +// * @return a wrapper of the map with a default value +// */ +// def withDefault[U >: V](d: K => U): scala.collection.parallel.immutable.ParMap[K, U] = new ParMap.WithDefault[K, U](this, d) +// +// /** The same map with a given default value. +// * +// * Invoking transformer methods (e.g. `map`) will not preserve the default value. +// * +// * @param d default value used for non-present keys +// * @return a wrapper of the map with a default value +// */ +// def withDefaultValue[U >: V](d: U): scala.collection.parallel.immutable.ParMap[K, U] = new ParMap.WithDefault[K, U](this, x => d) +// +//} +// +// +// +//object ParMap extends ParMapFactory[ParMap] { +// def empty[K, V]: ParMap[K, V] = new ParHashMap[K, V] +// +// def newCombiner[K, V]: Combiner[(K, V), ParMap[K, V]] = HashMapCombiner[K, V] +// +// implicit def canBuildFrom[K, V]: CanCombineFrom[Coll, (K, V), ParMap[K, V]] = new CanCombineFromMap[K, V] +// +// class WithDefault[K, +V](underlying: ParMap[K, V], d: K => V) +// extends scala.collection.parallel.ParMap.WithDefault[K, V](underlying, d) with ParMap[K, V] { +// override def empty = new WithDefault(underlying.empty, d) +// override def updated[U >: V](key: K, value: U): WithDefault[K, U] = new WithDefault[K, U](underlying.updated[U](key, value), d) +// override def + [U >: V](kv: (K, U)): WithDefault[K, U] = updated(kv._1, kv._2) +// override def - (key: K): WithDefault[K, V] = new WithDefault(underlying - key, d) +// override def withDefault[U >: V](d: K => U): ParMap[K, U] = new WithDefault[K, U](underlying, d) +// override def withDefaultValue[U >: V](d: U): ParMap[K, U] = new WithDefault[K, U](underlying, x => d) +// override def seq = underlying.seq.withDefault(d) +// } +// +//} diff --git a/core/src/main/scala/scala/collection/parallel/immutable/ParRange.scala b/core/src/main/scala/scala/collection/parallel/immutable/ParRange.scala index ee3c3201..452b4703 100644 --- a/core/src/main/scala/scala/collection/parallel/immutable/ParRange.scala +++ b/core/src/main/scala/scala/collection/parallel/immutable/ParRange.scala @@ -9,110 +9,110 @@ package scala package collection.parallel.immutable -import scala.collection.immutable.Range -import scala.collection.parallel.Combiner -import scala.collection.parallel.SeqSplitter -import scala.collection.Iterator - -/** Parallel ranges. - * - * $paralleliterableinfo - * - * $sideeffects - * - * @param range the sequential range this parallel range was obtained from - * - * @author Aleksandar Prokopec - * @since 2.9 - * @see [[http://docs.scala-lang.org/overviews/parallel-collections/concrete-parallel-collections.html#parallel_range Scala's Parallel Collections Library overview]] - * section on `ParRange` for more information. - * - * @define Coll `immutable.ParRange` - * @define coll immutable parallel range - */ -@SerialVersionUID(1L) -class ParRange(val range: Range) -extends ParSeq[Int] - with Serializable -{ -self => - - override def seq = range - - @inline final def length = range.length - - @inline final def apply(idx: Int) = range.apply(idx) - - def splitter = new ParRangeIterator - - class ParRangeIterator(range: Range = self.range) - extends SeqSplitter[Int] { - override def toString = "ParRangeIterator(over: " + range + ")" - private var ind = 0 - private val len = range.length - - final def remaining = len - ind - - final def hasNext = ind < len - - final def next = if (hasNext) { - val r = range.apply(ind) - ind += 1 - r - } else Iterator.empty.next() - - private def rangeleft = range.drop(ind) - - def dup = new ParRangeIterator(rangeleft) - - def split = { - val rleft = rangeleft - val elemleft = rleft.length - if (elemleft < 2) Seq(new ParRangeIterator(rleft)) - else Seq( - new ParRangeIterator(rleft.take(elemleft / 2)), - new ParRangeIterator(rleft.drop(elemleft / 2)) - ) - } - - def psplit(sizes: Int*) = { - var rleft = rangeleft - for (sz <- sizes) yield { - val fronttaken = rleft.take(sz) - rleft = rleft.drop(sz) - new ParRangeIterator(fronttaken) - } - } - - /* accessors */ - - override def foreach[U](f: Int => U): Unit = { - rangeleft.foreach(f.asInstanceOf[Int => Unit]) - ind = len - } - - override def reduce[U >: Int](op: (U, U) => U): U = { - val r = rangeleft.reduceLeft(op) - ind = len - r - } - - /* transformers */ - - override def map2combiner[S, That](f: Int => S, cb: Combiner[S, That]): Combiner[S, That] = { - while (hasNext) { - cb += f(next) - } - cb - } - } - - override def toString = s"Par$range" -} - -object ParRange { - def apply(start: Int, end: Int, step: Int, inclusive: Boolean) = new ParRange( - if (inclusive) new Range.Inclusive(start, end, step) - else new Range(start, end, step) - ) -} +//import scala.collection.immutable.Range +//import scala.collection.parallel.Combiner +//import scala.collection.parallel.SeqSplitter +//import scala.collection.Iterator +// +///** Parallel ranges. +// * +// * $paralleliterableinfo +// * +// * $sideeffects +// * +// * @param range the sequential range this parallel range was obtained from +// * +// * @author Aleksandar Prokopec +// * @since 2.9 +// * @see [[http://docs.scala-lang.org/overviews/parallel-collections/concrete-parallel-collections.html#parallel_range Scala's Parallel Collections Library overview]] +// * section on `ParRange` for more information. +// * +// * @define Coll `immutable.ParRange` +// * @define coll immutable parallel range +// */ +//@SerialVersionUID(1L) +//class ParRange(val range: Range) +//extends ParSeq[Int] +// with Serializable +//{ +//self => +// +// override def seq = range +// +// @inline final def length = range.length +// +// @inline final def apply(idx: Int) = range.apply(idx) +// +// def splitter = new ParRangeIterator +// +// class ParRangeIterator(range: Range = self.range) +// extends SeqSplitter[Int] { +// override def toString = "ParRangeIterator(over: " + range + ")" +// private var ind = 0 +// private val len = range.length +// +// final def remaining = len - ind +// +// final def hasNext = ind < len +// +// final def next = if (hasNext) { +// val r = range.apply(ind) +// ind += 1 +// r +// } else Iterator.empty.next() +// +// private def rangeleft = range.drop(ind) +// +// def dup = new ParRangeIterator(rangeleft) +// +// def split = { +// val rleft = rangeleft +// val elemleft = rleft.length +// if (elemleft < 2) Seq(new ParRangeIterator(rleft)) +// else Seq( +// new ParRangeIterator(rleft.take(elemleft / 2)), +// new ParRangeIterator(rleft.drop(elemleft / 2)) +// ) +// } +// +// def psplit(sizes: Int*) = { +// var rleft = rangeleft +// for (sz <- sizes) yield { +// val fronttaken = rleft.take(sz) +// rleft = rleft.drop(sz) +// new ParRangeIterator(fronttaken) +// } +// } +// +// /* accessors */ +// +// override def foreach[U](f: Int => U): Unit = { +// rangeleft.foreach(f.asInstanceOf[Int => Unit]) +// ind = len +// } +// +// override def reduce[U >: Int](op: (U, U) => U): U = { +// val r = rangeleft.reduceLeft(op) +// ind = len +// r +// } +// +// /* transformers */ +// +// override def map2combiner[S, That](f: Int => S, cb: Combiner[S, That]): Combiner[S, That] = { +// while (hasNext) { +// cb += f(next) +// } +// cb +// } +// } +// +// override def toString = s"Par$range" +//} +// +//object ParRange { +// def apply(start: Int, end: Int, step: Int, inclusive: Boolean) = new ParRange( +// if (inclusive) new Range.Inclusive(start, end, step) +// else new Range(start, end, step) +// ) +//} diff --git a/core/src/main/scala/scala/collection/parallel/immutable/ParSeq.scala b/core/src/main/scala/scala/collection/parallel/immutable/ParSeq.scala index a61fb209..fbfc32f5 100644 --- a/core/src/main/scala/scala/collection/parallel/immutable/ParSeq.scala +++ b/core/src/main/scala/scala/collection/parallel/immutable/ParSeq.scala @@ -10,37 +10,37 @@ package scala package collection package parallel.immutable -import scala.collection.generic.GenericParTemplate -import scala.collection.generic.GenericCompanion -import scala.collection.generic.GenericParCompanion -import scala.collection.generic.CanCombineFrom -import scala.collection.generic.ParFactory -import scala.collection.parallel.ParSeqLike -import scala.collection.parallel.Combiner - -/** An immutable variant of `ParSeq`. - * - * @define Coll `mutable.ParSeq` - * @define coll mutable parallel sequence - */ -trait ParSeq[+T] -extends scala.collection/*.immutable*/.GenSeq[T] - with scala.collection.parallel.ParSeq[T] - with ParIterable[T] - with GenericParTemplate[T, ParSeq] - with ParSeqLike[T, ParSeq[T], scala.collection.immutable.Seq[T]] -{ - override def companion: GenericCompanion[ParSeq] with GenericParCompanion[ParSeq] = ParSeq - override def toSeq: ParSeq[T] = this -} - -/** $factoryInfo - * @define Coll `mutable.ParSeq` - * @define coll mutable parallel sequence - */ -object ParSeq extends ParFactory[ParSeq] { - implicit def canBuildFrom[T]: CanCombineFrom[Coll, T, ParSeq[T]] = new GenericCanCombineFrom[T] - - def newBuilder[T]: Combiner[T, ParSeq[T]] = ParVector.newBuilder[T] - def newCombiner[T]: Combiner[T, ParSeq[T]] = ParVector.newCombiner[T] -} +//import scala.collection.generic.GenericParTemplate +//import scala.collection.generic.GenericCompanion +//import scala.collection.generic.GenericParCompanion +//import scala.collection.generic.CanCombineFrom +//import scala.collection.generic.ParFactory +//import scala.collection.parallel.ParSeqLike +//import scala.collection.parallel.Combiner +// +///** An immutable variant of `ParSeq`. +// * +// * @define Coll `mutable.ParSeq` +// * @define coll mutable parallel sequence +// */ +//trait ParSeq[+T] +//extends scala.collection/*.immutable*/.GenSeq[T] +// with scala.collection.parallel.ParSeq[T] +// with ParIterable[T] +// with GenericParTemplate[T, ParSeq] +// with ParSeqLike[T, ParSeq[T], scala.collection.immutable.Seq[T]] +//{ +// override def companion: GenericCompanion[ParSeq] with GenericParCompanion[ParSeq] = ParSeq +// override def toSeq: ParSeq[T] = this +//} +// +///** $factoryInfo +// * @define Coll `mutable.ParSeq` +// * @define coll mutable parallel sequence +// */ +//object ParSeq extends ParFactory[ParSeq] { +// implicit def canBuildFrom[T]: CanCombineFrom[Coll, T, ParSeq[T]] = new GenericCanCombineFrom[T] +// +// def newBuilder[T]: Combiner[T, ParSeq[T]] = ParVector.newBuilder[T] +// def newCombiner[T]: Combiner[T, ParSeq[T]] = ParVector.newCombiner[T] +//} diff --git a/core/src/main/scala/scala/collection/parallel/immutable/ParSet.scala b/core/src/main/scala/scala/collection/parallel/immutable/ParSet.scala index e4603b22..4b7b44e9 100644 --- a/core/src/main/scala/scala/collection/parallel/immutable/ParSet.scala +++ b/core/src/main/scala/scala/collection/parallel/immutable/ParSet.scala @@ -10,39 +10,39 @@ package scala package collection package parallel.immutable -import scala.collection.generic._ -import scala.collection.parallel.ParSetLike -import scala.collection.parallel.Combiner - -/** An immutable variant of `ParSet`. - * - * @define Coll `mutable.ParSet` - * @define coll mutable parallel set - */ -trait ParSet[T] -extends scala.collection/*.immutable*/.GenSet[T] - with GenericParTemplate[T, ParSet] - with parallel.ParSet[T] - with ParIterable[T] - with ParSetLike[T, ParSet[T], scala.collection.immutable.Set[T]] -{ -self => - override def empty: ParSet[T] = ParHashSet[T]() - - override def companion: GenericCompanion[ParSet] with GenericParCompanion[ParSet] = ParSet - - override def stringPrefix = "ParSet" - - // ok, because this could only violate `apply` and we can live with that - override def toSet[U >: T]: ParSet[U] = this.asInstanceOf[ParSet[U]] -} - -/** $factoryInfo - * @define Coll `mutable.ParSet` - * @define coll mutable parallel set - */ -object ParSet extends ParSetFactory[ParSet] { - def newCombiner[T]: Combiner[T, ParSet[T]] = HashSetCombiner[T] - - implicit def canBuildFrom[T]: CanCombineFrom[Coll, T, ParSet[T]] = new GenericCanCombineFrom[T] -} +//import scala.collection.generic._ +//import scala.collection.parallel.ParSetLike +//import scala.collection.parallel.Combiner +// +///** An immutable variant of `ParSet`. +// * +// * @define Coll `mutable.ParSet` +// * @define coll mutable parallel set +// */ +//trait ParSet[T] +//extends scala.collection/*.immutable*/.GenSet[T] +// with GenericParTemplate[T, ParSet] +// with parallel.ParSet[T] +// with ParIterable[T] +// with ParSetLike[T, ParSet[T], scala.collection.immutable.Set[T]] +//{ +//self => +// override def empty: ParSet[T] = ParHashSet[T]() +// +// override def companion: GenericCompanion[ParSet] with GenericParCompanion[ParSet] = ParSet +// +// override def stringPrefix = "ParSet" +// +// // ok, because this could only violate `apply` and we can live with that +// override def toSet[U >: T]: ParSet[U] = this.asInstanceOf[ParSet[U]] +//} +// +///** $factoryInfo +// * @define Coll `mutable.ParSet` +// * @define coll mutable parallel set +// */ +//object ParSet extends ParSetFactory[ParSet] { +// def newCombiner[T]: Combiner[T, ParSet[T]] = HashSetCombiner[T] +// +// implicit def canBuildFrom[T]: CanCombineFrom[Coll, T, ParSet[T]] = new GenericCanCombineFrom[T] +//} diff --git a/core/src/main/scala/scala/collection/parallel/immutable/ParVector.scala b/core/src/main/scala/scala/collection/parallel/immutable/ParVector.scala index ab1fa173..e6ae0b26 100644 --- a/core/src/main/scala/scala/collection/parallel/immutable/ParVector.scala +++ b/core/src/main/scala/scala/collection/parallel/immutable/ParVector.scala @@ -10,8 +10,8 @@ package scala package collection package parallel.immutable -import scala.collection.generic.{GenericParTemplate, CanCombineFrom, ParFactory} -import scala.collection.parallel.ParSeqLike +import scala.collection.generic.{GenericParTemplate, /*CanCombineFrom,*/ ParFactory} +//import scala.collection.parallel.ParSeqLike import scala.collection.parallel.Combiner import scala.collection.parallel.SeqSplitter import mutable.ArrayBuffer @@ -36,9 +36,10 @@ import immutable.VectorIterator * @define coll immutable parallel vector */ class ParVector[+T](private[this] val vector: Vector[T]) -extends ParSeq[T] +extends scala.collection.parallel.ParSeq[T] with GenericParTemplate[T, ParVector] - with ParSeqLike[T, ParVector[T], Vector[T]] + with scala.collection.parallel.ParIterableLike[T, ParVector, ParVector[T], Vector[T]] + /*with ParSeqLike[T, ParVector[T], Vector[T]]*/ with Serializable { override def companion = ParVector @@ -57,24 +58,24 @@ extends ParSeq[T] override def seq: Vector[T] = vector - override def toVector: Vector[T] = vector + /*override*/ def toVector: Vector[T] = vector class ParVectorIterator(_start: Int, _end: Int) extends VectorIterator[T](_start, _end) with SeqSplitter[T] { def remaining: Int = remainingElementCount def dup: SeqSplitter[T] = (new ParVector(remainingVector)).splitter - def split: Seq[ParVectorIterator] = { + def split: scala.collection.immutable.Seq[ParVectorIterator] = { val rem = remaining if (rem >= 2) psplit(rem / 2, rem - rem / 2) - else Seq(this) + else scala.collection.immutable.Seq(this) } - def psplit(sizes: Int*): Seq[ParVectorIterator] = { + def psplit(sizes: Int*): scala.Seq[ParVectorIterator] = { var remvector = remainingVector - val splitted = new ArrayBuffer[Vector[T]] + val splitted = List.newBuilder[Vector[T]] for (sz <- sizes) { splitted += remvector.take(sz) remvector = remvector.drop(sz) } - splitted.map(v => new ParVector(v).splitter.asInstanceOf[ParVectorIterator]) + splitted.result().map(v => new ParVector(v).splitter.asInstanceOf[ParVectorIterator]) } } } @@ -84,8 +85,8 @@ extends ParSeq[T] * @define coll immutable parallel vector */ object ParVector extends ParFactory[ParVector] { - implicit def canBuildFrom[T]: CanCombineFrom[Coll, T, ParVector[T]] = - new GenericCanCombineFrom[T] + /*implicit def canBuildFrom[T]: CanCombineFrom[Coll, T, ParVector[T]] = + new GenericCanCombineFrom[T]*/ def newBuilder[T]: Combiner[T, ParVector[T]] = newCombiner[T] @@ -99,7 +100,7 @@ private[immutable] class LazyParVectorCombiner[T] extends Combiner[T, ParVector[ def size: Int = sz - def +=(elem: T): this.type = { + def addOne(elem: T): this.type = { vectors.last += elem sz += 1 this diff --git a/core/src/main/scala/scala/collection/parallel/mutable/LazyCombiner.scala b/core/src/main/scala/scala/collection/parallel/mutable/LazyCombiner.scala index 28482758..492d4133 100644 --- a/core/src/main/scala/scala/collection/parallel/mutable/LazyCombiner.scala +++ b/core/src/main/scala/scala/collection/parallel/mutable/LazyCombiner.scala @@ -9,7 +9,7 @@ package scala package collection.parallel.mutable -import scala.collection.generic.Growable +import scala.collection.mutable.Growable import scala.collection.generic.Sizing import scala.collection.mutable.ArrayBuffer import scala.collection.parallel.Combiner @@ -26,7 +26,7 @@ trait LazyCombiner[Elem, +To, Buff <: Growable[Elem] with Sizing] extends Combin //self: scala.collection.parallel.EnvironmentPassingCombiner[Elem, To] => val chain: ArrayBuffer[Buff] val lastbuff = chain.last - def +=(elem: Elem) = { lastbuff += elem; this } + def addOne(elem: Elem) = { lastbuff += elem; this } def result: To = allocateAndCopy def clear() = { chain.clear() } def combine[N <: Elem, NewTo >: To](other: Combiner[N, NewTo]): Combiner[N, NewTo] = if (this ne other) { diff --git a/core/src/main/scala/scala/collection/parallel/mutable/ParArray.scala b/core/src/main/scala/scala/collection/parallel/mutable/ParArray.scala index 0dad4708..f8e39033 100644 --- a/core/src/main/scala/scala/collection/parallel/mutable/ParArray.scala +++ b/core/src/main/scala/scala/collection/parallel/mutable/ParArray.scala @@ -11,708 +11,708 @@ package collection.parallel.mutable -import scala.collection.generic.GenericParTemplate -import scala.collection.generic.GenericCompanion -import scala.collection.generic.GenericParCompanion -import scala.collection.generic.CanCombineFrom -import scala.collection.generic.CanBuildFrom -import scala.collection.generic.ParFactory -import scala.collection.parallel.Combiner -import scala.collection.parallel.SeqSplitter -import scala.collection.parallel.ParSeqLike -import scala.collection.parallel.Task -import scala.collection.parallel.CHECK_RATE -import scala.collection.mutable.ArraySeq -import scala.collection.mutable.Builder -import scala.collection.GenTraversableOnce -import scala.reflect.ClassTag - -/** Parallel sequence holding elements in a linear array. - * - * `ParArray` is a parallel sequence with a predefined size. The size of the array - * cannot be changed after it's been created. - * - * `ParArray` internally keeps an array containing the elements. This means that - * bulk operations based on traversal ensure fast access to elements. `ParArray` uses lazy builders that - * create the internal data array only after the size of the array is known. In the meantime, they keep - * the result set fragmented. The fragments - * are copied into the resulting data array in parallel using fast array copy operations once all the combiners - * are populated in parallel. - * - * @tparam T type of the elements in the array - * - * @author Aleksandar Prokopec - * @since 2.9 - * @see [[http://docs.scala-lang.org/overviews/parallel-collections/concrete-parallel-collections.html#parallel_array Scala's Parallel Collections Library overview]] - * section on `ParArray` for more information. - * - * @define Coll `ParArray` - * @define coll parallel array - * - */ -@SerialVersionUID(1L) -class ParArray[T] private[mutable] (val arrayseq: ArraySeq[T]) -extends ParSeq[T] - with GenericParTemplate[T, ParArray] - with ParSeqLike[T, ParArray[T], ArraySeq[T]] - with Serializable -{ -self => - - @transient private var array: Array[Any] = arrayseq.array.asInstanceOf[Array[Any]] - - override def companion: GenericCompanion[ParArray] with GenericParCompanion[ParArray] = ParArray - - def this(sz: Int) = this { - require(sz >= 0) - new ArraySeq[T](sz) - } - - def apply(i: Int) = array(i).asInstanceOf[T] - - def update(i: Int, elem: T) = array(i) = elem - - def length = arrayseq.length - - override def seq = arrayseq - - protected[parallel] def splitter: ParArrayIterator = { - val pit = new ParArrayIterator - pit - } - - class ParArrayIterator(var i: Int = 0, val until: Int = length, val arr: Array[Any] = array) - extends SeqSplitter[T] { - def hasNext = i < until - - def next = { - val elem = arr(i) - i += 1 - elem.asInstanceOf[T] - } - - def remaining = until - i - - def dup = new ParArrayIterator(i, until, arr) - - def psplit(sizesIncomplete: Int*): Seq[ParArrayIterator] = { - var traversed = i - val total = sizesIncomplete.reduceLeft(_ + _) - val left = remaining - val sizes = if (total >= left) sizesIncomplete else sizesIncomplete :+ (left - total) - for (sz <- sizes) yield if (traversed < until) { - val start = traversed - val end = (traversed + sz) min until - traversed = end - new ParArrayIterator(start, end, arr) - } else { - new ParArrayIterator(traversed, traversed, arr) - } - } - - override def split: Seq[ParArrayIterator] = { - val left = remaining - if (left >= 2) { - val splitpoint = left / 2 - val sq = Seq( - new ParArrayIterator(i, i + splitpoint, arr), - new ParArrayIterator(i + splitpoint, until, arr)) - i = until - sq - } else { - Seq(this) - } - } - - override def toString = "ParArrayIterator(" + i + ", " + until + ")" - - /* overrides for efficiency */ - - /* accessors */ - - override def foreach[U](f: T => U) = { - foreach_quick(f, arr, until, i) - i = until - } - - private def foreach_quick[U](f: T => U, a: Array[Any], ntil: Int, from: Int) = { - var j = from - while (j < ntil) { - f(a(j).asInstanceOf[T]) - j += 1 - } - } - - override def count(p: T => Boolean) = { - val c = count_quick(p, arr, until, i) - i = until - c - } - - private def count_quick(p: T => Boolean, a: Array[Any], ntil: Int, from: Int) = { - var cnt = 0 - var j = from - while (j < ntil) { - if (p(a(j).asInstanceOf[T])) cnt += 1 - j += 1 - } - cnt - } - - override def foldLeft[S](z: S)(op: (S, T) => S): S = { - val r = foldLeft_quick(arr, until, op, z) - i = until - r - } - - private def foldLeft_quick[S](a: Array[Any], ntil: Int, op: (S, T) => S, z: S): S = { - var j = i - var sum = z - while (j < ntil) { - sum = op(sum, a(j).asInstanceOf[T]) - j += 1 - } - sum - } - - override def fold[U >: T](z: U)(op: (U, U) => U): U = foldLeft[U](z)(op) - - override def aggregate[S](z: =>S)(seqop: (S, T) => S, combop: (S, S) => S): S = foldLeft[S](z)(seqop) - - override def sum[U >: T](implicit num: Numeric[U]): U = { - val s = sum_quick(num, arr, until, i, num.zero) - i = until - s - } - - private def sum_quick[U >: T](num: Numeric[U], a: Array[Any], ntil: Int, from: Int, zero: U): U = { - var j = from - var sum = zero - while (j < ntil) { - sum = num.plus(sum, a(j).asInstanceOf[T]) - j += 1 - } - sum - } - - override def product[U >: T](implicit num: Numeric[U]): U = { - val p = product_quick(num, arr, until, i, num.one) - i = until - p - } - - private def product_quick[U >: T](num: Numeric[U], a: Array[Any], ntil: Int, from: Int, one: U): U = { - var j = from - var prod = one - while (j < ntil) { - prod = num.times(prod, a(j).asInstanceOf[T]) - j += 1 - } - prod - } - - override def forall(p: T => Boolean): Boolean = { - if (isAborted) return false - - var all = true - while (i < until) { - val nextuntil = if (i + CHECK_RATE > until) until else i + CHECK_RATE - - all = forall_quick(p, array, nextuntil, i) - if (all) i = nextuntil - else { - i = until - abort() - } - - if (isAborted) return false - } - all - } - - // it's faster to use a separate small method - private def forall_quick(p: T => Boolean, a: Array[Any], nextuntil: Int, start: Int): Boolean = { - var j = start - while (j < nextuntil) { - if (p(a(j).asInstanceOf[T])) j += 1 - else return false - } - true - } - - override def exists(p: T => Boolean): Boolean = { - if (isAborted) return true - - var some = false - while (i < until) { - val nextuntil = if (i + CHECK_RATE > until) until else i + CHECK_RATE - - some = exists_quick(p, array, nextuntil, i) - if (some) { - i = until - abort() - } else i = nextuntil - - if (isAborted) return true - } - some - } - - // faster to use separate small method - private def exists_quick(p: T => Boolean, a: Array[Any], nextuntil: Int, start: Int): Boolean = { - var j = start - while (j < nextuntil) { - if (p(a(j).asInstanceOf[T])) return true - else j += 1 - } - false - } - - override def find(p: T => Boolean): Option[T] = { - if (isAborted) return None - - var r: Option[T] = None - while (i < until) { - val nextuntil = if ((i + CHECK_RATE) < until) (i + CHECK_RATE) else until - - r = find_quick(p, array, nextuntil, i) - - if (r != None) { - i = until - abort() - } else i = nextuntil - - if (isAborted) return r - } - r - } - - private def find_quick(p: T => Boolean, a: Array[Any], nextuntil: Int, start: Int): Option[T] = { - var j = start - while (j < nextuntil) { - val elem = a(j).asInstanceOf[T] - if (p(elem)) return Some(elem) - else j += 1 - } - None - } - - override def drop(n: Int): ParArrayIterator = { - i += n - this - } - - override def copyToArray[U >: T](array: Array[U], from: Int, len: Int): Unit = { - val totallen = (self.length - i) min len min (array.length - from) - Array.copy(arr, i, array, from, totallen) - i += totallen - } - - override def prefixLength(pred: T => Boolean): Int = { - val r = prefixLength_quick(pred, arr, until, i) - i += r + 1 - r - } - - private def prefixLength_quick(pred: T => Boolean, a: Array[Any], ntil: Int, startpos: Int): Int = { - var j = startpos - var endpos = ntil - while (j < endpos) { - if (pred(a(j).asInstanceOf[T])) j += 1 - else endpos = j - } - endpos - startpos - } - - override def indexWhere(pred: T => Boolean): Int = { - val r = indexWhere_quick(pred, arr, until, i) - val ret = if (r != -1) r - i else r - i = until - ret - } - - private def indexWhere_quick(pred: T => Boolean, a: Array[Any], ntil: Int, from: Int): Int = { - var j = from - var pos = -1 - while (j < ntil) { - if (pred(a(j).asInstanceOf[T])) { - pos = j - j = ntil - } else j += 1 - } - pos - } - - override def lastIndexWhere(pred: T => Boolean): Int = { - val r = lastIndexWhere_quick(pred, arr, i, until) - val ret = if (r != -1) r - i else r - i = until - ret - } - - private def lastIndexWhere_quick(pred: T => Boolean, a: Array[Any], from: Int, ntil: Int): Int = { - var pos = -1 - var j = ntil - 1 - while (j >= from) { - if (pred(a(j).asInstanceOf[T])) { - pos = j - j = -1 - } else j -= 1 - } - pos - } - - override def sameElements(that: Iterator[_]): Boolean = { - var same = true - while (i < until && that.hasNext) { - if (arr(i) != that.next) { - i = until - same = false - } - i += 1 - } - same - } - - /* transformers */ - - override def map2combiner[S, That](f: T => S, cb: Combiner[S, That]): Combiner[S, That] = { - //val cb = cbf(self.repr) - cb.sizeHint(remaining) - map2combiner_quick(f, arr, cb, until, i) - i = until - cb - } - - private def map2combiner_quick[S, That](f: T => S, a: Array[Any], cb: Builder[S, That], ntil: Int, from: Int): Unit = { - var j = from - while (j < ntil) { - cb += f(a(j).asInstanceOf[T]) - j += 1 - } - } - - override def collect2combiner[S, That](pf: PartialFunction[T, S], cb: Combiner[S, That]): Combiner[S, That] = { - //val cb = pbf(self.repr) - collect2combiner_quick(pf, arr, cb, until, i) - i = until - cb - } - - private def collect2combiner_quick[S, That](pf: PartialFunction[T, S], a: Array[Any], cb: Builder[S, That], ntil: Int, from: Int): Unit = { - var j = from - val runWith = pf.runWith(b => cb += b) - while (j < ntil) { - val curr = a(j).asInstanceOf[T] - runWith(curr) - j += 1 - } - } - - override def flatmap2combiner[S, That](f: T => GenTraversableOnce[S], cb: Combiner[S, That]): Combiner[S, That] = { - //val cb = pbf(self.repr) - while (i < until) { - val traversable = f(arr(i).asInstanceOf[T]) - if (traversable.isInstanceOf[Iterable[_]]) cb ++= traversable.asInstanceOf[Iterable[S]].iterator - else cb ++= traversable.seq - i += 1 - } - cb - } - - override def filter2combiner[U >: T, This](pred: T => Boolean, cb: Combiner[U, This]) = { - filter2combiner_quick(pred, cb, arr, until, i) - i = until - cb - } - - private def filter2combiner_quick[U >: T, This](pred: T => Boolean, cb: Builder[U, This], a: Array[Any], ntil: Int, from: Int): Unit = { - var j = i - while(j < ntil) { - val curr = a(j).asInstanceOf[T] - if (pred(curr)) cb += curr - j += 1 - } - } - - override def filterNot2combiner[U >: T, This](pred: T => Boolean, cb: Combiner[U, This]) = { - filterNot2combiner_quick(pred, cb, arr, until, i) - i = until - cb - } - - private def filterNot2combiner_quick[U >: T, This](pred: T => Boolean, cb: Builder[U, This], a: Array[Any], ntil: Int, from: Int): Unit = { - var j = i - while(j < ntil) { - val curr = a(j).asInstanceOf[T] - if (!pred(curr)) cb += curr - j += 1 - } - } - - override def copy2builder[U >: T, Coll, Bld <: Builder[U, Coll]](cb: Bld): Bld = { - cb.sizeHint(remaining) - cb.ifIs[ResizableParArrayCombiner[T]] { - pac => - // with res. combiner: - val targetarr: Array[Any] = pac.lastbuff.internalArray.asInstanceOf[Array[Any]] - Array.copy(arr, i, targetarr, pac.lastbuff.size, until - i) - pac.lastbuff.setInternalSize(remaining) - } otherwise { - cb.ifIs[UnrolledParArrayCombiner[T]] { - pac => - // with unr. combiner: - val targetarr: Array[Any] = pac.buff.lastPtr.array.asInstanceOf[Array[Any]] - Array.copy(arr, i, targetarr, 0, until - i) - pac.buff.size = pac.buff.size + until - i - pac.buff.lastPtr.size = until - i - } otherwise { - copy2builder_quick(cb, arr, until, i) - i = until - } - } - cb - } - - private def copy2builder_quick[U >: T, Coll](b: Builder[U, Coll], a: Array[Any], ntil: Int, from: Int): Unit = { - var j = from - while (j < ntil) { - b += a(j).asInstanceOf[T] - j += 1 - } - } - - override def partition2combiners[U >: T, This](pred: T => Boolean, btrue: Combiner[U, This], bfalse: Combiner[U, This]) = { - partition2combiners_quick(pred, btrue, bfalse, arr, until, i) - i = until - (btrue, bfalse) - } - - private def partition2combiners_quick[U >: T, This](p: T => Boolean, btrue: Builder[U, This], bfalse: Builder[U, This], a: Array[Any], ntil: Int, from: Int): Unit = { - var j = from - while (j < ntil) { - val curr = a(j).asInstanceOf[T] - if (p(curr)) btrue += curr else bfalse += curr - j += 1 - } - } - - override def take2combiner[U >: T, This](n: Int, cb: Combiner[U, This]) = { - cb.sizeHint(n) - val ntil = i + n - val a = arr - while (i < ntil) { - cb += a(i).asInstanceOf[T] - i += 1 - } - cb - } - - override def drop2combiner[U >: T, This](n: Int, cb: Combiner[U, This]) = { - drop(n) - cb.sizeHint(remaining) - while (i < until) { - cb += arr(i).asInstanceOf[T] - i += 1 - } - cb - } - - override def reverse2combiner[U >: T, This](cb: Combiner[U, This]): Combiner[U, This] = { - cb.ifIs[ResizableParArrayCombiner[T]] { - pac => - // with res. combiner: - val sz = remaining - pac.sizeHint(sz) - val targetarr: Array[Any] = pac.lastbuff.internalArray.asInstanceOf[Array[Any]] - reverse2combiner_quick(targetarr, arr, 0, i, until) - pac.lastbuff.setInternalSize(sz) - } otherwise { - cb.ifIs[UnrolledParArrayCombiner[T]] { - pac => - // with unr. combiner: - val sz = remaining - pac.sizeHint(sz) - val targetarr: Array[Any] = pac.buff.lastPtr.array.asInstanceOf[Array[Any]] - reverse2combiner_quick(targetarr, arr, 0, i, until) - pac.buff.size = pac.buff.size + sz - pac.buff.lastPtr.size = sz - } otherwise super.reverse2combiner(cb) - } - cb - } - - private def reverse2combiner_quick(targ: Array[Any], a: Array[Any], targfrom: Int, srcfrom: Int, srcuntil: Int): Unit = { - var j = srcfrom - var k = targfrom + srcuntil - srcfrom - 1 - while (j < srcuntil) { - targ(k) = a(j) - j += 1 - k -= 1 - } - } - - override def scanToArray[U >: T, A >: U](z: U, op: (U, U) => U, destarr: Array[A], from: Int): Unit = { - scanToArray_quick[U](array, destarr.asInstanceOf[Array[Any]], op, z, i, until, from) - i = until - } - - protected def scanToArray_quick[U](srcarr: Array[Any], destarr: Array[Any], op: (U, U) => U, z: U, srcfrom: Int, srcntil: Int, destfrom: Int): Unit = { - var last = z - var j = srcfrom - var k = destfrom - while (j < srcntil) { - last = op(last, srcarr(j).asInstanceOf[U]) - destarr(k) = last - j += 1 - k += 1 - } - } - - } - - /* operations */ - - private def buildsArray[S, That](c: Builder[S, That]) = c.isInstanceOf[ParArrayCombiner[_]] - - override def map[S, That](f: T => S)(implicit bf: CanBuildFrom[ParArray[T], S, That]) = if (buildsArray(bf(repr))) { - // reserve an array - val targarrseq = new ArraySeq[S](length) - val targetarr = targarrseq.array.asInstanceOf[Array[Any]] - - // fill it in parallel - tasksupport.executeAndWaitResult(new Map[S](f, targetarr, 0, length)) - - // wrap it into a parallel array - (new ParArray[S](targarrseq)).asInstanceOf[That] - } else super.map(f)(bf) - - override def scan[U >: T, That](z: U)(op: (U, U) => U)(implicit cbf: CanBuildFrom[ParArray[T], U, That]): That = - if (tasksupport.parallelismLevel > 1 && buildsArray(cbf(repr))) { - // reserve an array - val targarrseq = new ArraySeq[U](length + 1) - val targetarr = targarrseq.array.asInstanceOf[Array[Any]] - targetarr(0) = z - - // do a parallel prefix scan - if (length > 0) tasksupport.executeAndWaitResult(new CreateScanTree[U](0, size, z, op, splitter) mapResult { - tree => tasksupport.executeAndWaitResult(new ScanToArray(tree, z, op, targetarr)) - }) - - // wrap the array into a parallel array - (new ParArray[U](targarrseq)).asInstanceOf[That] - } else super.scan(z)(op)(cbf) - - /* tasks */ - - class ScanToArray[U >: T](tree: ScanTree[U], z: U, op: (U, U) => U, targetarr: Array[Any]) - extends Task[Unit, ScanToArray[U]] { - var result = () - - def leaf(prev: Option[Unit]) = iterate(tree) - private def iterate(tree: ScanTree[U]): Unit = tree match { - case ScanNode(left, right) => - iterate(left) - iterate(right) - case ScanLeaf(_, _, from, len, Some(prev), _) => - scanLeaf(array, targetarr, from, len, prev.acc) - case ScanLeaf(_, _, from, len, None, _) => - scanLeaf(array, targetarr, from, len, z) - } - private def scanLeaf(srcarr: Array[Any], targetarr: Array[Any], from: Int, len: Int, startval: U): Unit = { - var i = from - val until = from + len - var curr = startval - val operation = op - while (i < until) { - curr = operation(curr, srcarr(i).asInstanceOf[U]) - i += 1 - targetarr(i) = curr - } - } - def split = tree match { - case ScanNode(left, right) => Seq( - new ScanToArray(left, z, op, targetarr), - new ScanToArray(right, z, op, targetarr) - ) - case _ => sys.error("Can only split scan tree internal nodes.") - } - def shouldSplitFurther = tree match { - case ScanNode(_, _) => true - case _ => false - } - } - - class Map[S](f: T => S, targetarr: Array[Any], offset: Int, howmany: Int) extends Task[Unit, Map[S]] { - var result = () - - def leaf(prev: Option[Unit]) = { - val tarr = targetarr - val sarr = array - var i = offset - val until = offset + howmany - while (i < until) { - tarr(i) = f(sarr(i).asInstanceOf[T]) - i += 1 - } - } - def split = { - val fp = howmany / 2 - List(new Map(f, targetarr, offset, fp), new Map(f, targetarr, offset + fp, howmany - fp)) - } - def shouldSplitFurther = howmany > scala.collection.parallel.thresholdFromSize(length, tasksupport.parallelismLevel) - } - - /* serialization */ - - private def writeObject(out: java.io.ObjectOutputStream): Unit = { - out.defaultWriteObject - } - - private def readObject(in: java.io.ObjectInputStream): Unit = { - in.defaultReadObject - - // get raw array from arrayseq - array = arrayseq.array.asInstanceOf[Array[Any]] - } - -} - - -/** $factoryInfo - * @define Coll `mutable.ParArray` - * @define coll parallel array - */ -object ParArray extends ParFactory[ParArray] { - implicit def canBuildFrom[T]: CanCombineFrom[Coll, T, ParArray[T]] = new GenericCanCombineFrom[T] - def newBuilder[T]: Combiner[T, ParArray[T]] = newCombiner - def newCombiner[T]: Combiner[T, ParArray[T]] = ParArrayCombiner[T] - - /** Creates a new parallel array by wrapping the specified array. - */ - def handoff[T](arr: Array[T]): ParArray[T] = wrapOrRebuild(arr, arr.length) - - /** Creates a new parallel array by wrapping a part of the specified array. - */ - def handoff[T](arr: Array[T], sz: Int): ParArray[T] = wrapOrRebuild(arr, sz) - - private def wrapOrRebuild[T](arr: AnyRef, sz: Int) = arr match { - case arr: Array[AnyRef] => new ParArray[T](new ExposedArraySeq[T](arr, sz)) - case _ => new ParArray[T](new ExposedArraySeq[T](scala.runtime.ScalaRunTime.toObjectArray(arr), sz)) - } - - def createFromCopy[T <: AnyRef : ClassTag](arr: Array[T]): ParArray[T] = { - val newarr = new Array[T](arr.length) - Array.copy(arr, 0, newarr, 0, arr.length) - handoff(newarr) - } - - def fromTraversables[T](xss: GenTraversableOnce[T]*) = { - val cb = ParArrayCombiner[T]() - for (xs <- xss) { - cb ++= xs.seq - } - cb.result - } - -} +//import scala.collection.generic.GenericParTemplate +//import scala.collection.generic.GenericCompanion +//import scala.collection.generic.GenericParCompanion +//import scala.collection.generic.CanCombineFrom +//import scala.collection.generic.CanBuildFrom +//import scala.collection.generic.ParFactory +//import scala.collection.parallel.Combiner +//import scala.collection.parallel.SeqSplitter +//import scala.collection.parallel.ParSeqLike +//import scala.collection.parallel.Task +//import scala.collection.parallel.CHECK_RATE +//import scala.collection.mutable.ArraySeq +//import scala.collection.mutable.Builder +//import scala.collection.GenTraversableOnce +//import scala.reflect.ClassTag +// +///** Parallel sequence holding elements in a linear array. +// * +// * `ParArray` is a parallel sequence with a predefined size. The size of the array +// * cannot be changed after it's been created. +// * +// * `ParArray` internally keeps an array containing the elements. This means that +// * bulk operations based on traversal ensure fast access to elements. `ParArray` uses lazy builders that +// * create the internal data array only after the size of the array is known. In the meantime, they keep +// * the result set fragmented. The fragments +// * are copied into the resulting data array in parallel using fast array copy operations once all the combiners +// * are populated in parallel. +// * +// * @tparam T type of the elements in the array +// * +// * @author Aleksandar Prokopec +// * @since 2.9 +// * @see [[http://docs.scala-lang.org/overviews/parallel-collections/concrete-parallel-collections.html#parallel_array Scala's Parallel Collections Library overview]] +// * section on `ParArray` for more information. +// * +// * @define Coll `ParArray` +// * @define coll parallel array +// * +// */ +//@SerialVersionUID(1L) +//class ParArray[T] private[mutable] (val arrayseq: ArraySeq[T]) +//extends ParSeq[T] +// with GenericParTemplate[T, ParArray] +// with ParSeqLike[T, ParArray[T], ArraySeq[T]] +// with Serializable +//{ +//self => +// +// @transient private var array: Array[Any] = arrayseq.array.asInstanceOf[Array[Any]] +// +// override def companion: GenericCompanion[ParArray] with GenericParCompanion[ParArray] = ParArray +// +// def this(sz: Int) = this { +// require(sz >= 0) +// new ArraySeq[T](sz) +// } +// +// def apply(i: Int) = array(i).asInstanceOf[T] +// +// def update(i: Int, elem: T) = array(i) = elem +// +// def length = arrayseq.length +// +// override def seq = arrayseq +// +// protected[parallel] def splitter: ParArrayIterator = { +// val pit = new ParArrayIterator +// pit +// } +// +// class ParArrayIterator(var i: Int = 0, val until: Int = length, val arr: Array[Any] = array) +// extends SeqSplitter[T] { +// def hasNext = i < until +// +// def next = { +// val elem = arr(i) +// i += 1 +// elem.asInstanceOf[T] +// } +// +// def remaining = until - i +// +// def dup = new ParArrayIterator(i, until, arr) +// +// def psplit(sizesIncomplete: Int*): Seq[ParArrayIterator] = { +// var traversed = i +// val total = sizesIncomplete.reduceLeft(_ + _) +// val left = remaining +// val sizes = if (total >= left) sizesIncomplete else sizesIncomplete :+ (left - total) +// for (sz <- sizes) yield if (traversed < until) { +// val start = traversed +// val end = (traversed + sz) min until +// traversed = end +// new ParArrayIterator(start, end, arr) +// } else { +// new ParArrayIterator(traversed, traversed, arr) +// } +// } +// +// override def split: Seq[ParArrayIterator] = { +// val left = remaining +// if (left >= 2) { +// val splitpoint = left / 2 +// val sq = Seq( +// new ParArrayIterator(i, i + splitpoint, arr), +// new ParArrayIterator(i + splitpoint, until, arr)) +// i = until +// sq +// } else { +// Seq(this) +// } +// } +// +// override def toString = "ParArrayIterator(" + i + ", " + until + ")" +// +// /* overrides for efficiency */ +// +// /* accessors */ +// +// override def foreach[U](f: T => U) = { +// foreach_quick(f, arr, until, i) +// i = until +// } +// +// private def foreach_quick[U](f: T => U, a: Array[Any], ntil: Int, from: Int) = { +// var j = from +// while (j < ntil) { +// f(a(j).asInstanceOf[T]) +// j += 1 +// } +// } +// +// override def count(p: T => Boolean) = { +// val c = count_quick(p, arr, until, i) +// i = until +// c +// } +// +// private def count_quick(p: T => Boolean, a: Array[Any], ntil: Int, from: Int) = { +// var cnt = 0 +// var j = from +// while (j < ntil) { +// if (p(a(j).asInstanceOf[T])) cnt += 1 +// j += 1 +// } +// cnt +// } +// +// override def foldLeft[S](z: S)(op: (S, T) => S): S = { +// val r = foldLeft_quick(arr, until, op, z) +// i = until +// r +// } +// +// private def foldLeft_quick[S](a: Array[Any], ntil: Int, op: (S, T) => S, z: S): S = { +// var j = i +// var sum = z +// while (j < ntil) { +// sum = op(sum, a(j).asInstanceOf[T]) +// j += 1 +// } +// sum +// } +// +// override def fold[U >: T](z: U)(op: (U, U) => U): U = foldLeft[U](z)(op) +// +// override def aggregate[S](z: =>S)(seqop: (S, T) => S, combop: (S, S) => S): S = foldLeft[S](z)(seqop) +// +// override def sum[U >: T](implicit num: Numeric[U]): U = { +// val s = sum_quick(num, arr, until, i, num.zero) +// i = until +// s +// } +// +// private def sum_quick[U >: T](num: Numeric[U], a: Array[Any], ntil: Int, from: Int, zero: U): U = { +// var j = from +// var sum = zero +// while (j < ntil) { +// sum = num.plus(sum, a(j).asInstanceOf[T]) +// j += 1 +// } +// sum +// } +// +// override def product[U >: T](implicit num: Numeric[U]): U = { +// val p = product_quick(num, arr, until, i, num.one) +// i = until +// p +// } +// +// private def product_quick[U >: T](num: Numeric[U], a: Array[Any], ntil: Int, from: Int, one: U): U = { +// var j = from +// var prod = one +// while (j < ntil) { +// prod = num.times(prod, a(j).asInstanceOf[T]) +// j += 1 +// } +// prod +// } +// +// override def forall(p: T => Boolean): Boolean = { +// if (isAborted) return false +// +// var all = true +// while (i < until) { +// val nextuntil = if (i + CHECK_RATE > until) until else i + CHECK_RATE +// +// all = forall_quick(p, array, nextuntil, i) +// if (all) i = nextuntil +// else { +// i = until +// abort() +// } +// +// if (isAborted) return false +// } +// all +// } +// +// // it's faster to use a separate small method +// private def forall_quick(p: T => Boolean, a: Array[Any], nextuntil: Int, start: Int): Boolean = { +// var j = start +// while (j < nextuntil) { +// if (p(a(j).asInstanceOf[T])) j += 1 +// else return false +// } +// true +// } +// +// override def exists(p: T => Boolean): Boolean = { +// if (isAborted) return true +// +// var some = false +// while (i < until) { +// val nextuntil = if (i + CHECK_RATE > until) until else i + CHECK_RATE +// +// some = exists_quick(p, array, nextuntil, i) +// if (some) { +// i = until +// abort() +// } else i = nextuntil +// +// if (isAborted) return true +// } +// some +// } +// +// // faster to use separate small method +// private def exists_quick(p: T => Boolean, a: Array[Any], nextuntil: Int, start: Int): Boolean = { +// var j = start +// while (j < nextuntil) { +// if (p(a(j).asInstanceOf[T])) return true +// else j += 1 +// } +// false +// } +// +// override def find(p: T => Boolean): Option[T] = { +// if (isAborted) return None +// +// var r: Option[T] = None +// while (i < until) { +// val nextuntil = if ((i + CHECK_RATE) < until) (i + CHECK_RATE) else until +// +// r = find_quick(p, array, nextuntil, i) +// +// if (r != None) { +// i = until +// abort() +// } else i = nextuntil +// +// if (isAborted) return r +// } +// r +// } +// +// private def find_quick(p: T => Boolean, a: Array[Any], nextuntil: Int, start: Int): Option[T] = { +// var j = start +// while (j < nextuntil) { +// val elem = a(j).asInstanceOf[T] +// if (p(elem)) return Some(elem) +// else j += 1 +// } +// None +// } +// +// override def drop(n: Int): ParArrayIterator = { +// i += n +// this +// } +// +// override def copyToArray[U >: T](array: Array[U], from: Int, len: Int): Unit = { +// val totallen = (self.length - i) min len min (array.length - from) +// Array.copy(arr, i, array, from, totallen) +// i += totallen +// } +// +// override def prefixLength(pred: T => Boolean): Int = { +// val r = prefixLength_quick(pred, arr, until, i) +// i += r + 1 +// r +// } +// +// private def prefixLength_quick(pred: T => Boolean, a: Array[Any], ntil: Int, startpos: Int): Int = { +// var j = startpos +// var endpos = ntil +// while (j < endpos) { +// if (pred(a(j).asInstanceOf[T])) j += 1 +// else endpos = j +// } +// endpos - startpos +// } +// +// override def indexWhere(pred: T => Boolean): Int = { +// val r = indexWhere_quick(pred, arr, until, i) +// val ret = if (r != -1) r - i else r +// i = until +// ret +// } +// +// private def indexWhere_quick(pred: T => Boolean, a: Array[Any], ntil: Int, from: Int): Int = { +// var j = from +// var pos = -1 +// while (j < ntil) { +// if (pred(a(j).asInstanceOf[T])) { +// pos = j +// j = ntil +// } else j += 1 +// } +// pos +// } +// +// override def lastIndexWhere(pred: T => Boolean): Int = { +// val r = lastIndexWhere_quick(pred, arr, i, until) +// val ret = if (r != -1) r - i else r +// i = until +// ret +// } +// +// private def lastIndexWhere_quick(pred: T => Boolean, a: Array[Any], from: Int, ntil: Int): Int = { +// var pos = -1 +// var j = ntil - 1 +// while (j >= from) { +// if (pred(a(j).asInstanceOf[T])) { +// pos = j +// j = -1 +// } else j -= 1 +// } +// pos +// } +// +// override def sameElements(that: Iterator[_]): Boolean = { +// var same = true +// while (i < until && that.hasNext) { +// if (arr(i) != that.next) { +// i = until +// same = false +// } +// i += 1 +// } +// same +// } +// +// /* transformers */ +// +// override def map2combiner[S, That](f: T => S, cb: Combiner[S, That]): Combiner[S, That] = { +// //val cb = cbf(self.repr) +// cb.sizeHint(remaining) +// map2combiner_quick(f, arr, cb, until, i) +// i = until +// cb +// } +// +// private def map2combiner_quick[S, That](f: T => S, a: Array[Any], cb: Builder[S, That], ntil: Int, from: Int): Unit = { +// var j = from +// while (j < ntil) { +// cb += f(a(j).asInstanceOf[T]) +// j += 1 +// } +// } +// +// override def collect2combiner[S, That](pf: PartialFunction[T, S], cb: Combiner[S, That]): Combiner[S, That] = { +// //val cb = pbf(self.repr) +// collect2combiner_quick(pf, arr, cb, until, i) +// i = until +// cb +// } +// +// private def collect2combiner_quick[S, That](pf: PartialFunction[T, S], a: Array[Any], cb: Builder[S, That], ntil: Int, from: Int): Unit = { +// var j = from +// val runWith = pf.runWith(b => cb += b) +// while (j < ntil) { +// val curr = a(j).asInstanceOf[T] +// runWith(curr) +// j += 1 +// } +// } +// +// override def flatmap2combiner[S, That](f: T => GenTraversableOnce[S], cb: Combiner[S, That]): Combiner[S, That] = { +// //val cb = pbf(self.repr) +// while (i < until) { +// val traversable = f(arr(i).asInstanceOf[T]) +// if (traversable.isInstanceOf[Iterable[_]]) cb ++= traversable.asInstanceOf[Iterable[S]].iterator +// else cb ++= traversable.seq +// i += 1 +// } +// cb +// } +// +// override def filter2combiner[U >: T, This](pred: T => Boolean, cb: Combiner[U, This]) = { +// filter2combiner_quick(pred, cb, arr, until, i) +// i = until +// cb +// } +// +// private def filter2combiner_quick[U >: T, This](pred: T => Boolean, cb: Builder[U, This], a: Array[Any], ntil: Int, from: Int): Unit = { +// var j = i +// while(j < ntil) { +// val curr = a(j).asInstanceOf[T] +// if (pred(curr)) cb += curr +// j += 1 +// } +// } +// +// override def filterNot2combiner[U >: T, This](pred: T => Boolean, cb: Combiner[U, This]) = { +// filterNot2combiner_quick(pred, cb, arr, until, i) +// i = until +// cb +// } +// +// private def filterNot2combiner_quick[U >: T, This](pred: T => Boolean, cb: Builder[U, This], a: Array[Any], ntil: Int, from: Int): Unit = { +// var j = i +// while(j < ntil) { +// val curr = a(j).asInstanceOf[T] +// if (!pred(curr)) cb += curr +// j += 1 +// } +// } +// +// override def copy2builder[U >: T, Coll, Bld <: Builder[U, Coll]](cb: Bld): Bld = { +// cb.sizeHint(remaining) +// cb.ifIs[ResizableParArrayCombiner[T]] { +// pac => +// // with res. combiner: +// val targetarr: Array[Any] = pac.lastbuff.internalArray.asInstanceOf[Array[Any]] +// Array.copy(arr, i, targetarr, pac.lastbuff.size, until - i) +// pac.lastbuff.setInternalSize(remaining) +// } otherwise { +// cb.ifIs[UnrolledParArrayCombiner[T]] { +// pac => +// // with unr. combiner: +// val targetarr: Array[Any] = pac.buff.lastPtr.array.asInstanceOf[Array[Any]] +// Array.copy(arr, i, targetarr, 0, until - i) +// pac.buff.size = pac.buff.size + until - i +// pac.buff.lastPtr.size = until - i +// } otherwise { +// copy2builder_quick(cb, arr, until, i) +// i = until +// } +// } +// cb +// } +// +// private def copy2builder_quick[U >: T, Coll](b: Builder[U, Coll], a: Array[Any], ntil: Int, from: Int): Unit = { +// var j = from +// while (j < ntil) { +// b += a(j).asInstanceOf[T] +// j += 1 +// } +// } +// +// override def partition2combiners[U >: T, This](pred: T => Boolean, btrue: Combiner[U, This], bfalse: Combiner[U, This]) = { +// partition2combiners_quick(pred, btrue, bfalse, arr, until, i) +// i = until +// (btrue, bfalse) +// } +// +// private def partition2combiners_quick[U >: T, This](p: T => Boolean, btrue: Builder[U, This], bfalse: Builder[U, This], a: Array[Any], ntil: Int, from: Int): Unit = { +// var j = from +// while (j < ntil) { +// val curr = a(j).asInstanceOf[T] +// if (p(curr)) btrue += curr else bfalse += curr +// j += 1 +// } +// } +// +// override def take2combiner[U >: T, This](n: Int, cb: Combiner[U, This]) = { +// cb.sizeHint(n) +// val ntil = i + n +// val a = arr +// while (i < ntil) { +// cb += a(i).asInstanceOf[T] +// i += 1 +// } +// cb +// } +// +// override def drop2combiner[U >: T, This](n: Int, cb: Combiner[U, This]) = { +// drop(n) +// cb.sizeHint(remaining) +// while (i < until) { +// cb += arr(i).asInstanceOf[T] +// i += 1 +// } +// cb +// } +// +// override def reverse2combiner[U >: T, This](cb: Combiner[U, This]): Combiner[U, This] = { +// cb.ifIs[ResizableParArrayCombiner[T]] { +// pac => +// // with res. combiner: +// val sz = remaining +// pac.sizeHint(sz) +// val targetarr: Array[Any] = pac.lastbuff.internalArray.asInstanceOf[Array[Any]] +// reverse2combiner_quick(targetarr, arr, 0, i, until) +// pac.lastbuff.setInternalSize(sz) +// } otherwise { +// cb.ifIs[UnrolledParArrayCombiner[T]] { +// pac => +// // with unr. combiner: +// val sz = remaining +// pac.sizeHint(sz) +// val targetarr: Array[Any] = pac.buff.lastPtr.array.asInstanceOf[Array[Any]] +// reverse2combiner_quick(targetarr, arr, 0, i, until) +// pac.buff.size = pac.buff.size + sz +// pac.buff.lastPtr.size = sz +// } otherwise super.reverse2combiner(cb) +// } +// cb +// } +// +// private def reverse2combiner_quick(targ: Array[Any], a: Array[Any], targfrom: Int, srcfrom: Int, srcuntil: Int): Unit = { +// var j = srcfrom +// var k = targfrom + srcuntil - srcfrom - 1 +// while (j < srcuntil) { +// targ(k) = a(j) +// j += 1 +// k -= 1 +// } +// } +// +// override def scanToArray[U >: T, A >: U](z: U, op: (U, U) => U, destarr: Array[A], from: Int): Unit = { +// scanToArray_quick[U](array, destarr.asInstanceOf[Array[Any]], op, z, i, until, from) +// i = until +// } +// +// protected def scanToArray_quick[U](srcarr: Array[Any], destarr: Array[Any], op: (U, U) => U, z: U, srcfrom: Int, srcntil: Int, destfrom: Int): Unit = { +// var last = z +// var j = srcfrom +// var k = destfrom +// while (j < srcntil) { +// last = op(last, srcarr(j).asInstanceOf[U]) +// destarr(k) = last +// j += 1 +// k += 1 +// } +// } +// +// } +// +// /* operations */ +// +// private def buildsArray[S, That](c: Builder[S, That]) = c.isInstanceOf[ParArrayCombiner[_]] +// +// override def map[S, That](f: T => S)(implicit bf: CanBuildFrom[ParArray[T], S, That]) = if (buildsArray(bf(repr))) { +// // reserve an array +// val targarrseq = new ArraySeq[S](length) +// val targetarr = targarrseq.array.asInstanceOf[Array[Any]] +// +// // fill it in parallel +// tasksupport.executeAndWaitResult(new Map[S](f, targetarr, 0, length)) +// +// // wrap it into a parallel array +// (new ParArray[S](targarrseq)).asInstanceOf[That] +// } else super.map(f)(bf) +// +// override def scan[U >: T, That](z: U)(op: (U, U) => U)(implicit cbf: CanBuildFrom[ParArray[T], U, That]): That = +// if (tasksupport.parallelismLevel > 1 && buildsArray(cbf(repr))) { +// // reserve an array +// val targarrseq = new ArraySeq[U](length + 1) +// val targetarr = targarrseq.array.asInstanceOf[Array[Any]] +// targetarr(0) = z +// +// // do a parallel prefix scan +// if (length > 0) tasksupport.executeAndWaitResult(new CreateScanTree[U](0, size, z, op, splitter) mapResult { +// tree => tasksupport.executeAndWaitResult(new ScanToArray(tree, z, op, targetarr)) +// }) +// +// // wrap the array into a parallel array +// (new ParArray[U](targarrseq)).asInstanceOf[That] +// } else super.scan(z)(op)(cbf) +// +// /* tasks */ +// +// class ScanToArray[U >: T](tree: ScanTree[U], z: U, op: (U, U) => U, targetarr: Array[Any]) +// extends Task[Unit, ScanToArray[U]] { +// var result = () +// +// def leaf(prev: Option[Unit]) = iterate(tree) +// private def iterate(tree: ScanTree[U]): Unit = tree match { +// case ScanNode(left, right) => +// iterate(left) +// iterate(right) +// case ScanLeaf(_, _, from, len, Some(prev), _) => +// scanLeaf(array, targetarr, from, len, prev.acc) +// case ScanLeaf(_, _, from, len, None, _) => +// scanLeaf(array, targetarr, from, len, z) +// } +// private def scanLeaf(srcarr: Array[Any], targetarr: Array[Any], from: Int, len: Int, startval: U): Unit = { +// var i = from +// val until = from + len +// var curr = startval +// val operation = op +// while (i < until) { +// curr = operation(curr, srcarr(i).asInstanceOf[U]) +// i += 1 +// targetarr(i) = curr +// } +// } +// def split = tree match { +// case ScanNode(left, right) => Seq( +// new ScanToArray(left, z, op, targetarr), +// new ScanToArray(right, z, op, targetarr) +// ) +// case _ => sys.error("Can only split scan tree internal nodes.") +// } +// def shouldSplitFurther = tree match { +// case ScanNode(_, _) => true +// case _ => false +// } +// } +// +// class Map[S](f: T => S, targetarr: Array[Any], offset: Int, howmany: Int) extends Task[Unit, Map[S]] { +// var result = () +// +// def leaf(prev: Option[Unit]) = { +// val tarr = targetarr +// val sarr = array +// var i = offset +// val until = offset + howmany +// while (i < until) { +// tarr(i) = f(sarr(i).asInstanceOf[T]) +// i += 1 +// } +// } +// def split = { +// val fp = howmany / 2 +// List(new Map(f, targetarr, offset, fp), new Map(f, targetarr, offset + fp, howmany - fp)) +// } +// def shouldSplitFurther = howmany > scala.collection.parallel.thresholdFromSize(length, tasksupport.parallelismLevel) +// } +// +// /* serialization */ +// +// private def writeObject(out: java.io.ObjectOutputStream): Unit = { +// out.defaultWriteObject +// } +// +// private def readObject(in: java.io.ObjectInputStream): Unit = { +// in.defaultReadObject +// +// // get raw array from arrayseq +// array = arrayseq.array.asInstanceOf[Array[Any]] +// } +// +//} +// +// +///** $factoryInfo +// * @define Coll `mutable.ParArray` +// * @define coll parallel array +// */ +//object ParArray extends ParFactory[ParArray] { +// implicit def canBuildFrom[T]: CanCombineFrom[Coll, T, ParArray[T]] = new GenericCanCombineFrom[T] +// def newBuilder[T]: Combiner[T, ParArray[T]] = newCombiner +// def newCombiner[T]: Combiner[T, ParArray[T]] = ParArrayCombiner[T] +// +// /** Creates a new parallel array by wrapping the specified array. +// */ +// def handoff[T](arr: Array[T]): ParArray[T] = wrapOrRebuild(arr, arr.length) +// +// /** Creates a new parallel array by wrapping a part of the specified array. +// */ +// def handoff[T](arr: Array[T], sz: Int): ParArray[T] = wrapOrRebuild(arr, sz) +// +// private def wrapOrRebuild[T](arr: AnyRef, sz: Int) = arr match { +// case arr: Array[AnyRef] => new ParArray[T](new ExposedArraySeq[T](arr, sz)) +// case _ => new ParArray[T](new ExposedArraySeq[T](scala.runtime.ScalaRunTime.toObjectArray(arr), sz)) +// } +// +// def createFromCopy[T <: AnyRef : ClassTag](arr: Array[T]): ParArray[T] = { +// val newarr = new Array[T](arr.length) +// Array.copy(arr, 0, newarr, 0, arr.length) +// handoff(newarr) +// } +// +// def fromTraversables[T](xss: GenTraversableOnce[T]*) = { +// val cb = ParArrayCombiner[T]() +// for (xs <- xss) { +// cb ++= xs.seq +// } +// cb.result +// } +// +//} diff --git a/core/src/main/scala/scala/collection/parallel/mutable/ParFlatHashTable.scala b/core/src/main/scala/scala/collection/parallel/mutable/ParFlatHashTable.scala index 47374c8d..57bdd880 100644 --- a/core/src/main/scala/scala/collection/parallel/mutable/ParFlatHashTable.scala +++ b/core/src/main/scala/scala/collection/parallel/mutable/ParFlatHashTable.scala @@ -10,94 +10,94 @@ package scala package collection package parallel.mutable -import scala.collection.parallel.IterableSplitter - -/** Parallel flat hash table. - * - * @tparam T type of the elements in the $coll. - * @define coll table - * @define Coll `ParFlatHashTable` - * - * @author Aleksandar Prokopec - */ -trait ParFlatHashTable[T] extends scala.collection.mutable.FlatHashTable[T] { - - override def alwaysInitSizeMap = true - - abstract class ParFlatHashTableIterator(var idx: Int, val until: Int, val totalsize: Int) - extends IterableSplitter[T] with SizeMapUtils { - import scala.collection.DebugUtils._ - - private[this] var traversed = 0 - private[this] val itertable = table - - if (hasNext) scan() - - private[this] def scan(): Unit = { - while (itertable(idx) eq null) { - idx += 1 - } - } - - def newIterator(index: Int, until: Int, totalsize: Int): IterableSplitter[T] - - def remaining = totalsize - traversed - def hasNext = traversed < totalsize - def next() = if (hasNext) { - val r = entryToElem(itertable(idx)) - traversed += 1 - idx += 1 - if (hasNext) scan() - r - } else Iterator.empty.next() - def dup = newIterator(idx, until, totalsize) - def split = if (remaining > 1) { - val divpt = (until + idx) / 2 - - val fstidx = idx - val fstuntil = divpt - val fsttotal = calcNumElems(idx, divpt, itertable.length, sizeMapBucketSize) - val fstit = newIterator(fstidx, fstuntil, fsttotal) - - val sndidx = divpt - val snduntil = until - val sndtotal = remaining - fsttotal - val sndit = newIterator(sndidx, snduntil, sndtotal) - - Seq(fstit, sndit) - } else Seq(this) - - override def debugInformation = buildString { - append => - append("Parallel flat hash table iterator") - append("---------------------------------") - append("Traversed/total: " + traversed + " / " + totalsize) - append("Table idx/until: " + idx + " / " + until) - append("Table length: " + itertable.length) - append("Table: ") - append(arrayString(itertable, 0, itertable.length)) - append("Sizemap: ") - append(arrayString(sizemap, 0, sizemap.length)) - } - - protected def countElems(from: Int, until: Int) = { - var count = 0 - var i = from - while (i < until) { - if (itertable(i) ne null) count += 1 - i += 1 - } - count - } - - protected def countBucketSizes(frombucket: Int, untilbucket: Int) = { - var count = 0 - var i = frombucket - while (i < untilbucket) { - count += sizemap(i) - i += 1 - } - count - } - } -} +//import scala.collection.parallel.IterableSplitter +// +///** Parallel flat hash table. +// * +// * @tparam T type of the elements in the $coll. +// * @define coll table +// * @define Coll `ParFlatHashTable` +// * +// * @author Aleksandar Prokopec +// */ +//trait ParFlatHashTable[T] extends scala.collection.mutable.FlatHashTable[T] { +// +// override def alwaysInitSizeMap = true +// +// abstract class ParFlatHashTableIterator(var idx: Int, val until: Int, val totalsize: Int) +// extends IterableSplitter[T] with SizeMapUtils { +// import scala.collection.DebugUtils._ +// +// private[this] var traversed = 0 +// private[this] val itertable = table +// +// if (hasNext) scan() +// +// private[this] def scan(): Unit = { +// while (itertable(idx) eq null) { +// idx += 1 +// } +// } +// +// def newIterator(index: Int, until: Int, totalsize: Int): IterableSplitter[T] +// +// def remaining = totalsize - traversed +// def hasNext = traversed < totalsize +// def next() = if (hasNext) { +// val r = entryToElem(itertable(idx)) +// traversed += 1 +// idx += 1 +// if (hasNext) scan() +// r +// } else Iterator.empty.next() +// def dup = newIterator(idx, until, totalsize) +// def split = if (remaining > 1) { +// val divpt = (until + idx) / 2 +// +// val fstidx = idx +// val fstuntil = divpt +// val fsttotal = calcNumElems(idx, divpt, itertable.length, sizeMapBucketSize) +// val fstit = newIterator(fstidx, fstuntil, fsttotal) +// +// val sndidx = divpt +// val snduntil = until +// val sndtotal = remaining - fsttotal +// val sndit = newIterator(sndidx, snduntil, sndtotal) +// +// Seq(fstit, sndit) +// } else Seq(this) +// +// override def debugInformation = buildString { +// append => +// append("Parallel flat hash table iterator") +// append("---------------------------------") +// append("Traversed/total: " + traversed + " / " + totalsize) +// append("Table idx/until: " + idx + " / " + until) +// append("Table length: " + itertable.length) +// append("Table: ") +// append(arrayString(itertable, 0, itertable.length)) +// append("Sizemap: ") +// append(arrayString(sizemap, 0, sizemap.length)) +// } +// +// protected def countElems(from: Int, until: Int) = { +// var count = 0 +// var i = from +// while (i < until) { +// if (itertable(i) ne null) count += 1 +// i += 1 +// } +// count +// } +// +// protected def countBucketSizes(frombucket: Int, untilbucket: Int) = { +// var count = 0 +// var i = frombucket +// while (i < untilbucket) { +// count += sizemap(i) +// i += 1 +// } +// count +// } +// } +//} diff --git a/core/src/main/scala/scala/collection/parallel/mutable/ParHashMap.scala b/core/src/main/scala/scala/collection/parallel/mutable/ParHashMap.scala index b7b3b71d..a26fc2a1 100644 --- a/core/src/main/scala/scala/collection/parallel/mutable/ParHashMap.scala +++ b/core/src/main/scala/scala/collection/parallel/mutable/ParHashMap.scala @@ -10,293 +10,293 @@ package scala package collection.parallel package mutable -import scala.collection.generic._ -import scala.collection.mutable.DefaultEntry -import scala.collection.mutable.HashEntry -import scala.collection.mutable.HashTable -import scala.collection.mutable.UnrolledBuffer -import scala.collection.parallel.Task - -/** A parallel hash map. - * - * `ParHashMap` is a parallel map which internally keeps elements within a hash table. - * It uses chaining to resolve collisions. - * - * @tparam K type of the keys in the parallel hash map - * @tparam V type of the values in the parallel hash map - * - * @define Coll `ParHashMap` - * @define coll parallel hash map - * - * @author Aleksandar Prokopec - * @see [[http://docs.scala-lang.org/overviews/parallel-collections/concrete-parallel-collections.html#parallel_hash_tables Scala's Parallel Collections Library overview]] - * section on Parallel Hash Tables for more information. - */ -@SerialVersionUID(1L) -class ParHashMap[K, V] private[collection] (contents: HashTable.Contents[K, DefaultEntry[K, V]]) -extends ParMap[K, V] - with GenericParMapTemplate[K, V, ParHashMap] - with ParMapLike[K, V, ParHashMap[K, V], scala.collection.mutable.HashMap[K, V]] - with ParHashTable[K, DefaultEntry[K, V]] - with Serializable -{ -self => - initWithContents(contents) - - type Entry = scala.collection.mutable.DefaultEntry[K, V] - - def this() = this(null) - - override def mapCompanion: GenericParMapCompanion[ParHashMap] = ParHashMap - - override def empty: ParHashMap[K, V] = new ParHashMap[K, V] - - protected[this] override def newCombiner = ParHashMapCombiner[K, V] - - override def seq = new scala.collection.mutable.HashMap[K, V](hashTableContents) - - def splitter = new ParHashMapIterator(1, table.length, size, table(0).asInstanceOf[DefaultEntry[K, V]]) - - override def size = tableSize - - override def clear() = clearTable() - - def get(key: K): Option[V] = { - val e = findEntry(key) - if (e eq null) None - else Some(e.value) - } - - def put(key: K, value: V): Option[V] = { - val e = findOrAddEntry(key, value) - if (e eq null) None - else { val v = e.value; e.value = value; Some(v) } - } - - def update(key: K, value: V): Unit = put(key, value) - - def remove(key: K): Option[V] = { - val e = removeEntry(key) - if (e ne null) Some(e.value) - else None - } - - def += (kv: (K, V)): this.type = { - val e = findOrAddEntry(kv._1, kv._2) - if (e ne null) e.value = kv._2 - this - } - - def -=(key: K): this.type = { removeEntry(key); this } - - override def stringPrefix = "ParHashMap" - - class ParHashMapIterator(start: Int, untilIdx: Int, totalSize: Int, e: DefaultEntry[K, V]) - extends EntryIterator[(K, V), ParHashMapIterator](start, untilIdx, totalSize, e) { - def entry2item(entry: DefaultEntry[K, V]) = (entry.key, entry.value) - - def newIterator(idxFrom: Int, idxUntil: Int, totalSz: Int, es: DefaultEntry[K, V]) = - new ParHashMapIterator(idxFrom, idxUntil, totalSz, es) - } - - protected def createNewEntry[V1](key: K, value: V1): Entry = { - new Entry(key, value.asInstanceOf[V]) - } - - private def writeObject(out: java.io.ObjectOutputStream): Unit = { - serializeTo(out, { entry => - out.writeObject(entry.key) - out.writeObject(entry.value) - }) - } - - private def readObject(in: java.io.ObjectInputStream): Unit = { - init(in, createNewEntry(in.readObject().asInstanceOf[K], in.readObject())) - } - - private[parallel] override def brokenInvariants = { - // bucket by bucket, count elements - val buckets = for (i <- 0 until (table.length / sizeMapBucketSize)) yield checkBucket(i) - - // check if each element is in the position corresponding to its key - val elems = for (i <- 0 until table.length) yield checkEntry(i) - - buckets.flatMap(x => x) ++ elems.flatMap(x => x) - } - - private def checkBucket(i: Int) = { - def count(e: HashEntry[K, DefaultEntry[K, V]]): Int = if (e eq null) 0 else 1 + count(e.next) - val expected = sizemap(i) - val found = ((i * sizeMapBucketSize) until ((i + 1) * sizeMapBucketSize)).foldLeft(0) { - (acc, c) => acc + count(table(c)) - } - if (found != expected) List("Found " + found + " elements, while sizemap showed " + expected) - else Nil - } - - private def checkEntry(i: Int) = { - def check(e: HashEntry[K, DefaultEntry[K, V]]): List[String] = if (e eq null) Nil else - if (index(elemHashCode(e.key)) == i) check(e.next) - else ("Element " + e.key + " at " + i + " with " + elemHashCode(e.key) + " maps to " + index(elemHashCode(e.key))) :: check(e.next) - check(table(i)) - } -} - -/** $factoryInfo - * @define Coll `mutable.ParHashMap` - * @define coll parallel hash map - */ -object ParHashMap extends ParMapFactory[ParHashMap] { - var iters = 0 - - def empty[K, V]: ParHashMap[K, V] = new ParHashMap[K, V] - - def newCombiner[K, V]: Combiner[(K, V), ParHashMap[K, V]] = ParHashMapCombiner.apply[K, V] - - implicit def canBuildFrom[K, V]: CanCombineFrom[Coll, (K, V), ParHashMap[K, V]] = new CanCombineFromMap[K, V] -} - -private[mutable] abstract class ParHashMapCombiner[K, V](private val tableLoadFactor: Int) -extends scala.collection.parallel.BucketCombiner[(K, V), ParHashMap[K, V], DefaultEntry[K, V], ParHashMapCombiner[K, V]](ParHashMapCombiner.numblocks) - with scala.collection.mutable.HashTable.HashUtils[K] -{ - private val nonmasklen = ParHashMapCombiner.nonmasklength - private val seedvalue = 27 - - def +=(elem: (K, V)) = { - sz += 1 - val hc = improve(elemHashCode(elem._1), seedvalue) - val pos = (hc >>> nonmasklen) - if (buckets(pos) eq null) { - // initialize bucket - buckets(pos) = new UnrolledBuffer[DefaultEntry[K, V]]() - } - // add to bucket - buckets(pos) += new DefaultEntry(elem._1, elem._2) - this - } - - def result: ParHashMap[K, V] = if (size >= (ParHashMapCombiner.numblocks * sizeMapBucketSize)) { // 1024 - // construct table - val table = new AddingHashTable(size, tableLoadFactor, seedvalue) - val bucks = buckets.map(b => if (b ne null) b.headPtr else null) - val insertcount = combinerTaskSupport.executeAndWaitResult(new FillBlocks(bucks, table, 0, bucks.length)) - table.setSize(insertcount) - // TODO compare insertcount and size to see if compression is needed - val c = table.hashTableContents - new ParHashMap(c) - } else { - // construct a normal table and fill it sequentially - // TODO parallelize by keeping separate sizemaps and merging them - object table extends HashTable[K, DefaultEntry[K, V]] { - type Entry = DefaultEntry[K, V] - def insertEntry(e: Entry): Unit = { super.findOrAddEntry(e.key, e) } - def createNewEntry[E](key: K, entry: E): Entry = entry.asInstanceOf[Entry] - sizeMapInit(table.length) - } - var i = 0 - while (i < ParHashMapCombiner.numblocks) { - if (buckets(i) ne null) { - for (elem <- buckets(i)) table.insertEntry(elem) - } - i += 1 - } - new ParHashMap(table.hashTableContents) - } - - /* classes */ - - /** A hash table which will never resize itself. Knowing the number of elements in advance, - * it allocates the table of the required size when created. - * - * Entries are added using the `insertEntry` method. This method checks whether the element - * exists and updates the size map. It returns false if the key was already in the table, - * and true if the key was successfully inserted. It does not update the number of elements - * in the table. - */ - private[ParHashMapCombiner] class AddingHashTable(numelems: Int, lf: Int, _seedvalue: Int) extends HashTable[K, DefaultEntry[K, V]] { - import HashTable._ - _loadFactor = lf - table = new Array[HashEntry[K, DefaultEntry[K, V]]](capacity(sizeForThreshold(_loadFactor, numelems))) - tableSize = 0 - seedvalue = _seedvalue - threshold = newThreshold(_loadFactor, table.length) - sizeMapInit(table.length) - def setSize(sz: Int) = tableSize = sz - def insertEntry(/*block: Int, */e: DefaultEntry[K, V]) = { - var h = index(elemHashCode(e.key)) - val olde = table(h).asInstanceOf[DefaultEntry[K, V]] - - // check if key already exists - var ce = olde - while (ce ne null) { - if (ce.key == e.key) { - h = -1 - ce = null - } else ce = ce.next - } - - // if key does not already exist - if (h != -1) { - e.next = olde - table(h) = e - nnSizeMapAdd(h) - true - } else false - } - protected def createNewEntry[X](key: K, x: X) = ??? - } - - /* tasks */ - - import UnrolledBuffer.Unrolled - - class FillBlocks(buckets: Array[Unrolled[DefaultEntry[K, V]]], table: AddingHashTable, offset: Int, howmany: Int) - extends Task[Int, FillBlocks] { - var result = Int.MinValue - def leaf(prev: Option[Int]) = { - var i = offset - val until = offset + howmany - result = 0 - while (i < until) { - result += fillBlock(i, buckets(i)) - i += 1 - } - } - private def fillBlock(block: Int, elems: Unrolled[DefaultEntry[K, V]]) = { - var insertcount = 0 - var unrolled = elems - var i = 0 - val t = table - while (unrolled ne null) { - val chunkarr = unrolled.array - val chunksz = unrolled.size - while (i < chunksz) { - val elem = chunkarr(i) - if (t.insertEntry(elem)) insertcount += 1 - i += 1 - } - i = 0 - unrolled = unrolled.next - } - insertcount - } - def split = { - val fp = howmany / 2 - List(new FillBlocks(buckets, table, offset, fp), new FillBlocks(buckets, table, offset + fp, howmany - fp)) - } - override def merge(that: FillBlocks): Unit = { - this.result += that.result - } - def shouldSplitFurther = howmany > scala.collection.parallel.thresholdFromSize(ParHashMapCombiner.numblocks, combinerTaskSupport.parallelismLevel) - } -} - -private[parallel] object ParHashMapCombiner { - private[mutable] val discriminantbits = 5 - private[mutable] val numblocks = 1 << discriminantbits - private[mutable] val discriminantmask = ((1 << discriminantbits) - 1) - private[mutable] val nonmasklength = 32 - discriminantbits - - def apply[K, V] = new ParHashMapCombiner[K, V](HashTable.defaultLoadFactor) {} // was: with EnvironmentPassingCombiner[(K, V), ParHashMap[K, V]] -} +//import scala.collection.generic._ +//import scala.collection.mutable.DefaultEntry +//import scala.collection.mutable.HashEntry +//import scala.collection.mutable.HashTable +//import scala.collection.mutable.UnrolledBuffer +//import scala.collection.parallel.Task +// +///** A parallel hash map. +// * +// * `ParHashMap` is a parallel map which internally keeps elements within a hash table. +// * It uses chaining to resolve collisions. +// * +// * @tparam K type of the keys in the parallel hash map +// * @tparam V type of the values in the parallel hash map +// * +// * @define Coll `ParHashMap` +// * @define coll parallel hash map +// * +// * @author Aleksandar Prokopec +// * @see [[http://docs.scala-lang.org/overviews/parallel-collections/concrete-parallel-collections.html#parallel_hash_tables Scala's Parallel Collections Library overview]] +// * section on Parallel Hash Tables for more information. +// */ +//@SerialVersionUID(1L) +//class ParHashMap[K, V] private[collection] (contents: HashTable.Contents[K, DefaultEntry[K, V]]) +//extends ParMap[K, V] +// with GenericParMapTemplate[K, V, ParHashMap] +// with ParMapLike[K, V, ParHashMap[K, V], scala.collection.mutable.HashMap[K, V]] +// with ParHashTable[K, DefaultEntry[K, V]] +// with Serializable +//{ +//self => +// initWithContents(contents) +// +// type Entry = scala.collection.mutable.DefaultEntry[K, V] +// +// def this() = this(null) +// +// override def mapCompanion: GenericParMapCompanion[ParHashMap] = ParHashMap +// +// override def empty: ParHashMap[K, V] = new ParHashMap[K, V] +// +// protected[this] override def newCombiner = ParHashMapCombiner[K, V] +// +// override def seq = new scala.collection.mutable.HashMap[K, V](hashTableContents) +// +// def splitter = new ParHashMapIterator(1, table.length, size, table(0).asInstanceOf[DefaultEntry[K, V]]) +// +// override def size = tableSize +// +// override def clear() = clearTable() +// +// def get(key: K): Option[V] = { +// val e = findEntry(key) +// if (e eq null) None +// else Some(e.value) +// } +// +// def put(key: K, value: V): Option[V] = { +// val e = findOrAddEntry(key, value) +// if (e eq null) None +// else { val v = e.value; e.value = value; Some(v) } +// } +// +// def update(key: K, value: V): Unit = put(key, value) +// +// def remove(key: K): Option[V] = { +// val e = removeEntry(key) +// if (e ne null) Some(e.value) +// else None +// } +// +// def += (kv: (K, V)): this.type = { +// val e = findOrAddEntry(kv._1, kv._2) +// if (e ne null) e.value = kv._2 +// this +// } +// +// def -=(key: K): this.type = { removeEntry(key); this } +// +// override def stringPrefix = "ParHashMap" +// +// class ParHashMapIterator(start: Int, untilIdx: Int, totalSize: Int, e: DefaultEntry[K, V]) +// extends EntryIterator[(K, V), ParHashMapIterator](start, untilIdx, totalSize, e) { +// def entry2item(entry: DefaultEntry[K, V]) = (entry.key, entry.value) +// +// def newIterator(idxFrom: Int, idxUntil: Int, totalSz: Int, es: DefaultEntry[K, V]) = +// new ParHashMapIterator(idxFrom, idxUntil, totalSz, es) +// } +// +// protected def createNewEntry[V1](key: K, value: V1): Entry = { +// new Entry(key, value.asInstanceOf[V]) +// } +// +// private def writeObject(out: java.io.ObjectOutputStream): Unit = { +// serializeTo(out, { entry => +// out.writeObject(entry.key) +// out.writeObject(entry.value) +// }) +// } +// +// private def readObject(in: java.io.ObjectInputStream): Unit = { +// init(in, createNewEntry(in.readObject().asInstanceOf[K], in.readObject())) +// } +// +// private[parallel] override def brokenInvariants = { +// // bucket by bucket, count elements +// val buckets = for (i <- 0 until (table.length / sizeMapBucketSize)) yield checkBucket(i) +// +// // check if each element is in the position corresponding to its key +// val elems = for (i <- 0 until table.length) yield checkEntry(i) +// +// buckets.flatMap(x => x) ++ elems.flatMap(x => x) +// } +// +// private def checkBucket(i: Int) = { +// def count(e: HashEntry[K, DefaultEntry[K, V]]): Int = if (e eq null) 0 else 1 + count(e.next) +// val expected = sizemap(i) +// val found = ((i * sizeMapBucketSize) until ((i + 1) * sizeMapBucketSize)).foldLeft(0) { +// (acc, c) => acc + count(table(c)) +// } +// if (found != expected) List("Found " + found + " elements, while sizemap showed " + expected) +// else Nil +// } +// +// private def checkEntry(i: Int) = { +// def check(e: HashEntry[K, DefaultEntry[K, V]]): List[String] = if (e eq null) Nil else +// if (index(elemHashCode(e.key)) == i) check(e.next) +// else ("Element " + e.key + " at " + i + " with " + elemHashCode(e.key) + " maps to " + index(elemHashCode(e.key))) :: check(e.next) +// check(table(i)) +// } +//} +// +///** $factoryInfo +// * @define Coll `mutable.ParHashMap` +// * @define coll parallel hash map +// */ +//object ParHashMap extends ParMapFactory[ParHashMap] { +// var iters = 0 +// +// def empty[K, V]: ParHashMap[K, V] = new ParHashMap[K, V] +// +// def newCombiner[K, V]: Combiner[(K, V), ParHashMap[K, V]] = ParHashMapCombiner.apply[K, V] +// +// implicit def canBuildFrom[K, V]: CanCombineFrom[Coll, (K, V), ParHashMap[K, V]] = new CanCombineFromMap[K, V] +//} +// +//private[mutable] abstract class ParHashMapCombiner[K, V](private val tableLoadFactor: Int) +//extends scala.collection.parallel.BucketCombiner[(K, V), ParHashMap[K, V], DefaultEntry[K, V], ParHashMapCombiner[K, V]](ParHashMapCombiner.numblocks) +// with scala.collection.mutable.HashTable.HashUtils[K] +//{ +// private val nonmasklen = ParHashMapCombiner.nonmasklength +// private val seedvalue = 27 +// +// def +=(elem: (K, V)) = { +// sz += 1 +// val hc = improve(elemHashCode(elem._1), seedvalue) +// val pos = (hc >>> nonmasklen) +// if (buckets(pos) eq null) { +// // initialize bucket +// buckets(pos) = new UnrolledBuffer[DefaultEntry[K, V]]() +// } +// // add to bucket +// buckets(pos) += new DefaultEntry(elem._1, elem._2) +// this +// } +// +// def result: ParHashMap[K, V] = if (size >= (ParHashMapCombiner.numblocks * sizeMapBucketSize)) { // 1024 +// // construct table +// val table = new AddingHashTable(size, tableLoadFactor, seedvalue) +// val bucks = buckets.map(b => if (b ne null) b.headPtr else null) +// val insertcount = combinerTaskSupport.executeAndWaitResult(new FillBlocks(bucks, table, 0, bucks.length)) +// table.setSize(insertcount) +// // TODO compare insertcount and size to see if compression is needed +// val c = table.hashTableContents +// new ParHashMap(c) +// } else { +// // construct a normal table and fill it sequentially +// // TODO parallelize by keeping separate sizemaps and merging them +// object table extends HashTable[K, DefaultEntry[K, V]] { +// type Entry = DefaultEntry[K, V] +// def insertEntry(e: Entry): Unit = { super.findOrAddEntry(e.key, e) } +// def createNewEntry[E](key: K, entry: E): Entry = entry.asInstanceOf[Entry] +// sizeMapInit(table.length) +// } +// var i = 0 +// while (i < ParHashMapCombiner.numblocks) { +// if (buckets(i) ne null) { +// for (elem <- buckets(i)) table.insertEntry(elem) +// } +// i += 1 +// } +// new ParHashMap(table.hashTableContents) +// } +// +// /* classes */ +// +// /** A hash table which will never resize itself. Knowing the number of elements in advance, +// * it allocates the table of the required size when created. +// * +// * Entries are added using the `insertEntry` method. This method checks whether the element +// * exists and updates the size map. It returns false if the key was already in the table, +// * and true if the key was successfully inserted. It does not update the number of elements +// * in the table. +// */ +// private[ParHashMapCombiner] class AddingHashTable(numelems: Int, lf: Int, _seedvalue: Int) extends HashTable[K, DefaultEntry[K, V]] { +// import HashTable._ +// _loadFactor = lf +// table = new Array[HashEntry[K, DefaultEntry[K, V]]](capacity(sizeForThreshold(_loadFactor, numelems))) +// tableSize = 0 +// seedvalue = _seedvalue +// threshold = newThreshold(_loadFactor, table.length) +// sizeMapInit(table.length) +// def setSize(sz: Int) = tableSize = sz +// def insertEntry(/*block: Int, */e: DefaultEntry[K, V]) = { +// var h = index(elemHashCode(e.key)) +// val olde = table(h).asInstanceOf[DefaultEntry[K, V]] +// +// // check if key already exists +// var ce = olde +// while (ce ne null) { +// if (ce.key == e.key) { +// h = -1 +// ce = null +// } else ce = ce.next +// } +// +// // if key does not already exist +// if (h != -1) { +// e.next = olde +// table(h) = e +// nnSizeMapAdd(h) +// true +// } else false +// } +// protected def createNewEntry[X](key: K, x: X) = ??? +// } +// +// /* tasks */ +// +// import UnrolledBuffer.Unrolled +// +// class FillBlocks(buckets: Array[Unrolled[DefaultEntry[K, V]]], table: AddingHashTable, offset: Int, howmany: Int) +// extends Task[Int, FillBlocks] { +// var result = Int.MinValue +// def leaf(prev: Option[Int]) = { +// var i = offset +// val until = offset + howmany +// result = 0 +// while (i < until) { +// result += fillBlock(i, buckets(i)) +// i += 1 +// } +// } +// private def fillBlock(block: Int, elems: Unrolled[DefaultEntry[K, V]]) = { +// var insertcount = 0 +// var unrolled = elems +// var i = 0 +// val t = table +// while (unrolled ne null) { +// val chunkarr = unrolled.array +// val chunksz = unrolled.size +// while (i < chunksz) { +// val elem = chunkarr(i) +// if (t.insertEntry(elem)) insertcount += 1 +// i += 1 +// } +// i = 0 +// unrolled = unrolled.next +// } +// insertcount +// } +// def split = { +// val fp = howmany / 2 +// List(new FillBlocks(buckets, table, offset, fp), new FillBlocks(buckets, table, offset + fp, howmany - fp)) +// } +// override def merge(that: FillBlocks): Unit = { +// this.result += that.result +// } +// def shouldSplitFurther = howmany > scala.collection.parallel.thresholdFromSize(ParHashMapCombiner.numblocks, combinerTaskSupport.parallelismLevel) +// } +//} +// +//private[parallel] object ParHashMapCombiner { +// private[mutable] val discriminantbits = 5 +// private[mutable] val numblocks = 1 << discriminantbits +// private[mutable] val discriminantmask = ((1 << discriminantbits) - 1) +// private[mutable] val nonmasklength = 32 - discriminantbits +// +// def apply[K, V] = new ParHashMapCombiner[K, V](HashTable.defaultLoadFactor) {} // was: with EnvironmentPassingCombiner[(K, V), ParHashMap[K, V]] +//} diff --git a/core/src/main/scala/scala/collection/parallel/mutable/ParHashSet.scala b/core/src/main/scala/scala/collection/parallel/mutable/ParHashSet.scala index f8d51f18..dddaee07 100644 --- a/core/src/main/scala/scala/collection/parallel/mutable/ParHashSet.scala +++ b/core/src/main/scala/scala/collection/parallel/mutable/ParHashSet.scala @@ -11,319 +11,319 @@ package collection.parallel.mutable -import scala.collection.generic._ -import scala.collection.mutable.FlatHashTable -import scala.collection.parallel.Combiner -import scala.collection.mutable.UnrolledBuffer -import scala.collection.parallel.Task - - - -/** A parallel hash set. - * - * `ParHashSet` is a parallel set which internally keeps elements within a hash table. - * It uses linear probing to resolve collisions. - * - * @tparam T type of the elements in the $coll. - * - * @define Coll `ParHashSet` - * @define coll parallel hash set - * - * @author Aleksandar Prokopec - * @see [[http://docs.scala-lang.org/overviews/parallel-collections/concrete-parallel-collections.html#parallel_hash_tables Scala's Parallel Collections Library overview]] - * section on Parallel Hash Tables for more information. - */ -@SerialVersionUID(1L) -class ParHashSet[T] private[collection] (contents: FlatHashTable.Contents[T]) -extends ParSet[T] - with GenericParTemplate[T, ParHashSet] - with ParSetLike[T, ParHashSet[T], scala.collection.mutable.HashSet[T]] - with ParFlatHashTable[T] - with Serializable -{ - initWithContents(contents) - // println("----> new par hash set!") - // java.lang.Thread.dumpStack - // println(debugInformation) - - def this() = this(null) - - override def companion = ParHashSet - - override def empty = new ParHashSet - - override def iterator = splitter - - override def size = tableSize - - def clear() = clearTable() - - override def seq = new scala.collection.mutable.HashSet(hashTableContents) - - def +=(elem: T) = { - addElem(elem) - this - } - - def -=(elem: T) = { - removeElem(elem) - this - } - - override def stringPrefix = "ParHashSet" - - def contains(elem: T) = containsElem(elem) - - def splitter = new ParHashSetIterator(0, table.length, size) - - class ParHashSetIterator(start: Int, iteratesUntil: Int, totalElements: Int) - extends ParFlatHashTableIterator(start, iteratesUntil, totalElements) { - def newIterator(start: Int, until: Int, total: Int) = new ParHashSetIterator(start, until, total) - } - - private def writeObject(s: java.io.ObjectOutputStream): Unit = { - serializeTo(s) - } - - private def readObject(in: java.io.ObjectInputStream): Unit = { - init(in, x => ()) - } - - import scala.collection.DebugUtils._ - override def debugInformation = buildString { - append => - append("Parallel flat hash table set") - append("No. elems: " + tableSize) - append("Table length: " + table.length) - append("Table: ") - append(arrayString(table, 0, table.length)) - append("Sizemap: ") - append(arrayString(sizemap, 0, sizemap.length)) - } - -} - - -/** $factoryInfo - * @define Coll `mutable.ParHashSet` - * @define coll parallel hash set - */ -object ParHashSet extends ParSetFactory[ParHashSet] { - implicit def canBuildFrom[T]: CanCombineFrom[Coll, T, ParHashSet[T]] = new GenericCanCombineFrom[T] - - override def newBuilder[T]: Combiner[T, ParHashSet[T]] = newCombiner - - override def newCombiner[T]: Combiner[T, ParHashSet[T]] = ParHashSetCombiner.apply[T] -} - - -private[mutable] abstract class ParHashSetCombiner[T](private val tableLoadFactor: Int) -extends scala.collection.parallel.BucketCombiner[T, ParHashSet[T], AnyRef, ParHashSetCombiner[T]](ParHashSetCombiner.numblocks) -with scala.collection.mutable.FlatHashTable.HashUtils[T] { -//self: EnvironmentPassingCombiner[T, ParHashSet[T]] => - private val nonmasklen = ParHashSetCombiner.nonmasklength - private val seedvalue = 27 - - def +=(elem: T) = { - val entry = elemToEntry(elem) - sz += 1 - val hc = improve(entry.hashCode, seedvalue) - val pos = hc >>> nonmasklen - if (buckets(pos) eq null) { - // initialize bucket - buckets(pos) = new UnrolledBuffer[AnyRef] - } - // add to bucket - buckets(pos) += entry - this - } - - def result: ParHashSet[T] = { - val contents = if (size >= ParHashSetCombiner.numblocks * sizeMapBucketSize) parPopulate else seqPopulate - new ParHashSet(contents) - } - - private def parPopulate: FlatHashTable.Contents[T] = { - // construct it in parallel - val table = new AddingFlatHashTable(size, tableLoadFactor, seedvalue) - val (inserted, leftovers) = combinerTaskSupport.executeAndWaitResult(new FillBlocks(buckets, table, 0, buckets.length)) - var leftinserts = 0 - for (entry <- leftovers) leftinserts += table.insertEntry(0, table.tableLength, entry) - table.setSize(leftinserts + inserted) - table.hashTableContents - } - - private def seqPopulate: FlatHashTable.Contents[T] = { - // construct it sequentially - // TODO parallelize by keeping separate size maps and merging them - val tbl = new FlatHashTable[T] { - sizeMapInit(table.length) - seedvalue = ParHashSetCombiner.this.seedvalue - for { - buffer <- buckets - if buffer ne null - entry <- buffer - } addEntry(entry) - } - tbl.hashTableContents - } - - /* classes */ - - /** A flat hash table which doesn't resize itself. It accepts the number of elements - * it has to take and allocates the underlying hash table in advance. - * Elements can only be added to it. The final size has to be adjusted manually. - * It is internal to `ParHashSet` combiners. - */ - class AddingFlatHashTable(numelems: Int, lf: Int, inseedvalue: Int) extends FlatHashTable[T] { - _loadFactor = lf - table = new Array[AnyRef](capacity(FlatHashTable.sizeForThreshold(numelems, _loadFactor))) - tableSize = 0 - threshold = FlatHashTable.newThreshold(_loadFactor, table.length) - seedvalue = inseedvalue - sizeMapInit(table.length) - - override def toString = "AFHT(%s)".format(table.length) - - def tableLength = table.length - - def setSize(sz: Int) = tableSize = sz - - /** - * The elements are added using the `insertElem` method. This method accepts three - * arguments: - * - * @param insertAt where to add the element (set to -1 to use its hashcode) - * @param comesBefore the position before which the element should be added to - * @param newEntry the element to be added - * - * If the element is to be inserted at the position corresponding to its hash code, - * the table will try to add the element in such a position if possible. Collisions are resolved - * using linear hashing, so the element may actually have to be added to a position - * that follows the specified one. In the case that the first unoccupied position - * comes after `comesBefore`, the element is not added and the method simply returns -1, - * indicating that it couldn't add the element in a position that comes before the - * specified one. - * If the element is already present in the hash table, it is not added, and this method - * returns 0. If the element is added, it returns 1. - */ - def insertEntry(insertAt: Int, comesBefore: Int, newEntry : AnyRef): Int = { - var h = insertAt - if (h == -1) h = index(newEntry.hashCode) - var curEntry = table(h) - while (null != curEntry) { - if (curEntry == newEntry) return 0 - h = h + 1 // we *do not* do `(h + 1) % table.length` here, because we'll never overflow!! - if (h >= comesBefore) return -1 - curEntry = table(h) - } - table(h) = newEntry - - // this is incorrect since we set size afterwards anyway and a counter - // like this would not even work: - // - // tableSize = tableSize + 1 - // - // furthermore, it completely bogs down the parallel - // execution when there are multiple workers - - nnSizeMapAdd(h) - 1 - } - } - - /* tasks */ - - class FillBlocks(buckets: Array[UnrolledBuffer[AnyRef]], table: AddingFlatHashTable, val offset: Int, val howmany: Int) - extends Task[(Int, UnrolledBuffer[AnyRef]), FillBlocks] { - var result = (Int.MinValue, new UnrolledBuffer[AnyRef]) - - def leaf(prev: Option[(Int, UnrolledBuffer[AnyRef])]): Unit = { - var i = offset - var totalinserts = 0 - var leftover = new UnrolledBuffer[AnyRef]() - while (i < (offset + howmany)) { - val (inserted, intonextblock) = fillBlock(i, buckets(i), leftover) - totalinserts += inserted - leftover = intonextblock - i += 1 - } - result = (totalinserts, leftover) - } - private val blocksize = table.tableLength >> ParHashSetCombiner.discriminantbits - private def blockStart(block: Int) = block * blocksize - private def nextBlockStart(block: Int) = (block + 1) * blocksize - private def fillBlock(block: Int, elems: UnrolledBuffer[AnyRef], leftovers: UnrolledBuffer[AnyRef]): (Int, UnrolledBuffer[AnyRef]) = { - val beforePos = nextBlockStart(block) - - // store the elems - val (elemsIn, elemsLeft) = if (elems != null) insertAll(-1, beforePos, elems) else (0, UnrolledBuffer[AnyRef]()) - - // store the leftovers - val (leftoversIn, leftoversLeft) = insertAll(blockStart(block), beforePos, leftovers) - - // return the no. of stored elements tupled with leftovers - (elemsIn + leftoversIn, elemsLeft concat leftoversLeft) - } - private def insertAll(atPos: Int, beforePos: Int, elems: UnrolledBuffer[AnyRef]): (Int, UnrolledBuffer[AnyRef]) = { - val leftovers = new UnrolledBuffer[AnyRef] - var inserted = 0 - - var unrolled = elems.headPtr - var i = 0 - val t = table - while (unrolled ne null) { - val chunkarr = unrolled.array - val chunksz = unrolled.size - while (i < chunksz) { - val entry = chunkarr(i) - val res = t.insertEntry(atPos, beforePos, entry) - if (res >= 0) inserted += res - else leftovers += entry - i += 1 - } - i = 0 - unrolled = unrolled.next - } - - // slower: - // var it = elems.iterator - // while (it.hasNext) { - // val elem = it.next - // val res = table.insertEntry(atPos, beforePos, elem.asInstanceOf[T]) - // if (res >= 0) inserted += res - // else leftovers += elem - // } - - (inserted, leftovers) - } - def split = { - val fp = howmany / 2 - List(new FillBlocks(buckets, table, offset, fp), new FillBlocks(buckets, table, offset + fp, howmany - fp)) - } - override def merge(that: FillBlocks): Unit = { - // take the leftovers from the left task, store them into the block of the right task - val atPos = blockStart(that.offset) - val beforePos = blockStart(that.offset + that.howmany) - val (inserted, remainingLeftovers) = insertAll(atPos, beforePos, this.result._2) - - // anything left after trying the store the left leftovers is added to the right task leftovers - // and a new leftovers set is produced in this way - // the total number of successfully inserted elements is adjusted accordingly - result = (this.result._1 + that.result._1 + inserted, remainingLeftovers concat that.result._2) - } - def shouldSplitFurther = howmany > scala.collection.parallel.thresholdFromSize(ParHashMapCombiner.numblocks, combinerTaskSupport.parallelismLevel) - } - -} - - -private[parallel] object ParHashSetCombiner { - private[mutable] val discriminantbits = 5 - private[mutable] val numblocks = 1 << discriminantbits - private[mutable] val discriminantmask = ((1 << discriminantbits) - 1) - private[mutable] val nonmasklength = 32 - discriminantbits - - def apply[T] = new ParHashSetCombiner[T](FlatHashTable.defaultLoadFactor) {} //with EnvironmentPassingCombiner[T, ParHashSet[T]] -} - +//import scala.collection.generic._ +//import scala.collection.mutable.FlatHashTable +//import scala.collection.parallel.Combiner +//import scala.collection.mutable.UnrolledBuffer +//import scala.collection.parallel.Task +// +// +// +///** A parallel hash set. +// * +// * `ParHashSet` is a parallel set which internally keeps elements within a hash table. +// * It uses linear probing to resolve collisions. +// * +// * @tparam T type of the elements in the $coll. +// * +// * @define Coll `ParHashSet` +// * @define coll parallel hash set +// * +// * @author Aleksandar Prokopec +// * @see [[http://docs.scala-lang.org/overviews/parallel-collections/concrete-parallel-collections.html#parallel_hash_tables Scala's Parallel Collections Library overview]] +// * section on Parallel Hash Tables for more information. +// */ +//@SerialVersionUID(1L) +//class ParHashSet[T] private[collection] (contents: FlatHashTable.Contents[T]) +//extends ParSet[T] +// with GenericParTemplate[T, ParHashSet] +// with ParSetLike[T, ParHashSet[T], scala.collection.mutable.HashSet[T]] +// with ParFlatHashTable[T] +// with Serializable +//{ +// initWithContents(contents) +// // println("----> new par hash set!") +// // java.lang.Thread.dumpStack +// // println(debugInformation) +// +// def this() = this(null) +// +// override def companion = ParHashSet +// +// override def empty = new ParHashSet +// +// override def iterator = splitter +// +// override def size = tableSize +// +// def clear() = clearTable() +// +// override def seq = new scala.collection.mutable.HashSet(hashTableContents) +// +// def +=(elem: T) = { +// addElem(elem) +// this +// } +// +// def -=(elem: T) = { +// removeElem(elem) +// this +// } +// +// override def stringPrefix = "ParHashSet" +// +// def contains(elem: T) = containsElem(elem) +// +// def splitter = new ParHashSetIterator(0, table.length, size) +// +// class ParHashSetIterator(start: Int, iteratesUntil: Int, totalElements: Int) +// extends ParFlatHashTableIterator(start, iteratesUntil, totalElements) { +// def newIterator(start: Int, until: Int, total: Int) = new ParHashSetIterator(start, until, total) +// } +// +// private def writeObject(s: java.io.ObjectOutputStream): Unit = { +// serializeTo(s) +// } +// +// private def readObject(in: java.io.ObjectInputStream): Unit = { +// init(in, x => ()) +// } +// +// import scala.collection.DebugUtils._ +// override def debugInformation = buildString { +// append => +// append("Parallel flat hash table set") +// append("No. elems: " + tableSize) +// append("Table length: " + table.length) +// append("Table: ") +// append(arrayString(table, 0, table.length)) +// append("Sizemap: ") +// append(arrayString(sizemap, 0, sizemap.length)) +// } +// +//} +// +// +///** $factoryInfo +// * @define Coll `mutable.ParHashSet` +// * @define coll parallel hash set +// */ +//object ParHashSet extends ParSetFactory[ParHashSet] { +// implicit def canBuildFrom[T]: CanCombineFrom[Coll, T, ParHashSet[T]] = new GenericCanCombineFrom[T] +// +// override def newBuilder[T]: Combiner[T, ParHashSet[T]] = newCombiner +// +// override def newCombiner[T]: Combiner[T, ParHashSet[T]] = ParHashSetCombiner.apply[T] +//} +// +// +//private[mutable] abstract class ParHashSetCombiner[T](private val tableLoadFactor: Int) +//extends scala.collection.parallel.BucketCombiner[T, ParHashSet[T], AnyRef, ParHashSetCombiner[T]](ParHashSetCombiner.numblocks) +//with scala.collection.mutable.FlatHashTable.HashUtils[T] { +////self: EnvironmentPassingCombiner[T, ParHashSet[T]] => +// private val nonmasklen = ParHashSetCombiner.nonmasklength +// private val seedvalue = 27 +// +// def +=(elem: T) = { +// val entry = elemToEntry(elem) +// sz += 1 +// val hc = improve(entry.hashCode, seedvalue) +// val pos = hc >>> nonmasklen +// if (buckets(pos) eq null) { +// // initialize bucket +// buckets(pos) = new UnrolledBuffer[AnyRef] +// } +// // add to bucket +// buckets(pos) += entry +// this +// } +// +// def result: ParHashSet[T] = { +// val contents = if (size >= ParHashSetCombiner.numblocks * sizeMapBucketSize) parPopulate else seqPopulate +// new ParHashSet(contents) +// } +// +// private def parPopulate: FlatHashTable.Contents[T] = { +// // construct it in parallel +// val table = new AddingFlatHashTable(size, tableLoadFactor, seedvalue) +// val (inserted, leftovers) = combinerTaskSupport.executeAndWaitResult(new FillBlocks(buckets, table, 0, buckets.length)) +// var leftinserts = 0 +// for (entry <- leftovers) leftinserts += table.insertEntry(0, table.tableLength, entry) +// table.setSize(leftinserts + inserted) +// table.hashTableContents +// } +// +// private def seqPopulate: FlatHashTable.Contents[T] = { +// // construct it sequentially +// // TODO parallelize by keeping separate size maps and merging them +// val tbl = new FlatHashTable[T] { +// sizeMapInit(table.length) +// seedvalue = ParHashSetCombiner.this.seedvalue +// for { +// buffer <- buckets +// if buffer ne null +// entry <- buffer +// } addEntry(entry) +// } +// tbl.hashTableContents +// } +// +// /* classes */ +// +// /** A flat hash table which doesn't resize itself. It accepts the number of elements +// * it has to take and allocates the underlying hash table in advance. +// * Elements can only be added to it. The final size has to be adjusted manually. +// * It is internal to `ParHashSet` combiners. +// */ +// class AddingFlatHashTable(numelems: Int, lf: Int, inseedvalue: Int) extends FlatHashTable[T] { +// _loadFactor = lf +// table = new Array[AnyRef](capacity(FlatHashTable.sizeForThreshold(numelems, _loadFactor))) +// tableSize = 0 +// threshold = FlatHashTable.newThreshold(_loadFactor, table.length) +// seedvalue = inseedvalue +// sizeMapInit(table.length) +// +// override def toString = "AFHT(%s)".format(table.length) +// +// def tableLength = table.length +// +// def setSize(sz: Int) = tableSize = sz +// +// /** +// * The elements are added using the `insertElem` method. This method accepts three +// * arguments: +// * +// * @param insertAt where to add the element (set to -1 to use its hashcode) +// * @param comesBefore the position before which the element should be added to +// * @param newEntry the element to be added +// * +// * If the element is to be inserted at the position corresponding to its hash code, +// * the table will try to add the element in such a position if possible. Collisions are resolved +// * using linear hashing, so the element may actually have to be added to a position +// * that follows the specified one. In the case that the first unoccupied position +// * comes after `comesBefore`, the element is not added and the method simply returns -1, +// * indicating that it couldn't add the element in a position that comes before the +// * specified one. +// * If the element is already present in the hash table, it is not added, and this method +// * returns 0. If the element is added, it returns 1. +// */ +// def insertEntry(insertAt: Int, comesBefore: Int, newEntry : AnyRef): Int = { +// var h = insertAt +// if (h == -1) h = index(newEntry.hashCode) +// var curEntry = table(h) +// while (null != curEntry) { +// if (curEntry == newEntry) return 0 +// h = h + 1 // we *do not* do `(h + 1) % table.length` here, because we'll never overflow!! +// if (h >= comesBefore) return -1 +// curEntry = table(h) +// } +// table(h) = newEntry +// +// // this is incorrect since we set size afterwards anyway and a counter +// // like this would not even work: +// // +// // tableSize = tableSize + 1 +// // +// // furthermore, it completely bogs down the parallel +// // execution when there are multiple workers +// +// nnSizeMapAdd(h) +// 1 +// } +// } +// +// /* tasks */ +// +// class FillBlocks(buckets: Array[UnrolledBuffer[AnyRef]], table: AddingFlatHashTable, val offset: Int, val howmany: Int) +// extends Task[(Int, UnrolledBuffer[AnyRef]), FillBlocks] { +// var result = (Int.MinValue, new UnrolledBuffer[AnyRef]) +// +// def leaf(prev: Option[(Int, UnrolledBuffer[AnyRef])]): Unit = { +// var i = offset +// var totalinserts = 0 +// var leftover = new UnrolledBuffer[AnyRef]() +// while (i < (offset + howmany)) { +// val (inserted, intonextblock) = fillBlock(i, buckets(i), leftover) +// totalinserts += inserted +// leftover = intonextblock +// i += 1 +// } +// result = (totalinserts, leftover) +// } +// private val blocksize = table.tableLength >> ParHashSetCombiner.discriminantbits +// private def blockStart(block: Int) = block * blocksize +// private def nextBlockStart(block: Int) = (block + 1) * blocksize +// private def fillBlock(block: Int, elems: UnrolledBuffer[AnyRef], leftovers: UnrolledBuffer[AnyRef]): (Int, UnrolledBuffer[AnyRef]) = { +// val beforePos = nextBlockStart(block) +// +// // store the elems +// val (elemsIn, elemsLeft) = if (elems != null) insertAll(-1, beforePos, elems) else (0, UnrolledBuffer[AnyRef]()) +// +// // store the leftovers +// val (leftoversIn, leftoversLeft) = insertAll(blockStart(block), beforePos, leftovers) +// +// // return the no. of stored elements tupled with leftovers +// (elemsIn + leftoversIn, elemsLeft concat leftoversLeft) +// } +// private def insertAll(atPos: Int, beforePos: Int, elems: UnrolledBuffer[AnyRef]): (Int, UnrolledBuffer[AnyRef]) = { +// val leftovers = new UnrolledBuffer[AnyRef] +// var inserted = 0 +// +// var unrolled = elems.headPtr +// var i = 0 +// val t = table +// while (unrolled ne null) { +// val chunkarr = unrolled.array +// val chunksz = unrolled.size +// while (i < chunksz) { +// val entry = chunkarr(i) +// val res = t.insertEntry(atPos, beforePos, entry) +// if (res >= 0) inserted += res +// else leftovers += entry +// i += 1 +// } +// i = 0 +// unrolled = unrolled.next +// } +// +// // slower: +// // var it = elems.iterator +// // while (it.hasNext) { +// // val elem = it.next +// // val res = table.insertEntry(atPos, beforePos, elem.asInstanceOf[T]) +// // if (res >= 0) inserted += res +// // else leftovers += elem +// // } +// +// (inserted, leftovers) +// } +// def split = { +// val fp = howmany / 2 +// List(new FillBlocks(buckets, table, offset, fp), new FillBlocks(buckets, table, offset + fp, howmany - fp)) +// } +// override def merge(that: FillBlocks): Unit = { +// // take the leftovers from the left task, store them into the block of the right task +// val atPos = blockStart(that.offset) +// val beforePos = blockStart(that.offset + that.howmany) +// val (inserted, remainingLeftovers) = insertAll(atPos, beforePos, this.result._2) +// +// // anything left after trying the store the left leftovers is added to the right task leftovers +// // and a new leftovers set is produced in this way +// // the total number of successfully inserted elements is adjusted accordingly +// result = (this.result._1 + that.result._1 + inserted, remainingLeftovers concat that.result._2) +// } +// def shouldSplitFurther = howmany > scala.collection.parallel.thresholdFromSize(ParHashMapCombiner.numblocks, combinerTaskSupport.parallelismLevel) +// } +// +//} +// +// +//private[parallel] object ParHashSetCombiner { +// private[mutable] val discriminantbits = 5 +// private[mutable] val numblocks = 1 << discriminantbits +// private[mutable] val discriminantmask = ((1 << discriminantbits) - 1) +// private[mutable] val nonmasklength = 32 - discriminantbits +// +// def apply[T] = new ParHashSetCombiner[T](FlatHashTable.defaultLoadFactor) {} //with EnvironmentPassingCombiner[T, ParHashSet[T]] +//} +// diff --git a/core/src/main/scala/scala/collection/parallel/mutable/ParHashTable.scala b/core/src/main/scala/scala/collection/parallel/mutable/ParHashTable.scala index 96b0ac7c..f2f24b55 100644 --- a/core/src/main/scala/scala/collection/parallel/mutable/ParHashTable.scala +++ b/core/src/main/scala/scala/collection/parallel/mutable/ParHashTable.scala @@ -10,134 +10,134 @@ package scala package collection package parallel.mutable -import scala.collection.mutable.HashEntry -import scala.collection.parallel.IterableSplitter - -/** Provides functionality for hash tables with linked list buckets, - * enriching the data structure by fulfilling certain requirements - * for their parallel construction and iteration. - */ -trait ParHashTable[K, Entry >: Null <: HashEntry[K, Entry]] extends scala.collection.mutable.HashTable[K, Entry] { - - override def alwaysInitSizeMap = true - - /** A parallel iterator returning all the entries. - */ - abstract class EntryIterator[T, +IterRepr <: IterableSplitter[T]] - (private var idx: Int, private val until: Int, private val totalsize: Int, private var es: Entry) - extends IterableSplitter[T] with SizeMapUtils { - private val itertable = table - private var traversed = 0 - scan() - - def entry2item(e: Entry): T - def newIterator(idxFrom: Int, idxUntil: Int, totalSize: Int, es: Entry): IterRepr - - def hasNext = { - es ne null - } - - def next(): T = { - val res = es - es = es.next - scan() - traversed += 1 - entry2item(res) - } - - def scan(): Unit = { - while (es == null && idx < until) { - es = itertable(idx).asInstanceOf[Entry] - idx = idx + 1 - } - } - - def remaining = totalsize - traversed - - private[parallel] override def debugInformation = { - buildString { - append => - append("/--------------------\\") - append("Parallel hash table entry iterator") - append("total hash table elements: " + tableSize) - append("pos: " + idx) - append("until: " + until) - append("traversed: " + traversed) - append("totalsize: " + totalsize) - append("current entry: " + es) - append("underlying from " + idx + " until " + until) - append(itertable.slice(idx, until).map(x => if (x != null) x.toString else "n/a").mkString(" | ")) - append("\\--------------------/") - } - } - - def dup = newIterator(idx, until, totalsize, es) - - def split: Seq[IterableSplitter[T]] = if (remaining > 1) { - if (until > idx) { - // there is at least one more slot for the next iterator - // divide the rest of the table - val divsz = (until - idx) / 2 - - // second iterator params - val sidx = idx + divsz + 1 // + 1 preserves iteration invariant - val suntil = until - val ses = itertable(sidx - 1).asInstanceOf[Entry] // sidx - 1 ensures counting from the right spot - val stotal = calcNumElems(sidx - 1, suntil, table.length, sizeMapBucketSize) - - // first iterator params - val fidx = idx - val funtil = idx + divsz - val fes = es - val ftotal = totalsize - stotal - - Seq( - newIterator(fidx, funtil, ftotal, fes), - newIterator(sidx, suntil, stotal, ses) - ) - } else { - // otherwise, this is the last entry in the table - all what remains is the chain - // so split the rest of the chain - val arr = convertToArrayBuffer(es) - val arrpit = new scala.collection.parallel.BufferSplitter[T](arr, 0, arr.length, signalDelegate) - arrpit.split - } - } else Seq(this.asInstanceOf[IterRepr]) - - private def convertToArrayBuffer(chainhead: Entry): mutable.ArrayBuffer[T] = { - val buff = mutable.ArrayBuffer[Entry]() - var curr = chainhead - while (curr ne null) { - buff += curr - curr = curr.next - } - // println("converted " + remaining + " element iterator into buffer: " + buff) - buff map { e => entry2item(e) } - } - - protected def countElems(from: Int, until: Int) = { - var c = 0 - var idx = from - var es: Entry = null - while (idx < until) { - es = itertable(idx).asInstanceOf[Entry] - while (es ne null) { - c += 1 - es = es.next - } - idx += 1 - } - c - } - - protected def countBucketSizes(fromBucket: Int, untilBucket: Int) = { - var c = 0 - var idx = fromBucket - while (idx < untilBucket) { - c += sizemap(idx) - idx += 1 - } - c - } - } -} +//import scala.collection.mutable.HashEntry +//import scala.collection.parallel.IterableSplitter +// +///** Provides functionality for hash tables with linked list buckets, +// * enriching the data structure by fulfilling certain requirements +// * for their parallel construction and iteration. +// */ +//trait ParHashTable[K, Entry >: Null <: HashEntry[K, Entry]] extends scala.collection.mutable.HashTable[K, Entry] { +// +// override def alwaysInitSizeMap = true +// +// /** A parallel iterator returning all the entries. +// */ +// abstract class EntryIterator[T, +IterRepr <: IterableSplitter[T]] +// (private var idx: Int, private val until: Int, private val totalsize: Int, private var es: Entry) +// extends IterableSplitter[T] with SizeMapUtils { +// private val itertable = table +// private var traversed = 0 +// scan() +// +// def entry2item(e: Entry): T +// def newIterator(idxFrom: Int, idxUntil: Int, totalSize: Int, es: Entry): IterRepr +// +// def hasNext = { +// es ne null +// } +// +// def next(): T = { +// val res = es +// es = es.next +// scan() +// traversed += 1 +// entry2item(res) +// } +// +// def scan(): Unit = { +// while (es == null && idx < until) { +// es = itertable(idx).asInstanceOf[Entry] +// idx = idx + 1 +// } +// } +// +// def remaining = totalsize - traversed +// +// private[parallel] override def debugInformation = { +// buildString { +// append => +// append("/--------------------\\") +// append("Parallel hash table entry iterator") +// append("total hash table elements: " + tableSize) +// append("pos: " + idx) +// append("until: " + until) +// append("traversed: " + traversed) +// append("totalsize: " + totalsize) +// append("current entry: " + es) +// append("underlying from " + idx + " until " + until) +// append(itertable.slice(idx, until).map(x => if (x != null) x.toString else "n/a").mkString(" | ")) +// append("\\--------------------/") +// } +// } +// +// def dup = newIterator(idx, until, totalsize, es) +// +// def split: Seq[IterableSplitter[T]] = if (remaining > 1) { +// if (until > idx) { +// // there is at least one more slot for the next iterator +// // divide the rest of the table +// val divsz = (until - idx) / 2 +// +// // second iterator params +// val sidx = idx + divsz + 1 // + 1 preserves iteration invariant +// val suntil = until +// val ses = itertable(sidx - 1).asInstanceOf[Entry] // sidx - 1 ensures counting from the right spot +// val stotal = calcNumElems(sidx - 1, suntil, table.length, sizeMapBucketSize) +// +// // first iterator params +// val fidx = idx +// val funtil = idx + divsz +// val fes = es +// val ftotal = totalsize - stotal +// +// Seq( +// newIterator(fidx, funtil, ftotal, fes), +// newIterator(sidx, suntil, stotal, ses) +// ) +// } else { +// // otherwise, this is the last entry in the table - all what remains is the chain +// // so split the rest of the chain +// val arr = convertToArrayBuffer(es) +// val arrpit = new scala.collection.parallel.BufferSplitter[T](arr, 0, arr.length, signalDelegate) +// arrpit.split +// } +// } else Seq(this.asInstanceOf[IterRepr]) +// +// private def convertToArrayBuffer(chainhead: Entry): mutable.ArrayBuffer[T] = { +// val buff = mutable.ArrayBuffer[Entry]() +// var curr = chainhead +// while (curr ne null) { +// buff += curr +// curr = curr.next +// } +// // println("converted " + remaining + " element iterator into buffer: " + buff) +// buff map { e => entry2item(e) } +// } +// +// protected def countElems(from: Int, until: Int) = { +// var c = 0 +// var idx = from +// var es: Entry = null +// while (idx < until) { +// es = itertable(idx).asInstanceOf[Entry] +// while (es ne null) { +// c += 1 +// es = es.next +// } +// idx += 1 +// } +// c +// } +// +// protected def countBucketSizes(fromBucket: Int, untilBucket: Int) = { +// var c = 0 +// var idx = fromBucket +// while (idx < untilBucket) { +// c += sizemap(idx) +// idx += 1 +// } +// c +// } +// } +//} diff --git a/core/src/main/scala/scala/collection/parallel/mutable/ParIterable.scala b/core/src/main/scala/scala/collection/parallel/mutable/ParIterable.scala index db3d13a5..decdd8e2 100644 --- a/core/src/main/scala/scala/collection/parallel/mutable/ParIterable.scala +++ b/core/src/main/scala/scala/collection/parallel/mutable/ParIterable.scala @@ -10,41 +10,41 @@ package scala package collection package parallel.mutable -import scala.collection.generic._ -import scala.collection.parallel.{ ParIterableLike, Combiner } - -/** A template trait for mutable parallel iterable collections. - * - * $paralleliterableinfo - * - * $sideeffects - * - * @tparam T the element type of the collection - * - * @author Aleksandar Prokopec - * @since 2.9 - */ -trait ParIterable[T] extends scala.collection.GenIterable[T] - with scala.collection.parallel.ParIterable[T] - with GenericParTemplate[T, ParIterable] - with ParIterableLike[T, ParIterable[T], Iterable[T]] - with Mutable { - override def companion: GenericCompanion[ParIterable] with GenericParCompanion[ParIterable] = ParIterable - //protected[this] override def newBuilder = ParIterable.newBuilder[T] - - // if `mutable.ParIterableLike` is introduced, please move these methods there - override def toIterable: ParIterable[T] = this - - override def toSeq: ParSeq[T] = toParCollection[T, ParSeq[T]](() => ParSeq.newCombiner[T]) - - def seq: scala.collection.mutable.Iterable[T] -} - -/** $factoryInfo - */ -object ParIterable extends ParFactory[ParIterable] { - implicit def canBuildFrom[T]: CanCombineFrom[Coll, T, ParIterable[T]] = new GenericCanCombineFrom[T] - - def newBuilder[T]: Combiner[T, ParIterable[T]] = ParArrayCombiner[T] - def newCombiner[T]: Combiner[T, ParIterable[T]] = ParArrayCombiner[T] -} +//import scala.collection.generic._ +//import scala.collection.parallel.{ ParIterableLike, Combiner } +// +///** A template trait for mutable parallel iterable collections. +// * +// * $paralleliterableinfo +// * +// * $sideeffects +// * +// * @tparam T the element type of the collection +// * +// * @author Aleksandar Prokopec +// * @since 2.9 +// */ +//trait ParIterable[T] extends scala.collection.GenIterable[T] +// with scala.collection.parallel.ParIterable[T] +// with GenericParTemplate[T, ParIterable] +// with ParIterableLike[T, ParIterable[T], Iterable[T]] +// with Mutable { +// override def companion: GenericCompanion[ParIterable] with GenericParCompanion[ParIterable] = ParIterable +// //protected[this] override def newBuilder = ParIterable.newBuilder[T] +// +// // if `mutable.ParIterableLike` is introduced, please move these methods there +// override def toIterable: ParIterable[T] = this +// +// override def toSeq: ParSeq[T] = toParCollection[T, ParSeq[T]](() => ParSeq.newCombiner[T]) +// +// def seq: scala.collection.mutable.Iterable[T] +//} +// +///** $factoryInfo +// */ +//object ParIterable extends ParFactory[ParIterable] { +// implicit def canBuildFrom[T]: CanCombineFrom[Coll, T, ParIterable[T]] = new GenericCanCombineFrom[T] +// +// def newBuilder[T]: Combiner[T, ParIterable[T]] = ParArrayCombiner[T] +// def newCombiner[T]: Combiner[T, ParIterable[T]] = ParArrayCombiner[T] +//} diff --git a/core/src/main/scala/scala/collection/parallel/mutable/ParMap.scala b/core/src/main/scala/scala/collection/parallel/mutable/ParMap.scala index a9e6da18..1e6385c8 100644 --- a/core/src/main/scala/scala/collection/parallel/mutable/ParMap.scala +++ b/core/src/main/scala/scala/collection/parallel/mutable/ParMap.scala @@ -10,80 +10,80 @@ package scala package collection package parallel.mutable -import scala.collection.generic._ -import scala.collection.parallel.Combiner - -/** A template trait for mutable parallel maps. - * - * $sideeffects - * - * @tparam K the key type of the map - * @tparam V the value type of the map - * - * @author Aleksandar Prokopec - * @since 2.9 - */ -trait ParMap[K, V] -extends GenMap[K, V] - with parallel.ParMap[K, V] - with ParIterable[(K, V)] - with GenericParMapTemplate[K, V, ParMap] - with ParMapLike[K, V, ParMap[K, V], mutable.Map[K, V]] -{ - - protected[this] override def newCombiner: Combiner[(K, V), ParMap[K, V]] = ParMap.newCombiner[K, V] - - override def mapCompanion: GenericParMapCompanion[ParMap] = ParMap - - override def empty: ParMap[K, V] = new ParHashMap[K, V] - - def seq: scala.collection.mutable.Map[K, V] - - override def updated [U >: V](key: K, value: U): ParMap[K, U] = this + ((key, value)) - - /** The same map with a given default function. - * Note: `get`, `contains`, `iterator`, `keys`, etc are not affected by `withDefault`. - * - * Invoking transformer methods (e.g. `map`) will not preserve the default value. - * - * @param d the function mapping keys to values, used for non-present keys - * @return a wrapper of the map with a default value - */ - def withDefault(d: K => V): scala.collection.parallel.mutable.ParMap[K, V] = new ParMap.WithDefault[K, V](this, d) - - /** The same map with a given default value. - * - * Invoking transformer methods (e.g. `map`) will not preserve the default value. - * - * @param d default value used for non-present keys - * @return a wrapper of the map with a default value - */ - def withDefaultValue(d: V): scala.collection.parallel.mutable.ParMap[K, V] = new ParMap.WithDefault[K, V](this, x => d) -} - -object ParMap extends ParMapFactory[ParMap] { - def empty[K, V]: ParMap[K, V] = new ParHashMap[K, V] - - def newCombiner[K, V]: Combiner[(K, V), ParMap[K, V]] = ParHashMapCombiner.apply[K, V] - - implicit def canBuildFrom[K, V]: CanCombineFrom[Coll, (K, V), ParMap[K, V]] = new CanCombineFromMap[K, V] - - class WithDefault[K, V](underlying: ParMap[K, V], d: K => V) - extends scala.collection.parallel.ParMap.WithDefault(underlying, d) with ParMap[K, V] { - override def += (kv: (K, V)) = {underlying += kv; this} - def -= (key: K) = {underlying -= key; this} - override def empty = new WithDefault(underlying.empty, d) - override def updated[U >: V](key: K, value: U): WithDefault[K, U] = new WithDefault[K, U](underlying.updated[U](key, value), d) - override def + [U >: V](kv: (K, U)): WithDefault[K, U] = updated(kv._1, kv._2) - override def - (key: K): WithDefault[K, V] = new WithDefault(underlying - key, d) - override def seq = underlying.seq.withDefault(d) - def clear() = underlying.clear() - def put(key: K, value: V): Option[V] = underlying.put(key, value) - - /** If these methods aren't overridden to thread through the underlying map, - * successive calls to withDefault* have no effect. - */ - override def withDefault(d: K => V): ParMap[K, V] = new WithDefault[K, V](underlying, d) - override def withDefaultValue(d: V): ParMap[K, V] = new WithDefault[K, V](underlying, x => d) - } -} +//import scala.collection.generic._ +//import scala.collection.parallel.Combiner +// +///** A template trait for mutable parallel maps. +// * +// * $sideeffects +// * +// * @tparam K the key type of the map +// * @tparam V the value type of the map +// * +// * @author Aleksandar Prokopec +// * @since 2.9 +// */ +//trait ParMap[K, V] +//extends GenMap[K, V] +// with parallel.ParMap[K, V] +// with ParIterable[(K, V)] +// with GenericParMapTemplate[K, V, ParMap] +// with ParMapLike[K, V, ParMap[K, V], mutable.Map[K, V]] +//{ +// +// protected[this] override def newCombiner: Combiner[(K, V), ParMap[K, V]] = ParMap.newCombiner[K, V] +// +// override def mapCompanion: GenericParMapCompanion[ParMap] = ParMap +// +// override def empty: ParMap[K, V] = new ParHashMap[K, V] +// +// def seq: scala.collection.mutable.Map[K, V] +// +// override def updated [U >: V](key: K, value: U): ParMap[K, U] = this + ((key, value)) +// +// /** The same map with a given default function. +// * Note: `get`, `contains`, `iterator`, `keys`, etc are not affected by `withDefault`. +// * +// * Invoking transformer methods (e.g. `map`) will not preserve the default value. +// * +// * @param d the function mapping keys to values, used for non-present keys +// * @return a wrapper of the map with a default value +// */ +// def withDefault(d: K => V): scala.collection.parallel.mutable.ParMap[K, V] = new ParMap.WithDefault[K, V](this, d) +// +// /** The same map with a given default value. +// * +// * Invoking transformer methods (e.g. `map`) will not preserve the default value. +// * +// * @param d default value used for non-present keys +// * @return a wrapper of the map with a default value +// */ +// def withDefaultValue(d: V): scala.collection.parallel.mutable.ParMap[K, V] = new ParMap.WithDefault[K, V](this, x => d) +//} +// +//object ParMap extends ParMapFactory[ParMap] { +// def empty[K, V]: ParMap[K, V] = new ParHashMap[K, V] +// +// def newCombiner[K, V]: Combiner[(K, V), ParMap[K, V]] = ParHashMapCombiner.apply[K, V] +// +// implicit def canBuildFrom[K, V]: CanCombineFrom[Coll, (K, V), ParMap[K, V]] = new CanCombineFromMap[K, V] +// +// class WithDefault[K, V](underlying: ParMap[K, V], d: K => V) +// extends scala.collection.parallel.ParMap.WithDefault(underlying, d) with ParMap[K, V] { +// override def += (kv: (K, V)) = {underlying += kv; this} +// def -= (key: K) = {underlying -= key; this} +// override def empty = new WithDefault(underlying.empty, d) +// override def updated[U >: V](key: K, value: U): WithDefault[K, U] = new WithDefault[K, U](underlying.updated[U](key, value), d) +// override def + [U >: V](kv: (K, U)): WithDefault[K, U] = updated(kv._1, kv._2) +// override def - (key: K): WithDefault[K, V] = new WithDefault(underlying - key, d) +// override def seq = underlying.seq.withDefault(d) +// def clear() = underlying.clear() +// def put(key: K, value: V): Option[V] = underlying.put(key, value) +// +// /** If these methods aren't overridden to thread through the underlying map, +// * successive calls to withDefault* have no effect. +// */ +// override def withDefault(d: K => V): ParMap[K, V] = new WithDefault[K, V](underlying, d) +// override def withDefaultValue(d: V): ParMap[K, V] = new WithDefault[K, V](underlying, x => d) +// } +//} diff --git a/core/src/main/scala/scala/collection/parallel/mutable/ParMapLike.scala b/core/src/main/scala/scala/collection/parallel/mutable/ParMapLike.scala index caec5eae..16cc7675 100644 --- a/core/src/main/scala/scala/collection/parallel/mutable/ParMapLike.scala +++ b/core/src/main/scala/scala/collection/parallel/mutable/ParMapLike.scala @@ -10,45 +10,45 @@ package scala package collection.parallel package mutable -import scala.collection.generic._ -import scala.collection.mutable.Cloneable -import scala.collection.generic.Growable -import scala.collection.generic.Shrinkable - -/** A template trait for mutable parallel maps. This trait is to be mixed in - * with concrete parallel maps to override the representation type. - * - * $sideeffects - * - * @tparam K the key type of the map - * @tparam V the value type of the map - * @define Coll `ParMap` - * @define coll parallel map - * - * @author Aleksandar Prokopec - * @since 2.9 - */ -trait ParMapLike[K, - V, - +Repr <: ParMapLike[K, V, Repr, Sequential] with ParMap[K, V], - +Sequential <: scala.collection.mutable.Map[K, V] with scala.collection.mutable.MapLike[K, V, Sequential]] -extends scala.collection.GenMapLike[K, V, Repr] - with scala.collection.parallel.ParMapLike[K, V, Repr, Sequential] - with Growable[(K, V)] - with Shrinkable[K] - with Cloneable[Repr] -{ - // note: should not override toMap - - def put(key: K, value: V): Option[V] - - def +=(kv: (K, V)): this.type - - def -=(key: K): this.type - - def +[U >: V](kv: (K, U)) = this.clone().asInstanceOf[ParMap[K, U]] += kv - - def -(key: K) = this.clone() -= key - - def clear(): Unit -} +//import scala.collection.generic._ +//import scala.collection.mutable.Cloneable +//import scala.collection.generic.Growable +//import scala.collection.generic.Shrinkable +// +///** A template trait for mutable parallel maps. This trait is to be mixed in +// * with concrete parallel maps to override the representation type. +// * +// * $sideeffects +// * +// * @tparam K the key type of the map +// * @tparam V the value type of the map +// * @define Coll `ParMap` +// * @define coll parallel map +// * +// * @author Aleksandar Prokopec +// * @since 2.9 +// */ +//trait ParMapLike[K, +// V, +// +Repr <: ParMapLike[K, V, Repr, Sequential] with ParMap[K, V], +// +Sequential <: scala.collection.mutable.Map[K, V] with scala.collection.mutable.MapLike[K, V, Sequential]] +//extends scala.collection.GenMapLike[K, V, Repr] +// with scala.collection.parallel.ParMapLike[K, V, Repr, Sequential] +// with Growable[(K, V)] +// with Shrinkable[K] +// with Cloneable[Repr] +//{ +// // note: should not override toMap +// +// def put(key: K, value: V): Option[V] +// +// def +=(kv: (K, V)): this.type +// +// def -=(key: K): this.type +// +// def +[U >: V](kv: (K, U)) = this.clone().asInstanceOf[ParMap[K, U]] += kv +// +// def -(key: K) = this.clone() -= key +// +// def clear(): Unit +//} diff --git a/core/src/main/scala/scala/collection/parallel/mutable/ParSeq.scala b/core/src/main/scala/scala/collection/parallel/mutable/ParSeq.scala index 0a9574ae..65acdbe9 100644 --- a/core/src/main/scala/scala/collection/parallel/mutable/ParSeq.scala +++ b/core/src/main/scala/scala/collection/parallel/mutable/ParSeq.scala @@ -9,44 +9,44 @@ package scala package collection.parallel.mutable -import scala.collection.generic.GenericParTemplate -import scala.collection.generic.GenericCompanion -import scala.collection.generic.GenericParCompanion -import scala.collection.generic.CanCombineFrom -import scala.collection.generic.ParFactory -import scala.collection.parallel.ParSeqLike -import scala.collection.parallel.Combiner - -/** A mutable variant of `ParSeq`. - * - * @define Coll `mutable.ParSeq` - * @define coll mutable parallel sequence - */ -trait ParSeq[T] extends scala.collection/*.mutable*/.GenSeq[T] // was: scala.collection.mutable.Seq[T] - with ParIterable[T] - with scala.collection.parallel.ParSeq[T] - with GenericParTemplate[T, ParSeq] - with ParSeqLike[T, ParSeq[T], scala.collection.mutable.Seq[T]] { -self => - override def companion: GenericCompanion[ParSeq] with GenericParCompanion[ParSeq] = ParSeq - //protected[this] override def newBuilder = ParSeq.newBuilder[T] - - def update(i: Int, elem: T): Unit - - def seq: scala.collection.mutable.Seq[T] - - override def toSeq: ParSeq[T] = this -} - - -/** $factoryInfo - * @define Coll `mutable.ParSeq` - * @define coll mutable parallel sequence - */ -object ParSeq extends ParFactory[ParSeq] { - implicit def canBuildFrom[T]: CanCombineFrom[Coll, T, ParSeq[T]] = new GenericCanCombineFrom[T] - - def newBuilder[T]: Combiner[T, ParSeq[T]] = ParArrayCombiner[T] - - def newCombiner[T]: Combiner[T, ParSeq[T]] = ParArrayCombiner[T] -} +//import scala.collection.generic.GenericParTemplate +//import scala.collection.generic.GenericCompanion +//import scala.collection.generic.GenericParCompanion +//import scala.collection.generic.CanCombineFrom +//import scala.collection.generic.ParFactory +//import scala.collection.parallel.ParSeqLike +//import scala.collection.parallel.Combiner +// +///** A mutable variant of `ParSeq`. +// * +// * @define Coll `mutable.ParSeq` +// * @define coll mutable parallel sequence +// */ +//trait ParSeq[T] extends scala.collection/*.mutable*/.GenSeq[T] // was: scala.collection.mutable.Seq[T] +// with ParIterable[T] +// with scala.collection.parallel.ParSeq[T] +// with GenericParTemplate[T, ParSeq] +// with ParSeqLike[T, ParSeq[T], scala.collection.mutable.Seq[T]] { +//self => +// override def companion: GenericCompanion[ParSeq] with GenericParCompanion[ParSeq] = ParSeq +// //protected[this] override def newBuilder = ParSeq.newBuilder[T] +// +// def update(i: Int, elem: T): Unit +// +// def seq: scala.collection.mutable.Seq[T] +// +// override def toSeq: ParSeq[T] = this +//} +// +// +///** $factoryInfo +// * @define Coll `mutable.ParSeq` +// * @define coll mutable parallel sequence +// */ +//object ParSeq extends ParFactory[ParSeq] { +// implicit def canBuildFrom[T]: CanCombineFrom[Coll, T, ParSeq[T]] = new GenericCanCombineFrom[T] +// +// def newBuilder[T]: Combiner[T, ParSeq[T]] = ParArrayCombiner[T] +// +// def newCombiner[T]: Combiner[T, ParSeq[T]] = ParArrayCombiner[T] +//} diff --git a/core/src/main/scala/scala/collection/parallel/mutable/ParSet.scala b/core/src/main/scala/scala/collection/parallel/mutable/ParSet.scala index 5ee30ac7..1dcfb16d 100644 --- a/core/src/main/scala/scala/collection/parallel/mutable/ParSet.scala +++ b/core/src/main/scala/scala/collection/parallel/mutable/ParSet.scala @@ -9,35 +9,35 @@ package scala package collection.parallel.mutable -import scala.collection.generic._ -import scala.collection.parallel.Combiner - -/** A mutable variant of `ParSet`. - * - * @author Aleksandar Prokopec - */ -trait ParSet[T] -extends scala.collection/*.mutable*/.GenSet[T] - with ParIterable[T] - with scala.collection.parallel.ParSet[T] - with GenericParTemplate[T, ParSet] - with ParSetLike[T, ParSet[T], scala.collection.mutable.Set[T]] -{ -self => - override def companion: GenericCompanion[ParSet] with GenericParCompanion[ParSet] = ParSet - override def empty: ParSet[T] = ParHashSet() - def seq: scala.collection.mutable.Set[T] -} - - -/** $factoryInfo - * @define Coll `mutable.ParSet` - * @define coll mutable parallel set - */ -object ParSet extends ParSetFactory[ParSet] { - implicit def canBuildFrom[T]: CanCombineFrom[Coll, T, ParSet[T]] = new GenericCanCombineFrom[T] - - override def newBuilder[T]: Combiner[T, ParSet[T]] = ParHashSet.newBuilder - - override def newCombiner[T]: Combiner[T, ParSet[T]] = ParHashSet.newCombiner -} +//import scala.collection.generic._ +//import scala.collection.parallel.Combiner +// +///** A mutable variant of `ParSet`. +// * +// * @author Aleksandar Prokopec +// */ +//trait ParSet[T] +//extends scala.collection/*.mutable*/.GenSet[T] +// with ParIterable[T] +// with scala.collection.parallel.ParSet[T] +// with GenericParTemplate[T, ParSet] +// with ParSetLike[T, ParSet[T], scala.collection.mutable.Set[T]] +//{ +//self => +// override def companion: GenericCompanion[ParSet] with GenericParCompanion[ParSet] = ParSet +// override def empty: ParSet[T] = ParHashSet() +// def seq: scala.collection.mutable.Set[T] +//} +// +// +///** $factoryInfo +// * @define Coll `mutable.ParSet` +// * @define coll mutable parallel set +// */ +//object ParSet extends ParSetFactory[ParSet] { +// implicit def canBuildFrom[T]: CanCombineFrom[Coll, T, ParSet[T]] = new GenericCanCombineFrom[T] +// +// override def newBuilder[T]: Combiner[T, ParSet[T]] = ParHashSet.newBuilder +// +// override def newCombiner[T]: Combiner[T, ParSet[T]] = ParHashSet.newCombiner +//} diff --git a/core/src/main/scala/scala/collection/parallel/mutable/ParSetLike.scala b/core/src/main/scala/scala/collection/parallel/mutable/ParSetLike.scala index 84f77d91..4cce740c 100644 --- a/core/src/main/scala/scala/collection/parallel/mutable/ParSetLike.scala +++ b/core/src/main/scala/scala/collection/parallel/mutable/ParSetLike.scala @@ -10,43 +10,43 @@ package scala package collection package parallel.mutable -import scala.collection.mutable.Cloneable -import scala.collection.GenSetLike -import scala.collection.generic.Growable -import scala.collection.generic.Shrinkable - -/** A template trait for mutable parallel sets. This trait is mixed in with concrete - * parallel sets to override the representation type. - * - * $sideeffects - * - * @tparam T the element type of the set - * @define Coll `mutable.ParSet` - * @define coll mutable parallel set - * - * @author Aleksandar Prokopec - * @since 2.9 - */ -trait ParSetLike[T, - +Repr <: ParSetLike[T, Repr, Sequential] with ParSet[T], - +Sequential <: mutable.Set[T] with mutable.SetLike[T, Sequential]] -extends GenSetLike[T, Repr] - with scala.collection.parallel.ParIterableLike[T, Repr, Sequential] - with scala.collection.parallel.ParSetLike[T, Repr, Sequential] - with Growable[T] - with Shrinkable[T] - with Cloneable[Repr] -{ -self => - override def empty: Repr - - def +=(elem: T): this.type - - def -=(elem: T): this.type - - def +(elem: T) = this.clone() += elem - - def -(elem: T) = this.clone() -= elem - - // note: should not override toSet -} +//import scala.collection.mutable.Cloneable +//import scala.collection.GenSetLike +//import scala.collection.generic.Growable +//import scala.collection.generic.Shrinkable +// +///** A template trait for mutable parallel sets. This trait is mixed in with concrete +// * parallel sets to override the representation type. +// * +// * $sideeffects +// * +// * @tparam T the element type of the set +// * @define Coll `mutable.ParSet` +// * @define coll mutable parallel set +// * +// * @author Aleksandar Prokopec +// * @since 2.9 +// */ +//trait ParSetLike[T, +// +Repr <: ParSetLike[T, Repr, Sequential] with ParSet[T], +// +Sequential <: mutable.Set[T] with mutable.SetLike[T, Sequential]] +//extends GenSetLike[T, Repr] +// with scala.collection.parallel.ParIterableLike[T, Repr, Sequential] +// with scala.collection.parallel.ParSetLike[T, Repr, Sequential] +// with Growable[T] +// with Shrinkable[T] +// with Cloneable[Repr] +//{ +//self => +// override def empty: Repr +// +// def +=(elem: T): this.type +// +// def -=(elem: T): this.type +// +// def +(elem: T) = this.clone() += elem +// +// def -(elem: T) = this.clone() -= elem +// +// // note: should not override toSet +//} diff --git a/core/src/main/scala/scala/collection/parallel/mutable/ParTrieMap.scala b/core/src/main/scala/scala/collection/parallel/mutable/ParTrieMap.scala index 3ae2728d..56fd49db 100644 --- a/core/src/main/scala/scala/collection/parallel/mutable/ParTrieMap.scala +++ b/core/src/main/scala/scala/collection/parallel/mutable/ParTrieMap.scala @@ -9,159 +9,159 @@ package scala package collection.parallel.mutable -import scala.collection.generic._ -import scala.collection.parallel.Combiner -import scala.collection.parallel.IterableSplitter -import scala.collection.parallel.Task -import scala.collection.concurrent.BasicNode -import scala.collection.concurrent.TNode -import scala.collection.concurrent.LNode -import scala.collection.concurrent.CNode -import scala.collection.concurrent.SNode -import scala.collection.concurrent.INode -import scala.collection.concurrent.TrieMap -import scala.collection.concurrent.TrieMapIterator - -/** Parallel TrieMap collection. - * - * It has its bulk operations parallelized, but uses the snapshot operation - * to create the splitter. This means that parallel bulk operations can be - * called concurrently with the modifications. - * - * @author Aleksandar Prokopec - * @since 2.10 - * @see [[http://docs.scala-lang.org/overviews/parallel-collections/concrete-parallel-collections.html#parallel_concurrent_tries Scala's Parallel Collections Library overview]] - * section on `ParTrieMap` for more information. - */ -final class ParTrieMap[K, V] private[collection] (private val ctrie: TrieMap[K, V]) -extends ParMap[K, V] - with GenericParMapTemplate[K, V, ParTrieMap] - with ParMapLike[K, V, ParTrieMap[K, V], TrieMap[K, V]] - with ParTrieMapCombiner[K, V] - with Serializable -{ - def this() = this(new TrieMap) - - override def mapCompanion: GenericParMapCompanion[ParTrieMap] = ParTrieMap - - override def empty: ParTrieMap[K, V] = ParTrieMap.empty - - protected[this] override def newCombiner = ParTrieMap.newCombiner - - override def seq = ctrie - - def splitter = new ParTrieMapSplitter(0, ctrie.readOnlySnapshot().asInstanceOf[TrieMap[K, V]], true) - - override def clear() = ctrie.clear() - - def result = this - - def get(key: K): Option[V] = ctrie.get(key) - - def put(key: K, value: V): Option[V] = ctrie.put(key, value) - - def update(key: K, value: V): Unit = ctrie.update(key, value) - - def remove(key: K): Option[V] = ctrie.remove(key) - - def +=(kv: (K, V)): this.type = { - ctrie.+=(kv) - this - } - - def -=(key: K): this.type = { - ctrie.-=(key) - this - } - - override def size = { - val in = ctrie.readRoot() - val r = in.gcasRead(ctrie) - r match { - case tn: TNode[_, _] => tn.cachedSize(ctrie) - case ln: LNode[_, _] => ln.cachedSize(ctrie) - case cn: CNode[_, _] => - tasksupport.executeAndWaitResult(new Size(0, cn.array.length, cn.array)) - cn.cachedSize(ctrie) - } - } - - override def stringPrefix = "ParTrieMap" - - /* tasks */ - - /** Computes TrieMap size in parallel. */ - class Size(offset: Int, howmany: Int, array: Array[BasicNode]) extends Task[Int, Size] { - var result = -1 - def leaf(prev: Option[Int]) = { - var sz = 0 - var i = offset - val until = offset + howmany - while (i < until) { - array(i) match { - case sn: SNode[_, _] => sz += 1 - case in: INode[K, V] => sz += in.cachedSize(ctrie) - } - i += 1 - } - result = sz - } - def split = { - val fp = howmany / 2 - Seq(new Size(offset, fp, array), new Size(offset + fp, howmany - fp, array)) - } - def shouldSplitFurther = howmany > 1 - override def merge(that: Size) = result = result + that.result - } -} - -private[collection] class ParTrieMapSplitter[K, V](lev: Int, ct: TrieMap[K, V], mustInit: Boolean) -extends TrieMapIterator[K, V](lev, ct, mustInit) - with IterableSplitter[(K, V)] -{ - // only evaluated if `remaining` is invoked (which is not used by most tasks) - lazy val totalsize = new ParTrieMap(ct).size - var iterated = 0 - - protected override def newIterator(_lev: Int, _ct: TrieMap[K, V], _mustInit: Boolean) = new ParTrieMapSplitter[K, V](_lev, _ct, _mustInit) - - override def shouldSplitFurther[S](coll: scala.collection.parallel.ParIterable[S], parallelismLevel: Int) = { - val maxsplits = 3 + Integer.highestOneBit(parallelismLevel) - level < maxsplits - } - - def dup = { - val it = newIterator(0, ct, _mustInit = false) - dupTo(it) - it.iterated = this.iterated - it - } - - override def next() = { - iterated += 1 - super.next() - } - - def split: Seq[IterableSplitter[(K, V)]] = subdivide().asInstanceOf[Seq[IterableSplitter[(K, V)]]] - - override def isRemainingCheap = false - - def remaining: Int = totalsize - iterated -} - -/** Only used within the `ParTrieMap`. */ -private[mutable] trait ParTrieMapCombiner[K, V] extends Combiner[(K, V), ParTrieMap[K, V]] { - - def combine[N <: (K, V), NewTo >: ParTrieMap[K, V]](other: Combiner[N, NewTo]): Combiner[N, NewTo] = - if (this eq other) this - else throw new UnsupportedOperationException("This shouldn't have been called in the first place.") - - override def canBeShared = true -} - -object ParTrieMap extends ParMapFactory[ParTrieMap] { - def empty[K, V]: ParTrieMap[K, V] = new ParTrieMap[K, V] - def newCombiner[K, V]: Combiner[(K, V), ParTrieMap[K, V]] = new ParTrieMap[K, V] - - implicit def canBuildFrom[K, V]: CanCombineFrom[Coll, (K, V), ParTrieMap[K, V]] = new CanCombineFromMap[K, V] -} +//import scala.collection.generic._ +//import scala.collection.parallel.Combiner +//import scala.collection.parallel.IterableSplitter +//import scala.collection.parallel.Task +//import scala.collection.concurrent.BasicNode +//import scala.collection.concurrent.TNode +//import scala.collection.concurrent.LNode +//import scala.collection.concurrent.CNode +//import scala.collection.concurrent.SNode +//import scala.collection.concurrent.INode +//import scala.collection.concurrent.TrieMap +//import scala.collection.concurrent.TrieMapIterator +// +///** Parallel TrieMap collection. +// * +// * It has its bulk operations parallelized, but uses the snapshot operation +// * to create the splitter. This means that parallel bulk operations can be +// * called concurrently with the modifications. +// * +// * @author Aleksandar Prokopec +// * @since 2.10 +// * @see [[http://docs.scala-lang.org/overviews/parallel-collections/concrete-parallel-collections.html#parallel_concurrent_tries Scala's Parallel Collections Library overview]] +// * section on `ParTrieMap` for more information. +// */ +//final class ParTrieMap[K, V] private[collection] (private val ctrie: TrieMap[K, V]) +//extends ParMap[K, V] +// with GenericParMapTemplate[K, V, ParTrieMap] +// with ParMapLike[K, V, ParTrieMap[K, V], TrieMap[K, V]] +// with ParTrieMapCombiner[K, V] +// with Serializable +//{ +// def this() = this(new TrieMap) +// +// override def mapCompanion: GenericParMapCompanion[ParTrieMap] = ParTrieMap +// +// override def empty: ParTrieMap[K, V] = ParTrieMap.empty +// +// protected[this] override def newCombiner = ParTrieMap.newCombiner +// +// override def seq = ctrie +// +// def splitter = new ParTrieMapSplitter(0, ctrie.readOnlySnapshot().asInstanceOf[TrieMap[K, V]], true) +// +// override def clear() = ctrie.clear() +// +// def result = this +// +// def get(key: K): Option[V] = ctrie.get(key) +// +// def put(key: K, value: V): Option[V] = ctrie.put(key, value) +// +// def update(key: K, value: V): Unit = ctrie.update(key, value) +// +// def remove(key: K): Option[V] = ctrie.remove(key) +// +// def +=(kv: (K, V)): this.type = { +// ctrie.+=(kv) +// this +// } +// +// def -=(key: K): this.type = { +// ctrie.-=(key) +// this +// } +// +// override def size = { +// val in = ctrie.readRoot() +// val r = in.gcasRead(ctrie) +// r match { +// case tn: TNode[_, _] => tn.cachedSize(ctrie) +// case ln: LNode[_, _] => ln.cachedSize(ctrie) +// case cn: CNode[_, _] => +// tasksupport.executeAndWaitResult(new Size(0, cn.array.length, cn.array)) +// cn.cachedSize(ctrie) +// } +// } +// +// override def stringPrefix = "ParTrieMap" +// +// /* tasks */ +// +// /** Computes TrieMap size in parallel. */ +// class Size(offset: Int, howmany: Int, array: Array[BasicNode]) extends Task[Int, Size] { +// var result = -1 +// def leaf(prev: Option[Int]) = { +// var sz = 0 +// var i = offset +// val until = offset + howmany +// while (i < until) { +// array(i) match { +// case sn: SNode[_, _] => sz += 1 +// case in: INode[K, V] => sz += in.cachedSize(ctrie) +// } +// i += 1 +// } +// result = sz +// } +// def split = { +// val fp = howmany / 2 +// Seq(new Size(offset, fp, array), new Size(offset + fp, howmany - fp, array)) +// } +// def shouldSplitFurther = howmany > 1 +// override def merge(that: Size) = result = result + that.result +// } +//} +// +//private[collection] class ParTrieMapSplitter[K, V](lev: Int, ct: TrieMap[K, V], mustInit: Boolean) +//extends TrieMapIterator[K, V](lev, ct, mustInit) +// with IterableSplitter[(K, V)] +//{ +// // only evaluated if `remaining` is invoked (which is not used by most tasks) +// lazy val totalsize = new ParTrieMap(ct).size +// var iterated = 0 +// +// protected override def newIterator(_lev: Int, _ct: TrieMap[K, V], _mustInit: Boolean) = new ParTrieMapSplitter[K, V](_lev, _ct, _mustInit) +// +// override def shouldSplitFurther[S](coll: scala.collection.parallel.ParIterable[S], parallelismLevel: Int) = { +// val maxsplits = 3 + Integer.highestOneBit(parallelismLevel) +// level < maxsplits +// } +// +// def dup = { +// val it = newIterator(0, ct, _mustInit = false) +// dupTo(it) +// it.iterated = this.iterated +// it +// } +// +// override def next() = { +// iterated += 1 +// super.next() +// } +// +// def split: Seq[IterableSplitter[(K, V)]] = subdivide().asInstanceOf[Seq[IterableSplitter[(K, V)]]] +// +// override def isRemainingCheap = false +// +// def remaining: Int = totalsize - iterated +//} +// +///** Only used within the `ParTrieMap`. */ +//private[mutable] trait ParTrieMapCombiner[K, V] extends Combiner[(K, V), ParTrieMap[K, V]] { +// +// def combine[N <: (K, V), NewTo >: ParTrieMap[K, V]](other: Combiner[N, NewTo]): Combiner[N, NewTo] = +// if (this eq other) this +// else throw new UnsupportedOperationException("This shouldn't have been called in the first place.") +// +// override def canBeShared = true +//} +// +//object ParTrieMap extends ParMapFactory[ParTrieMap] { +// def empty[K, V]: ParTrieMap[K, V] = new ParTrieMap[K, V] +// def newCombiner[K, V]: Combiner[(K, V), ParTrieMap[K, V]] = new ParTrieMap[K, V] +// +// implicit def canBuildFrom[K, V]: CanCombineFrom[Coll, (K, V), ParTrieMap[K, V]] = new CanCombineFromMap[K, V] +//} diff --git a/core/src/main/scala/scala/collection/parallel/mutable/ResizableParArrayCombiner.scala b/core/src/main/scala/scala/collection/parallel/mutable/ResizableParArrayCombiner.scala index e43b2778..cef12177 100644 --- a/core/src/main/scala/scala/collection/parallel/mutable/ResizableParArrayCombiner.scala +++ b/core/src/main/scala/scala/collection/parallel/mutable/ResizableParArrayCombiner.scala @@ -9,78 +9,78 @@ package scala package collection.parallel.mutable -import scala.collection.mutable.ArraySeq -import scala.collection.mutable.ArrayBuffer -import scala.collection.parallel.Task - -/** An array combiner that uses a chain of arraybuffers to store elements. */ -trait ResizableParArrayCombiner[T] extends LazyCombiner[T, ParArray[T], ExposedArrayBuffer[T]] { - - override def sizeHint(sz: Int) = if (chain.length == 1) chain(0).sizeHint(sz) - - // public method with private[mutable] type ExposedArrayBuffer in parameter type; cannot be overridden. - final def newLazyCombiner(c: ArrayBuffer[ExposedArrayBuffer[T]]) = ResizableParArrayCombiner(c) - - def allocateAndCopy = if (chain.size > 1) { - val arrayseq = new ArraySeq[T](size) - val array = arrayseq.array.asInstanceOf[Array[Any]] - - combinerTaskSupport.executeAndWaitResult(new CopyChainToArray(array, 0, size)) - - new ParArray(arrayseq) - } else { // optimisation if there is only 1 array - new ParArray(new ExposedArraySeq[T](chain(0).internalArray, size)) - } - - override def toString = "ResizableParArrayCombiner(" + size + "): " //+ chain - - /* tasks */ - - class CopyChainToArray(array: Array[Any], offset: Int, howmany: Int) extends Task[Unit, CopyChainToArray] { - var result = () - def leaf(prev: Option[Unit]) = if (howmany > 0) { - var totalleft = howmany - val (stbuff, stind) = findStart(offset) - var buffind = stbuff - var ind = stind - var arrayIndex = offset - while (totalleft > 0) { - val currbuff = chain(buffind) - val chunksize = if (totalleft < (currbuff.size - ind)) totalleft else currbuff.size - ind - val until = ind + chunksize - - copyChunk(currbuff.internalArray, ind, array, arrayIndex, until) - arrayIndex += chunksize - ind += chunksize - - totalleft -= chunksize - buffind += 1 - ind = 0 - } - } - private def copyChunk(buffarr: Array[AnyRef], buffStart: Int, ra: Array[Any], arrayStart: Int, until: Int): Unit = { - Array.copy(buffarr, buffStart, ra, arrayStart, until - buffStart) - } - private def findStart(pos: Int) = { - var left = pos - var buffind = 0 - while (left >= chain(buffind).size) { - left -= chain(buffind).size - buffind += 1 - } - (buffind, left) - } - def split = { - val fp = howmany / 2 - List(new CopyChainToArray(array, offset, fp), new CopyChainToArray(array, offset + fp, howmany - fp)) - } - def shouldSplitFurther = howmany > scala.collection.parallel.thresholdFromSize(size, combinerTaskSupport.parallelismLevel) - } -} - -object ResizableParArrayCombiner { - def apply[T](c: ArrayBuffer[ExposedArrayBuffer[T]]): ResizableParArrayCombiner[T] = { - new { val chain = c } with ResizableParArrayCombiner[T] // was: with EnvironmentPassingCombiner[T, ParArray[T]] - } - def apply[T](): ResizableParArrayCombiner[T] = apply(new ArrayBuffer[ExposedArrayBuffer[T]] += new ExposedArrayBuffer[T]) -} +//import scala.collection.mutable.ArraySeq +//import scala.collection.mutable.ArrayBuffer +//import scala.collection.parallel.Task +// +///** An array combiner that uses a chain of arraybuffers to store elements. */ +//trait ResizableParArrayCombiner[T] extends LazyCombiner[T, ParArray[T], ExposedArrayBuffer[T]] { +// +// override def sizeHint(sz: Int) = if (chain.length == 1) chain(0).sizeHint(sz) +// +// // public method with private[mutable] type ExposedArrayBuffer in parameter type; cannot be overridden. +// final def newLazyCombiner(c: ArrayBuffer[ExposedArrayBuffer[T]]) = ResizableParArrayCombiner(c) +// +// def allocateAndCopy = if (chain.size > 1) { +// val arrayseq = new ArraySeq[T](size) +// val array = arrayseq.array.asInstanceOf[Array[Any]] +// +// combinerTaskSupport.executeAndWaitResult(new CopyChainToArray(array, 0, size)) +// +// new ParArray(arrayseq) +// } else { // optimisation if there is only 1 array +// new ParArray(new ExposedArraySeq[T](chain(0).internalArray, size)) +// } +// +// override def toString = "ResizableParArrayCombiner(" + size + "): " //+ chain +// +// /* tasks */ +// +// class CopyChainToArray(array: Array[Any], offset: Int, howmany: Int) extends Task[Unit, CopyChainToArray] { +// var result = () +// def leaf(prev: Option[Unit]) = if (howmany > 0) { +// var totalleft = howmany +// val (stbuff, stind) = findStart(offset) +// var buffind = stbuff +// var ind = stind +// var arrayIndex = offset +// while (totalleft > 0) { +// val currbuff = chain(buffind) +// val chunksize = if (totalleft < (currbuff.size - ind)) totalleft else currbuff.size - ind +// val until = ind + chunksize +// +// copyChunk(currbuff.internalArray, ind, array, arrayIndex, until) +// arrayIndex += chunksize +// ind += chunksize +// +// totalleft -= chunksize +// buffind += 1 +// ind = 0 +// } +// } +// private def copyChunk(buffarr: Array[AnyRef], buffStart: Int, ra: Array[Any], arrayStart: Int, until: Int): Unit = { +// Array.copy(buffarr, buffStart, ra, arrayStart, until - buffStart) +// } +// private def findStart(pos: Int) = { +// var left = pos +// var buffind = 0 +// while (left >= chain(buffind).size) { +// left -= chain(buffind).size +// buffind += 1 +// } +// (buffind, left) +// } +// def split = { +// val fp = howmany / 2 +// List(new CopyChainToArray(array, offset, fp), new CopyChainToArray(array, offset + fp, howmany - fp)) +// } +// def shouldSplitFurther = howmany > scala.collection.parallel.thresholdFromSize(size, combinerTaskSupport.parallelismLevel) +// } +//} +// +//object ResizableParArrayCombiner { +// def apply[T](c: ArrayBuffer[ExposedArrayBuffer[T]]): ResizableParArrayCombiner[T] = { +// new { val chain = c } with ResizableParArrayCombiner[T] // was: with EnvironmentPassingCombiner[T, ParArray[T]] +// } +// def apply[T](): ResizableParArrayCombiner[T] = apply(new ArrayBuffer[ExposedArrayBuffer[T]] += new ExposedArrayBuffer[T]) +//} diff --git a/core/src/main/scala/scala/collection/parallel/mutable/UnrolledParArrayCombiner.scala b/core/src/main/scala/scala/collection/parallel/mutable/UnrolledParArrayCombiner.scala index ae2c8139..a043bcb1 100644 --- a/core/src/main/scala/scala/collection/parallel/mutable/UnrolledParArrayCombiner.scala +++ b/core/src/main/scala/scala/collection/parallel/mutable/UnrolledParArrayCombiner.scala @@ -9,93 +9,93 @@ package scala package collection.parallel.mutable -import scala.collection.mutable.ArraySeq -import scala.collection.mutable.DoublingUnrolledBuffer -import scala.collection.mutable.UnrolledBuffer.Unrolled -import scala.collection.parallel.Combiner -import scala.collection.parallel.Task - -/** An array combiner that uses doubling unrolled buffers to store elements. */ -trait UnrolledParArrayCombiner[T] -extends Combiner[T, ParArray[T]] { -//self: EnvironmentPassingCombiner[T, ParArray[T]] => - // because size is doubling, random access is O(logn)! - val buff = new DoublingUnrolledBuffer[Any] - - def +=(elem: T) = { - buff += elem - this - } - - def result = { - val arrayseq = new ArraySeq[T](size) - val array = arrayseq.array.asInstanceOf[Array[Any]] - - combinerTaskSupport.executeAndWaitResult(new CopyUnrolledToArray(array, 0, size)) - - new ParArray(arrayseq) - } - - def clear(): Unit = { - buff.clear() - } - - override def sizeHint(sz: Int) = { - buff.lastPtr.next = new Unrolled(0, new Array[Any](sz), null, buff) - buff.lastPtr = buff.lastPtr.next - } - - def combine[N <: T, NewTo >: ParArray[T]](other: Combiner[N, NewTo]): Combiner[N, NewTo] = other match { - case that if that eq this => this // just return this - case that: UnrolledParArrayCombiner[t] => - buff concat that.buff - this - case _ => throw new UnsupportedOperationException("Cannot combine with combiner of different type.") - } - - def size = buff.size - - /* tasks */ - - class CopyUnrolledToArray(array: Array[Any], offset: Int, howmany: Int) - extends Task[Unit, CopyUnrolledToArray] { - var result = () - - def leaf(prev: Option[Unit]) = if (howmany > 0) { - var totalleft = howmany - val (startnode, startpos) = findStart(offset) - var curr = startnode - var pos = startpos - var arroffset = offset - while (totalleft > 0) { - val lefthere = scala.math.min(totalleft, curr.size - pos) - Array.copy(curr.array, pos, array, arroffset, lefthere) - // println("from: " + arroffset + " elems " + lefthere + " - " + pos + ", " + curr + " -> " + array.toList + " by " + this + " !! " + buff.headPtr) - totalleft -= lefthere - arroffset += lefthere - pos = 0 - curr = curr.next - } - } - private def findStart(pos: Int) = { - var left = pos - var node = buff.headPtr - while ((left - node.size) >= 0) { - left -= node.size - node = node.next - } - (node, left) - } - def split = { - val fp = howmany / 2 - List(new CopyUnrolledToArray(array, offset, fp), new CopyUnrolledToArray(array, offset + fp, howmany - fp)) - } - def shouldSplitFurther = howmany > scala.collection.parallel.thresholdFromSize(size, combinerTaskSupport.parallelismLevel) - override def toString = "CopyUnrolledToArray(" + offset + ", " + howmany + ")" - } -} - -object UnrolledParArrayCombiner { - def apply[T](): UnrolledParArrayCombiner[T] = new UnrolledParArrayCombiner[T] {} // was: with EnvironmentPassingCombiner[T, ParArray[T]] -} - +//import scala.collection.mutable.ArraySeq +//import scala.collection.mutable.DoublingUnrolledBuffer +//import scala.collection.mutable.UnrolledBuffer.Unrolled +//import scala.collection.parallel.Combiner +//import scala.collection.parallel.Task +// +///** An array combiner that uses doubling unrolled buffers to store elements. */ +//trait UnrolledParArrayCombiner[T] +//extends Combiner[T, ParArray[T]] { +////self: EnvironmentPassingCombiner[T, ParArray[T]] => +// // because size is doubling, random access is O(logn)! +// val buff = new DoublingUnrolledBuffer[Any] +// +// def +=(elem: T) = { +// buff += elem +// this +// } +// +// def result = { +// val arrayseq = new ArraySeq[T](size) +// val array = arrayseq.array.asInstanceOf[Array[Any]] +// +// combinerTaskSupport.executeAndWaitResult(new CopyUnrolledToArray(array, 0, size)) +// +// new ParArray(arrayseq) +// } +// +// def clear(): Unit = { +// buff.clear() +// } +// +// override def sizeHint(sz: Int) = { +// buff.lastPtr.next = new Unrolled(0, new Array[Any](sz), null, buff) +// buff.lastPtr = buff.lastPtr.next +// } +// +// def combine[N <: T, NewTo >: ParArray[T]](other: Combiner[N, NewTo]): Combiner[N, NewTo] = other match { +// case that if that eq this => this // just return this +// case that: UnrolledParArrayCombiner[t] => +// buff concat that.buff +// this +// case _ => throw new UnsupportedOperationException("Cannot combine with combiner of different type.") +// } +// +// def size = buff.size +// +// /* tasks */ +// +// class CopyUnrolledToArray(array: Array[Any], offset: Int, howmany: Int) +// extends Task[Unit, CopyUnrolledToArray] { +// var result = () +// +// def leaf(prev: Option[Unit]) = if (howmany > 0) { +// var totalleft = howmany +// val (startnode, startpos) = findStart(offset) +// var curr = startnode +// var pos = startpos +// var arroffset = offset +// while (totalleft > 0) { +// val lefthere = scala.math.min(totalleft, curr.size - pos) +// Array.copy(curr.array, pos, array, arroffset, lefthere) +// // println("from: " + arroffset + " elems " + lefthere + " - " + pos + ", " + curr + " -> " + array.toList + " by " + this + " !! " + buff.headPtr) +// totalleft -= lefthere +// arroffset += lefthere +// pos = 0 +// curr = curr.next +// } +// } +// private def findStart(pos: Int) = { +// var left = pos +// var node = buff.headPtr +// while ((left - node.size) >= 0) { +// left -= node.size +// node = node.next +// } +// (node, left) +// } +// def split = { +// val fp = howmany / 2 +// List(new CopyUnrolledToArray(array, offset, fp), new CopyUnrolledToArray(array, offset + fp, howmany - fp)) +// } +// def shouldSplitFurther = howmany > scala.collection.parallel.thresholdFromSize(size, combinerTaskSupport.parallelismLevel) +// override def toString = "CopyUnrolledToArray(" + offset + ", " + howmany + ")" +// } +//} +// +//object UnrolledParArrayCombiner { +// def apply[T](): UnrolledParArrayCombiner[T] = new UnrolledParArrayCombiner[T] {} // was: with EnvironmentPassingCombiner[T, ParArray[T]] +//} +// diff --git a/core/src/main/scala/scala/collection/parallel/mutable/package.scala b/core/src/main/scala/scala/collection/parallel/mutable/package.scala index 13a030cb..0816e528 100644 --- a/core/src/main/scala/scala/collection/parallel/mutable/package.scala +++ b/core/src/main/scala/scala/collection/parallel/mutable/package.scala @@ -9,69 +9,69 @@ package scala package collection.parallel -import scala.collection.mutable.ArrayBuffer -import scala.collection.mutable.ArraySeq -import scala.collection.generic.Sizing - -package object mutable { - /* aliases */ - type ParArrayCombiner[T] = ResizableParArrayCombiner[T] - val ParArrayCombiner = ResizableParArrayCombiner -} - -package mutable { - /* classes and traits */ - private[mutable] trait SizeMapUtils { - - protected def calcNumElems(from: Int, until: Int, tableLength: Int, sizeMapBucketSize: Int) = { - // find the first bucket - val fbindex = from / sizeMapBucketSize - - // find the last bucket - val lbindex = until / sizeMapBucketSize - // note to self: FYI if you define lbindex as from / sizeMapBucketSize, the first branch - // below always triggers and tests pass, so you spend a great day benchmarking and profiling - - if (fbindex == lbindex) { - // if first and last are the same, just count between `from` and `until` - // return this count - countElems(from, until) - } else { - // otherwise count in first, then count in last - val fbuntil = ((fbindex + 1) * sizeMapBucketSize) min tableLength - val fbcount = countElems(from, fbuntil) - val lbstart = lbindex * sizeMapBucketSize - val lbcount = countElems(lbstart, until) - - // and finally count the elements in all the buckets between first and last using a sizemap - val inbetween = countBucketSizes(fbindex + 1, lbindex) - - // return the sum - fbcount + inbetween + lbcount - } - } - - protected def countElems(from: Int, until: Int): Int - - protected def countBucketSizes(fromBucket: Int, untilBucket: Int): Int - } - - /* hack-arounds */ - private[mutable] class ExposedArrayBuffer[T] extends ArrayBuffer[T] with Sizing { - def internalArray = array - def setInternalSize(s: Int) = size0 = s - override def sizeHint(len: Int) = { - if (len > size && len >= 1) { - val newarray = new Array[AnyRef](len) - Array.copy(array, 0, newarray, 0, size0) - array = newarray - } - } - } - - private[mutable] class ExposedArraySeq[T](arr: Array[AnyRef], sz: Int) extends ArraySeq[T](sz) { - override val array = arr - override val length = sz - override def stringPrefix = "ArraySeq" - } -} +//import scala.collection.mutable.ArrayBuffer +//import scala.collection.mutable.ArraySeq +//import scala.collection.generic.Sizing +// +//package object mutable { +// /* aliases */ +// type ParArrayCombiner[T] = ResizableParArrayCombiner[T] +// val ParArrayCombiner = ResizableParArrayCombiner +//} +// +//package mutable { +// /* classes and traits */ +// private[mutable] trait SizeMapUtils { +// +// protected def calcNumElems(from: Int, until: Int, tableLength: Int, sizeMapBucketSize: Int) = { +// // find the first bucket +// val fbindex = from / sizeMapBucketSize +// +// // find the last bucket +// val lbindex = until / sizeMapBucketSize +// // note to self: FYI if you define lbindex as from / sizeMapBucketSize, the first branch +// // below always triggers and tests pass, so you spend a great day benchmarking and profiling +// +// if (fbindex == lbindex) { +// // if first and last are the same, just count between `from` and `until` +// // return this count +// countElems(from, until) +// } else { +// // otherwise count in first, then count in last +// val fbuntil = ((fbindex + 1) * sizeMapBucketSize) min tableLength +// val fbcount = countElems(from, fbuntil) +// val lbstart = lbindex * sizeMapBucketSize +// val lbcount = countElems(lbstart, until) +// +// // and finally count the elements in all the buckets between first and last using a sizemap +// val inbetween = countBucketSizes(fbindex + 1, lbindex) +// +// // return the sum +// fbcount + inbetween + lbcount +// } +// } +// +// protected def countElems(from: Int, until: Int): Int +// +// protected def countBucketSizes(fromBucket: Int, untilBucket: Int): Int +// } +// +// /* hack-arounds */ +// private[mutable] class ExposedArrayBuffer[T] extends ArrayBuffer[T] with Sizing { +// def internalArray = array +// def setInternalSize(s: Int) = size0 = s +// override def sizeHint(len: Int) = { +// if (len > size && len >= 1) { +// val newarray = new Array[AnyRef](len) +// Array.copy(array, 0, newarray, 0, size0) +// array = newarray +// } +// } +// } +// +// private[mutable] class ExposedArraySeq[T](arr: Array[AnyRef], sz: Int) extends ArraySeq[T](sz) { +// override val array = arr +// override val length = sz +// override def stringPrefix = "ArraySeq" +// } +//} diff --git a/core/src/main/scala/scala/collection/parallel/package.scala b/core/src/main/scala/scala/collection/parallel/package.scala index 05b4ad33..63152e36 100644 --- a/core/src/main/scala/scala/collection/parallel/package.scala +++ b/core/src/main/scala/scala/collection/parallel/package.scala @@ -9,9 +9,9 @@ package scala package collection -import scala.collection.generic.CanBuildFrom +import scala.collection.generic.OldCanBuildFrom import scala.collection.generic.CanCombineFrom -import scala.collection.parallel.mutable.ParArray +//import scala.collection.parallel.mutable.ParArray import scala.collection.mutable.UnrolledBuffer import scala.annotation.unchecked.uncheckedVariance import scala.language.implicitConversions @@ -39,39 +39,39 @@ package object parallel { def setTaskSupport[Coll](c: Coll, t: TaskSupport): Coll = { c match { - case pc: ParIterableLike[_, _, _] => pc.tasksupport = t + case pc: ParIterableLike[_, _, _, _] => pc.tasksupport = t case _ => // do nothing } c } - /** Adds toParArray method to collection classes. */ - implicit class CollectionsHaveToParArray[C, T](c: C)(implicit asGto: C => scala.collection.GenTraversableOnce[T]) { - def toParArray = { - val t = asGto(c) - if (t.isInstanceOf[ParArray[_]]) t.asInstanceOf[ParArray[T]] - else { - val it = t.toIterator - val cb = mutable.ParArrayCombiner[T]() - while (it.hasNext) cb += it.next - cb.result - } - } - } +// /** Adds toParArray method to collection classes. */ +// implicit class CollectionsHaveToParArray[C, T](c: C)(implicit asGto: C => scala.collection.GenTraversableOnce[T]) { +// def toParArray = { +// val t = asGto(c) +// if (t.isInstanceOf[ParArray[_]]) t.asInstanceOf[ParArray[T]] +// else { +// val it = t.toIterator +// val cb = mutable.ParArrayCombiner[T]() +// while (it.hasNext) cb += it.next +// cb.result +// } +// } +// } } package parallel { /** Implicit conversions used in the implementation of parallel collections. */ private[collection] object ParallelCollectionImplicits { - implicit def factory2ops[From, Elem, To](bf: CanBuildFrom[From, Elem, To]) = new FactoryOps[From, Elem, To] { + implicit def factory2ops[From, Elem, To](bf: OldCanBuildFrom[From, Elem, To]) = new FactoryOps[From, Elem, To] { def isParallel = bf.isInstanceOf[Parallel] def asParallel = bf.asInstanceOf[CanCombineFrom[From, Elem, To]] def ifParallel[R](isbody: CanCombineFrom[From, Elem, To] => R) = new Otherwise[R] { def otherwise(notbody: => R) = if (isParallel) isbody(asParallel) else notbody } } - implicit def traversable2ops[T](t: scala.collection.GenTraversableOnce[T]) = new TraversableOps[T] { + implicit def traversable2ops[T](t: scala.collection.IterableOnce[T]) = new TraversableOps[T] { def isParallel = t.isInstanceOf[Parallel] def isParIterable = t.isInstanceOf[ParIterable[_]] def asParIterable = t.asInstanceOf[ParIterable[T]] @@ -135,13 +135,13 @@ package parallel { } def remaining = until - index def dup = new BufferSplitter(buffer, index, until, signalDelegate) - def split: Seq[IterableSplitter[T]] = if (remaining > 1) { + def split: scala.Seq[IterableSplitter[T]] = if (remaining > 1) { val divsz = (until - index) / 2 - Seq( + scala.Seq( new BufferSplitter(buffer, index, index + divsz, signalDelegate), new BufferSplitter(buffer, index + divsz, until, signalDelegate) ) - } else Seq(this) + } else scala.Seq(this) private[parallel] override def debugInformation = { buildString { append =>