From 8da0743f2fca45f67b98b66942c913438521ad27 Mon Sep 17 00:00:00 2001 From: Julien Richard-Foy Date: Mon, 13 Aug 2018 15:49:14 +0200 Subject: [PATCH 1/4] Prepare for Scala 2.13.0-RC1 MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit - Old code from scala.collection.generic package has been imported from the 2.12.x branch. - Make ParIterableLike extend IterableOnce - Operations that used to take GenIterable collections as parameter now have two overloads, one taking a ParIterable and another taking an Iterable - VectorIterator has been deprecated, which is unfortunate because it is used for implementing ParVector. As a consequence, we now get a deprecation warning when we compile the project - Inline GenTraversableFactory and GenericCompanion from scala/scala - Move the contents that was previously in GenericCompanion into the GenericParCompanion trait - Remove ParallelConsistencyTest, which does not make sense anymore since sequential collection don’t take “generic” collections as parameter. - Move ExposedArraySeq logic to ParArray. - Parallel collections are not anymore comparable with sequential collections. Tests systematically replace `==` with `sameElements`. - Add support for the `to` conversion method in both directions (sequential to parallel and parallel to sequential). `toSeq`, `toIterable`, `toSet` and `toMap` used to be overridden to return parallel collections. I’ve kept that choice (although that makes it impossible to inherit from `IterableOnceOps`, then). - Restore usage of DoublingUnrolledBuffer - Restore FlatHashTable from scala/scala@9008c2fa06607eb72d597fca89bc8c8119f20212. Systematically use `areEqual` in tests with no strict order. - Implementation of the sequential Set returned by the `seq` member has been copied from scala/scala@056e1e9a6e75f69222fe568299e98e451aebdbe5. --- build.sbt | 6 +- .../scala/scala/collection/DebugUtils.scala | 21 + .../scala/scala/collection/Parallel.scala | 17 + .../scala/collection/Parallelizable.scala | 2 +- .../collection/generic/CanCombineFrom.scala | 3 +- .../generic/GenericParCompanion.scala | 68 +- .../generic/GenericParTemplate.scala | 6 +- .../generic/GenericTraversableTemplate.scala | 223 ++++ .../collection/generic/HasNewBuilder.scala | 17 + .../scala/collection/generic/ParFactory.scala | 195 +++- .../collection/generic/ParMapFactory.scala | 23 +- .../collection/generic/ParSetFactory.scala | 8 +- .../scala/collection/generic/Signalling.scala | 176 +++ .../scala/collection/generic/Sizing.scala | 17 + .../collection/immutable/OldHashMap.scala | 582 ++++++++++ .../collection/immutable/OldHashSet.scala | 998 ++++++++++++++++++ .../collection/immutable/TrieIterator.scala | 216 ++++ .../collection/mutable/FlatHashTable.scala | 433 ++++++++ .../parallel/CollectionConverters.scala | 156 +-- .../scala/collection/parallel/Combiner.scala | 4 +- .../collection/parallel/ParIterable.scala | 11 +- .../collection/parallel/ParIterableLike.scala | 188 ++-- .../scala/collection/parallel/ParMap.scala | 11 +- .../collection/parallel/ParMapLike.scala | 155 ++- .../scala/collection/parallel/ParSeq.scala | 11 +- .../collection/parallel/ParSeqLike.scala | 261 +++-- .../scala/collection/parallel/ParSet.scala | 9 +- .../collection/parallel/ParSetLike.scala | 112 +- .../collection/parallel/RemainsIterator.scala | 21 +- .../scala/collection/parallel/Tasks.scala | 10 +- .../parallel/immutable/ParHashMap.scala | 78 +- .../parallel/immutable/ParHashSet.scala | 44 +- .../parallel/immutable/ParIterable.scala | 10 +- .../parallel/immutable/ParMap.scala | 38 +- .../parallel/immutable/ParRange.scala | 5 +- .../parallel/immutable/ParSeq.scala | 15 +- .../parallel/immutable/ParSet.scala | 9 +- .../parallel/immutable/ParVector.scala | 22 +- .../parallel/immutable/package.scala | 1 + .../parallel/mutable/LazyCombiner.scala | 4 +- .../parallel/mutable/ParArray.scala | 88 +- .../parallel/mutable/ParFlatHashTable.scala | 6 +- .../parallel/mutable/ParHashMap.scala | 49 +- .../parallel/mutable/ParHashSet.scala | 17 +- .../parallel/mutable/ParHashTable.scala | 58 +- .../parallel/mutable/ParIterable.scala | 10 +- .../collection/parallel/mutable/ParMap.scala | 12 +- .../parallel/mutable/ParMapLike.scala | 25 +- .../collection/parallel/mutable/ParSeq.scala | 10 +- .../collection/parallel/mutable/ParSet.scala | 9 +- .../parallel/mutable/ParSetLike.scala | 21 +- .../parallel/mutable/ParTrieMap.scala | 8 +- .../mutable/ResizableParArrayCombiner.scala | 9 +- .../mutable/UnrolledParArrayCombiner.scala | 6 +- .../collection/parallel/mutable/package.scala | 6 - .../scala/collection/parallel/package.scala | 37 +- junit/src/test/scala/MiscTest.scala | 29 +- .../scala/SerializationStabilityTest.scala | 16 +- .../CollectionConversionsTest.scala | 118 ++- .../scala/collection/NewBuilderTest.scala | 174 ++- .../ctries_new/ConcurrentMapSpec.scala | 4 +- .../concurrent/ctries_new/IteratorSpec.scala | 2 +- .../concurrent/ctries_new/LNodeSpec.scala | 8 +- .../ctries_old/ConcurrentMapSpec.scala | 4 +- .../concurrent/ctries_old/IteratorSpec.scala | 2 +- .../concurrent/ctries_old/LNodeSpec.scala | 8 +- .../immutable/ParallelConsistencyTest.scala | 44 - .../collection/parallel/ParMapTest.scala | 8 +- .../parallel/ParSeqConversionsTest.scala | 68 +- .../scala/runtime/ScalaRunTimeTest.scala | 70 -- scalacheck/src/test/scala/IntOperators.scala | 2 +- scalacheck/src/test/scala/Operators.scala | 4 +- scalacheck/src/test/scala/PairOperators.scala | 10 +- .../src/test/scala/ParallelArrayCheck.scala | 2 +- .../src/test/scala/ParallelCtrieCheck.scala | 4 +- .../src/test/scala/ParallelHashMapCheck.scala | 4 +- .../src/test/scala/ParallelHashSetCheck.scala | 4 +- .../test/scala/ParallelHashTrieCheck.scala | 6 +- .../test/scala/ParallelIterableCheck.scala | 110 +- .../src/test/scala/ParallelMapCheck1.scala | 98 ++ .../src/test/scala/ParallelRangeCheck.scala | 7 +- .../src/test/scala/ParallelSeqCheck.scala | 46 +- .../src/test/scala/ParallelSetCheck.scala | 1 + 83 files changed, 4344 insertions(+), 1066 deletions(-) create mode 100644 core/src/main/scala/scala/collection/DebugUtils.scala create mode 100644 core/src/main/scala/scala/collection/Parallel.scala create mode 100644 core/src/main/scala/scala/collection/generic/GenericTraversableTemplate.scala create mode 100644 core/src/main/scala/scala/collection/generic/HasNewBuilder.scala create mode 100644 core/src/main/scala/scala/collection/generic/Signalling.scala create mode 100644 core/src/main/scala/scala/collection/generic/Sizing.scala create mode 100644 core/src/main/scala/scala/collection/immutable/OldHashMap.scala create mode 100644 core/src/main/scala/scala/collection/immutable/OldHashSet.scala create mode 100644 core/src/main/scala/scala/collection/immutable/TrieIterator.scala create mode 100644 core/src/main/scala/scala/collection/mutable/FlatHashTable.scala delete mode 100644 junit/src/test/scala/scala/collection/immutable/ParallelConsistencyTest.scala delete mode 100644 junit/src/test/scala/scala/runtime/ScalaRunTimeTest.scala diff --git a/build.sbt b/build.sbt index 935d7528..7bc4df4d 100644 --- a/build.sbt +++ b/build.sbt @@ -2,14 +2,16 @@ import ScalaModulePlugin._ version in ThisBuild := "0.1.3-SNAPSHOT" +resolvers in ThisBuild += "scala-integration" at "https://scala-ci.typesafe.com/artifactory/scala-integration/" + scalaVersionsByJvm in ThisBuild := { - val v213 = "2.13.0-M3" + val v213 = "2.13.0-pre-021a9a4" Map( 8 -> List(v213 -> true), 11 -> List(v213 -> false)) } -scalacOptions in ThisBuild ++= Seq("-deprecation", "-feature", "-Xfatal-warnings") +scalacOptions in ThisBuild ++= Seq("-deprecation", "-feature"/*, "-Xfatal-warnings"*/) cancelable in Global := true diff --git a/core/src/main/scala/scala/collection/DebugUtils.scala b/core/src/main/scala/scala/collection/DebugUtils.scala new file mode 100644 index 00000000..631706f4 --- /dev/null +++ b/core/src/main/scala/scala/collection/DebugUtils.scala @@ -0,0 +1,21 @@ +package scala.collection + +private[collection] object DebugUtils { + + def buildString(closure: (Any => Unit) => Unit): String = { + val output = new collection.mutable.StringBuilder + closure { any => + output ++= any.toString + output += '\n' + } + + output.result() + } + + def arrayString[T](array: Array[T], from: Int, until: Int): String = { + array.slice(from, until) map ({ + case null => "n/a" + case x => "" + x + }: scala.PartialFunction[T, String]) mkString " | " + } +} \ No newline at end of file diff --git a/core/src/main/scala/scala/collection/Parallel.scala b/core/src/main/scala/scala/collection/Parallel.scala new file mode 100644 index 00000000..174e3ab7 --- /dev/null +++ b/core/src/main/scala/scala/collection/Parallel.scala @@ -0,0 +1,17 @@ +/* __ *\ +** ________ ___ / / ___ Scala API ** +** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** +** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** +** /____/\___/_/ |_/____/_/ | | ** +** |/ ** +\* */ + +package scala +package collection + +/** A marker trait for collections which have their operations parallelised. + * + * @since 2.9 + * @author Aleksandar Prokopec + */ +trait Parallel diff --git a/core/src/main/scala/scala/collection/Parallelizable.scala b/core/src/main/scala/scala/collection/Parallelizable.scala index 90a9ab68..0b2dda97 100644 --- a/core/src/main/scala/scala/collection/Parallelizable.scala +++ b/core/src/main/scala/scala/collection/Parallelizable.scala @@ -24,7 +24,7 @@ import parallel.Combiner */ trait Parallelizable[+A, +ParRepr <: Parallel] extends Any { - def seq: TraversableOnce[A] + def seq: IterableOnce[A] /** Returns a parallel implementation of this collection. * diff --git a/core/src/main/scala/scala/collection/generic/CanCombineFrom.scala b/core/src/main/scala/scala/collection/generic/CanCombineFrom.scala index ead36ffe..08a10824 100644 --- a/core/src/main/scala/scala/collection/generic/CanCombineFrom.scala +++ b/core/src/main/scala/scala/collection/generic/CanCombineFrom.scala @@ -24,8 +24,7 @@ import scala.collection.parallel._ * @tparam To the type of the collection to be created. * @since 2.8 */ -trait CanCombineFrom[-From, -Elem, +To] extends CanBuildFrom[From, Elem, To] with Parallel { +trait CanCombineFrom[-From, -Elem, +To] extends Parallel { def apply(from: From): Combiner[Elem, To] def apply(): Combiner[Elem, To] } - diff --git a/core/src/main/scala/scala/collection/generic/GenericParCompanion.scala b/core/src/main/scala/scala/collection/generic/GenericParCompanion.scala index 21c69465..75706844 100644 --- a/core/src/main/scala/scala/collection/generic/GenericParCompanion.scala +++ b/core/src/main/scala/scala/collection/generic/GenericParCompanion.scala @@ -17,7 +17,7 @@ package generic import scala.collection.parallel.Combiner import scala.collection.parallel.ParIterable import scala.collection.parallel.ParMap -import scala.language.higherKinds +import scala.language.{higherKinds, implicitConversions} /** A template class for companion objects of parallel collection classes. * They should be mixed in together with `GenericCompanion` type. @@ -27,6 +27,26 @@ import scala.language.higherKinds * @since 2.8 */ trait GenericParCompanion[+CC[X] <: ParIterable[X]] { + + /** An empty collection of type `$Coll[A]` + * @tparam A the type of the ${coll}'s elements + */ + def empty[A]: CC[A] = newBuilder[A].result() + + /** Creates a $coll with the specified elements. + * @tparam A the type of the ${coll}'s elements + * @param elems the elements of the created $coll + * @return a new $coll with elements `elems` + */ + def apply[A](elems: A*): CC[A] = { + if (elems.isEmpty) empty[A] + else { + val b = newBuilder[A] + b ++= elems + b.result() + } + } + /** The default builder for $Coll objects. */ def newBuilder[A]: Combiner[A, CC[A]] @@ -34,9 +54,55 @@ trait GenericParCompanion[+CC[X] <: ParIterable[X]] { /** The parallel builder for $Coll objects. */ def newCombiner[A]: Combiner[A, CC[A]] + + implicit def toFactory[A]: Factory[A, CC[A]] = GenericParCompanion.toFactory(this) + +} + + +// TODO Specialize `Factory` with parallel collection creation methods so that the `xs.to(ParArray)` syntax +// does build the resulting `ParArray` in parallel +object GenericParCompanion { + /** + * Implicit conversion for converting any `ParFactory` into a sequential `Factory`. + * This provides supports for the `to` conversion method (eg, `xs.to(ParArray)`). + */ + implicit def toFactory[A, CC[X] <: ParIterable[X]](parFactory: GenericParCompanion[CC]): Factory[A, CC[A]] = + new ToFactory(parFactory) + + @SerialVersionUID(3L) + private class ToFactory[A, CC[X] <: ParIterable[X]](parFactory: GenericParCompanion[CC]) + extends Factory[A, CC[A]] with Serializable{ + def fromSpecific(it: IterableOnce[A]): CC[A] = (parFactory.newBuilder[A] ++= it).result() + def newBuilder: mutable.Builder[A, CC[A]] = parFactory.newBuilder + } + } trait GenericParMapCompanion[+CC[P, Q] <: ParMap[P, Q]] { + def newCombiner[P, Q]: Combiner[(P, Q), CC[P, Q]] + + implicit def toFactory[K, V]: Factory[(K, V), CC[K, V]] = GenericParMapCompanion.toFactory(this) + } +object GenericParMapCompanion { + /** + * Implicit conversion for converting any `ParFactory` into a sequential `Factory`. + * This provides supports for the `to` conversion method (eg, `xs.to(ParMap)`). + */ + implicit def toFactory[K, V, CC[X, Y] <: ParMap[X, Y]]( + parFactory: GenericParMapCompanion[CC] + ): Factory[(K, V), CC[K, V]] = + new ToFactory[K, V, CC](parFactory) + + @SerialVersionUID(3L) + private class ToFactory[K, V, CC[X, Y] <: ParMap[X, Y]]( + parFactory: GenericParMapCompanion[CC] + ) extends Factory[(K, V), CC[K, V]] with Serializable { + def fromSpecific(it: IterableOnce[(K, V)]): CC[K, V] = (parFactory.newCombiner[K, V] ++= it).result() + def newBuilder: mutable.Builder[(K, V), CC[K, V]] = parFactory.newCombiner + } + +} \ No newline at end of file diff --git a/core/src/main/scala/scala/collection/generic/GenericParTemplate.scala b/core/src/main/scala/scala/collection/generic/GenericParTemplate.scala index c5355610..86792a5b 100644 --- a/core/src/main/scala/scala/collection/generic/GenericParTemplate.scala +++ b/core/src/main/scala/scala/collection/generic/GenericParTemplate.scala @@ -29,10 +29,10 @@ import scala.language.higherKinds * @since 2.8 */ trait GenericParTemplate[+A, +CC[X] <: ParIterable[X]] -extends GenericTraversableTemplate[A, CC] - with HasNewCombiner[A, CC[A] @uncheckedVariance] + extends GenericTraversableTemplate[A, CC] + with HasNewCombiner[A, CC[A] @uncheckedVariance] { - def companion: GenericCompanion[CC] with GenericParCompanion[CC] + def companion: GenericParCompanion[CC] protected[this] override def newBuilder: scala.collection.mutable.Builder[A, CC[A]] = newCombiner diff --git a/core/src/main/scala/scala/collection/generic/GenericTraversableTemplate.scala b/core/src/main/scala/scala/collection/generic/GenericTraversableTemplate.scala new file mode 100644 index 00000000..538696c2 --- /dev/null +++ b/core/src/main/scala/scala/collection/generic/GenericTraversableTemplate.scala @@ -0,0 +1,223 @@ +package scala.collection.generic + +import scala.language.higherKinds +import scala.annotation.migration +import scala.annotation.unchecked.uncheckedVariance +import scala.collection.mutable.Builder +import scala.collection.parallel.ParIterable + +/** A template class for companion objects of ``regular`` collection classes + * that represent an unconstrained higher-kinded type. + * + * @tparam A The type of the collection elements. + * @tparam CC The type constructor representing the collection class. + * @author Martin Odersky + * @since 2.8 + * @define coll collection + */ +// TODO inline in GenericParTemplate or ParIterable +trait GenericTraversableTemplate[+A, +CC[X] <: ParIterable[X]] extends HasNewBuilder[A, CC[A] @uncheckedVariance] { + + /** A sequential collection containing the same elements as this collection */ + def seq: Iterable[A] + + /** Applies a function `f` to all elements of this $coll. + * + * @param f the function that is applied for its side-effect to every element. + * The result of function `f` is discarded. + * + * @tparam U the type parameter describing the result of function `f`. + * This result will always be ignored. Typically `U` is `Unit`, + * but this is not necessary. + * + * @usecase def foreach(f: A => Unit): Unit + */ + def foreach[U](f: A => U): Unit + + /** Selects the first element of this $coll. + * + * @return the first element of this $coll. + * @throws NoSuchElementException if the $coll is empty. + */ + def head: A + + /** Tests whether this $coll is empty. + * + * @return `true` if the $coll contain no elements, `false` otherwise. + */ + def isEmpty: Boolean + + /** The factory companion object that builds instances of class $Coll. + * (or its `Iterable` superclass where class $Coll is not a `Seq`.) + */ + def companion: GenericParCompanion[CC] + + /** The builder that builds instances of type $Coll[A] + */ + protected[this] def newBuilder: Builder[A, CC[A]] = companion.newBuilder[A] + + /** The generic builder that builds instances of $Coll + * at arbitrary element types. + */ + def genericBuilder[B]: Builder[B, CC[B]] = companion.newBuilder[B] + + private def sequential: IterableOnce[A] = this.seq + + /** Converts this $coll of pairs into two collections of the first and second + * half of each pair. + * + * {{{ + * val xs = $Coll( + * (1, "one"), + * (2, "two"), + * (3, "three")).unzip + * // xs == ($Coll(1, 2, 3), + * // $Coll(one, two, three)) + * }}} + * + * @tparam A1 the type of the first half of the element pairs + * @tparam A2 the type of the second half of the element pairs + * @param asPair an implicit conversion which asserts that the element type + * of this $coll is a pair. + * @return a pair of ${coll}s, containing the first, respectively second + * half of each element pair of this $coll. + */ + def unzip[A1, A2](implicit asPair: A => (A1, A2)): (CC[A1], CC[A2]) = { + val b1 = genericBuilder[A1] + val b2 = genericBuilder[A2] + for (xy <- sequential.iterator) { + val (x, y) = asPair(xy) + b1 += x + b2 += y + } + (b1.result(), b2.result()) + } + + /** Converts this $coll of triples into three collections of the first, second, + * and third element of each triple. + * + * {{{ + * val xs = $Coll( + * (1, "one", '1'), + * (2, "two", '2'), + * (3, "three", '3')).unzip3 + * // xs == ($Coll(1, 2, 3), + * // $Coll(one, two, three), + * // $Coll(1, 2, 3)) + * }}} + * + * @tparam A1 the type of the first member of the element triples + * @tparam A2 the type of the second member of the element triples + * @tparam A3 the type of the third member of the element triples + * @param asTriple an implicit conversion which asserts that the element type + * of this $coll is a triple. + * @return a triple of ${coll}s, containing the first, second, respectively + * third member of each element triple of this $coll. + */ + def unzip3[A1, A2, A3](implicit asTriple: A => (A1, A2, A3)): (CC[A1], CC[A2], CC[A3]) = { + val b1 = genericBuilder[A1] + val b2 = genericBuilder[A2] + val b3 = genericBuilder[A3] + + for (xyz <- sequential.iterator) { + val (x, y, z) = asTriple(xyz) + b1 += x + b2 += y + b3 += z + } + (b1.result(), b2.result(), b3.result()) + } + + /** Converts this $coll of traversable collections into + * a $coll formed by the elements of these traversable + * collections. + * + * @tparam B the type of the elements of each traversable collection. + * @param asTraversable an implicit conversion which asserts that the element + * type of this $coll is a `GenTraversable`. + * @return a new $coll resulting from concatenating all element ${coll}s. + * + * @usecase def flatten[B]: $Coll[B] + * + * @inheritdoc + * + * The resulting collection's type will be guided by the + * static type of $coll. For example: + * + * {{{ + * val xs = List( + * Set(1, 2, 3), + * Set(1, 2, 3) + * ).flatten + * // xs == List(1, 2, 3, 1, 2, 3) + * + * val ys = Set( + * List(1, 2, 3), + * List(3, 2, 1) + * ).flatten + * // ys == Set(1, 2, 3) + * }}} + */ + def flatten[B](implicit asTraversable: A => /*<: /*<:= headSize) fail + bs(i) += x + i += 1 + } + if (i != headSize) + fail + } + val bb = genericBuilder[CC[B]] + for (b <- bs) bb += b.result + bb.result() + } +} diff --git a/core/src/main/scala/scala/collection/generic/HasNewBuilder.scala b/core/src/main/scala/scala/collection/generic/HasNewBuilder.scala new file mode 100644 index 00000000..aa0ce669 --- /dev/null +++ b/core/src/main/scala/scala/collection/generic/HasNewBuilder.scala @@ -0,0 +1,17 @@ +/* __ *\ +** ________ ___ / / ___ Scala API ** +** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** +** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** +** /____/\___/_/ |_/____/_/ | | ** +** |/ ** +\* */ +package scala +package collection +package generic + +import mutable.Builder + +trait HasNewBuilder[+A, +Repr] extends Any { + /** The builder that builds instances of Repr */ + protected[this] def newBuilder: Builder[A, Repr] +} diff --git a/core/src/main/scala/scala/collection/generic/ParFactory.scala b/core/src/main/scala/scala/collection/generic/ParFactory.scala index fd997df6..a090912f 100644 --- a/core/src/main/scala/scala/collection/generic/ParFactory.scala +++ b/core/src/main/scala/scala/collection/generic/ParFactory.scala @@ -28,8 +28,195 @@ import scala.language.higherKinds * @since 2.8 */ abstract class ParFactory[CC[X] <: ParIterable[X] with GenericParTemplate[X, CC]] -extends GenTraversableFactory[CC] - with GenericParCompanion[CC] { +extends GenericParCompanion[CC] { + + /** Concatenates all argument collections into a single $coll. + * + * @param xss the collections that are to be concatenated. + * @return the concatenation of all the collections. + */ + def concat[A](xss: Iterable[A]*): CC[A] = { + val b = newBuilder[A] + // At present we're using IndexedSeq as a proxy for "has a cheap size method". + if (xss forall (_.isInstanceOf[IndexedSeq[_]])) + b.sizeHint(xss.map(_.size).sum) + + for (xs <- xss) b ++= xs + b.result() + } + + /** Produces a $coll containing the results of some element computation a number of times. + * @param n the number of elements contained in the $coll. + * @param elem the element computation + * @return A $coll that contains the results of `n` evaluations of `elem`. + */ + def fill[A](n: Int)(elem: => A): CC[A] = { + val b = newBuilder[A] + b.sizeHint(n) + var i = 0 + while (i < n) { + b += elem + i += 1 + } + b.result() + } + + /** Produces a two-dimensional $coll containing the results of some element computation a number of times. + * @param n1 the number of elements in the 1st dimension + * @param n2 the number of elements in the 2nd dimension + * @param elem the element computation + * @return A $coll that contains the results of `n1 x n2` evaluations of `elem`. + */ + def fill[A](n1: Int, n2: Int)(elem: => A): CC[CC[A]] = + tabulate(n1)(_ => fill(n2)(elem)) + + /** Produces a three-dimensional $coll containing the results of some element computation a number of times. + * @param n1 the number of elements in the 1st dimension + * @param n2 the number of elements in the 2nd dimension + * @param n3 the number of elements in the 3nd dimension + * @param elem the element computation + * @return A $coll that contains the results of `n1 x n2 x n3` evaluations of `elem`. + */ + def fill[A](n1: Int, n2: Int, n3: Int)(elem: => A): CC[CC[CC[A]]] = + tabulate(n1)(_ => fill(n2, n3)(elem)) + + /** Produces a four-dimensional $coll containing the results of some element computation a number of times. + * @param n1 the number of elements in the 1st dimension + * @param n2 the number of elements in the 2nd dimension + * @param n3 the number of elements in the 3nd dimension + * @param n4 the number of elements in the 4th dimension + * @param elem the element computation + * @return A $coll that contains the results of `n1 x n2 x n3 x n4` evaluations of `elem`. + */ + def fill[A](n1: Int, n2: Int, n3: Int, n4: Int)(elem: => A): CC[CC[CC[CC[A]]]] = + tabulate(n1)(_ => fill(n2, n3, n4)(elem)) + + /** Produces a five-dimensional $coll containing the results of some element computation a number of times. + * @param n1 the number of elements in the 1st dimension + * @param n2 the number of elements in the 2nd dimension + * @param n3 the number of elements in the 3nd dimension + * @param n4 the number of elements in the 4th dimension + * @param n5 the number of elements in the 5th dimension + * @param elem the element computation + * @return A $coll that contains the results of `n1 x n2 x n3 x n4 x n5` evaluations of `elem`. + */ + def fill[A](n1: Int, n2: Int, n3: Int, n4: Int, n5: Int)(elem: => A): CC[CC[CC[CC[CC[A]]]]] = + tabulate(n1)(_ => fill(n2, n3, n4, n5)(elem)) + + /** Produces a $coll containing values of a given function over a range of integer values starting from 0. + * @param n The number of elements in the $coll + * @param f The function computing element values + * @return A $coll consisting of elements `f(0), ..., f(n -1)` + */ + def tabulate[A](n: Int)(f: Int => A): CC[A] = { + val b = newBuilder[A] + b.sizeHint(n) + var i = 0 + while (i < n) { + b += f(i) + i += 1 + } + b.result() + } + + /** Produces a two-dimensional $coll containing values of a given function over ranges of integer values starting from 0. + * @param n1 the number of elements in the 1st dimension + * @param n2 the number of elements in the 2nd dimension + * @param f The function computing element values + * @return A $coll consisting of elements `f(i1, i2)` + * for `0 <= i1 < n1` and `0 <= i2 < n2`. + */ + def tabulate[A](n1: Int, n2: Int)(f: (Int, Int) => A): CC[CC[A]] = + tabulate(n1)(i1 => tabulate(n2)(f(i1, _))) + + /** Produces a three-dimensional $coll containing values of a given function over ranges of integer values starting from 0. + * @param n1 the number of elements in the 1st dimension + * @param n2 the number of elements in the 2nd dimension + * @param n3 the number of elements in the 3nd dimension + * @param f The function computing element values + * @return A $coll consisting of elements `f(i1, i2, i3)` + * for `0 <= i1 < n1`, `0 <= i2 < n2`, and `0 <= i3 < n3`. + */ + def tabulate[A](n1: Int, n2: Int, n3: Int)(f: (Int, Int, Int) => A): CC[CC[CC[A]]] = + tabulate(n1)(i1 => tabulate(n2, n3)(f(i1, _, _))) + + /** Produces a four-dimensional $coll containing values of a given function over ranges of integer values starting from 0. + * @param n1 the number of elements in the 1st dimension + * @param n2 the number of elements in the 2nd dimension + * @param n3 the number of elements in the 3nd dimension + * @param n4 the number of elements in the 4th dimension + * @param f The function computing element values + * @return A $coll consisting of elements `f(i1, i2, i3, i4)` + * for `0 <= i1 < n1`, `0 <= i2 < n2`, `0 <= i3 < n3`, and `0 <= i4 < n4`. + */ + def tabulate[A](n1: Int, n2: Int, n3: Int, n4: Int)(f: (Int, Int, Int, Int) => A): CC[CC[CC[CC[A]]]] = + tabulate(n1)(i1 => tabulate(n2, n3, n4)(f(i1, _, _, _))) + + /** Produces a five-dimensional $coll containing values of a given function over ranges of integer values starting from 0. + * @param n1 the number of elements in the 1st dimension + * @param n2 the number of elements in the 2nd dimension + * @param n3 the number of elements in the 3nd dimension + * @param n4 the number of elements in the 4th dimension + * @param n5 the number of elements in the 5th dimension + * @param f The function computing element values + * @return A $coll consisting of elements `f(i1, i2, i3, i4, i5)` + * for `0 <= i1 < n1`, `0 <= i2 < n2`, `0 <= i3 < n3`, `0 <= i4 < n4`, and `0 <= i5 < n5`. + */ + def tabulate[A](n1: Int, n2: Int, n3: Int, n4: Int, n5: Int)(f: (Int, Int, Int, Int, Int) => A): CC[CC[CC[CC[CC[A]]]]] = + tabulate(n1)(i1 => tabulate(n2, n3, n4, n5)(f(i1, _, _, _, _))) + + /** Produces a $coll containing a sequence of increasing of integers. + * + * @param start the first element of the $coll + * @param end the end value of the $coll (the first value NOT contained) + * @return a $coll with values `start, start + 1, ..., end - 1` + */ + def range[T: Integral](start: T, end: T): CC[T] = range(start, end, implicitly[Integral[T]].one) + + /** Produces a $coll containing equally spaced values in some integer interval. + * @param start the start value of the $coll + * @param end the end value of the $coll (the first value NOT contained) + * @param step the difference between successive elements of the $coll (must be positive or negative) + * @return a $coll with values `start, start + step, ...` up to, but excluding `end` + */ + def range[T: Integral](start: T, end: T, step: T): CC[T] = { + val num = implicitly[Integral[T]] + import num._ + + if (step == zero) throw new IllegalArgumentException("zero step") + val b = newBuilder[T] + b sizeHint immutable.NumericRange.count(start, end, step, isInclusive = false) + var i = start + while (if (step < zero) end < i else i < end) { + b += i + i += step + } + b.result() + } + + /** Produces a $coll containing repeated applications of a function to a start value. + * + * @param start the start value of the $coll + * @param len the number of elements contained in the $coll + * @param f the function that's repeatedly applied + * @return a $coll with `len` values in the sequence `start, f(start), f(f(start)), ...` + */ + def iterate[A](start: A, len: Int)(f: A => A): CC[A] = { + val b = newBuilder[A] + if (len > 0) { + b.sizeHint(len) + var acc = start + var i = 1 + b += acc + + while (i < len) { + acc = f(acc) + i += 1 + b += acc + } + } + b.result() + } //type EPC[T, C] = scala.collection.parallel.EnvironmentPassingCombiner[T, C] @@ -37,8 +224,8 @@ extends GenTraversableFactory[CC] * all calls to `apply(from)` to the `genericParBuilder` method of the $coll * `from`, and calls to `apply()` to this factory. */ - class GenericCanCombineFrom[A] extends GenericCanBuildFrom[A] with CanCombineFrom[CC[_], A, CC[A]] { - override def apply(from: Coll) = from.genericCombiner + class GenericCanCombineFrom[A] extends CanCombineFrom[CC[_], A, CC[A]] { + override def apply(from: CC[_]) = from.genericCombiner override def apply() = newBuilder[A] } } diff --git a/core/src/main/scala/scala/collection/generic/ParMapFactory.scala b/core/src/main/scala/scala/collection/generic/ParMapFactory.scala index 09edb8ad..28e911a2 100644 --- a/core/src/main/scala/scala/collection/generic/ParMapFactory.scala +++ b/core/src/main/scala/scala/collection/generic/ParMapFactory.scala @@ -17,7 +17,6 @@ package generic import scala.collection.parallel.ParMap import scala.collection.parallel.ParMapLike import scala.collection.parallel.Combiner -import scala.collection.mutable.Builder import scala.language.higherKinds /** A template class for companion objects of `ParMap` and subclasses thereof. @@ -31,9 +30,20 @@ import scala.language.higherKinds * @author Aleksandar Prokopec * @since 2.8 */ -abstract class ParMapFactory[CC[X, Y] <: ParMap[X, Y] with ParMapLike[X, Y, CC[X, Y], _]] -extends GenMapFactory[CC] - with GenericParMapCompanion[CC] { +abstract class ParMapFactory[CC[X, Y] <: ParMap[X, Y] with ParMapLike[X, Y, CC, CC[X, Y], _]] +extends GenericParMapCompanion[CC] { + + type Coll = MapColl + + /** A collection of type $Coll that contains given key/value bindings. + * @param elems the key/value pairs that make up the $coll + * @tparam K the type of the keys + * @tparam V the type of the associated values + * @return a new $coll consisting key/value pairs given by `elems`. + */ + def apply[K, V](elems: (K, V)*): CC[K, V] = (newCombiner[K, V] ++= elems).result() + + def empty[K, V]: CC[K, V] type MapColl = CC[_, _] @@ -41,7 +51,7 @@ extends GenMapFactory[CC] * @tparam K the type of the keys * @tparam V the type of the associated values */ - override def newBuilder[K, V]: Builder[(K, V), CC[K, V]] = newCombiner[K, V] + def newBuilder[K, V]: mutable.Builder[(K, V), CC[K, V]] = newCombiner[K, V] /** The default combiner for $Coll objects. * @tparam K the type of the keys @@ -54,5 +64,4 @@ extends GenMapFactory[CC] def apply() = newCombiner[K, V] } -} - +} \ No newline at end of file diff --git a/core/src/main/scala/scala/collection/generic/ParSetFactory.scala b/core/src/main/scala/scala/collection/generic/ParSetFactory.scala index b1627a3d..d5b44274 100644 --- a/core/src/main/scala/scala/collection/generic/ParSetFactory.scala +++ b/core/src/main/scala/scala/collection/generic/ParSetFactory.scala @@ -25,16 +25,14 @@ import scala.language.higherKinds * @author Aleksandar Prokopec * @since 2.8 */ -abstract class ParSetFactory[CC[X] <: ParSet[X] with ParSetLike[X, CC[X], _] with GenericParTemplate[X, CC]] - extends GenSetFactory[CC] - with GenericParCompanion[CC] -{ +abstract class ParSetFactory[CC[X] <: ParSet[X] with ParSetLike[X, CC, CC[X], _] with GenericParTemplate[X, CC]] + extends GenericParCompanion[CC] { def newBuilder[A]: Combiner[A, CC[A]] = newCombiner[A] def newCombiner[A]: Combiner[A, CC[A]] class GenericCanCombineFrom[A] extends CanCombineFrom[CC[_], A, CC[A]] { - override def apply(from: Coll) = from.genericCombiner[A] + override def apply(from: CC[_]) = from.genericCombiner[A] override def apply() = newCombiner[A] } } diff --git a/core/src/main/scala/scala/collection/generic/Signalling.scala b/core/src/main/scala/scala/collection/generic/Signalling.scala new file mode 100644 index 00000000..e05acaa9 --- /dev/null +++ b/core/src/main/scala/scala/collection/generic/Signalling.scala @@ -0,0 +1,176 @@ +/* __ *\ +** ________ ___ / / ___ Scala API ** +** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** +** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** +** /____/\___/_/ |_/____/_/ | | ** +** |/ ** +\* */ + +package scala +package collection +package generic + +import java.util.concurrent.atomic.AtomicInteger + +/** + * A message interface serves as a unique interface to the + * part of the collection capable of receiving messages from + * a different task. + * + * One example of use of this is the `find` method, which can use the + * signalling interface to inform worker threads that an element has + * been found and no further search is necessary. + * + * @author prokopec + * + * @define abortflag + * Abort flag being true means that a worker can abort and produce whatever result, + * since its result will not affect the final result of computation. An example + * of operations using this are `find`, `forall` and `exists` methods. + * + * @define indexflag + * The index flag holds an integer which carries some operation-specific meaning. For + * instance, `takeWhile` operation sets the index flag to the position of the element + * where the predicate fails. Other workers may check this index against the indices + * they are working on and return if this index is smaller than their index. Examples + * of operations using this are `takeWhile`, `dropWhile`, `span` and `indexOf`. + */ +trait Signalling { + /** + * Checks whether an abort signal has been issued. + * + * $abortflag + * @return the state of the abort + */ + def isAborted: Boolean + + /** + * Sends an abort signal to other workers. + * + * $abortflag + */ + def abort(): Unit + + /** + * Returns the value of the index flag. + * + * $indexflag + * @return the value of the index flag + */ + def indexFlag: Int + + /** + * Sets the value of the index flag. + * + * $indexflag + * @param f the value to which the index flag is set. + */ + def setIndexFlag(f: Int): Unit + + /** + * Sets the value of the index flag if argument is greater than current value. + * This method does this atomically. + * + * $indexflag + * @param f the value to which the index flag is set + */ + def setIndexFlagIfGreater(f: Int): Unit + + /** + * Sets the value of the index flag if argument is lesser than current value. + * This method does this atomically. + * + * $indexflag + * @param f the value to which the index flag is set + */ + def setIndexFlagIfLesser(f: Int): Unit + + /** + * A read only tag specific to the signalling object. It is used to give + * specific workers information on the part of the collection being operated on. + */ + def tag: Int +} + +/** + * This signalling implementation returns default values and ignores received signals. + */ +class DefaultSignalling extends Signalling with VolatileAbort { + def indexFlag = -1 + def setIndexFlag(f: Int): Unit = () + def setIndexFlagIfGreater(f: Int): Unit = () + def setIndexFlagIfLesser(f: Int): Unit = () + + def tag = -1 +} + +/** + * An object that returns default values and ignores received signals. + */ +object IdleSignalling extends DefaultSignalling + +/** + * A mixin trait that implements abort flag behaviour using volatile variables. + */ +trait VolatileAbort extends Signalling { + @volatile private var abortflag = false + override def isAborted = abortflag + override def abort() = abortflag = true +} + +/** + * A mixin trait that implements index flag behaviour using atomic integers. + * The `setIndex` operation is wait-free, while conditional set operations `setIndexIfGreater` + * and `setIndexIfLesser` are lock-free and support only monotonic changes. + */ +trait AtomicIndexFlag extends Signalling { + private val intflag: AtomicInteger = new AtomicInteger(-1) + abstract override def indexFlag = intflag.get + abstract override def setIndexFlag(f: Int) = intflag.set(f) + abstract override def setIndexFlagIfGreater(f: Int) = { + var loop = true + do { + val old = intflag.get + if (f <= old) loop = false + else if (intflag.compareAndSet(old, f)) loop = false + } while (loop) + } + abstract override def setIndexFlagIfLesser(f: Int) = { + var loop = true + do { + val old = intflag.get + if (f >= old) loop = false + else if (intflag.compareAndSet(old, f)) loop = false + } while (loop) + } +} + +/** + * An implementation of the signalling interface using delegates. + */ +trait DelegatedSignalling extends Signalling { + /** + * A delegate that method calls are redirected to. + */ + var signalDelegate: Signalling + + def isAborted = signalDelegate.isAborted + def abort() = signalDelegate.abort() + + def indexFlag = signalDelegate.indexFlag + def setIndexFlag(f: Int) = signalDelegate.setIndexFlag(f) + def setIndexFlagIfGreater(f: Int) = signalDelegate.setIndexFlagIfGreater(f) + def setIndexFlagIfLesser(f: Int) = signalDelegate.setIndexFlagIfLesser(f) + + def tag = signalDelegate.tag +} + +/** + * Class implementing delegated signalling. + */ +class DelegatedContext(var signalDelegate: Signalling) extends DelegatedSignalling + +/** + * Class implementing delegated signalling, but having its own distinct `tag`. + */ +class TaggedDelegatedContext(deleg: Signalling, override val tag: Int) extends DelegatedContext(deleg) diff --git a/core/src/main/scala/scala/collection/generic/Sizing.scala b/core/src/main/scala/scala/collection/generic/Sizing.scala new file mode 100644 index 00000000..73584ce8 --- /dev/null +++ b/core/src/main/scala/scala/collection/generic/Sizing.scala @@ -0,0 +1,17 @@ +/* __ *\ +** ________ ___ / / ___ Scala API ** +** / __/ __// _ | / / / _ | (c) 2010-2013, LAMP/EPFL ** +** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** +** /____/\___/_/ |_/____/_/ | | ** +** |/ ** +\* */ + +package scala +package collection +package generic + +/** A trait for objects which have a size. + */ +trait Sizing { + def size: Int +} diff --git a/core/src/main/scala/scala/collection/immutable/OldHashMap.scala b/core/src/main/scala/scala/collection/immutable/OldHashMap.scala new file mode 100644 index 00000000..c568edfb --- /dev/null +++ b/core/src/main/scala/scala/collection/immutable/OldHashMap.scala @@ -0,0 +1,582 @@ +package scala +package collection.immutable + +import java.io.{ObjectInputStream, ObjectOutputStream} + +import collection.{Iterator, MapFactory, StrictOptimizedIterableOps, StrictOptimizedMapOps} +import collection.Hashing.{computeHash, keepBits} +import scala.annotation.unchecked.{uncheckedVariance => uV} +import java.lang.{Integer, String, System} + +import scala.collection.mutable.{Builder, ImmutableBuilder} + +/** This class implements immutable maps using a hash trie. + * + * '''Note:''' The builder of this hash map may return specialized representations for small maps. + * + * @tparam K the type of the keys contained in this hash map. + * @tparam V the type of the values associated with the keys. + * @author Martin Odersky + * @author Tiark Rompf + * @since 2.3 + * @see [[http://docs.scala-lang.org/overviews/collections/concrete-immutable-collection-classes.html#hash-tries "Scala's Collection Library overview"]] + * section on `Hash Tries` for more information. + * @define Coll `immutable.OldHashMap` + * @define coll immutable hash map + * @define mayNotTerminateInf + * @define willNotTerminateInf + */ +sealed abstract class OldHashMap[K, +V] + extends AbstractMap[K, V] + with MapOps[K, V, OldHashMap, OldHashMap[K, V]] + with StrictOptimizedIterableOps[(K, V), Iterable, OldHashMap[K, V]] + with StrictOptimizedMapOps[K, V, OldHashMap, OldHashMap[K, V]] { + + import OldHashMap.{bufferSize, liftMerger, Merger, MergeFunction, nullToEmpty} + + override def mapFactory: MapFactory[OldHashMap] = OldHashMap + + final def removed(key: K): OldHashMap[K, V] = removed0(key, computeHash(key), 0) + + final def updated[V1 >: V](key: K, value: V1): OldHashMap[K, V1] = + updated0(key, computeHash(key), 0, value, null, null) + + @`inline` override final def +[V1 >: V](kv: (K, V1)): OldHashMap[K, V1] = updated(kv._1, kv._2) + + def get(key: K): Option[V] = get0(key, computeHash(key), 0) + + def split: Seq[OldHashMap[K, V]] = Seq(this) + + /** Creates a new map which is the merge of this and the argument hash map. + * + * Uses the specified collision resolution function if two keys are the same. + * The collision resolution function will always take the first argument from + * `this` hash map and the second from `that`. + * + * The `merged` method is on average more performant than doing a traversal and reconstructing a + * new immutable hash map from scratch, or `++`. + * + * @tparam V1 the value type of the other hash map + * @param that the other hash map + * @param mergef the merge function or null if the first key-value pair is to be picked + */ + def merged[V1 >: V](that: OldHashMap[K, V1])(mergef: MergeFunction[K, V1]): OldHashMap[K, V1] = merge0(that, 0, liftMerger(mergef)) + + protected[collection] def updated0[V1 >: V](key: K, hash: Int, level: Int, value: V1, kv: (K, V1), merger: Merger[K, V1]): OldHashMap[K, V1] + + protected def removed0(key: K, hash: Int, level: Int): OldHashMap[K, V] + + protected[collection] def get0(key: K, hash: Int, level: Int): Option[V] + + protected def merge0[V1 >: V](that: OldHashMap[K, V1], level: Int, merger: Merger[K, V1]): OldHashMap[K, V1] + + protected def filter0(p: ((K, V)) => Boolean, negate: Boolean, level: Int, buffer: Array[OldHashMap[K, V @uV]], offset0: Int): OldHashMap[K, V] + + protected def contains0(key: K, hash: Int, level: Int): Boolean + + override final def contains(key: K): Boolean = contains0(key, computeHash(key), 0) + + override def tail: OldHashMap[K, V] = this - head._1 + + override def init: OldHashMap[K, V] = this - last._1 + + override def filter(pred: ((K, V)) => Boolean): OldHashMap[K, V] = { + val buffer = new Array[OldHashMap[K, V]](bufferSize(size)) + nullToEmpty(filter0(pred, negate = false, 0, buffer, 0)) + } + + override def filterNot(pred: ((K, V)) => Boolean): OldHashMap[K, V] = { + val buffer = new Array[OldHashMap[K, V]](bufferSize(size)) + nullToEmpty(filter0(pred, negate = true, 0, buffer, 0)) + } + + override protected[this] def className: String = "OldHashMap" + +} + +/** + * $factoryInfo + * @define Coll `immutable.OldHashMap` + * @define coll immutable hash map + */ +@SerialVersionUID(3L) +object OldHashMap extends MapFactory[OldHashMap] { + + def empty[K, V]: OldHashMap[K, V] = EmptyOldHashMap.asInstanceOf[OldHashMap[K, V]] + + def from[K, V](it: collection.IterableOnce[(K, V)]): OldHashMap[K, V] = + it match { + case hm: OldHashMap[K, V] => hm + case _ => (newBuilder[K, V] ++= it).result() + } + + def newBuilder[K, V]: Builder[(K, V), OldHashMap[K, V]] = + new ImmutableBuilder[(K, V), OldHashMap[K, V]](empty) { + def addOne(elem: (K, V)): this.type = { elems = elems + elem; this } + } + + private[collection] abstract class Merger[A, B] { + def apply(kv1: (A, B), kv2: (A, B)): (A, B) + def invert: Merger[A, B] + } + + private type MergeFunction[A1, B1] = ((A1, B1), (A1, B1)) => (A1, B1) + + private def liftMerger[A1, B1](mergef: MergeFunction[A1, B1]): Merger[A1, B1] = + if (mergef == null) defaultMerger.asInstanceOf[Merger[A1, B1]] else liftMerger0(mergef) + + private[this] val defaultMerger : Merger[Any, Any] = liftMerger0((a,b) => a) + + private[this] def liftMerger0[A1, B1](mergef: MergeFunction[A1, B1]): Merger[A1, B1] = new Merger[A1, B1] { + self => + def apply(kv1: (A1, B1), kv2: (A1, B1)): (A1, B1) = mergef(kv1, kv2) + val invert: Merger[A1, B1] = new Merger[A1, B1] { + def apply(kv1: (A1, B1), kv2: (A1, B1)): (A1, B1) = mergef(kv2, kv1) + def invert: Merger[A1, B1] = self + } + } + + // utility method to create a HashTrieMap from two leaf OldHashMaps (OldHashMap1 or OldHashMapCollision1) with non-colliding hash code) + private def makeHashTrieMap[A, B](hash0: Int, elem0: OldHashMap[A, B], hash1: Int, elem1:OldHashMap[A, B], level: Int, size: Int) : HashTrieMap[A, B] = { + val index0 = (hash0 >>> level) & 0x1f + val index1 = (hash1 >>> level) & 0x1f + if(index0 != index1) { + val bitmap = (1 << index0) | (1 << index1) + val elems = new Array[OldHashMap[A,B]](2) + if(index0 < index1) { + elems(0) = elem0 + elems(1) = elem1 + } else { + elems(0) = elem1 + elems(1) = elem0 + } + new HashTrieMap[A, B](bitmap, elems, size) + } else { + val elems = new Array[OldHashMap[A,B]](1) + val bitmap = (1 << index0) + elems(0) = makeHashTrieMap(hash0, elem0, hash1, elem1, level + 5, size) + new HashTrieMap[A, B](bitmap, elems, size) + } + } + + /** + * Calculates the maximum buffer size given the maximum possible total size of the trie-based collection + * @param size the maximum size of the collection to be generated + * @return the maximum buffer size + */ + @`inline` private def bufferSize(size: Int): Int = math.min(size + 6, 32 * 7) + + /** + * In many internal operations the empty map is represented as null for performance reasons. This method converts + * null to the empty map for use in public methods + */ + @`inline` private def nullToEmpty[A, B](m: OldHashMap[A, B]): OldHashMap[A, B] = if (m eq null) empty[A, B] else m + + private object EmptyOldHashMap extends OldHashMap[Any, Nothing] { + + override def isEmpty: Boolean = true + override def knownSize: Int = 0 + protected[collection] def updated0[V1 >: Nothing](key: Any, hash: Int, level: Int, value: V1, kv: (Any, V1), merger: Merger[Any, V1]): OldHashMap[Any, V1] = + new OldHashMap.OldHashMap1(key, hash, value, kv) + + protected def removed0(key: Any, hash: Int, level: Int): OldHashMap[Any, Nothing] = this + + protected[collection] def get0(key: Any, hash: Int, level: Int): Option[Nothing] = None + + protected def filter0(p: ((Any, Nothing)) => Boolean, negate: Boolean, level: Int, buffer: Array[OldHashMap[Any, Nothing]], offset0: Int): OldHashMap[Any, Nothing] = null + + protected def contains0(key: Any, hash: Int, level: Int): Boolean = false + + protected def merge0[V1 >: Nothing](that: OldHashMap[Any, V1], level: Int, merger: Merger[Any, V1]): OldHashMap[Any, V1] = that + + def iterator: Iterator[(Any, Nothing)] = Iterator.empty + + override def foreach[U](f: ((Any, Nothing)) => U): Unit = () + + override def head: (Any, Nothing) = throw new NoSuchElementException("Empty Map") + + override def headOption: None.type = None + + override def tail: OldHashMap[Any, Nothing] = throw new NoSuchElementException("Empty Map") + + override def last: (Any, Nothing) = throw new NoSuchElementException("Empty Map") + + override def init: OldHashMap[Any, Nothing] = throw new NoSuchElementException("Empty Map") + + } + + final class OldHashMap1[K, +V](private[collection] val key: K, private[collection] val hash: Int, private[collection] val value: V, private[collection] var kv: (K, V@uV)) extends OldHashMap[K, V] { + override def isEmpty: Boolean = false + def iterator: Iterator[(K, V)] = Iterator.single(ensurePair) + + protected[collection] def get0(key: K, hash: Int, level: Int): Option[V] = + if (hash == this.hash && key == this.key) Some(value) else None + + override def size = 1 + override def knownSize: Int = 1 + private[collection] def getKey = key + private[collection] def getHash = hash + private[collection] def computeHashFor(k: K) = computeHash(k) + + protected def contains0(key: K, hash: Int, level: Int): Boolean = + hash == this.hash && key == this.key + + protected[collection] def updated0[V1 >: V](key: K, hash: Int, level: Int, value: V1, kv: (K, V1), merger: Merger[K, V1]): OldHashMap[K, V1] = + if (hash == this.hash && key == this.key ) { + if (merger eq null) { + if (this.value.asInstanceOf[AnyRef] eq value.asInstanceOf[AnyRef]) this + else new OldHashMap1(key, hash, value, kv) + } else { + val nkv = merger(this.ensurePair, if(kv != null) kv else (key, value)) + new OldHashMap1(nkv._1, hash, nkv._2, nkv) + } + } else { + if (hash != this.hash) { + // they have different hashes, but may collide at this level - find a level at which they don't + val that = new OldHashMap1[K, V1](key, hash, value, kv) + makeHashTrieMap[K,V1](this.hash, this, hash, that, level, 2) + } else { + // 32-bit hash collision (rare, but not impossible) + new OldHashMapCollision1(hash, ListMap.empty.updated(this.key,this.value).updated(key,value)) + } + } + + protected def removed0(key: K, hash: Int, level: Int): OldHashMap[K, V] = + if (hash == this.hash && key == this.key) OldHashMap.empty[K,V] else this + + protected def filter0(p: ((K, V)) => Boolean, negate: Boolean, level: Int, buffer: Array[OldHashMap[K, V @uV]], offset0: Int): OldHashMap[K, V] = + if (negate ^ p(ensurePair)) this else null + + override def foreach[U](f: ((K, V)) => U): Unit = f(ensurePair) + + // this method may be called multiple times in a multithreaded environment, but that's ok + private[OldHashMap] def ensurePair: (K, V) = if (kv ne null) kv else { kv = (key, value); kv } + + protected def merge0[V1 >: V](that: OldHashMap[K, V1], level: Int, merger: Merger[K, V1]): OldHashMap[K, V1] = + that.updated0(key, hash, level, value, kv, merger.invert) + + } + + private[collection] class OldHashMapCollision1[K, +V](private[collection] val hash: Int, val kvs: ListMap[K, V @uV]) + extends OldHashMap[K, V @uV] { + // assert(kvs.size > 1) + + override def size: Int = kvs.size + override def isEmpty: Boolean = false + protected[collection] def get0(key: K, hash: Int, level: Int): Option[V] = + if (hash == this.hash) kvs.get(key) else None + + protected def contains0(key: K, hash: Int, level: Int): Boolean = + hash == this.hash && kvs.contains(key) + + protected[collection] override def updated0[B1 >: V](key: K, hash: Int, level: Int, value: B1, kv: (K, B1), merger: Merger[K, B1]): OldHashMap[K, B1] = + if (hash == this.hash) { + if ((merger eq null) || !kvs.contains(key)) new OldHashMapCollision1(hash, kvs.updated(key, value)) + else new OldHashMapCollision1(hash, kvs + merger((key, kvs(key)), kv)) + } else { + val that = new OldHashMap1(key, hash, value, kv) + makeHashTrieMap(this.hash, this, hash, that, level, size + 1) + } + + override def removed0(key: K, hash: Int, level: Int): OldHashMap[K, V] = + if (hash == this.hash) { + val kvs1 = kvs - key + kvs1.size match { + case 0 => + OldHashMap.empty[K,V] + case 1 => + val kv = kvs1.head + new OldHashMap1(kv._1,hash,kv._2,kv) + case x if x == kvs.size => + this + case _ => + new OldHashMapCollision1(hash, kvs1) + } + } else this + + override protected def filter0(p: ((K, V)) => Boolean, negate: Boolean, level: Int, buffer: Array[OldHashMap[K, V @uV]], offset0: Int): OldHashMap[K, V] = { + val kvs1 = if (negate) kvs.filterNot(p) else kvs.filter(p) + kvs1.size match { + case 0 => + null + case 1 => + val kv@(k,v) = kvs1.head + new OldHashMap1(k, hash, v, kv) + case x if x == kvs.size => + this + case _ => + new OldHashMapCollision1(hash, kvs1) + } + } + + protected def merge0[V1 >: V](that: OldHashMap[K, V1], level: Int, merger: Merger[K, V1]): OldHashMap[K, V1] = { + // this can be made more efficient by passing the entire ListMap at once + var m = that + for (p <- kvs) m = m.updated0(p._1, this.hash, level, p._2, p, merger.invert) + m + } + + override def iterator: Iterator[(K, V)] = kvs.iterator + + override def foreach[U](f: ((K, V)) => U): Unit = kvs.foreach(f) + + override def split: Seq[OldHashMap[K, V]] = { + val (x, y) = kvs.splitAt(kvs.size / 2) + def newhm(lm: ListMap[K, V @uV]) = new OldHashMapCollision1(hash, lm) + List(newhm(x), newhm(y)) + } + + } + + final class HashTrieMap[K, +V]( + private[collection] val bitmap: Int, + private[collection] val elems: Array[OldHashMap[K, V @uV]], + private[collection] val size0: Int + ) extends OldHashMap[K, V @uV] { + + // assert(Integer.bitCount(bitmap) == elems.length) + // assert(elems.length > 1 || (elems.length == 1 && elems(0).isInstanceOf[HashTrieMap[_,_]])) + + override def size: Int = size0 + override def isEmpty: Boolean = false + override def knownSize: Int = size + protected[collection] def get0(key: K, hash: Int, level: Int): Option[V] = { + // Note: this code is duplicated with `contains0` + val index = (hash >>> level) & 0x1f + if (bitmap == - 1) { + elems(index).get0(key, hash, level + 5) + } else { + val mask = (1 << index) + if ((bitmap & mask) != 0) { + val offset = Integer.bitCount(bitmap & (mask - 1)) + elems(offset).get0(key, hash, level + 5) + } else { + None + } + } + } + + protected def contains0(key: K, hash: Int, level: Int): Boolean = { + // Note: this code is duplicated from `get0` + val index = (hash >>> level) & 0x1f + if (bitmap == - 1) { + elems(index).contains0(key, hash, level + 5) + } else { + val mask = (1 << index) + if ((bitmap & mask) != 0) { + val offset = Integer.bitCount(bitmap & (mask - 1)) + elems(offset).contains0(key, hash, level + 5) + } else { + false + } + } + } + + protected[collection] def updated0[V1 >: V](key: K, hash: Int, level: Int, value: V1, kv: (K, V1), merger: Merger[K, V1]): OldHashMap[K, V1] = { + val index = (hash >>> level) & 0x1f + val mask = (1 << index) + val offset = Integer.bitCount(bitmap & (mask - 1)) + if ((bitmap & mask) != 0) { + val sub = elems(offset) + val subNew = sub.updated0(key, hash, level + 5, value, kv, merger) + if(subNew eq sub) this else { + val elemsNew = new Array[OldHashMap[K,V1]](elems.length) + Array.copy(elems, 0, elemsNew, 0, elems.length) + elemsNew(offset) = subNew + new HashTrieMap(bitmap, elemsNew, size + (subNew.size - sub.size)) + } + } else { + val elemsNew = new Array[OldHashMap[K,V1]](elems.length + 1) + Array.copy(elems, 0, elemsNew, 0, offset) + elemsNew(offset) = new OldHashMap1(key, hash, value, kv) + Array.copy(elems, offset, elemsNew, offset + 1, elems.length - offset) + new HashTrieMap(bitmap | mask, elemsNew, size + 1) + } + } + + override def removed0(key: K, hash: Int, level: Int): OldHashMap[K, V] = { + val index = (hash >>> level) & 0x1f + val mask = (1 << index) + val offset = Integer.bitCount(bitmap & (mask - 1)) + if ((bitmap & mask) != 0) { + val sub = elems(offset) + val subNew = sub.removed0(key, hash, level + 5) + if (subNew eq sub) this + else if (subNew.isEmpty) { + val bitmapNew = bitmap ^ mask + if (bitmapNew != 0) { + val elemsNew = new Array[OldHashMap[K,V]](elems.length - 1) + Array.copy(elems, 0, elemsNew, 0, offset) + Array.copy(elems, offset + 1, elemsNew, offset, elems.length - offset - 1) + val sizeNew = size - sub.size + // if we have only one child, which is not a HashTrieSet but a self-contained set like + // HashSet1 or HashSetCollision1, return the child instead + if (elemsNew.length == 1 && !elemsNew(0).isInstanceOf[HashTrieMap[_,_]]) + elemsNew(0) + else + new HashTrieMap(bitmapNew, elemsNew, sizeNew) + } else + OldHashMap.empty[K,V] + } else if(elems.length == 1 && !subNew.isInstanceOf[HashTrieMap[_,_]]) { + subNew + } else { + val elemsNew = java.util.Arrays.copyOf(elems, elems.length) + elemsNew(offset) = subNew + val sizeNew = size + (subNew.size - sub.size) + new HashTrieMap(bitmap, elemsNew, sizeNew) + } + } else { + this + } + } + + protected def filter0(p: ((K, V)) => Boolean, negate: Boolean, level: Int, buffer: Array[OldHashMap[K, V @uV]], offset0: Int): OldHashMap[K, V] = { + // current offset + var offset = offset0 + // result size + var rs = 0 + // bitmap for kept elems + var kept = 0 + // loop over all elements + var i = 0 + while (i < elems.length) { + val result = elems(i).filter0(p, negate, level + 5, buffer, offset) + if (result ne null) { + buffer(offset) = result + offset += 1 + // add the result size + rs += result.size + // mark the bit i as kept + kept |= (1 << i) + } + i += 1 + } + if (offset == offset0) { + // empty + null + } else if (rs == size0) { + // unchanged + this + } else if (offset == offset0 + 1 && !buffer(offset0).isInstanceOf[HashTrieMap[K, V]]) { + // leaf + buffer(offset0) + } else { + // we have to return a HashTrieMap + val length = offset - offset0 + val elems1 = new Array[OldHashMap[K, V]](length) + System.arraycopy(buffer, offset0, elems1, 0, length) + val bitmap1 = if (length == elems.length) { + // we can reuse the original bitmap + bitmap + } else { + // calculate new bitmap by keeping just bits in the kept bitmask + keepBits(bitmap, kept) + } + new HashTrieMap(bitmap1, elems1, rs) + } + } + + override def iterator: Iterator[(K, V)] = new TrieIterator[(K, V)](elems.asInstanceOf[Array[Iterable[(K, V)]]]) { + final override def getElem(cc: AnyRef): (K, V) = cc.asInstanceOf[OldHashMap1[K, V]].ensurePair + } + + override def foreach[U](f: ((K, V)) => U): Unit = { + var i = 0 + while (i < elems.length) { + elems(i).foreach(f) + i += 1 + } + } + + private def posOf(n: Int, bm: Int) = { + var left = n + var i = -1 + var b = bm + while (left >= 0) { + i += 1 + if ((b & 1) != 0) left -= 1 + b = b >>> 1 + } + i + } + + override def split: Seq[OldHashMap[K, V]] = if (size == 1) Seq(this) else { + val nodesize = Integer.bitCount(bitmap) + if (nodesize > 1) { + val splitpoint = nodesize / 2 + val bitsplitpoint = posOf(nodesize / 2, bitmap) + val bm1 = bitmap & (-1 << bitsplitpoint) + val bm2 = bitmap & (-1 >>> (32 - bitsplitpoint)) + + val (e1, e2) = elems.splitAt(splitpoint) + val hm1 = new HashTrieMap(bm1, e1, e1.foldLeft(0)(_ + _.size)) + val hm2 = new HashTrieMap(bm2, e2, e2.foldLeft(0)(_ + _.size)) + + List(hm1, hm2) + } else elems(0).split + } + + protected def merge0[V1 >: V](that: OldHashMap[K, V1], level: Int, merger: Merger[K, V1]): OldHashMap[K, V1] = that match { + case hm: OldHashMap1[_, _] => + this.updated0(hm.key, hm.hash, level, hm.value.asInstanceOf[V1], hm.kv, merger) + case hm: HashTrieMap[_, _] => + val that = hm.asInstanceOf[HashTrieMap[K, V1]] + val thiselems = this.elems + val thatelems = that.elems + var thisbm = this.bitmap + var thatbm = that.bitmap + + // determine the necessary size for the array + val subcount = Integer.bitCount(thisbm | thatbm) + + // construct a new array of appropriate size + val merged = new Array[OldHashMap[K, V1]](subcount) + + // run through both bitmaps and add elements to it + var i = 0 + var thisi = 0 + var thati = 0 + var totalelems = 0 + while (i < subcount) { + val thislsb = thisbm ^ (thisbm & (thisbm - 1)) + val thatlsb = thatbm ^ (thatbm & (thatbm - 1)) + + // collision + if (thislsb == thatlsb) { + val m = thiselems(thisi).merge0(thatelems(thati), level + 5, merger) + totalelems += m.size + merged(i) = m + thisbm = thisbm & ~thislsb + thatbm = thatbm & ~thatlsb + thati += 1 + thisi += 1 + } else { + if (Integer.compareUnsigned(thislsb - 1, thatlsb - 1) < 0) { + val m = thiselems(thisi) + totalelems += m.size + merged(i) = m + thisbm = thisbm & ~thislsb + thisi += 1 + } + else { + val m = thatelems(thati) + totalelems += m.size + merged(i) = m + thatbm = thatbm & ~thatlsb + thati += 1 + } + } + i += 1 + } + + new HashTrieMap[K, V1](this.bitmap | that.bitmap, merged, totalelems) + case hm: OldHashMapCollision1[_, _] => that.merge0(this, level, merger.invert) + case hm: OldHashMap[_, _] => this + } + } + + // scalac generates a `readReplace` method to discard the deserialized state (see https://github.com/scala/bug/issues/10412). + // This prevents it from serializing it in the first place: + private[this] def writeObject(out: ObjectOutputStream): Unit = () + private[this] def readObject(in: ObjectInputStream): Unit = () +} diff --git a/core/src/main/scala/scala/collection/immutable/OldHashSet.scala b/core/src/main/scala/scala/collection/immutable/OldHashSet.scala new file mode 100644 index 00000000..f6ba2f24 --- /dev/null +++ b/core/src/main/scala/scala/collection/immutable/OldHashSet.scala @@ -0,0 +1,998 @@ +package scala +package collection +package immutable + +import java.io.{ObjectInputStream, ObjectOutputStream} + +import mutable.{Builder, ImmutableBuilder} +import Hashing.computeHash +import java.lang.{Integer, System} + +import scala.collection.generic.BitOperations +import scala.annotation.tailrec + +/** This class implements immutable sets using a hash trie. + * + * '''Note:''' The builder of this hash set may return specialized representations for small sets. + * + * @tparam A the type of the elements contained in this hash set. + * + * @author Martin Odersky + * @author Tiark Rompf + * @since 2.3 + * @define Coll `immutable.OldHashSet` + * @define coll immutable hash set + */ +sealed abstract class OldHashSet[A] + extends AbstractSet[A] + with SetOps[A, OldHashSet, OldHashSet[A]] + with StrictOptimizedIterableOps[A, OldHashSet, OldHashSet[A]] { + + import OldHashSet.{bufferSize, LeafOldHashSet, nullToEmpty} + + override protected[this] def className: String = "OldHashSet" + + override def iterableFactory = OldHashSet + + def contains(elem: A): Boolean = get0(elem, computeHash(elem), 0) + + def incl(elem: A): OldHashSet[A] = updated0(elem, computeHash(elem), 0) + + def excl(elem: A): OldHashSet[A] = nullToEmpty(removed0(elem, computeHash(elem), 0)) + + override def subsetOf(that: collection.Set[A]): Boolean = that match { + case that:OldHashSet[A] => + // call the specialized implementation with a level of 0 since both this and that are top-level hash sets + subsetOf0(that, 0) + case _ => + // call the generic implementation + super.subsetOf(that) + } + + override def concat(that: collection.IterableOnce[A]): OldHashSet[A] = that match { + case that: OldHashSet[A] => + val buffer = new Array[OldHashSet[A]](bufferSize(this.size + that.size)) + nullToEmpty(union0(that, 0, buffer, 0)) + case _ => super.concat(that) + } + + override def intersect(that: collection.Set[A]): OldHashSet[A] = that match { + case that: OldHashSet[A] => + val buffer = new Array[OldHashSet[A]](bufferSize(this.size min that.size)) + nullToEmpty(intersect0(that, 0, buffer, 0)) + case _ => super.intersect(that) + } + + override def diff(that: collection.Set[A]): OldHashSet[A] = that match { + case that: OldHashSet[A] => + val buffer = new Array[OldHashSet[A]](bufferSize(this.size)) + nullToEmpty(diff0(that, 0, buffer, 0)) + case _ => super.diff(that) + } + + override def filter(p: A => Boolean) = { + val buffer = new Array[OldHashSet[A]](bufferSize(size)) + nullToEmpty(filter0(p, false, 0, buffer, 0)) + } + + override def filterNot(p: A => Boolean) = { + val buffer = new Array[OldHashSet[A]](bufferSize(size)) + nullToEmpty(filter0(p, true, 0, buffer, 0)) + } + + override def tail: OldHashSet[A] = this - head + + override def init: OldHashSet[A] = this - last + + protected def get0(key: A, hash: Int, level: Int): Boolean + + protected[collection] def updated0(key: A, hash: Int, level: Int): OldHashSet[A] + + protected def removed0(key: A, hash: Int, level: Int): OldHashSet[A] + + protected def filter0(p: A => Boolean, negate: Boolean, level: Int, buffer: Array[OldHashSet[A]], offset0: Int): OldHashSet[A] + + /** + * A specialized implementation of subsetOf for when both this and that are OldHashSet[A] and we can take advantage + * of the tree structure of both operands and the precalculated hashcodes of the OldHashSet1 instances. + * @param that the other set + * @param level the level of this and that hashset + * The purpose of level is to keep track of how deep we are in the tree. + * We need this information for when we arrive at a leaf and have to call get0 on that + * The value of level is 0 for a top-level OldHashSet and grows in increments of 5 + * @return true if all elements of this set are contained in that set + */ + protected def subsetOf0(that: OldHashSet[A], level: Int): Boolean + + /** + * Union with a leaf OldHashSet at a given level. + * @param that a leaf OldHashSet + * @param level the depth in the tree. We need this when we have to create a branch node on top of this and that + * @return The union of this and that at the given level. Unless level is zero, the result is not a self-contained + * OldHashSet but needs to be stored at the correct depth + */ + private[immutable] def union0(that: LeafOldHashSet[A], level: Int): OldHashSet[A] + + /** + * Union with a OldHashSet at a given level + * @param that a OldHashSet + * @param level the depth in the tree. We need to keep track of the level to know how deep we are in the tree + * @param buffer a temporary buffer that is used for temporarily storing elements when creating new branch nodes + * @param offset0 the first offset into the buffer in which we are allowed to write + * @return The union of this and that at the given level. Unless level is zero, the result is not a self-contained + * OldHashSet but needs to be stored at the correct depth + */ + protected def union0(that: OldHashSet[A], level: Int, buffer: Array[OldHashSet[A]], offset0: Int): OldHashSet[A] + + /** + * Intersection with another hash set at a given level + * @param level the depth in the tree. We need to keep track of the level to know how deep we are in the tree + * @param buffer a temporary buffer that is used for temporarily storing elements when creating new branch nodes + * @param offset0 the first offset into the buffer in which we are allowed to write + * @return The intersection of this and that at the given level. Unless level is zero, the result is not a + * self-contained OldHashSet but needs to be stored at the correct depth + */ + protected def intersect0(that: OldHashSet[A], level: Int, buffer: Array[OldHashSet[A]], offset0: Int): OldHashSet[A] + + /** + * Diff with another hash set at a given level + * @param level the depth in the tree. We need to keep track of the level to know how deep we are in the tree + * @param buffer a temporary buffer that is used for temporarily storing elements when creating new branch nodes + * @param offset0 the first offset into the buffer in which we are allowed to write + * @return The diff of this and that at the given level. Unless level is zero, the result is not a + * self-contained OldHashSet but needs to be stored at the correct depth + */ + protected def diff0(that: OldHashSet[A], level: Int, buffer: Array[OldHashSet[A]], offset0: Int): OldHashSet[A] + +} + +/** + * $factoryInfo + * @define Coll `immutable.OldHashSet` + * @define coll immutable hash set + */ +@SerialVersionUID(3L) +object OldHashSet extends IterableFactory[OldHashSet] { + + def from[A](it: collection.IterableOnce[A]): OldHashSet[A] = + it match { + case hs: OldHashSet[A] => hs + case _ => (newBuilder[A] ++= it).result() + } + + def empty[A]: OldHashSet[A] = EmptyOldHashSet.asInstanceOf[OldHashSet[A]] + + def newBuilder[A]: Builder[A, OldHashSet[A]] = + new ImmutableBuilder[A, OldHashSet[A]](empty) { + def addOne(elem: A): this.type = { elems = elems + elem; this } + } + + private object EmptyOldHashSet extends OldHashSet[Any] { + + def iterator: Iterator[Any] = Iterator.empty + override def isEmpty: Boolean = true + override def knownSize: Int = 0 + override def foreach[U](f: Any => U): Unit = () + + override def head: Any = throw new NoSuchElementException("Empty Set") + + override def headOption: None.type = None + + override def tail: OldHashSet[Any] = throw new NoSuchElementException("Empty Set") + + override def init: OldHashSet[Any] = throw new NoSuchElementException("Empty Set") + + override def size: Int = 0 + + protected def get0(elem: Any, hash: Int, level: Int) = false + + protected def subsetOf0(that: OldHashSet[Any], level: Int): Boolean = { + // returns true because the empty set is a subset of all sets + true + } + + protected[collection] def updated0(elem: Any, hash: Int, level: Int) = new OldHashSet1(elem, hash) + + protected def removed0(key: Any, hash: Int, level: Int) = this + + private[immutable] def union0(that: LeafOldHashSet[Any], level: Int): OldHashSet[Any] = that + + protected def union0(that: OldHashSet[Any], level: Int, buffer: Array[OldHashSet[Any]], offset0: Int): OldHashSet[Any] = that + + protected def intersect0(that: OldHashSet[Any], level: Int, buffer: Array[OldHashSet[Any]], offset0: Int): OldHashSet[Any] = null + + protected def diff0(that: OldHashSet[Any], level: Int, buffer: Array[OldHashSet[Any]], offset0: Int): OldHashSet[Any] = null + + protected def filter0(p: Any => Boolean, negate: Boolean, level: Int, buffer: Array[OldHashSet[Any]], offset0: Int): OldHashSet[Any] = null + + } + + /** + * Common superclass of OldHashSet1 and OldHashSetCollision1, which are the two possible leaves of the Trie + */ + private[immutable] sealed abstract class LeafOldHashSet[A] extends OldHashSet[A] { + private[OldHashSet] def hash:Int + } + + private[immutable] final class OldHashSet1[A](private[OldHashSet] val key: A, private[OldHashSet] val hash: Int) extends LeafOldHashSet[A] { + override def isEmpty: Boolean = false + override def knownSize: Int = 1 + def iterator: Iterator[A] = Iterator.single(key) + + override def foreach[U](f: A => U): Unit = f(key) + + override def head: A = key + + override def headOption: Some[A] = Some(key) + + override def tail: OldHashSet[A] = OldHashSet.empty[A] + + override def last: A = key + + override def init: OldHashSet[A] = OldHashSet.empty[A] + + override def size: Int = 1 + + protected def get0(key: A, hash: Int, level: Int) = + (hash == this.hash && key == this.key) + + protected[collection] def updated0(key: A, hash: Int, level: Int) = + if (hash == this.hash && key == this.key) this + else { + if (hash != this.hash) { + makeHashTrieSet(this.hash, this, hash, new OldHashSet1(key, hash), level) + } else { + // 32-bit hash collision (rare, but not impossible) + new OldHashSetCollision1(hash, ListSet.empty + this.key + key) + } + } + + protected def removed0(key: A, hash: Int, level: Int) = + if (hash == this.hash && key == this.key) null else this + + protected def subsetOf0(that: OldHashSet[A], level: Int): Boolean = { + // check if that contains this.key + // we use get0 with our key and hash at the correct level instead of calling contains, + // which would not work since that might not be a top-level OldHashSet + // and in any case would be inefficient because it would require recalculating the hash code + that.get0(key, hash, level) + } + + private[immutable] def union0(that: LeafOldHashSet[A], level: Int): OldHashSet[A] = that match { + case that if that.hash != this.hash => + // different hash code, so there is no need to investigate further. + // Just create a branch node containing the two. + makeHashTrieSet(this.hash, this, that.hash, that, level) + case that: OldHashSet1[A] => + if (this.key == that.key) { + this + } else { + // 32-bit hash collision (rare, but not impossible) + new OldHashSetCollision1[A](hash, ListSet.empty + this.key + that.key) + } + case that: OldHashSetCollision1[A] => + val ks1 = that.ks + key + // Could use eq check (faster) if ListSet was guaranteed to return itself + if (ks1.size == that.ks.size) { + that + } else { + new OldHashSetCollision1[A](hash, ks1) + } + } + + protected def union0(that: OldHashSet[A], level: Int, buffer: Array[OldHashSet[A]], offset0: Int) = { + // switch to the Leaf version of union + // we can exchange the arguments because union is symmetrical + that.union0(this, level) + } + + protected def intersect0(that: OldHashSet[A], level: Int, buffer: Array[OldHashSet[A]], offset0: Int) = + if (that.get0(key, hash, level)) this else null + + protected def diff0(that: OldHashSet[A], level: Int, buffer: Array[OldHashSet[A]], offset0: Int) = + if (that.get0(key, hash, level)) null else this + + protected def filter0(p: A => Boolean, negate: Boolean, level: Int, buffer: Array[OldHashSet[A]], offset0: Int): OldHashSet[A] = + if (negate ^ p(key)) this else null + } + + private[immutable] final class OldHashSetCollision1[A](private[OldHashSet] val hash: Int, val ks: ListSet[A]) extends LeafOldHashSet[A] { + override def isEmpty: Boolean = false + override def size = ks.size + + def iterator: Iterator[A] = ks.iterator + + override def foreach[U](f: A => U): Unit = ks.foreach(f) + + protected def get0(key: A, hash: Int, level: Int) = + if (hash == this.hash) ks.contains(key) else false + + protected[collection] def updated0(key: A, hash: Int, level: Int): OldHashSet[A] = + if (hash == this.hash) new OldHashSetCollision1(hash, ks + key) + else makeHashTrieSet(this.hash, this, hash, new OldHashSet1(key, hash), level) + + protected def removed0(key: A, hash: Int, level: Int): OldHashSet[A] = + if (hash == this.hash) { + val ks1 = ks - key + ks1.size match { + case 0 => + // the empty set + null + case 1 => + // create a new OldHashSet1 with the hash we already know + new OldHashSet1(ks1.head, hash) + case size if size == ks.size => + // Should only have HSC1 if size > 1 + this + case _ => + // create a new OldHashSetCollision with the hash we already know and the new keys + new OldHashSetCollision1(hash, ks1) + } + } else this + + private def writeObject(out: java.io.ObjectOutputStream): Unit = { + // this cannot work - reading things in might produce different + // hash codes and remove the collision. however this is never called + // because no references to this class are ever handed out to client code + // and HashTrieSet serialization takes care of the situation + sys.error("cannot serialize an immutable.OldHashSet where all items have the same 32-bit hash code") + //out.writeObject(kvs) + } + + private def readObject(in: java.io.ObjectInputStream): Unit = { + sys.error("cannot deserialize an immutable.OldHashSet where all items have the same 32-bit hash code") + //kvs = in.readObject().asInstanceOf[ListSet[A]] + //hash = computeHash(kvs.) + } + + protected def filter0(p: A => Boolean, negate: Boolean, level: Int, buffer: Array[OldHashSet[A]], offset0: Int): OldHashSet[A] = { + val ks1 = if(negate) ks.filterNot(p) else ks.filter(p) + ks1.size match { + case 0 => + null + case 1 => + new OldHashSet1(ks1.head, hash) + case x if x == ks.size => + this + case _ => + new OldHashSetCollision1(hash, ks1) + } + } + + protected def subsetOf0(that: OldHashSet[A], level: Int): Boolean = { + // we have to check each element + // we use get0 with our hash at the correct level instead of calling contains, + // which would not work since that might not be a top-level OldHashSet + // and in any case would be inefficient because it would require recalculating the hash code + ks.forall(key => that.get0(key, hash, level)) + } + + private[immutable] def union0(that: LeafOldHashSet[A], level: Int) = that match { + case that if that.hash != this.hash => + // different hash code, so there is no need to investigate further. + // Just create a branch node containing the two. + makeHashTrieSet(this.hash, this, that.hash, that, level) + case that: OldHashSet1[A] => + val ks1 = ks + that.key + // Could use eq check (faster) if ListSet was guaranteed to return itself + if (ks1.size == ks.size) { + this + } else { + // create a new OldHashSetCollision with the existing hash + // we don't have to check for size=1 because union is never going to remove elements + new OldHashSetCollision1[A](hash, ks1) + } + case that: OldHashSetCollision1[A] => + val ks1 = this.ks ++ that.ks + ks1.size match { + case size if size == this.ks.size => + // could this check be made faster by doing an eq check? + // I am not sure we can rely on ListSet returning itself when all elements are already in the set, + // so it seems unwise to rely on it. + this + case size if size == that.ks.size => + // we have to check this as well, since we don't want to create a new instance if this is a subset of that + that + case _ => + // create a new OldHashSetCollision with the existing hash + // we don't have to check for size=1 because union is never going to remove elements + new OldHashSetCollision1[A](hash, ks1) + } + } + + protected def union0(that: OldHashSet[A], level: Int, buffer: Array[OldHashSet[A]], offset0: Int) = that match { + case that: LeafOldHashSet[A] => + // switch to the simpler Tree/Leaf implementation + this.union0(that, level) + case that: HashTrieSet[A] => + // switch to the simpler Tree/Leaf implementation + // we can swap this and that because union is symmetrical + that.union0(this, level) + case _ => this + } + + protected def intersect0(that: OldHashSet[A], level: Int, buffer: Array[OldHashSet[A]], offset0: Int) = { + // filter the keys, taking advantage of the fact that we know their hash code + val ks1 = ks.filter(that.get0(_, hash, level)) + ks1.size match { + case 0 => + // the empty set + null + case size if size == this.size => + // unchanged + // We do this check first since even if the result is of size 1 since + // it is preferable to return the existing set for better structural sharing + this + case size if size == that.size => + // the other set + // We do this check first since even if the result is of size 1 since + // it is preferable to return the existing set for better structural sharing + that + case 1 => + // create a new OldHashSet1 with the hash we already know + new OldHashSet1(ks1.head, hash) + case _ => + // create a new OldHashSetCollision with the hash we already know and the new keys + new OldHashSetCollision1(hash, ks1) + } + } + + protected def diff0(that: OldHashSet[A], level: Int, buffer: Array[OldHashSet[A]], offset0: Int) = { + val ks1 = ks.filterNot(that.get0(_, hash, level)) + ks1.size match { + case 0 => + // the empty set + null + case size if size == this.size => + // unchanged + // We do this check first since even if the result is of size 1 since + // it is preferable to return the existing set for better structural sharing + this + case 1 => + // create a new OldHashSet1 with the hash we already know + new OldHashSet1(ks1.head, hash) + case _ => + // create a new OldHashSetCollision with the hash we already know and the new keys + new OldHashSetCollision1(hash, ks1) + } + } + + } + + /** + * A branch node of the HashTrieSet with at least one and up to 32 children. + * + * @param bitmap encodes which element corresponds to which child + * @param elems the up to 32 children of this node. + * the number of children must be identical to the number of 1 bits in bitmap + * @param size0 the total number of elements. This is stored just for performance reasons. + * @tparam A the type of the elements contained in this hash set. + * + * How levels work: + * + * When looking up or adding elements, the part of the hashcode that is used to address the children array depends + * on how deep we are in the tree. This is accomplished by having a level parameter in all internal methods + * that starts at 0 and increases by 5 (32 = 2^5) every time we go deeper into the tree. + * + * hashcode (binary): 00000000000000000000000000000000 + * level=0 (depth=0) ^^^^^ + * level=5 (depth=1) ^^^^^ + * level=10 (depth=2) ^^^^^ + * ... + * + * Be careful: a non-toplevel HashTrieSet is not a self-contained set, so e.g. calling contains on it will not work! + * It relies on its depth in the Trie for which part of a hash to use to address the children, but this information + * (the level) is not stored due to storage efficiency reasons but has to be passed explicitly! + * + * How bitmap and elems correspond: + * + * A naive implementation of a HashTrieSet would always have an array of size 32 for children and leave the unused + * children empty (null). But that would be very wasteful regarding memory. Instead, only non-empty children are + * stored in elems, and the bitmap is used to encode which elem corresponds to which child bucket. The lowest 1 bit + * corresponds to the first element, the second-lowest to the second, etc. + * + * bitmap (binary): 00010000000000000000100000000000 + * elems: [a,b] + * children: ---b----------------a----------- + */ + private[collection] final class HashTrieSet[A](private val bitmap: Int, private[collection] val elems: Array[OldHashSet[A]], private val size0: Int) + extends OldHashSet[A] { + assert(Integer.bitCount(bitmap) == elems.length) + // assertion has to remain disabled until SI-6197 is solved + // assert(elems.length > 1 || (elems.length == 1 && elems(0).isInstanceOf[HashTrieSet[_]])) + override def size = size0 + override def isEmpty: Boolean = false + override def knownSize: Int = size + def iterator: Iterator[A] = new TrieIterator[A](elems.asInstanceOf[Array[Iterable[A]]]) { + final override def getElem(cc: AnyRef): A = cc.asInstanceOf[OldHashSet1[A]].key + } + + override def foreach[U](f: A => U): Unit = { + var i = 0 + while (i < elems.length) { + elems(i).foreach(f) + i += 1 + } + } + + protected def get0(key: A, hash: Int, level: Int) = { + val index = (hash >>> level) & 0x1f + val mask = (1 << index) + if (bitmap == - 1) { + elems(index & 0x1f).get0(key, hash, level + 5) + } else if ((bitmap & mask) != 0) { + val offset = Integer.bitCount(bitmap & (mask-1)) + elems(offset).get0(key, hash, level + 5) + } else + false + } + + protected[collection] def updated0(key: A, hash: Int, level: Int) = { + val index = (hash >>> level) & 0x1f + val mask = (1 << index) + val offset = Integer.bitCount(bitmap & (mask-1)) + if ((bitmap & mask) != 0) { + val sub = elems(offset) + val subNew = sub.updated0(key, hash, level + 5) + if (sub eq subNew) this + else { + val elemsNew = java.util.Arrays.copyOf(elems, elems.length) + elemsNew(offset) = subNew + new HashTrieSet(bitmap, elemsNew, size + (subNew.size - sub.size)) + } + } else { + val elemsNew = new Array[OldHashSet[A]](elems.length + 1) + Array.copy(elems, 0, elemsNew, 0, offset) + elemsNew(offset) = new OldHashSet1(key, hash) + Array.copy(elems, offset, elemsNew, offset + 1, elems.length - offset) + val bitmapNew = bitmap | mask + new HashTrieSet(bitmapNew, elemsNew, size + 1) + } + } + + protected def removed0(key: A, hash: Int, level: Int): OldHashSet[A] = { + val index = (hash >>> level) & 0x1f + val mask = (1 << index) + val offset = Integer.bitCount(bitmap & (mask-1)) + if ((bitmap & mask) != 0) { + val sub = elems(offset) + val subNew = sub.removed0(key, hash, level + 5) + if (sub eq subNew) this + else if (subNew eq null) { + val bitmapNew = bitmap ^ mask + if (bitmapNew != 0) { + val elemsNew = new Array[OldHashSet[A]](elems.length - 1) + Array.copy(elems, 0, elemsNew, 0, offset) + Array.copy(elems, offset + 1, elemsNew, offset, elems.length - offset - 1) + val sizeNew = size - sub.size + // if we have only one child, which is not a HashTrieSet but a self-contained set like + // OldHashSet1 or OldHashSetCollision1, return the child instead + if (elemsNew.length == 1 && !elemsNew(0).isInstanceOf[HashTrieSet[_]]) + elemsNew(0) + else + new HashTrieSet(bitmapNew, elemsNew, sizeNew) + } else + null + } else if(elems.length == 1 && !subNew.isInstanceOf[HashTrieSet[_]]) { + subNew + } else { + val elemsNew = java.util.Arrays.copyOf(elems, elems.length) + elemsNew(offset) = subNew + val sizeNew = size + (subNew.size - sub.size) + new HashTrieSet(bitmap, elemsNew, sizeNew) + } + } else { + this + } + } + + private[immutable] def union0(that: LeafOldHashSet[A], level: Int): OldHashSet[A] = { + val index = (that.hash >>> level) & 0x1f + val mask = (1 << index) + val offset = Integer.bitCount(bitmap & (mask - 1)) + if ((bitmap & mask) != 0) { + val sub = elems(offset) + val sub1 = sub.union0(that, level + 5) + if (sub eq sub1) this + else { + val elems1 = new Array[OldHashSet[A]](elems.length) + Array.copy(elems, 0, elems1, 0, elems.length) + elems1(offset) = sub1 + new HashTrieSet(bitmap, elems1, size + (sub1.size - sub.size)) + } + } else { + val elems1 = new Array[OldHashSet[A]](elems.length + 1) + Array.copy(elems, 0, elems1, 0, offset) + elems1(offset) = that + Array.copy(elems, offset, elems1, offset + 1, elems.length - offset) + val bitmap1 = bitmap | mask + new HashTrieSet(bitmap1, elems1, size + that.size) + } + } + + protected def union0(that: OldHashSet[A], level: Int, buffer: Array[OldHashSet[A]], offset0: Int): OldHashSet[A] = that match { + case that if that eq this => + // shortcut for when that is this + // this happens often for nodes deeper in the tree, especially when that and this share a common "heritage" + // e.g. you have a large set A and do some small operations (adding and removing elements) to it to create B + // then A and B will have the vast majority of nodes in common, and this eq check will allow not even looking + // at these nodes. + this + case that: LeafOldHashSet[A] => + // when that is a leaf, we can switch to the simpler Tree/Leaf implementation + this.union0(that, level) + case that: HashTrieSet[A] => + val a = this.elems + var abm = this.bitmap + var ai = 0 + + val b = that.elems + var bbm = that.bitmap + var bi = 0 + + // fetch a new temporary array that is guaranteed to be big enough (32 elements) + var offset = offset0 + var rs = 0 + + // loop as long as there are bits left in either abm or bbm + while ((abm | bbm) != 0) { + // lowest remaining bit in abm + val alsb = abm ^ (abm & (abm - 1)) + // lowest remaining bit in bbm + val blsb = bbm ^ (bbm & (bbm - 1)) + if (alsb == blsb) { + val sub1 = a(ai).union0(b(bi), level + 5, buffer, offset) + rs += sub1.size + buffer(offset) = sub1 + offset += 1 + // clear lowest remaining one bit in abm and increase the a index + abm &= ~alsb + ai += 1 + // clear lowest remaining one bit in bbm and increase the b index + bbm &= ~blsb + bi += 1 + } else if (BitOperations.Int.unsignedCompare(alsb - 1, blsb - 1)) { + // alsb is smaller than blsb, or alsb is set and blsb is 0 + // in any case, alsb is guaranteed to be set here! + val sub1 = a(ai) + rs += sub1.size + buffer(offset) = sub1 + offset += 1 + // clear lowest remaining one bit in abm and increase the a index + abm &= ~alsb + ai += 1 + } else { + // blsb is smaller than alsb, or blsb is set and alsb is 0 + // in any case, blsb is guaranteed to be set here! + val sub1 = b(bi) + rs += sub1.size + buffer(offset) = sub1 + offset += 1 + // clear lowest remaining one bit in bbm and increase the b index + bbm &= ~blsb + bi += 1 + } + } + if (rs == this.size) { + // if the result would be identical to this, we might as well return this + this + } else if (rs == that.size) { + // if the result would be identical to that, we might as well return that + that + } else { + // we don't have to check whether the result is a leaf, since union will only make the set larger + // and this is not a leaf to begin with. + val length = offset - offset0 + val elems = new Array[OldHashSet[A]](length) + System.arraycopy(buffer, offset0, elems, 0, length) + new HashTrieSet(this.bitmap | that.bitmap, elems, rs) + } + case _ => this + } + + protected def intersect0(that: OldHashSet[A], level: Int, buffer: Array[OldHashSet[A]], offset0: Int): OldHashSet[A] = that match { + case that if that eq this => + // shortcut for when that is this + // this happens often for nodes deeper in the tree, especially when that and this share a common "heritage" + // e.g. you have a large set A and do some small operations (adding and removing elements) to it to create B + // then A and B will have the vast majority of nodes in common, and this eq check will allow not even looking + // at these nodes! + this + case that: LeafOldHashSet[A] => + // when that is a leaf, we can switch to the simpler Tree/Leaf implementation + // it is OK to swap the arguments because intersect is symmetric + // (we can't do this in case of diff, which is not symmetric) + that.intersect0(this, level, buffer, offset0) + case that: HashTrieSet[A] => + val a = this.elems + var abm = this.bitmap + var ai = 0 + + val b = that.elems + var bbm = that.bitmap + var bi = 0 + + // if the bitmasks do not overlap, the result is definitely empty so we can abort here + if ((abm & bbm) == 0) + return null + + // fetch a new temporary array that is guaranteed to be big enough (32 elements) + var offset = offset0 + var rs = 0 + var rbm = 0 + + // loop as long as there are bits left that are set in both abm and bbm + while ((abm & bbm) != 0) { + // highest remaining bit in abm + val alsb = abm ^ (abm & (abm - 1)) + // highest remaining bit in bbm + val blsb = bbm ^ (bbm & (bbm - 1)) + if (alsb == blsb) { + val sub1 = a(ai).intersect0(b(bi), level + 5, buffer, offset) + if (sub1 ne null) { + rs += sub1.size + rbm |= alsb + buffer(offset) = sub1 + offset += 1 + } + // clear lowest remaining one bit in abm and increase the a index + abm &= ~alsb + ai += 1 + // clear lowest remaining one bit in bbm and increase the b index + bbm &= ~blsb + bi += 1 + } else if (BitOperations.Int.unsignedCompare(alsb - 1, blsb - 1)) { + // alsb is smaller than blsb, or alsb is set and blsb is 0 + // in any case, alsb is guaranteed to be set here! + // clear lowest remaining one bit in abm and increase the a index + abm &= ~alsb + ai += 1 + } else { + // blsb is smaller than alsb, or blsb is set and alsb is 0 + // in any case, blsb is guaranteed to be set here! + // clear lowest remaining one bit in bbm and increase the b index + bbm &= ~blsb + bi += 1 + } + } + + if (rbm == 0) { + // if the result bitmap is empty, the result is the empty set + null + } else if (rs == size0) { + // if the result has the same number of elements as this, it must be identical to this, + // so we might as well return this + this + } else if (rs == that.size0) { + // if the result has the same number of elements as that, it must be identical to that, + // so we might as well return that + that + } else { + val length = offset - offset0 + if (length == 1 && !buffer(offset0).isInstanceOf[HashTrieSet[A]]) + buffer(offset0) + else { + val elems = new Array[OldHashSet[A]](length) + System.arraycopy(buffer, offset0, elems, 0, length) + new HashTrieSet[A](rbm, elems, rs) + } + } + case _ => null + } + + protected def diff0(that: OldHashSet[A], level: Int, buffer: Array[OldHashSet[A]], offset0: Int): OldHashSet[A] = that match { + case that if that eq this => + // shortcut for when that is this + // this happens often for nodes deeper in the tree, especially when that and this share a common "heritage" + // e.g. you have a large set A and do some small operations (adding and removing elements) to it to create B + // then A and B will have the vast majority of nodes in common, and this eq check will allow not even looking + // at these nodes! + null + case that: OldHashSet1[A] => + removed0(that.key, that.hash, level) + case that: HashTrieSet[A] => + val a = this.elems + var abm = this.bitmap + var ai = 0 + + val b = that.elems + var bbm = that.bitmap + var bi = 0 + + // fetch a new temporary array that is guaranteed to be big enough (32 elements) + var offset = offset0 + var rs = 0 + var rbm = 0 + + // loop until there are no more bits in abm + while(abm!=0) { + // highest remaining bit in abm + val alsb = abm ^ (abm & (abm - 1)) + // highest remaining bit in bbm + val blsb = bbm ^ (bbm & (bbm - 1)) + if (alsb == blsb) { + val sub1 = a(ai).diff0(b(bi), level + 5, buffer, offset) + if (sub1 ne null) { + rs += sub1.size + rbm |= alsb + buffer(offset) = sub1 + offset += 1 + } + // clear lowest remaining one bit in abm and increase the a index + abm &= ~alsb; ai += 1 + // clear lowest remaining one bit in bbm and increase the b index + bbm &= ~blsb; bi += 1 + } else if (BitOperations.Int.unsignedCompare(alsb - 1, blsb - 1)) { + // alsb is smaller than blsb, or alsb is set and blsb is 0 + // in any case, alsb is guaranteed to be set here! + val sub1 = a(ai) + rs += sub1.size + rbm |= alsb + buffer(offset) = sub1; offset += 1 + // clear lowest remaining one bit in abm and increase the a index + abm &= ~alsb; ai += 1 + } else { + // blsb is smaller than alsb, or blsb is set and alsb is 0 + // in any case, blsb is guaranteed to be set here! + // clear lowest remaining one bit in bbm and increase the b index + bbm &= ~blsb; bi += 1 + } + } + if (rbm == 0) { + null + } else if (rs == this.size0) { + // if the result has the same number of elements as this, it must be identical to this, + // so we might as well return this + this + } else { + val length = offset - offset0 + if (length == 1 && !buffer(offset0).isInstanceOf[HashTrieSet[A]]) + buffer(offset0) + else { + val elems = new Array[OldHashSet[A]](length) + System.arraycopy(buffer, offset0, elems, 0, length) + new HashTrieSet[A](rbm, elems, rs) + } + } + case that: OldHashSetCollision1[A] => + // we remove the elements using removed0 so we can use the fact that we know the hash of all elements + // to be removed + @tailrec def removeAll(s:OldHashSet[A], r:ListSet[A]) : OldHashSet[A] = + if(r.isEmpty || (s eq null)) s + else removeAll(s.removed0(r.head, that.hash, level), r.tail) + removeAll(this, that.ks) + case _ => this + } + + protected def subsetOf0(that: OldHashSet[A], level: Int): Boolean = if (that eq this) true else that match { + case that: HashTrieSet[A] if this.size0 <= that.size0 => + // create local mutable copies of members + var abm = this.bitmap + val a = this.elems + var ai = 0 + val b = that.elems + var bbm = that.bitmap + var bi = 0 + if ((abm & bbm) == abm) { + // I tried rewriting this using tail recursion, but the generated java byte code was less than optimal + while(abm!=0) { + // highest remaining bit in abm + val alsb = abm ^ (abm & (abm - 1)) + // highest remaining bit in bbm + val blsb = bbm ^ (bbm & (bbm - 1)) + // if both trees have a bit set at the same position, we need to check the subtrees + if (alsb == blsb) { + // we are doing a comparison of a child of this with a child of that, + // so we have to increase the level by 5 to keep track of how deep we are in the tree + if (!a(ai).subsetOf0(b(bi), level + 5)) + return false + // clear lowest remaining one bit in abm and increase the a index + abm &= ~alsb; ai += 1 + } + // clear lowermost remaining one bit in bbm and increase the b index + // we must do this in any case + bbm &= ~blsb; bi += 1 + } + true + } else { + // the bitmap of this contains more one bits than the bitmap of that, + // so this can not possibly be a subset of that + false + } + case _ => + // if the other set is a HashTrieSet but has less elements than this, it can not be a subset + // if the other set is a OldHashSet1, we can not be a subset of it because we are a HashTrieSet with at least two children (see assertion) + // if the other set is a OldHashSetCollision1, we can not be a subset of it because we are a HashTrieSet with at least two different hash codes + // if the other set is the empty set, we are not a subset of it because we are not empty + false + } + + protected def filter0(p: A => Boolean, negate: Boolean, level: Int, buffer: Array[OldHashSet[A]], offset0: Int): OldHashSet[A] = { + // current offset + var offset = offset0 + // result size + var rs = 0 + // bitmap for kept elems + var kept = 0 + // loop over all elements + var i = 0 + while (i < elems.length) { + val result = elems(i).filter0(p, negate, level + 5, buffer, offset) + if (result ne null) { + buffer(offset) = result + offset += 1 + // add the result size + rs += result.size + // mark the bit i as kept + kept |= (1 << i) + } + i += 1 + } + if (offset == offset0) { + // empty + null + } else if (rs == size0) { + // unchanged + this + } else if (offset == offset0 + 1 && !buffer(offset0).isInstanceOf[HashTrieSet[A]]) { + // leaf + buffer(offset0) + } else { + // we have to return a HashTrieSet + val length = offset - offset0 + val elems1 = new Array[OldHashSet[A]](length) + System.arraycopy(buffer, offset0, elems1, 0, length) + val bitmap1 = if (length == elems.length) { + // we can reuse the original bitmap + bitmap + } else { + // calculate new bitmap by keeping just bits in the kept bitmask + Hashing.keepBits(bitmap, kept) + } + new HashTrieSet(bitmap1, elems1, rs) + } + } + } + + // utility method to create a HashTrieSet from two leaf OldHashSets (OldHashSet1 or OldHashSetCollision1) with non-colliding hash code) + private def makeHashTrieSet[A](hash0:Int, elem0:OldHashSet[A], hash1:Int, elem1:OldHashSet[A], level:Int) : HashTrieSet[A] = { + val index0 = (hash0 >>> level) & 0x1f + val index1 = (hash1 >>> level) & 0x1f + if(index0 != index1) { + val bitmap = (1 << index0) | (1 << index1) + val elems = new Array[OldHashSet[A]](2) + if(index0 < index1) { + elems(0) = elem0 + elems(1) = elem1 + } else { + elems(0) = elem1 + elems(1) = elem0 + } + new HashTrieSet[A](bitmap, elems, elem0.size + elem1.size) + } else { + val elems = new Array[OldHashSet[A]](1) + val bitmap = (1 << index0) + val child = makeHashTrieSet(hash0, elem0, hash1, elem1, level + 5) + elems(0) = child + new HashTrieSet[A](bitmap, elems, child.size) + } + } + + /** + * Calculates the maximum buffer size given the maximum possible total size of the trie-based collection + * @param size the maximum size of the collection to be generated + * @return the maximum buffer size + */ + @`inline` private def bufferSize(size: Int): Int = (size + 6) min (32 * 7) + + /** + * In many internal operations the empty set is represented as null for performance reasons. This method converts + * null to the empty set for use in public methods + */ + @`inline` private def nullToEmpty[A](s: OldHashSet[A]): OldHashSet[A] = if (s eq null) empty[A] else s + + // scalac generates a `readReplace` method to discard the deserialized state (see https://github.com/scala/bug/issues/10412). + // This prevents it from serializing it in the first place: + private[this] def writeObject(out: ObjectOutputStream): Unit = () + private[this] def readObject(in: ObjectInputStream): Unit = () +} diff --git a/core/src/main/scala/scala/collection/immutable/TrieIterator.scala b/core/src/main/scala/scala/collection/immutable/TrieIterator.scala new file mode 100644 index 00000000..423eaac2 --- /dev/null +++ b/core/src/main/scala/scala/collection/immutable/TrieIterator.scala @@ -0,0 +1,216 @@ +package scala + +package collection +package immutable + +import OldHashMap.{ HashTrieMap, OldHashMapCollision1, OldHashMap1 } +import OldHashSet.{OldHashSet1, OldHashSetCollision1, HashTrieSet} +import collection.Iterator +import scala.collection.mutable.ArrayBuffer + +import scala.annotation.unchecked.{uncheckedVariance => uV} +import scala.annotation.tailrec + +/** Abandons any pretense of type safety for speed. You can't say I + * didn't try: see r23934. + */ +private[collection] abstract class TrieIterator[+T](elems: Array[Iterable[T]]) extends collection.Iterator[T] { + outer => + + private[immutable] def getElem(x: AnyRef): T + + def initDepth = 0 + def initArrayStack: Array[Array[Iterable[T @uV]]] = new Array[Array[Iterable[T]]](6) + def initPosStack = new Array[Int](6) + def initArrayD: Array[Iterable[T @uV]] = elems + def initPosD = 0 + def initSubIter: Iterator[T] = null // to traverse collision nodes + + private[this] var depth = initDepth + private[this] var arrayStack: Array[Array[Iterable[T @uV]]] = initArrayStack + private[this] var posStack = initPosStack + private[this] var arrayD: Array[Iterable[T @uV]] = initArrayD + private[this] var posD = initPosD + private[this] var subIter = initSubIter + + private[this] def getElems(x: Iterable[T]): Array[Iterable[T]] = (x match { + case x: HashTrieMap[_, _] => x.elems + case x: HashTrieSet[_] => x.elems + }).asInstanceOf[Array[Iterable[T]]] + + private[this] def collisionToArray(x: Iterable[T]): Array[Iterable[T]] = (x match { + case x: OldHashMapCollision1[_, _] => x.kvs.map((x: (Any, Any)) => OldHashMap(x)).toArray + case x: OldHashSetCollision1[_] => x.ks.map(x => OldHashSet(x)).toArray + }).asInstanceOf[Array[Iterable[T]]] + + private[this] type SplitIterators = ((Iterator[T], Int), Iterator[T]) + + private def isTrie(x: AnyRef) = x match { + case _: HashTrieMap[_,_] | _: HashTrieSet[_] => true + case _ => false + } + private def isContainer(x: AnyRef) = x match { + case _: OldHashMap1[_, _] | _: OldHashSet1[_] => true + case _ => false + } + + final class DupIterator(xs: Array[Iterable[T]] @uV) extends TrieIterator[T](xs) { + override def initDepth = outer.depth + override def initArrayStack: Array[Array[Iterable[T @uV]]] = outer.arrayStack + override def initPosStack = outer.posStack + override def initArrayD: Array[Iterable[T @uV]] = outer.arrayD + override def initPosD = outer.posD + override def initSubIter = outer.subIter + + final override def getElem(x: AnyRef): T = outer.getElem(x) + } + + def dupIterator: TrieIterator[T] = new DupIterator(elems) + + private[this] def newIterator(xs: Array[Iterable[T]]) = new TrieIterator(xs) { + final override def getElem(x: AnyRef): T = outer.getElem(x) + } + + private[this] def iteratorWithSize(arr: Array[Iterable[T]]): (Iterator[T], Int) = + (newIterator(arr), ((arr.map(_.size): Array[Int]): scala.collection.IterableOps[Int, scala.collection.Iterable, _]).sum) + + private[this] def arrayToIterators(arr: Array[Iterable[T]]): SplitIterators = { + val (fst, snd) = arr.splitAt(arr.length / 2) + + (iteratorWithSize(snd), newIterator(fst)) + } + private[this] def splitArray(ad: Array[Iterable[T]]): SplitIterators = + if (ad.length > 1) arrayToIterators(ad) + else ad(0) match { + case _: OldHashMapCollision1[_, _] | _: OldHashSetCollision1[_] => + arrayToIterators(collisionToArray(ad(0))) + case _ => + splitArray(getElems(ad(0))) + } + + def hasNext = (subIter ne null) || depth >= 0 + @throws[NoSuchElementException] + def next(): T = { + if (subIter ne null) { + val el = subIter.next() + if (!subIter.hasNext) + subIter = null + el + } else + next0(arrayD, posD) + } + + @tailrec private[this] def next0(elems: Array[Iterable[T]], i: Int): T = { + if (i == elems.length-1) { // reached end of level, pop stack + depth -= 1 + if (depth >= 0) { + arrayD = arrayStack(depth) + posD = posStack(depth) + arrayStack(depth) = null + } else { + arrayD = null + posD = 0 + } + } else + posD += 1 + + val m = elems(i) + + // Note: this block is over twice as fast written this way as it is + // as a pattern match. Haven't started looking into why that is, but + // it's pretty sad the pattern matcher is that much slower. + if (isContainer(m)) + getElem(m) // push current pos onto stack and descend + else if (isTrie(m)) { + if (depth >= 0) { + arrayStack(depth) = arrayD + posStack(depth) = posD + } + depth += 1 + arrayD = getElems(m) + posD = 0 + next0(getElems(m), 0) + } + else { + subIter = m.iterator + next() + } + // The much slower version: + // + // m match { + // case _: OldHashMap1[_, _] | _: OldHashSet1[_] => + // getElem(m) // push current pos onto stack and descend + // case _: HashTrieMap[_,_] | _: HashTrieSet[_] => + // if (depth >= 0) { + // arrayStack(depth) = arrayD + // posStack(depth) = posD + // } + // depth += 1 + // arrayD = getElems(m) + // posD = 0 + // next0(getElems(m), 0) + // case _ => + // subIter = m.iterator + // next + // } + } + + // assumption: contains 2 or more elements + // splits this iterator into 2 iterators + // returns the 1st iterator, its number of elements, and the second iterator + def split: SplitIterators = { + // 0) simple case: no elements have been iterated - simply divide arrayD + if (arrayD != null && depth == 0 && posD == 0) + return splitArray(arrayD) + + // otherwise, some elements have been iterated over + // 1) collision case: if we have a subIter, we return subIter and elements after it + if (subIter ne null) { + val buff = ArrayBuffer.empty.++=(subIter) + subIter = null + ((buff.iterator, buff.length), this) + } + else { + // otherwise find the topmost array stack element + if (depth > 0) { + // 2) topmost comes before (is not) arrayD + // steal a portion of top to create a new iterator + if (posStack(0) == arrayStack(0).length - 1) { + // 2a) only a single entry left on top + // this means we have to modify this iterator - pop topmost + val snd = Array[Iterable[T]](arrayStack(0).last) + val szsnd = snd(0).size + // modify this - pop + depth -= 1 + 1 until arrayStack.length foreach (i => arrayStack(i - 1) = arrayStack(i)) + arrayStack(arrayStack.length - 1) = Array[Iterable[T]](null) + posStack = posStack.tail ++ Array[Int](0) + // we know that `this` is not empty, since it had something on the arrayStack and arrayStack elements are always non-empty + ((newIterator(snd), szsnd), this) + } else { + // 2b) more than a single entry left on top + val (fst, snd) = arrayStack(0).splitAt(arrayStack(0).length - (arrayStack(0).length - posStack(0) + 1) / 2) + arrayStack(0) = fst + (iteratorWithSize(snd), this) + } + } else { + // 3) no topmost element (arrayD is at the top) + // steal a portion of it and update this iterator + if (posD == arrayD.length - 1) { + // 3a) positioned at the last element of arrayD + val m = arrayD(posD) + arrayToIterators( + if (isTrie(m)) getElems(m) + else collisionToArray(m) + ) + } + else { + // 3b) arrayD has more free elements + val (fst, snd) = arrayD.splitAt(arrayD.length - (arrayD.length - posD + 1) / 2) + arrayD = fst + (iteratorWithSize(snd), this) + } + } + } + } +} diff --git a/core/src/main/scala/scala/collection/mutable/FlatHashTable.scala b/core/src/main/scala/scala/collection/mutable/FlatHashTable.scala new file mode 100644 index 00000000..49ed47e0 --- /dev/null +++ b/core/src/main/scala/scala/collection/mutable/FlatHashTable.scala @@ -0,0 +1,433 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +package collection.mutable + +import collection.{AbstractIterator, Iterator} + +import java.lang.{Integer, ThreadLocal} + +import java.lang.Integer.rotateRight +import scala.util.hashing.byteswap32 + +/** An implementation class backing a `HashSet`. + * + * This trait is used internally. It can be mixed in with various collections relying on + * hash table as an implementation. + * + * @define coll flat hash table + * @since 2.3 + * @tparam A the type of the elements contained in the $coll. + */ +private[collection] trait FlatHashTable[A] extends FlatHashTable.HashUtils[A] { + import FlatHashTable._ + + private def tableDebug = false + + private[collection] var _loadFactor = defaultLoadFactor + + /** The actual hash table. + */ + var table: Array[AnyRef] = new Array(initialCapacity) + + /** The number of mappings contained in this hash table. + */ + protected var tableSize = 0 + + /** The next size value at which to resize (capacity * load factor). + */ + protected var threshold: Int = newThreshold(_loadFactor, initialCapacity) + + /** The array keeping track of number of elements in 32 element blocks. + */ + protected var sizemap: Array[Int] = null + + protected var seedvalue: Int = tableSizeSeed + + protected def capacity(expectedSize: Int) = HashTable.nextPositivePowerOfTwo(expectedSize) + + /** The initial size of the hash table. + */ + def initialSize: Int = 32 + + def size: Int = tableSize + + private def initialCapacity = capacity(initialSize) + + protected def randomSeed = seedGenerator.get.nextInt() + + protected def tableSizeSeed = Integer.bitCount(table.length - 1) + + /** + * Initializes the collection from the input stream. `f` will be called for each element + * read from the input stream in the order determined by the stream. This is useful for + * structures where iteration order is important (e.g. LinkedHashSet). + * + * The serialization format expected is the one produced by `serializeTo`. + */ + def init(in: java.io.ObjectInputStream, f: A => Unit): Unit = { + _loadFactor = in.readInt() + assert(_loadFactor > 0) + + val size = in.readInt() + tableSize = 0 + assert(size >= 0) + + table = new Array(capacity(sizeForThreshold(size, _loadFactor))) + threshold = newThreshold(_loadFactor, table.length) + + seedvalue = in.readInt() + + val smDefined = in.readBoolean() + if (smDefined) sizeMapInit(table.length) else sizemap = null + + var index = 0 + while (index < size) { + val elem = entryToElem(in.readObject()) + f(elem) + addElem(elem) + index += 1 + } + } + + /** + * Serializes the collection to the output stream by saving the load factor, collection + * size and collection elements. `foreach` determines the order in which the elements are saved + * to the stream. To deserialize, `init` should be used. + */ + def serializeTo(out: java.io.ObjectOutputStream) = { + out.writeInt(_loadFactor) + out.writeInt(tableSize) + out.writeInt(seedvalue) + out.writeBoolean(isSizeMapDefined) + iterator.foreach(out.writeObject) + } + + /** Finds an entry in the hash table if such an element exists. */ + def findEntry(elem: A): Option[A] = + findElemImpl(elem) match { + case null => None + case entry => Some(entryToElem(entry)) + } + + + /** Checks whether an element is contained in the hash table. */ + def containsElem(elem: A): Boolean = { + null != findElemImpl(elem) + } + + private def findElemImpl(elem: A): AnyRef = { + val searchEntry = elemToEntry(elem) + var h = index(searchEntry.hashCode) + var curEntry = table(h) + while (null != curEntry && curEntry != searchEntry) { + h = (h + 1) % table.length + curEntry = table(h) + } + curEntry + } + + /** Add elem if not yet in table. + * @return Returns `true` if a new elem was added, `false` otherwise. + */ + def addElem(elem: A) : Boolean = { + addEntry(elemToEntry(elem)) + } + + /** + * Add an entry (an elem converted to an entry via elemToEntry) if not yet in + * table. + * @return Returns `true` if a new elem was added, `false` otherwise. + */ + protected def addEntry(newEntry : AnyRef) : Boolean = { + var h = index(newEntry.hashCode) + var curEntry = table(h) + while (null != curEntry) { + if (curEntry == newEntry) return false + h = (h + 1) % table.length + curEntry = table(h) + //Statistics.collisions += 1 + } + table(h) = newEntry + tableSize = tableSize + 1 + nnSizeMapAdd(h) + if (tableSize >= threshold) growTable() + true + + } + + /** + * Removes an elem from the hash table returning true if the element was found (and thus removed) + * or false if it didn't exist. + */ + def removeElem(elem: A) : Boolean = { + if (tableDebug) checkConsistent() + def precedes(i: Int, j: Int) = { + val d = table.length >> 1 + if (i <= j) j - i < d + else i - j > d + } + val removalEntry = elemToEntry(elem) + var h = index(removalEntry.hashCode) + var curEntry = table(h) + while (null != curEntry) { + if (curEntry == removalEntry) { + var h0 = h + var h1 = (h0 + 1) % table.length + while (null != table(h1)) { + val h2 = index(table(h1).hashCode) + //Console.println("shift at "+h1+":"+table(h1)+" with h2 = "+h2+"? "+(h2 != h1)+precedes(h2, h0)+table.length) + if (h2 != h1 && precedes(h2, h0)) { + //Console.println("shift "+h1+" to "+h0+"!") + table(h0) = table(h1) + h0 = h1 + } + h1 = (h1 + 1) % table.length + } + table(h0) = null + tableSize -= 1 + nnSizeMapRemove(h0) + if (tableDebug) checkConsistent() + return true + } + h = (h + 1) % table.length + curEntry = table(h) + } + false + } + + def iterator: Iterator[A] = new AbstractIterator[A] { + private[this] var i = 0 + def hasNext: Boolean = { + while (i < table.length && (null == table(i))) i += 1 + i < table.length + } + def next(): A = + if (hasNext) { i += 1; entryToElem(table(i - 1)) } + else Iterator.empty.next() + } + + private def growTable() = { + val oldtable = table + table = new Array[AnyRef](table.length * 2) + tableSize = 0 + nnSizeMapReset(table.length) + seedvalue = tableSizeSeed + threshold = newThreshold(_loadFactor, table.length) + var i = 0 + while (i < oldtable.length) { + val entry = oldtable(i) + if (null != entry) addEntry(entry) + i += 1 + } + if (tableDebug) checkConsistent() + } + + private def checkConsistent() = { + for (i <- 0 until table.length) + if (table(i) != null && !containsElem(entryToElem(table(i)))) + assert(assertion = false, s"$i ${table(i)} ${table.mkString}") + } + + + /* Size map handling code */ + + /* + * The following three methods (nn*) modify a size map only if it has been + * initialized, that is, if it's not set to null. + * + * The size map logically divides the hash table into `sizeMapBucketSize` element buckets + * by keeping an integer entry for each such bucket. Each integer entry simply denotes + * the number of elements in the corresponding bucket. + * Best understood through an example, see: + * table = [/, 1, /, 6, 90, /, -3, 5] (8 entries) + * sizemap = [ 2 | 3 ] (2 entries) + * where sizeMapBucketSize == 4. + * + */ + protected final def nnSizeMapAdd(h: Int) = if (sizemap ne null) { + val p = h >> sizeMapBucketBitSize + sizemap(p) += 1 + } + + protected final def nnSizeMapRemove(h: Int) = if (sizemap ne null) { + sizemap(h >> sizeMapBucketBitSize) -= 1 + } + + protected final def nnSizeMapReset(tableLength: Int) = if (sizemap ne null) { + val nsize = calcSizeMapSize(tableLength) + if (sizemap.length != nsize) sizemap = new Array[Int](nsize) + else java.util.Arrays.fill(sizemap, 0) + } + + private[collection] final def totalSizeMapBuckets = (table.length - 1) / sizeMapBucketSize + 1 + + protected final def calcSizeMapSize(tableLength: Int) = (tableLength >> sizeMapBucketBitSize) + 1 + + // discards the previous sizemap and only allocates a new one + protected final def sizeMapInit(tableLength: Int): Unit = { + sizemap = new Array[Int](calcSizeMapSize(tableLength)) + } + + // discards the previous sizemap and populates the new one + protected final def sizeMapInitAndRebuild(): Unit = { + // first allocate + sizeMapInit(table.length) + + // rebuild + val totalbuckets = totalSizeMapBuckets + var bucketidx = 0 + var tableidx = 0 + val tbl = table + var tableuntil = sizeMapBucketSize min tbl.length + while (bucketidx < totalbuckets) { + var currbucketsz = 0 + while (tableidx < tableuntil) { + if (tbl(tableidx) ne null) currbucketsz += 1 + tableidx += 1 + } + sizemap(bucketidx) = currbucketsz + tableuntil += sizeMapBucketSize + bucketidx += 1 + } + } + + private[collection] def printSizeMap() = { + println(sizemap.mkString("szmap: [", ", ", "]")) + } + + private[collection] def printContents() = { + println(table.mkString("[", ", ", "]")) + } + + protected def sizeMapDisable() = sizemap = null + + protected def isSizeMapDefined = sizemap ne null + + protected def alwaysInitSizeMap = false + + /* End of size map handling code */ + + protected def index(hcode: Int) = { + // version 1 (no longer used - did not work with parallel hash tables) + // improve(hcode) & (table.length - 1) + + // version 2 (allows for parallel hash table construction) + val improved = improve(hcode, seedvalue) + val ones = table.length - 1 + (improved >>> (32 - java.lang.Integer.bitCount(ones))) & ones + + // version 3 (solves SI-5293 in most cases, but such a case would still arise for parallel hash tables) + // val hc = improve(hcode) + // val bbp = blockbitpos + // val ones = table.length - 1 + // val needed = Integer.bitCount(ones) + // val blockbits = ((hc >>> bbp) & 0x1f) << (needed - 5) + // val rest = ((hc >>> (bbp + 5)) << bbp) | (((1 << bbp) - 1) & hc) + // val restmask = (1 << (needed - 5)) - 1 + // val improved = blockbits | (rest & restmask) + // improved + } + + def clearTable(): Unit = { + var i = table.length - 1 + while (i >= 0) { table(i) = null; i -= 1 } + tableSize = 0 + nnSizeMapReset(table.length) + } + + private[collection] def hashTableContents = new FlatHashTable.Contents[A]( + _loadFactor, + table, + tableSize, + threshold, + seedvalue, + sizemap + ) + + protected def initWithContents(c: FlatHashTable.Contents[A]) = { + if (c != null) { + _loadFactor = c.loadFactor + table = c.table + tableSize = c.tableSize + threshold = c.threshold + seedvalue = c.seedvalue + sizemap = c.sizemap + } + if (alwaysInitSizeMap && sizemap == null) sizeMapInitAndRebuild() + } + +} + + +private[collection] object FlatHashTable { + + /** Creates a specific seed to improve hashcode of a hash table instance + * and ensure that iteration order vulnerabilities are not 'felt' in other + * hash tables. + * + * See SI-5293. + */ + final def seedGenerator = new ThreadLocal[scala.util.Random] { + override def initialValue = new scala.util.Random + } + + private object NullSentinel { + override def hashCode = 0 + override def toString = "NullSentinel" + } + + /** The load factor for the hash table; must be < 500 (0.5) + */ + def defaultLoadFactor: Int = 450 + final def loadFactorDenum = 1000 + + def sizeForThreshold(size: Int, _loadFactor: Int) = scala.math.max(32, (size.toLong * loadFactorDenum / _loadFactor).toInt) + + def newThreshold(_loadFactor: Int, size: Int) = { + val lf = _loadFactor + assert(lf < (loadFactorDenum / 2), "loadFactor too large; must be < 0.5") + (size.toLong * lf / loadFactorDenum ).toInt + } + + class Contents[A]( + val loadFactor: Int, + val table: Array[AnyRef], + val tableSize: Int, + val threshold: Int, + val seedvalue: Int, + val sizemap: Array[Int] + ) + + trait HashUtils[A] { + protected final def sizeMapBucketBitSize = 5 + // so that: + protected final def sizeMapBucketSize = 1 << sizeMapBucketBitSize + + protected final def improve(hcode: Int, seed: Int) = rotateRight(byteswap32(hcode), seed) + + /** + * Elems have type A, but we store AnyRef in the table. Plus we need to deal with + * null elems, which need to be stored as NullSentinel + */ + protected final def elemToEntry(elem : A) : AnyRef = + if (null == elem) NullSentinel else elem.asInstanceOf[AnyRef] + + /** + * Does the inverse translation of elemToEntry + */ + final def entryToElem(entry : AnyRef) : A = + (if (entry.isInstanceOf[NullSentinel.type]) null else entry).asInstanceOf[A] + } + +} diff --git a/core/src/main/scala/scala/collection/parallel/CollectionConverters.scala b/core/src/main/scala/scala/collection/parallel/CollectionConverters.scala index 821bc7e2..d41c0174 100644 --- a/core/src/main/scala/scala/collection/parallel/CollectionConverters.scala +++ b/core/src/main/scala/scala/collection/parallel/CollectionConverters.scala @@ -12,221 +12,179 @@ package scala.collection.parallel +import scala.collection.immutable.{OldHashMap, OldHashSet} import scala.language.implicitConversions import scala.{collection => sc} -import scala.collection.{mutable => scm, immutable => sci, concurrent => scc} - -import scala.collection._ +import scala.collection.{immutable => sci, mutable => scm, concurrent => scc} /** Extension methods for `.par` on sequential collections. */ -object CollectionConverters extends CollectionConvertersLowPriority { +object CollectionConverters { - // Traversable & Iterable + // TODO Use IsSeqLike, IsIterableLike, etc. + // Iterable - implicit class TraversableIsParallelizable[A](private val coll: sc.Traversable[A]) extends AnyVal with CustomParallelizable[A, ParIterable[A]] { + implicit class IterableIsParallelizable[A](private val coll: sc.Iterable[A]) extends AnyVal with sc.CustomParallelizable[A, ParIterable[A]] { def seq = coll override def par = coll match { - case coll: sc.Set[_] => new SetIsParallelizable(coll.asInstanceOf[sc.Set[A]]).par - case coll: sc.Map[_, _] => new MapIsParallelizable(coll.asInstanceOf[sc.Map[_, _]]).par.asInstanceOf[ParIterable[A]] - case coll: sci.Iterable[_] => new ImmutableIterableIsParallelizable(coll.asInstanceOf[sci.Iterable[A]]).par - case coll: scm.Iterable[_] => new MutableIterableIsParallelizable(coll.asInstanceOf[scm.Iterable[A]]).par - case coll: Parallelizable[_, _] => coll.asInstanceOf[Parallelizable[A, ParIterable[A]]].par + case coll: sc.Set[A] => new SetIsParallelizable(coll).par + case coll: sc.Map[_, _] => new MapIsParallelizable(coll).par.asInstanceOf[ParIterable[A]] + case coll: sci.Iterable[A] => new ImmutableIterableIsParallelizable(coll).par + case coll: scm.Iterable[A] => new MutableIterableIsParallelizable(coll).par case _ => ParIterable.newCombiner[A].fromSequential(seq) // builds ParArray, same as for scm.Iterable } } - implicit class MutableIterableIsParallelizable[A](private val coll: scm.Iterable[A]) extends AnyVal with CustomParallelizable[A, mutable.ParIterable[A]] { + implicit class MutableIterableIsParallelizable[A](private val coll: scm.Iterable[A]) extends AnyVal with sc.CustomParallelizable[A, mutable.ParIterable[A]] { def seq = coll override def par = coll match { - case coll: scm.Seq[_] => new MutableSeqIsParallelizable(coll.asInstanceOf[scm.Seq[A]]).par - case coll: scm.Set[_] => new MutableSetIsParallelizable(coll.asInstanceOf[scm.Set[A]]).par - case coll: scm.Map[_, _] => new MutableMapIsParallelizable(coll.asInstanceOf[scm.Map[_, _]]).par.asInstanceOf[mutable.ParIterable[A]] - case coll: Parallelizable[_, _] => coll.asInstanceOf[Parallelizable[A, mutable.ParIterable[A]]].par + case coll: scm.Seq[A] => new MutableSeqIsParallelizable(coll).par + case coll: scm.Set[A] => new MutableSetIsParallelizable(coll).par + case coll: scm.Map[_, _] => new MutableMapIsParallelizable(coll).par.asInstanceOf[mutable.ParIterable[A]] case _ => mutable.ParIterable.newCombiner[A].fromSequential(seq) // builds ParArray } } - implicit class ImmutableIterableIsParallelizable[A](private val coll: sci.Iterable[A]) extends AnyVal with CustomParallelizable[A, immutable.ParIterable[A]] { + implicit class ImmutableIterableIsParallelizable[A](private val coll: sci.Iterable[A]) extends AnyVal with sc.CustomParallelizable[A, immutable.ParIterable[A]] { def seq = coll override def par = coll match { - case coll: sci.Seq[_] => new ImmutableSeqIsParallelizable(coll.asInstanceOf[sci.Seq[A]]).par - case coll: sci.Set[_] => new ImmutableSetIsParallelizable(coll.asInstanceOf[sci.Set[A]]).par - case coll: sci.Map[_, _] => new ImmutableMapIsParallelizable(coll.asInstanceOf[sci.Map[_, _]]).par.asInstanceOf[immutable.ParIterable[A]] - case coll: Parallelizable[_, _] => coll.asInstanceOf[Parallelizable[A, immutable.ParIterable[A]]].par + case coll: sci.Seq[A] => new ImmutableSeqIsParallelizable(coll).par + case coll: sci.Set[A] => new ImmutableSetIsParallelizable(coll).par + case coll: sci.Map[_, _] => new ImmutableMapIsParallelizable(coll).par.asInstanceOf[immutable.ParIterable[A]] case _ => immutable.ParIterable.newCombiner[A].fromSequential(seq) // builds ParVector } } - // mutable.Seq + // Seq + implicit def seqIsParallelizable[A](coll: sc.Seq[A]): sc.Parallelizable[A, ParSeq[A]] = coll match { + case it: scm.Seq[A] => new MutableSeqIsParallelizable(it) + case it: sci.Seq[A] => new ImmutableSeqIsParallelizable(it) + case _ => throw new IllegalArgumentException("Unexpected type "+coll.getClass.getName+" - every scala.collection.Seq must be a scala.collection.mutable.Seq or scala.collection.immutable.Seq") + } - implicit class MutableSeqIsParallelizable[A](private val coll: scm.Seq[A]) extends AnyVal with CustomParallelizable[A, mutable.ParSeq[A]] { + implicit class MutableSeqIsParallelizable[A](private val coll: scm.Seq[A]) extends AnyVal with sc.CustomParallelizable[A, mutable.ParSeq[A]] { def seq = coll override def par = coll match { - case coll: scm.WrappedArray[_] => new WrappedArrayIsParallelizable(coll.asInstanceOf[scm.WrappedArray[A]]).par - case coll: scm.ArraySeq[_] => new MutableArraySeqIsParallelizable(coll.asInstanceOf[scm.ArraySeq[A]]).par - case coll: scm.ArrayBuffer[_] => new MutableArrayBufferIsParallelizable(coll.asInstanceOf[scm.ArrayBuffer[A]]).par - case coll: Parallelizable[_, _] => coll.asInstanceOf[Parallelizable[A, mutable.ParSeq[A]]].par + case coll: scm.ArraySeq[A] => new MutableArraySeqIsParallelizable(coll).par + case coll: scm.ArrayBuffer[A] => new MutableArrayBufferIsParallelizable(coll).par case _ => mutable.ParSeq.newCombiner[A].fromSequential(seq) } } - implicit class WrappedArrayIsParallelizable[T](private val coll: scm.WrappedArray[T]) extends AnyVal with CustomParallelizable[T, mutable.ParArray[T]] { - def seq = coll - override def par = mutable.ParArray.handoff(coll.array) - } - - implicit class MutableArraySeqIsParallelizable[T](private val coll: scm.ArraySeq[T]) extends AnyVal with CustomParallelizable[T, mutable.ParArray[T]] { + implicit class MutableArraySeqIsParallelizable[T](private val coll: scm.ArraySeq[T]) extends AnyVal with sc.CustomParallelizable[T, mutable.ParArray[T]] { def seq = coll override def par = mutable.ParArray.handoff(coll.array.asInstanceOf[Array[T]], coll.length) } - implicit class MutableArrayBufferIsParallelizable[T](private val coll: scm.ArrayBuffer[T]) extends AnyVal with CustomParallelizable[T, mutable.ParArray[T]] { + implicit class MutableArrayBufferIsParallelizable[T](private val coll: scm.ArrayBuffer[T]) extends AnyVal with sc.CustomParallelizable[T, mutable.ParArray[T]] { def seq = coll override def par = mutable.ParArray.handoff[T](coll.array.asInstanceOf[Array[T]], coll.size) } // immutable.Seq - implicit class ImmutableSeqIsParallelizable[A](private val coll: sci.Seq[A]) extends AnyVal with CustomParallelizable[A, immutable.ParSeq[A]] { + implicit class ImmutableSeqIsParallelizable[A](private val coll: sci.Seq[A]) extends AnyVal with sc.CustomParallelizable[A, immutable.ParSeq[A]] { def seq = coll override def par = coll match { case coll: sci.Vector[_] => new VectorIsParallelizable(coll.asInstanceOf[sci.Vector[A]]).par case coll: sci.Range => new RangeIsParallelizable(coll).par.asInstanceOf[immutable.ParSeq[A]] - case coll: Parallelizable[_, _] => coll.asInstanceOf[Parallelizable[A, immutable.ParSeq[A]]].par case _ => immutable.ParSeq.newCombiner[A].fromSequential(seq) } } - implicit class RangeIsParallelizable(private val coll: sci.Range) extends AnyVal with CustomParallelizable[Int, immutable.ParRange] { + implicit class RangeIsParallelizable(private val coll: sci.Range) extends AnyVal with sc.CustomParallelizable[Int, immutable.ParRange] { def seq = coll override def par = new immutable.ParRange(coll) } - implicit class VectorIsParallelizable[T](private val coll: sci.Vector[T]) extends AnyVal with CustomParallelizable[T, immutable.ParVector[T]] { + implicit class VectorIsParallelizable[T](private val coll: sci.Vector[T]) extends AnyVal with sc.CustomParallelizable[T, immutable.ParVector[T]] { def seq = coll override def par = new immutable.ParVector(coll) } // Set - implicit class SetIsParallelizable[A](private val coll: sc.Set[A]) extends AnyVal with CustomParallelizable[A, ParSet[A]] { + implicit class SetIsParallelizable[A](private val coll: sc.Set[A]) extends AnyVal with sc.CustomParallelizable[A, ParSet[A]] { def seq = coll override def par = coll match { - case coll: sci.Set[_] => new ImmutableSetIsParallelizable(coll.asInstanceOf[sci.Set[A]]).par - case coll: scm.Set[_] => new MutableSetIsParallelizable(coll.asInstanceOf[scm.Set[A]]).par - case coll: Parallelizable[_, _] => coll.asInstanceOf[Parallelizable[A, ParSet[A]]].par + case coll: sci.Set[A] => new ImmutableSetIsParallelizable(coll).par + case coll: scm.Set[A] => new MutableSetIsParallelizable(coll).par case _ => ParSet.newCombiner[A].fromSequential(seq) } } - implicit class ImmutableSetIsParallelizable[A](private val coll: sci.Set[A]) extends AnyVal with CustomParallelizable[A, immutable.ParSet[A]] { + implicit class ImmutableSetIsParallelizable[A](private val coll: sci.Set[A]) extends AnyVal with sc.CustomParallelizable[A, immutable.ParSet[A]] { def seq = coll override def par = coll match { - case coll: sci.HashSet[_] => new ImmutableHashSetIsParallelizable(coll.asInstanceOf[sci.HashSet[A]]).par - case coll: Parallelizable[_, _] => coll.asInstanceOf[Parallelizable[A, immutable.ParSet[A]]].par + case coll: sci.HashSet[A] => new ImmutableHashSetIsParallelizable(coll).par case _ => immutable.ParSet.newCombiner[A].fromSequential(seq) } } - implicit class MutableSetIsParallelizable[A](private val coll: scm.Set[A]) extends AnyVal with CustomParallelizable[A, mutable.ParSet[A]] { + implicit class MutableSetIsParallelizable[A](private val coll: scm.Set[A]) extends AnyVal with sc.CustomParallelizable[A, mutable.ParSet[A]] { def seq = coll override def par = coll match { - case coll: scm.HashSet[_] => new MutableHashSetIsParallelizable(coll.asInstanceOf[scm.HashSet[A]]).par - case coll: Parallelizable[_, _] => coll.asInstanceOf[Parallelizable[A, mutable.ParSet[A]]].par + case coll: scm.HashSet[A] => new MutableHashSetIsParallelizable(coll).par case _ => mutable.ParSet.newCombiner[A].fromSequential(seq) } } - implicit class MutableHashSetIsParallelizable[T](private val coll: scm.HashSet[T]) extends AnyVal with CustomParallelizable[T, mutable.ParHashSet[T]] { + implicit class MutableHashSetIsParallelizable[T](private val coll: scm.HashSet[T]) extends AnyVal with sc.CustomParallelizable[T, mutable.ParHashSet[T]] { def seq = coll - override def par = new mutable.ParHashSet(coll.hashTableContents) + override def par = coll.to(mutable.ParHashSet) } - implicit class ImmutableHashSetIsParallelizable[T](private val coll: sci.HashSet[T]) extends AnyVal with CustomParallelizable[T, immutable.ParHashSet[T]] { + implicit class ImmutableHashSetIsParallelizable[T](private val coll: sci.HashSet[T]) extends AnyVal with sc.CustomParallelizable[T, immutable.ParHashSet[T]] { def seq = coll - override def par = immutable.ParHashSet.fromTrie(coll) + override def par = immutable.ParHashSet.fromTrie(coll.to(OldHashSet)) // TODO Redesign immutable.ParHashSet so that conversion from sequential sci.HashSet takes constant time } // Map - implicit class MapIsParallelizable[K, V](private val coll: sc.Map[K, V]) extends AnyVal with CustomParallelizable[(K, V), ParMap[K, V]] { + implicit class MapIsParallelizable[K, V](private val coll: sc.Map[K, V]) extends AnyVal with sc.CustomParallelizable[(K, V), ParMap[K, V]] { def seq = coll override def par = coll match { - case coll: sci.Map[_, _] => new ImmutableMapIsParallelizable(coll.asInstanceOf[sci.Map[K, V]]).par - case coll: scm.Map[_, _] => new MutableMapIsParallelizable(coll.asInstanceOf[scm.Map[K, V]]).par - case coll: Parallelizable[_, _] => coll.asInstanceOf[Parallelizable[(K, V), ParMap[K, V]]].par + case coll: sci.Map[K, V] => new ImmutableMapIsParallelizable(coll).par + case coll: scm.Map[K, V] => new MutableMapIsParallelizable(coll).par case _ => ParMap.newCombiner[K, V].fromSequential(seq) } } - implicit class ImmutableMapIsParallelizable[K, V](private val coll: sci.Map[K, V]) extends AnyVal with CustomParallelizable[(K, V), immutable.ParMap[K, V]] { + implicit class ImmutableMapIsParallelizable[K, V](private val coll: sci.Map[K, V]) extends AnyVal with sc.CustomParallelizable[(K, V), immutable.ParMap[K, V]] { def seq = coll override def par = coll match { - case coll: sci.HashMap[_, _] => new ImmutableHashMapIsParallelizable(coll.asInstanceOf[sci.HashMap[K, V]]).par - case coll: Parallelizable[_, _] => coll.asInstanceOf[Parallelizable[(K, V), immutable.ParMap[K, V]]].par + case coll: sci.HashMap[K, V] => new ImmutableHashMapIsParallelizable(coll).par case _ => immutable.ParMap.newCombiner[K, V].fromSequential(seq) } } - implicit class MutableMapIsParallelizable[K, V](private val coll: scm.Map[K, V]) extends AnyVal with CustomParallelizable[(K, V), mutable.ParMap[K, V]] { + implicit class MutableMapIsParallelizable[K, V](private val coll: scm.Map[K, V]) extends AnyVal with sc.CustomParallelizable[(K, V), mutable.ParMap[K, V]] { def seq = coll override def par = coll match { - case coll: scm.HashMap[_, _] => new MutableHashMapIsParallelizable(coll.asInstanceOf[scm.HashMap[K, V]]).par - case coll: scc.TrieMap[_, _] => new ConcurrentTrieMapIsParallelizable(coll.asInstanceOf[scc.TrieMap[K, V]]).par - case coll: Parallelizable[_, _] => coll.asInstanceOf[Parallelizable[(K, V), mutable.ParMap[K, V]]].par + case coll: scm.HashMap[K, V] => new MutableHashMapIsParallelizable(coll).par + case coll: scc.TrieMap[K, V] => new ConcurrentTrieMapIsParallelizable(coll).par case _ => mutable.ParMap.newCombiner[K, V].fromSequential(seq) } } - implicit class ImmutableHashMapIsParallelizable[K, V](private val coll: sci.HashMap[K, V]) extends AnyVal with CustomParallelizable[(K, V), immutable.ParHashMap[K, V]] { + implicit class ImmutableHashMapIsParallelizable[K, V](private val coll: sci.HashMap[K, V]) extends AnyVal with sc.CustomParallelizable[(K, V), immutable.ParHashMap[K, V]] { def seq = coll - override def par = immutable.ParHashMap.fromTrie(coll) + override def par = immutable.ParHashMap.fromTrie(coll.to(OldHashMap)) // TODO Redesign immutable.ParHashMap so that conversion from sequential sci.HashMap takes constant time } - implicit class MutableHashMapIsParallelizable[K, V](private val coll: scm.HashMap[K, V]) extends AnyVal with CustomParallelizable[(K, V), mutable.ParHashMap[K, V]] { + implicit class MutableHashMapIsParallelizable[K, V](private val coll: scm.HashMap[K, V]) extends AnyVal with sc.CustomParallelizable[(K, V), mutable.ParHashMap[K, V]] { def seq = coll - override def par = new mutable.ParHashMap[K, V](coll.hashTableContents) + override def par = coll.to(mutable.ParHashMap) // TODO Redesign mutable.ParHashMap so that conversion from sequential scm.HashMap takes constant time } - implicit class ConcurrentTrieMapIsParallelizable[K, V](private val coll: scc.TrieMap[K, V]) extends AnyVal with CustomParallelizable[(K, V), mutable.ParTrieMap[K, V]] { + implicit class ConcurrentTrieMapIsParallelizable[K, V](private val coll: scc.TrieMap[K, V]) extends AnyVal with sc.CustomParallelizable[(K, V), mutable.ParTrieMap[K, V]] { def seq = coll override def par = new mutable.ParTrieMap(coll) } // Other - implicit class ArrayIsParallelizable[T](private val a: Array[T]) extends AnyVal with CustomParallelizable[T, mutable.ParArray[T]] { + implicit class ArrayIsParallelizable[T](private val a: Array[T]) extends AnyVal with sc.CustomParallelizable[T, mutable.ParArray[T]] { def seq = a // via ArrayOps override def par = mutable.ParArray.handoff(a) } } - -trait CollectionConvertersLowPriority { self: CollectionConverters.type => - - // Generic - - implicit def genTraversableLikeIsParallelizable[A, Repr](coll: sc.GenTraversableLike[A, Repr]): Parallelizable[A, ParIterable[A]] = coll match { - case coll: Parallelizable[_, _] => coll.asInstanceOf[Parallelizable[A, ParIterable[A]]].par - case coll: sc.Traversable[_] => new TraversableIsParallelizable(coll.asInstanceOf[sc.Traversable[A]]) - case coll => throw new IllegalArgumentException("Unexpected type "+coll.getClass.getName+" - every scala.collection.GenTraversableLike must be Parallelizable or a scala.collection.Traversable") - } - - implicit def genSeqLikeIsParallelizable[A, Repr](coll: sc.GenSeqLike[A, Repr]): Parallelizable[A, ParSeq[A]] = coll match { - case coll: Parallelizable[_, _] => coll.asInstanceOf[Parallelizable[A, ParSeq[A]]].par - case it: scm.Seq[_] => new MutableSeqIsParallelizable(it.asInstanceOf[scm.Seq[A]]) - case it: sci.Seq[_] => new ImmutableSeqIsParallelizable(it.asInstanceOf[sci.Seq[A]]) - case coll => throw new IllegalArgumentException("Unexpected type "+coll.getClass.getName+" - every scala.collection.GenSeqLike must be Parallelizable or a scala.collection.mutable.Seq or scala.collection.immutable.Seq") - } - - implicit def genSetLikeIsParallelizable[A, Repr](coll: sc.GenSetLike[A, Repr]): Parallelizable[A, ParSet[A]] = coll match { - case coll: Parallelizable[_, _] => coll.asInstanceOf[Parallelizable[A, ParSet[A]]].par - case it: sc.Set[_] => new SetIsParallelizable(it.asInstanceOf[sc.Set[A]]) - case coll => throw new IllegalArgumentException("Unexpected type "+coll.getClass.getName+" - every scala.collection.GenSetLike must be Parallelizable or a scala.collection.Set") - } - - implicit def genMapLikeIsParallelizable[K, V, Repr](coll: sc.GenMapLike[K, V, Repr]): Parallelizable[(K, V), ParMap[K, V]] = coll match { - case coll: Parallelizable[_, _] => coll.asInstanceOf[Parallelizable[(K, V), ParMap[K, V]]].par - case it: sc.Map[_, _] => new MapIsParallelizable(it.asInstanceOf[sc.Map[K, V]]) - case coll => throw new IllegalArgumentException("Unexpected type "+coll.getClass.getName+" - every scala.collection.GenMapLike must be Parallelizable or a scala.collection.Map") - } -} diff --git a/core/src/main/scala/scala/collection/parallel/Combiner.scala b/core/src/main/scala/scala/collection/parallel/Combiner.scala index 671a3d39..9215eb9d 100644 --- a/core/src/main/scala/scala/collection/parallel/Combiner.scala +++ b/core/src/main/scala/scala/collection/parallel/Combiner.scala @@ -93,8 +93,8 @@ trait Combiner[-Elem, +To] extends Builder[Elem, To] with Sizing with Parallel { /** Add all elements from a sequential collection and return the result. */ - def fromSequential(seq: TraversableOnce[Elem]): To = { - for (x <- seq) this += x + def fromSequential(seq: IterableOnce[Elem]): To = { + for (x <- seq.iterator) this += x result() } } diff --git a/core/src/main/scala/scala/collection/parallel/ParIterable.scala b/core/src/main/scala/scala/collection/parallel/ParIterable.scala index fd888c5e..09796799 100644 --- a/core/src/main/scala/scala/collection/parallel/ParIterable.scala +++ b/core/src/main/scala/scala/collection/parallel/ParIterable.scala @@ -13,7 +13,6 @@ package scala package collection.parallel -import scala.collection.GenIterable import scala.collection.generic._ import scala.collection.parallel.mutable.ParArrayCombiner @@ -29,11 +28,9 @@ import scala.collection.parallel.mutable.ParArrayCombiner * @since 2.9 */ trait ParIterable[+T] -extends GenIterable[T] - with GenericParTemplate[T, ParIterable] - with ParIterableLike[T, ParIterable[T], Iterable[T]] { - override def companion: GenericCompanion[ParIterable] with GenericParCompanion[ParIterable] = ParIterable - //protected[this] override def newBuilder = ParIterable.newBuilder[T] + extends GenericParTemplate[T, ParIterable] + with ParIterableLike[T, ParIterable, ParIterable[T], Iterable[T]] { + def companion: GenericParCompanion[ParIterable] = ParIterable def stringPrefix = "ParIterable" } @@ -41,7 +38,7 @@ extends GenIterable[T] /** $factoryInfo */ object ParIterable extends ParFactory[ParIterable] { - implicit def canBuildFrom[T]: CanCombineFrom[Coll, T, ParIterable[T]] = new GenericCanCombineFrom[T] + implicit def canBuildFrom[T]: CanCombineFrom[ParIterable[_], T, ParIterable[T]] = new GenericCanCombineFrom[T] def newBuilder[T]: Combiner[T, ParIterable[T]] = ParArrayCombiner[T] diff --git a/core/src/main/scala/scala/collection/parallel/ParIterableLike.scala b/core/src/main/scala/scala/collection/parallel/ParIterableLike.scala index ad71fb43..2111eb52 100644 --- a/core/src/main/scala/scala/collection/parallel/ParIterableLike.scala +++ b/core/src/main/scala/scala/collection/parallel/ParIterableLike.scala @@ -14,24 +14,13 @@ package scala package collection.parallel import scala.language.{ higherKinds, implicitConversions } - import scala.collection.mutable.Builder import scala.collection.mutable.ArrayBuffer -import scala.collection.IterableLike -import scala.collection.Parallel -import scala.collection.CustomParallelizable +import scala.collection.{CustomParallelizable, IterableOps, Parallel} import scala.collection.generic._ -import scala.collection.GenIterableLike -import scala.collection.GenIterable -import scala.collection.GenTraversableOnce -import scala.collection.GenTraversable import immutable.HashMapCombiner import scala.reflect.ClassTag -import scala.annotation.unchecked.uncheckedVariance - -import scala.collection.parallel.ParallelCollectionImplicits._ - /** A template trait for parallel collections of type `ParIterable[T]`. * @@ -158,13 +147,17 @@ import scala.collection.parallel.ParallelCollectionImplicits._ * @define Coll `ParIterable` * @define coll parallel iterable */ -trait ParIterableLike[+T, +Repr <: ParIterable[T], +Sequential <: Iterable[T] with IterableLike[T, Sequential]] -extends GenIterableLike[T, Repr] +trait ParIterableLike[+T, +CC[X] <: ParIterable[X], +Repr <: ParIterable[T], +Sequential <: Iterable[T] with IterableOps[T, Iterable, Sequential]] +extends IterableOnce[T] with CustomParallelizable[T, Repr] with Parallel with HasNewCombiner[T, Repr] { -self: ParIterableLike[T, Repr, Sequential] => +self => + + def size: Int + def stringPrefix: String + def companion: GenericParCompanion[CC] @transient @volatile @@ -343,11 +336,6 @@ self: ParIterableLike[T, Repr, Sequential] => def asCombiner = cb.asInstanceOf[Combiner[Elem, To]] } - protected[this] def bf2seq[S, That](bf: CanBuildFrom[Repr, S, That]) = new CanBuildFrom[Sequential, S, That] { - def apply(from: Sequential) = bf.apply(newCombiner.fromSequential(from)) - def apply() = bf.apply() - } - protected[this] def sequentially[S, That <: Parallel](b: Sequential => Sequential) = newCombiner.fromSequential(b(seq)) def mkString(start: String, sep: String, end: String): String = seq.mkString(start, sep, end) @@ -358,8 +346,6 @@ self: ParIterableLike[T, Repr, Sequential] => override def toString = seq.mkString(stringPrefix + "(", ", ", ")") - def canEqual(other: Any) = true - /** Reduces the elements of this sequence using the specified associative binary operator. * * $undefinedorder @@ -500,26 +486,17 @@ self: ParIterableLike[T, Repr, Sequential] => reduce((x, y) => if (cmp.lteq(f(x), f(y))) x else y) } - def map[S, That](f: T => S)(implicit bf: CanBuildFrom[Repr, S, That]): That = if (bf(repr).isCombiner) { - tasksupport.executeAndWaitResult(new Map[S, That](f, combinerFactory(() => bf(repr).asCombiner), splitter) mapResult { _.resultWithTaskSupport }) - } else setTaskSupport(seq.map(f)(bf2seq(bf)), tasksupport) - /*bf ifParallel { pbf => - tasksupport.executeAndWaitResult(new Map[S, That](f, pbf, splitter) mapResult { _.result }) - } otherwise seq.map(f)(bf2seq(bf))*/ + def map[S](f: T => S): CC[S] = { + tasksupport.executeAndWaitResult(new Map[S, CC[S]](f, combinerFactory(() => companion.newCombiner[S]), splitter) mapResult { _.resultWithTaskSupport }) + } - def collect[S, That](pf: PartialFunction[T, S])(implicit bf: CanBuildFrom[Repr, S, That]): That = if (bf(repr).isCombiner) { - tasksupport.executeAndWaitResult(new Collect[S, That](pf, combinerFactory(() => bf(repr).asCombiner), splitter) mapResult { _.resultWithTaskSupport }) - } else setTaskSupport(seq.collect(pf)(bf2seq(bf)), tasksupport) - /*bf ifParallel { pbf => - tasksupport.executeAndWaitResult(new Collect[S, That](pf, pbf, splitter) mapResult { _.result }) - } otherwise seq.collect(pf)(bf2seq(bf))*/ + def collect[S](pf: PartialFunction[T, S]): CC[S] = { + tasksupport.executeAndWaitResult(new Collect[S, CC[S]](pf, combinerFactory(() => companion.newCombiner[S]), splitter) mapResult { _.resultWithTaskSupport }) + } - def flatMap[S, That](f: T => GenTraversableOnce[S])(implicit bf: CanBuildFrom[Repr, S, That]): That = if (bf(repr).isCombiner) { - tasksupport.executeAndWaitResult(new FlatMap[S, That](f, combinerFactory(() => bf(repr).asCombiner), splitter) mapResult { _.resultWithTaskSupport }) - } else setTaskSupport(seq.flatMap(f)(bf2seq(bf)), tasksupport) - /*bf ifParallel { pbf => - tasksupport.executeAndWaitResult(new FlatMap[S, That](f, pbf, splitter) mapResult { _.result }) - } otherwise seq.flatMap(f)(bf2seq(bf))*/ + def flatMap[S](f: T => IterableOnce[S]): CC[S] = { + tasksupport.executeAndWaitResult(new FlatMap[S, CC[S]](f, combinerFactory(() => companion.newCombiner[S]), splitter) mapResult { _.resultWithTaskSupport }) + } /** Tests whether a predicate holds for all elements of this $coll. * @@ -605,12 +582,10 @@ self: ParIterableLike[T, Repr, Sequential] => tasksupport.executeAndWaitResult(new FilterNot(pred, combinerFactory, splitter) mapResult { _.resultWithTaskSupport }) } - def ++[U >: T, That](that: GenTraversableOnce[U])(implicit bf: CanBuildFrom[Repr, U, That]): That = { - if (that.isParallel && bf.isParallel) { + def ++[U >: T](that: IterableOnce[U]): CC[U] = that match { + case other: ParIterable[U] => // println("case both are parallel") - val other = that.asParIterable - val pbf = bf.asParallel - val cfactory = combinerFactory(() => pbf(repr)) + val cfactory = combinerFactory(() => companion.newCombiner[U]) val copythis = new Copy(cfactory, splitter) val copythat = wrap { val othtask = new other.Copy(cfactory, other.splitter) @@ -620,22 +595,15 @@ self: ParIterableLike[T, Repr, Sequential] => _.resultWithTaskSupport } tasksupport.executeAndWaitResult(task) - } else if (bf(repr).isCombiner) { + case _ => // println("case parallel builder, `that` not parallel") - val copythis = new Copy(combinerFactory(() => bf(repr).asCombiner), splitter) + val copythis = new Copy(combinerFactory(() => companion.newCombiner[U]), splitter) val copythat = wrap { - val cb = bf(repr).asCombiner - for (elem <- that.seq) cb += elem + val cb = companion.newCombiner[U] + cb ++= that cb } tasksupport.executeAndWaitResult((copythis parallel copythat) { _ combine _ } mapResult { _.resultWithTaskSupport }) - } else { - // println("case not a parallel builder") - val b = bf(repr) - this.splitter.copy2builder[U, That, Builder[U, That]](b) - for (elem <- that.seq) b += elem - setTaskSupport(b.result(), tasksupport) - } } def partition(pred: T => Boolean): (Repr, Repr) = { @@ -687,7 +655,7 @@ self: ParIterableLike[T, Repr, Sequential] => cb.resultWithTaskSupport } - override def slice(unc_from: Int, unc_until: Int): Repr = { + def slice(unc_from: Int, unc_until: Int): Repr = { val from = unc_from min size max 0 val until = unc_until min size max from if ((until - from) <= MIN_FOR_COPY) slice_sequential(from, until) @@ -718,30 +686,21 @@ self: ParIterableLike[T, Repr, Sequential] => * Note: The neutral element `z` may be applied more than once. * * @tparam U element type of the resulting collection - * @tparam That type of the resulting collection * @param z neutral element for the operator `op` * @param op the associative operator for the scan - * @param bf $pbfinfo - * @return a collection containing the prefix scan of the elements in the original collection - * - * @usecase def scan(z: T)(op: (T, T) => T): $Coll[T] - * @inheritdoc - * - * @return a new $coll containing the prefix scan of the elements in this $coll + * @return a new $coll containing the prefix scan of the elements in this $coll */ - def scan[U >: T, That](z: U)(op: (U, U) => U)(implicit bf: CanBuildFrom[Repr, U, That]): That = if (bf(repr).isCombiner) { - if (tasksupport.parallelismLevel > 1) { - if (size > 0) tasksupport.executeAndWaitResult(new CreateScanTree(0, size, z, op, splitter) mapResult { - tree => tasksupport.executeAndWaitResult(new FromScanTree(tree, z, op, combinerFactory(() => bf(repr).asCombiner)) mapResult { - cb => cb.resultWithTaskSupport - }) - }) else setTaskSupport((bf(repr) += z).result(), tasksupport) - } else setTaskSupport(seq.scan(z)(op)(bf2seq(bf)), tasksupport) - } else setTaskSupport(seq.scan(z)(op)(bf2seq(bf)), tasksupport) + def scan[U >: T](z: U)(op: (U, U) => U): CC[U] = { + if (size > 0) tasksupport.executeAndWaitResult(new CreateScanTree(0, size, z, op, splitter) mapResult { + tree => tasksupport.executeAndWaitResult(new FromScanTree(tree, z, op, combinerFactory(() => companion.newCombiner[U])) mapResult { + cb => cb.resultWithTaskSupport + }) + }) else setTaskSupport((companion.newCombiner[U] += z).result(), tasksupport) + } - def scanLeft[S, That](z: S)(op: (S, T) => S)(implicit bf: CanBuildFrom[Repr, S, That]) = setTaskSupport(seq.scanLeft(z)(op)(bf2seq(bf)), tasksupport) + def scanLeft[S](z: S)(op: (S, T) => S): Iterable[S] = seq.scanLeft(z)(op) - def scanRight[S, That](z: S)(op: (T, S) => S)(implicit bf: CanBuildFrom[Repr, S, That]) = setTaskSupport(seq.scanRight(z)(op)(bf2seq(bf)), tasksupport) + def scanRight[S](z: S)(op: (T, S) => S): Iterable[S] = seq.scanRight(z)(op) /** Takes the longest prefix of elements that satisfy the predicate. * @@ -815,31 +774,46 @@ self: ParIterableLike[T, Repr, Sequential] => ) } - def copyToArray[U >: T](xs: Array[U]) = copyToArray(xs, 0) + def copyToArray[U >: T](xs: Array[U]): Unit = copyToArray(xs, 0) - def copyToArray[U >: T](xs: Array[U], start: Int) = copyToArray(xs, start, xs.length - start) + def copyToArray[U >: T](xs: Array[U], start: Int): Unit = copyToArray(xs, start, xs.length - start) - def copyToArray[U >: T](xs: Array[U], start: Int, len: Int) = if (len > 0) { + def copyToArray[U >: T](xs: Array[U], start: Int, len: Int): Unit = if (len > 0) { tasksupport.executeAndWaitResult(new CopyToArray(start, len, xs, splitter)) } - def sameElements[U >: T](that: GenIterable[U]) = seq.sameElements(that) + // FIXME Push down to `ParSeq`? + def sameElements[U >: T](that: IterableOnce[U]) = seq.iterator.sameElements(that) + + def zip[U >: T, S](that: ParIterable[S]): CC[(U, S)] = { + that match { + case thatseq: ParSeq[S] => + tasksupport.executeAndWaitResult(new Zip(combinerFactory(() => companion.newCombiner[(U, S)]), splitter, thatseq.splitter) mapResult { _.resultWithTaskSupport }) + case _ => + (companion.newBuilder[(U, S)] ++= setTaskSupport(seq.zip(that.seq), tasksupport)).result() + } + } + def zip[U >: T, S](that: Iterable[S]): CC[(U, S)] = + (companion.newBuilder[(U, S)] ++= setTaskSupport(seq.zip(that), tasksupport)).result() - def zip[U >: T, S, That](that: GenIterable[S])(implicit bf: CanBuildFrom[Repr, (U, S), That]): That = if (bf(repr).isCombiner && that.isParSeq) { - val thatseq = that.asParSeq - tasksupport.executeAndWaitResult(new Zip(combinerFactory(() => bf(repr).asCombiner), splitter, thatseq.splitter) mapResult { _.resultWithTaskSupport }) - } else setTaskSupport(seq.zip(that)(bf2seq(bf)), tasksupport) - def zipWithIndex[U >: T, That](implicit bf: CanBuildFrom[Repr, (U, Int), That]): That = this zip immutable.ParRange(0, size, 1, inclusive = false) + /** Zips this $coll with its indices. + * + * @tparam U the type of the first half of the returned pairs (this is always a supertype + * of the collection's element type `T`). + * @return A new collection of type $Coll containing pairs consisting of all elements of this + * $coll paired with their index. Indices start at 0. + */ + def zipWithIndex[U >: T]: CC[(U, Int)] = this zip immutable.ParRange(0, size, 1, inclusive = false) - def zipAll[S, U >: T, That](that: GenIterable[S], thisElem: U, thatElem: S)(implicit bf: CanBuildFrom[Repr, (U, S), That]): That = if (bf(repr).isCombiner && that.isParSeq) { - val thatseq = that.asParSeq + def zipAll[S, U >: T](that: ParIterable[S], thisElem: U, thatElem: S): CC[(U, S)] = { + val thatseq = that.toSeq tasksupport.executeAndWaitResult( - new ZipAll(size max thatseq.length, thisElem, thatElem, combinerFactory(() => bf(repr).asCombiner), splitter, thatseq.splitter) mapResult { + new ZipAll(size max thatseq.length, thisElem, thatElem, combinerFactory(() => companion.newCombiner[(U, S)]), splitter, thatseq.splitter) mapResult { _.resultWithTaskSupport } ) - } else setTaskSupport(seq.zipAll(that, thisElem, thatElem)(bf2seq(bf)), tasksupport) + } protected def toParCollection[U >: T, That](cbf: () => Combiner[U, That]): That = { tasksupport.executeAndWaitResult(new ToParCollection(combinerFactory(cbf), splitter) mapResult { _.resultWithTaskSupport }) @@ -849,39 +823,37 @@ self: ParIterableLike[T, Repr, Sequential] => tasksupport.executeAndWaitResult(new ToParMap(combinerFactory(cbf), splitter)(ev) mapResult { _.resultWithTaskSupport }) } - override def toArray[U >: T: ClassTag]: Array[U] = { + def toArray[U >: T: ClassTag]: Array[U] = { val arr = new Array[U](size) copyToArray(arr) arr } - override def toList: List[T] = seq.toList - - override def toIndexedSeq: scala.collection.immutable.IndexedSeq[T] = seq.toIndexedSeq + def toList: List[T] = seq.toList - override def toStream: Stream[T] = seq.toStream + def toIndexedSeq: scala.collection.immutable.IndexedSeq[T] = seq.toIndexedSeq - override def toIterator: Iterator[T] = splitter + @deprecated("Use `to(LazyList)` instead.", "0.1.3") + def toStream: Stream[T] = seq.toStream - // the methods below are overridden + def toIterator: Iterator[T] = splitter - override def toBuffer[U >: T]: scala.collection.mutable.Buffer[U] = seq.toBuffer // have additional, parallel buffers? + def toBuffer[U >: T]: scala.collection.mutable.Buffer[U] = seq.toBuffer // have additional, parallel buffers? - override def toTraversable: GenTraversable[T] = this.asInstanceOf[GenTraversable[T]] + @deprecated("Use `toIterable` instead", "0.1.3") + def toTraversable: ParIterable[T] = this.asInstanceOf[ParIterable[T]] - override def toIterable: ParIterable[T] = this.asInstanceOf[ParIterable[T]] + def toIterable: ParIterable[T] = this.asInstanceOf[ParIterable[T]] - override def toSeq: ParSeq[T] = toParCollection[T, ParSeq[T]](() => ParSeq.newCombiner[T]) + def toSeq: ParSeq[T] = toParCollection[T, ParSeq[T]](() => ParSeq.newCombiner[T]) - override def toSet[U >: T]: immutable.ParSet[U] = toParCollection[U, immutable.ParSet[U]](() => immutable.ParSet.newCombiner[U]) + def toSet[U >: T]: immutable.ParSet[U] = toParCollection[U, immutable.ParSet[U]](() => immutable.ParSet.newCombiner[U]) - override def toMap[K, V](implicit ev: T <:< (K, V)): immutable.ParMap[K, V] = toParMap[K, V, immutable.ParMap[K, V]](() => immutable.ParMap.newCombiner[K, V]) + def toMap[K, V](implicit ev: T <:< (K, V)): immutable.ParMap[K, V] = toParMap[K, V, immutable.ParMap[K, V]](() => immutable.ParMap.newCombiner[K, V]) - override def toVector: Vector[T] = to[Vector] + def toVector: Vector[T] = to(Vector) - override def to[Col[_]](implicit cbf: CanBuildFrom[Nothing, T, Col[T @uncheckedVariance]]): Col[T @uncheckedVariance] = if (cbf().isCombiner) { - toParCollection[T, Col[T]](() => cbf().asCombiner) - } else seq.to(cbf) + def to[C](factory: collection.Factory[T, C]): C = factory.fromSpecific(this) /* tasks */ @@ -1068,7 +1040,7 @@ self: ParIterableLike[T, Repr, Sequential] => } protected[this] class FlatMap[S, That] - (f: T => GenTraversableOnce[S], pbf: CombinerFactory[S, That], protected[this] val pit: IterableSplitter[T]) + (f: T => IterableOnce[S], pbf: CombinerFactory[S, That], protected[this] val pit: IterableSplitter[T]) extends Transformer[Combiner[S, That], FlatMap[S, That]] { @volatile var result: Combiner[S, That] = null def leaf(prev: Option[Combiner[S, That]]) = result = pit.flatmap2combiner(f, pbf()) @@ -1311,7 +1283,7 @@ self: ParIterableLike[T, Repr, Sequential] => } else { val opits = othpit.psplitWithSignalling(pit.remaining) val diff = len - pit.remaining - Seq( + scala.collection.immutable.Seq( new ZipAll(pit.remaining, thiselem, thatelem, pbf, pit, opits(0)), // nothing wrong will happen with the cast below - elem T is never accessed new ZipAll(diff, thiselem, thatelem, pbf, immutable.repetition(thiselem, diff).splitter.asInstanceOf[IterableSplitter[T]], opits(1)) ) diff --git a/core/src/main/scala/scala/collection/parallel/ParMap.scala b/core/src/main/scala/scala/collection/parallel/ParMap.scala index f0ef2022..52268c28 100644 --- a/core/src/main/scala/scala/collection/parallel/ParMap.scala +++ b/core/src/main/scala/scala/collection/parallel/ParMap.scala @@ -14,7 +14,6 @@ package scala package collection.parallel import scala.collection.Map -import scala.collection.GenMap import scala.collection.generic.ParMapFactory import scala.collection.generic.GenericParMapTemplate import scala.collection.generic.GenericParMapCompanion @@ -31,10 +30,9 @@ import scala.collection.generic.CanCombineFrom * @since 2.9 */ trait ParMap[K, +V] -extends GenMap[K, V] - with GenericParMapTemplate[K, V, ParMap] +extends GenericParMapTemplate[K, V, ParMap] with ParIterable[(K, V)] - with ParMapLike[K, V, ParMap[K, V], Map[K, V]] + with ParMapLike[K, V, ParMap, ParMap[K, V], Map[K, V]] { self => @@ -46,9 +44,6 @@ self => override def stringPrefix = "ParMap" - override def updated [U >: V](key: K, value: U): ParMap[K, U] = this + ((key, value)) - - def + [U >: V](kv: (K, U)): ParMap[K, U] } @@ -64,7 +59,7 @@ object ParMap extends ParMapFactory[ParMap] { * because of variance issues. */ abstract class WithDefault[A, +B](underlying: ParMap[A, B], d: A => B) extends ParMap[A, B] { - override def size = underlying.size + def size = underlying.size def get(key: A) = underlying.get(key) def splitter = underlying.splitter override def default(key: A): B = d(key) diff --git a/core/src/main/scala/scala/collection/parallel/ParMapLike.scala b/core/src/main/scala/scala/collection/parallel/ParMapLike.scala index 5d176dda..e9a329c1 100644 --- a/core/src/main/scala/scala/collection/parallel/ParMapLike.scala +++ b/core/src/main/scala/scala/collection/parallel/ParMapLike.scala @@ -13,11 +13,12 @@ package scala package collection.parallel -import scala.collection.MapLike -import scala.collection.GenMapLike +import scala.collection.generic.GenericParMapCompanion +import scala.collection.{IterableOnce, MapOps} import scala.collection.Map import scala.annotation.unchecked.uncheckedVariance +import scala.language.higherKinds /** A template trait for mutable parallel maps. This trait is to be mixed in * with concrete parallel maps to override the representation type. @@ -34,13 +35,58 @@ import scala.annotation.unchecked.uncheckedVariance */ trait ParMapLike[K, +V, - +Repr <: ParMapLike[K, V, Repr, Sequential] with ParMap[K, V], - +Sequential <: Map[K, V] with MapLike[K, V, Sequential]] -extends GenMapLike[K, V, Repr] - with ParIterableLike[(K, V), Repr, Sequential] + +CC[X, Y] <: ParMap[X, Y], + +Repr <: ParMapLike[K, V, ParMap, Repr, Sequential] with ParMap[K, V], + +Sequential <: Map[K, V] with MapOps[K, V, Map, Sequential]] +extends ParIterableLike[(K, V), ParIterable, Repr, Sequential] + with Equals { self => + // --- Previously inherited from GenMapLike + def get(key: K): Option[V] + + def canEqual(that: Any): Boolean = true + + /** Compares two maps structurally; i.e., checks if all mappings + * contained in this map are also contained in the other map, + * and vice versa. + * + * @param that the other map + * @return `true` if both maps contain exactly the + * same mappings, `false` otherwise. + */ + override def equals(that: Any): Boolean = that match { + case that: ParMap[b, _] => + (this eq that) || + (that canEqual this) && + (this.size == that.size) && { + try { + this forall { + case (k, v) => that.get(k.asInstanceOf[b]) match { + case Some(`v`) => + true + case _ => false + } + } + } catch { + case ex: ClassCastException => false + }} + case _ => + false + } + + // This hash code must be symmetric in the contents but ought not + // collide trivially. + override def hashCode(): Int = scala.util.hashing.MurmurHash3.unorderedHash(this, "ParMap".hashCode) + + def +[V1 >: V](kv: (K, V1)): CC[K, V1] + def updated [V1 >: V](key: K, value: V1): CC[K, V1] = this + ((key, value)) + def - (key: K): Repr + // --- + + def mapCompanion: GenericParMapCompanion[CC] + def default(key: K): V = throw new NoSuchElementException("key not found: " + key) def empty: Repr @@ -97,17 +143,19 @@ self => def contains(key : K) = self.contains(key) def splitter = keysIterator(self.splitter) def + (elem: K): ParSet[K] = - (ParSet[K]() ++ this + elem).asInstanceOf[ParSet[K]] // !!! concrete overrides abstract problem + ParSet[K]() ++ this + elem // !!! concrete overrides abstract problem def - (elem: K): ParSet[K] = - (ParSet[K]() ++ this - elem).asInstanceOf[ParSet[K]] // !!! concrete overrides abstract problem - override def size = self.size + ParSet[K]() ++ this - elem // !!! concrete overrides abstract problem + def size = self.size + def knownSize = self.knownSize override def foreach[U](f: K => U) = for ((k, v) <- self) f(k) override def seq = self.seq.keySet } protected class DefaultValuesIterable extends ParIterable[V] { def splitter = valuesIterator(self.splitter) - override def size = self.size + def size = self.size + def knownSize = self.knownSize override def foreach[U](f: V => U) = for ((k, v) <- self) f(v) def seq = self.seq.values } @@ -124,8 +172,9 @@ self => def splitter = filtered.splitter override def contains(key: K) = self.contains(key) && p(key) def get(key: K) = if (!p(key)) None else self.get(key) - def seq = self.seq.filterKeys(p) + def seq = self.seq.view.filterKeys(p).to(Map) def size = filtered.size + def knownSize = filtered.knownSize def + [U >: V](kv: (K, U)): ParMap[K, U] = ParMap[K, U]() ++ this + kv def - (key: K): ParMap[K, V] = ParMap[K, V]() ++ this - key } @@ -133,13 +182,93 @@ self => def mapValues[S](f: V => S): ParMap[K, S] = new ParMap[K, S] { override def foreach[U](g: ((K, S)) => U): Unit = for ((k, v) <- self) g((k, f(v))) def splitter = self.splitter.map(kv => (kv._1, f(kv._2))) - override def size = self.size + def size = self.size + def knownSize = self.knownSize override def contains(key: K) = self.contains(key) def get(key: K) = self.get(key).map(f) - def seq = self.seq.mapValues(f) + def seq = self.seq.view.mapValues(f).to(Map) def + [U >: S](kv: (K, U)): ParMap[K, U] = ParMap[K, U]() ++ this + kv def - (key: K): ParMap[K, S] = ParMap[K, S]() ++ this - key } + /** Builds a new map by applying a function to all elements of this $coll. + * + * @param f the function to apply to each element. + * @return a new $coll resulting from applying the given function + * `f` to each element of this $coll and collecting the results. + */ + def map[K2, V2](f: ((K, V)) => (K2, V2)): CC[K2, V2] = + tasksupport.executeAndWaitResult(new Map[(K2, V2), CC[K2, V2]]( + f, + combinerFactory(() => mapCompanion.newCombiner[K2, V2]), + splitter + ) mapResult { _.resultWithTaskSupport }) + + /** Builds a new collection by applying a partial function to all elements of this $coll + * on which the function is defined. + * + * @param pf the partial function which filters and maps the $coll. + * @tparam K2 the key type of the returned $coll. + * @tparam V2 the value type of the returned $coll. + * @return a new $coll resulting from applying the given partial function + * `pf` to each element on which it is defined and collecting the results. + * The order of the elements is preserved. + */ + def collect[K2, V2](pf: PartialFunction[(K, V), (K2, V2)]): CC[K2, V2] = + tasksupport.executeAndWaitResult(new Collect[(K2, V2), CC[K2, V2]]( + pf, + combinerFactory(() => mapCompanion.newCombiner[K2, V2]), + splitter + ) mapResult { _.resultWithTaskSupport }) + + /** Builds a new map by applying a function to all elements of this $coll + * and using the elements of the resulting collections. + * + * @param f the function to apply to each element. + * @return a new $coll resulting from applying the given collection-valued function + * `f` to each element of this $coll and concatenating the results. + */ + def flatMap[K2, V2](f: ((K, V)) => IterableOnce[(K2, V2)]): CC[K2, V2] = + tasksupport.executeAndWaitResult(new FlatMap[(K2, V2), CC[K2, V2]]( + f, + combinerFactory(() => mapCompanion.newCombiner[K2, V2]), + splitter + ) mapResult { _.resultWithTaskSupport }) + + /** Returns a new $coll containing the elements from the left hand operand followed by the elements from the + * right hand operand. The element type of the $coll is the most specific superclass encompassing + * the element types of the two operands. + * + * @param that the collection or iterator to append. + * @return a new $coll which contains all elements + * of this $coll followed by all elements of `suffix`. + */ + def concat[V2 >: V](that: collection.IterableOnce[(K, V2)]): CC[K, V2] = that match { + case other: ParIterable[(K, V2)] => + // println("case both are parallel") + val cfactory = combinerFactory(() => mapCompanion.newCombiner[K, V2]) + val copythis = new Copy(cfactory, splitter) + val copythat = wrap { + val othtask = new other.Copy(cfactory, other.splitter) + tasksupport.executeAndWaitResult(othtask) + } + val task = (copythis parallel copythat) { _ combine _ } mapResult { + _.resultWithTaskSupport + } + tasksupport.executeAndWaitResult(task) + case _ => + // println("case parallel builder, `that` not parallel") + val copythis = new Copy(combinerFactory(() => mapCompanion.newCombiner[K, V2]), splitter) + val copythat = wrap { + val cb = mapCompanion.newCombiner[K, V2] + cb ++= that + cb + } + tasksupport.executeAndWaitResult((copythis parallel copythat) { _ combine _ } mapResult { _.resultWithTaskSupport }) + } + + /** Alias for `concat` */ + @`inline` final def ++ [V2 >: V](xs: collection.IterableOnce[(K, V2)]): CC[K, V2] = concat(xs) + // note - should not override toMap (could be mutable) } diff --git a/core/src/main/scala/scala/collection/parallel/ParSeq.scala b/core/src/main/scala/scala/collection/parallel/ParSeq.scala index 6c0939f9..0c28bdde 100644 --- a/core/src/main/scala/scala/collection/parallel/ParSeq.scala +++ b/core/src/main/scala/scala/collection/parallel/ParSeq.scala @@ -13,12 +13,10 @@ package scala package collection.parallel -import scala.collection.generic.GenericCompanion import scala.collection.generic.GenericParCompanion import scala.collection.generic.GenericParTemplate import scala.collection.generic.ParFactory import scala.collection.generic.CanCombineFrom -import scala.collection.GenSeq import scala.collection.parallel.mutable.ParArrayCombiner /** A template trait for parallel sequences. @@ -31,12 +29,11 @@ import scala.collection.parallel.mutable.ParArrayCombiner * * @author Aleksandar Prokopec */ -trait ParSeq[+T] extends GenSeq[T] - with ParIterable[T] +trait ParSeq[+T] extends ParIterable[T] with GenericParTemplate[T, ParSeq] - with ParSeqLike[T, ParSeq[T], Seq[T]] + with ParSeqLike[T, ParSeq, ParSeq[T], scala.collection.Seq[T]] { - override def companion: GenericCompanion[ParSeq] with GenericParCompanion[ParSeq] = ParSeq + override def companion: GenericParCompanion[ParSeq] = ParSeq //protected[this] override def newBuilder = ParSeq.newBuilder[T] def apply(i: Int): T @@ -47,7 +44,7 @@ trait ParSeq[+T] extends GenSeq[T] } object ParSeq extends ParFactory[ParSeq] { - implicit def canBuildFrom[T]: CanCombineFrom[Coll, T, ParSeq[T]] = new GenericCanCombineFrom[T] + implicit def canBuildFrom[T]: CanCombineFrom[ParSeq[_], T, ParSeq[T]] = new GenericCanCombineFrom[T] def newBuilder[T]: Combiner[T, ParSeq[T]] = ParArrayCombiner[T] def newCombiner[T]: Combiner[T, ParSeq[T]] = ParArrayCombiner[T] diff --git a/core/src/main/scala/scala/collection/parallel/ParSeqLike.scala b/core/src/main/scala/scala/collection/parallel/ParSeqLike.scala index 53cfc985..3ce555ac 100644 --- a/core/src/main/scala/scala/collection/parallel/ParSeqLike.scala +++ b/core/src/main/scala/scala/collection/parallel/ParSeqLike.scala @@ -13,12 +13,12 @@ package scala package collection.parallel -import scala.collection.{ SeqLike, GenSeq, GenIterable, Iterator } +import scala.language.higherKinds + +import scala.collection.{AnyConstr, BufferedIterator, Iterator, SeqOps} import scala.collection.generic.DefaultSignalling import scala.collection.generic.AtomicIndexFlag -import scala.collection.generic.CanBuildFrom import scala.collection.generic.VolatileAbort - import scala.collection.parallel.ParallelCollectionImplicits._ /** A template trait for sequences of type `ParSeq[T]`, representing @@ -50,11 +50,29 @@ import scala.collection.parallel.ParallelCollectionImplicits._ * @author Aleksandar Prokopec * @since 2.9 */ -trait ParSeqLike[+T, +Repr <: ParSeq[T], +Sequential <: Seq[T] with SeqLike[T, Sequential]] -extends scala.collection.GenSeqLike[T, Repr] - with ParIterableLike[T, Repr, Sequential] { +trait ParSeqLike[+T, +CC[X] <: ParSeq[X], +Repr <: ParSeq[T], +Sequential <: scala.collection.Seq[T] with SeqOps[T, AnyConstr, Sequential]] +extends ParIterableLike[T, CC, Repr, Sequential] + with Equals { self => + def length: Int + def apply(index: Int): T + + override def hashCode() = scala.util.hashing.MurmurHash3.orderedHash(this, "ParSeq".hashCode) + + /** The equals method for arbitrary parallel sequences. Compares this + * parallel sequence to some other object. + * @param that The object to compare the sequence to + * @return `true` if `that` is a sequence that has the same elements as + * this sequence in the same order, `false` otherwise + */ + override def equals(that: Any): Boolean = that match { + case that: ParSeq[_] => (that eq this.asInstanceOf[AnyRef]) || (that canEqual this) && (this sameElements that) + case _ => false + } + + def canEqual(other: Any): Boolean = true + protected[this] type SuperParIterator = IterableSplitter[T] /** A more refined version of the iterator found in the `ParallelIterable` trait, @@ -66,7 +84,7 @@ self => override def iterator: PreciseSplitter[T] = splitter - override def size = length + final def size = length /** Used to iterate elements using indices */ protected abstract class Elements(start: Int, val end: Int) extends SeqSplitter[T] with BufferedIterator[T] { @@ -98,6 +116,16 @@ self => override def toString = "Elements(" + start + ", " + end + ")" } + /** Tests whether this $coll contains given index. + * + * The implementations of methods `apply` and `isDefinedAt` turn a `ParSeq[T]` into + * a `PartialFunction[Int, T]`. + * + * @param idx the index to test + * @return `true` if this $coll contains an element at position `idx`, `false` otherwise. + */ + def isDefinedAt(idx: Int): Boolean = (idx >= 0) && (idx < length) + /* ParallelSeq methods */ /** Returns the length of the longest segment of elements starting at @@ -119,6 +147,46 @@ self => tasksupport.executeAndWaitResult(new SegmentLength(p, 0, splitter.psplitWithSignalling(realfrom, length - realfrom)(1) assign ctx))._1 } + /** Returns the length of the longest prefix whose elements all satisfy some predicate. + * + * $mayNotTerminateInf + * + * @param p the predicate used to test elements. + * @return the length of the longest prefix of this $coll + * such that every element of the segment satisfies the predicate `p`. + */ + def prefixLength(p: T => Boolean): Int = segmentLength(p, 0) + + /** Finds index of first occurrence of some value in this $coll. + * + * @param elem the element value to search for. + * @tparam B the type of the element `elem`. + * @return the index of the first element of this $coll that is equal (as determined by `==`) + * to `elem`, or `-1`, if none exists. + */ + def indexOf[B >: T](elem: B): Int = indexOf(elem, 0) + + /** Finds index of first occurrence of some value in this $coll after or at some start index. + * + * @param elem the element value to search for. + * @tparam B the type of the element `elem`. + * @param from the start index + * @return the index `>= from` of the first element of this $coll that is equal (as determined by `==`) + * to `elem`, or `-1`, if none exists. + */ + def indexOf[B >: T](elem: B, from: Int): Int = indexWhere(elem == _, from) + + + /** Finds index of first element satisfying some predicate. + * + * $mayNotTerminateInf + * + * @param p the predicate used to test elements. + * @return the index of the first element of this $coll that satisfies the predicate `p`, + * or `-1`, if none exists. + */ + def indexWhere(p: T => Boolean): Int = indexWhere(p, 0) + /** Finds the first element satisfying some predicate. * * $indexsignalling @@ -137,6 +205,38 @@ self => tasksupport.executeAndWaitResult(new IndexWhere(p, realfrom, splitter.psplitWithSignalling(realfrom, length - realfrom)(1) assign ctx)) } + + /** Finds index of last occurrence of some value in this $coll. + * + * $willNotTerminateInf + * + * @param elem the element value to search for. + * @tparam B the type of the element `elem`. + * @return the index of the last element of this $coll that is equal (as determined by `==`) + * to `elem`, or `-1`, if none exists. + */ + def lastIndexOf[B >: T](elem: B): Int = lastIndexWhere(elem == _) + + /** Finds index of last occurrence of some value in this $coll before or at a given end index. + * + * @param elem the element value to search for. + * @param end the end index. + * @tparam B the type of the element `elem`. + * @return the index `<= end` of the last element of this $coll that is equal (as determined by `==`) + * to `elem`, or `-1`, if none exists. + */ + def lastIndexOf[B >: T](elem: B, end: Int): Int = lastIndexWhere(elem == _, end) + + /** Finds index of last element satisfying some predicate. + * + * $willNotTerminateInf + * + * @param p the predicate used to test elements. + * @return the index of the last element of this $coll that satisfies the predicate `p`, + * or `-1`, if none exists. + */ + def lastIndexWhere(p: T => Boolean): Int = lastIndexWhere(p, length - 1) + /** Finds the last element satisfying some predicate. * * $indexsignalling @@ -159,14 +259,11 @@ self => tasksupport.executeAndWaitResult(new Reverse(() => newCombiner, splitter) mapResult { _.resultWithTaskSupport }) } - def reverseMap[S, That](f: T => S)(implicit bf: CanBuildFrom[Repr, S, That]): That = if (bf(repr).isCombiner) { + def reverseMap[S](f: T => S): CC[S] = { tasksupport.executeAndWaitResult( - new ReverseMap[S, That](f, () => bf(repr).asCombiner, splitter) mapResult { _.resultWithTaskSupport } + new ReverseMap[S, CC[S]](f, () => companion.newCombiner[S], splitter) mapResult { _.resultWithTaskSupport } ) - } else setTaskSupport(seq.reverseMap(f)(bf2seq(bf)), tasksupport) - /*bf ifParallel { pbf => - tasksupport.executeAndWaitResult(new ReverseMap[S, That](f, pbf, splitter) mapResult { _.result }) - } otherwise seq.reverseMap(f)(bf2seq(bf))*/ + } /** Tests whether this $coll contains the given sequence at a given index. * @@ -177,22 +274,28 @@ self => * @param offset the starting offset for the search * @return `true` if there is a sequence `that` starting at `offset` in this sequence, `false` otherwise */ - def startsWith[S](that: GenSeq[S], offset: Int): Boolean = that ifParSeq { pthat => - if (offset < 0 || offset >= length) offset == length && pthat.length == 0 - else if (pthat.length == 0) true - else if (pthat.length > length - offset) false - else { - val ctx = new DefaultSignalling with VolatileAbort - tasksupport.executeAndWaitResult( - new SameElements(splitter.psplitWithSignalling(offset, pthat.length)(1) assign ctx, pthat.splitter) - ) - } - } otherwise seq.startsWith(that, offset) + def startsWith[S >: T](that: IterableOnce[S], offset: Int = 0): Boolean = that match { + case pt: ParSeq[S] => + if (offset < 0 || offset >= length) offset == length && pt.isEmpty + else if (pt.isEmpty) true + else if (pt.length > length - offset) false + else { + val ctx = new DefaultSignalling with VolatileAbort + tasksupport.executeAndWaitResult( + new SameElements[S](splitter.psplitWithSignalling(offset, pt.length)(1) assign ctx, pt.splitter) + ) + } + case _ => seq.startsWith(that, offset) + } - override def sameElements[U >: T](that: GenIterable[U]): Boolean = that ifParSeq { pthat => - val ctx = new DefaultSignalling with VolatileAbort - length == pthat.length && tasksupport.executeAndWaitResult(new SameElements(splitter assign ctx, pthat.splitter)) - } otherwise seq.sameElements(that) + override def sameElements[U >: T](that: IterableOnce[U]): Boolean = { + that match { + case pthat: ParSeq[U] => + val ctx = new DefaultSignalling with VolatileAbort + length == pthat.length && tasksupport.executeAndWaitResult(new SameElements(splitter assign ctx, pthat.splitter)) + case _ => super.sameElements(that) + } + } /** Tests whether this $coll ends with the given parallel sequence. * @@ -202,37 +305,49 @@ self => * @param that the sequence to test * @return `true` if this $coll has `that` as a suffix, `false` otherwise */ - def endsWith[S](that: GenSeq[S]): Boolean = that ifParSeq { pthat => + def endsWith[S >: T](that: ParSeq[S]): Boolean = { if (that.length == 0) true else if (that.length > length) false else { val ctx = new DefaultSignalling with VolatileAbort val tlen = that.length - tasksupport.executeAndWaitResult(new SameElements(splitter.psplitWithSignalling(length - tlen, tlen)(1) assign ctx, pthat.splitter)) + tasksupport.executeAndWaitResult(new SameElements[S](splitter.psplitWithSignalling(length - tlen, tlen)(1) assign ctx, that.splitter)) } - } otherwise seq.endsWith(that) + } + + /** Tests whether this $coll ends with the given collection. + * + * $abortsignalling + * + * @tparam S the type of the elements of `that` sequence + * @param that the sequence to test + * @return `true` if this $coll has `that` as a suffix, `false` otherwise + */ + def endsWith[S >: T](that: Iterable[S]): Boolean = seq.endsWith(that) - def patch[U >: T, That](from: Int, patch: GenSeq[U], replaced: Int)(implicit bf: CanBuildFrom[Repr, U, That]): That = { + def patch[U >: T](from: Int, patch: scala.collection.Seq[U], replaced: Int): CC[U] = patch_sequential(from, patch, replaced) + + def patch[U >: T](from: Int, patch: ParSeq[U], replaced: Int): CC[U] = { val realreplaced = replaced min (length - from) - if (patch.isParSeq && bf(repr).isCombiner && (size - realreplaced + patch.size) > MIN_FOR_COPY) { + if ((size - realreplaced + patch.size) > MIN_FOR_COPY) { val that = patch.asParSeq val pits = splitter.psplitWithSignalling(from, replaced, length - from - realreplaced) - val cfactory = combinerFactory(() => bf(repr).asCombiner) - val copystart = new Copy[U, That](cfactory, pits(0)) + val cfactory = combinerFactory(() => companion.newCombiner[U]) + val copystart = new Copy[U, CC[U]](cfactory, pits(0)) val copymiddle = wrap { - val tsk = new that.Copy[U, That](cfactory, that.splitter) + val tsk = new that.Copy[U, CC[U]](cfactory, that.splitter) tasksupport.executeAndWaitResult(tsk) } - val copyend = new Copy[U, That](cfactory, pits(2)) + val copyend = new Copy[U, CC[U]](cfactory, pits(2)) tasksupport.executeAndWaitResult(((copystart parallel copymiddle) { _ combine _ } parallel copyend) { _ combine _ } mapResult { _.resultWithTaskSupport }) } else patch_sequential(from, patch.seq, replaced) } - private def patch_sequential[U >: T, That](fromarg: Int, patch: Seq[U], r: Int)(implicit bf: CanBuildFrom[Repr, U, That]): That = { + private def patch_sequential[U >: T](fromarg: Int, patch: scala.collection.Seq[U], r: Int): CC[U] = { val from = 0 max fromarg - val b = bf(repr) + val b = companion.newBuilder[U] val repl = (r min (length - from)) max 0 val pits = splitter.psplitWithSignalling(from, repl, length - from - repl) b ++= pits(0) @@ -241,37 +356,55 @@ self => setTaskSupport(b.result(), tasksupport) } - def updated[U >: T, That](index: Int, elem: U)(implicit bf: CanBuildFrom[Repr, U, That]): That = if (bf(repr).isCombiner) { + def updated[U >: T](index: Int, elem: U): CC[U] = { tasksupport.executeAndWaitResult( - new Updated(index, elem, combinerFactory(() => bf(repr).asCombiner), splitter) mapResult { + new Updated(index, elem, combinerFactory(() => companion.newCombiner[U]), splitter) mapResult { _.resultWithTaskSupport } ) - } else setTaskSupport(seq.updated(index, elem)(bf2seq(bf)), tasksupport) - /*bf ifParallel { pbf => - tasksupport.executeAndWaitResult(new Updated(index, elem, pbf, splitter) mapResult { _.result }) - } otherwise seq.updated(index, elem)(bf2seq(bf))*/ + } - def +:[U >: T, That](elem: U)(implicit bf: CanBuildFrom[Repr, U, That]): That = { + def +:[U >: T, That](elem: U): CC[U] = { patch(0, mutable.ParArray(elem), 0) } - def :+[U >: T, That](elem: U)(implicit bf: CanBuildFrom[Repr, U, That]): That = { + def :+[U >: T, That](elem: U): CC[U] = { patch(length, mutable.ParArray(elem), 0) } - def padTo[U >: T, That](len: Int, elem: U)(implicit bf: CanBuildFrom[Repr, U, That]): That = if (length < len) { - patch(length, new immutable.Repetition(elem, len - length), 0) - } else patch(length, Nil, 0) - override def zip[U >: T, S, That](that: GenIterable[S])(implicit bf: CanBuildFrom[Repr, (U, S), That]): That = if (bf(repr).isCombiner && that.isParSeq) { - val thatseq = that.asParSeq - tasksupport.executeAndWaitResult( - new Zip(length min thatseq.length, combinerFactory(() => bf(repr).asCombiner), splitter, thatseq.splitter) mapResult { - _.resultWithTaskSupport - } - ) - } else super.zip(that)(bf) + /** Produces a new sequence which contains all elements of this $coll and also all elements of + * a given sequence. `xs union ys` is equivalent to `xs ++ ys`. + * + * Another way to express this + * is that `xs union ys` computes the order-preserving multi-set union of `xs` and `ys`. + * `union` is hence a counter-part of `diff` and `intersect` which also work on multi-sets. + * + * $willNotTerminateInf + * + * @param that the sequence to add. + * @tparam B the element type of the returned $coll. + * @return a new $coll which contains all elements of this $coll + * followed by all elements of `that`. + */ + def union[B >: T](that: scala.collection.Seq[B]): CC[B] = this ++ that + def union[B >: T](that: ParSeq[B]): CC[B] = this ++ that + + def padTo[U >: T](len: Int, elem: U): CC[U] = if (length < len) { + patch(length, new immutable.Repetition(elem, len - length), 0) + } else patch(length, ParSeq.newBuilder[U].result(), 0) + + override def zip[U >: T, S](that: ParIterable[S]): CC[(U, S)] = /*if (bf(repr).isCombiner && that.isParSeq)*/ { + that match { + case thatseq: ParSeq[S] => + tasksupport.executeAndWaitResult( + new Zip(length min thatseq.length, combinerFactory(() => companion.newCombiner[(U, S)]), splitter, thatseq.splitter) mapResult { + _.resultWithTaskSupport + } + ) + case _ => super.zip(that) + } + } /** Tests whether every element of this $coll relates to the * corresponding element of another parallel sequence by satisfying a test predicate. @@ -285,12 +418,14 @@ self => * `p(x, y)` is `true` for all corresponding elements `x` of this $coll * and `y` of `that`, otherwise `false` */ - def corresponds[S](that: GenSeq[S])(p: (T, S) => Boolean): Boolean = that ifParSeq { pthat => + def corresponds[S](that: ParSeq[S])(p: (T, S) => Boolean): Boolean = { val ctx = new DefaultSignalling with VolatileAbort - length == pthat.length && tasksupport.executeAndWaitResult(new Corresponds(p, splitter assign ctx, pthat.splitter)) - } otherwise seq.corresponds(that)(p) + length == that.length && tasksupport.executeAndWaitResult(new Corresponds(p, splitter assign ctx, that.splitter)) + } + + def diff[U >: T](that: ParSeq[U]): Repr = diff(that.seq) - def diff[U >: T](that: GenSeq[U]): Repr = sequentially { + def diff[U >: T](that: scala.collection.Seq[U]): Repr = sequentially { _ diff that } @@ -315,7 +450,7 @@ self => * ''n'' times in `that`, then the first ''n'' occurrences of `x` will be retained * in the result, but any following occurrences will be omitted. */ - def intersect[U >: T](that: GenSeq[U]) = sequentially { + def intersect[U >: T](that: scala.collection.Seq[U]) = sequentially { _ intersect that } diff --git a/core/src/main/scala/scala/collection/parallel/ParSet.scala b/core/src/main/scala/scala/collection/parallel/ParSet.scala index a49561cf..246c1d24 100644 --- a/core/src/main/scala/scala/collection/parallel/ParSet.scala +++ b/core/src/main/scala/scala/collection/parallel/ParSet.scala @@ -26,17 +26,16 @@ import scala.collection.generic._ * @since 2.9 */ trait ParSet[T] - extends GenSet[T] - with GenericParTemplate[T, ParSet] + extends GenericParTemplate[T, ParSet] with ParIterable[T] - with ParSetLike[T, ParSet[T], Set[T]] + with ParSetLike[T, ParSet, ParSet[T], Set[T]] { self => override def empty: ParSet[T] = mutable.ParHashSet[T]() //protected[this] override def newCombiner: Combiner[T, ParSet[T]] = ParSet.newCombiner[T] - override def companion: GenericCompanion[ParSet] with GenericParCompanion[ParSet] = ParSet + override def companion: GenericParCompanion[ParSet] = ParSet override def stringPrefix = "ParSet" } @@ -44,5 +43,5 @@ trait ParSet[T] object ParSet extends ParSetFactory[ParSet] { def newCombiner[T]: Combiner[T, ParSet[T]] = mutable.ParHashSetCombiner[T] - implicit def canBuildFrom[T]: CanCombineFrom[Coll, T, ParSet[T]] = new GenericCanCombineFrom[T] + implicit def canBuildFrom[T]: CanCombineFrom[ParSet[_], T, ParSet[T]] = new GenericCanCombineFrom[T] } diff --git a/core/src/main/scala/scala/collection/parallel/ParSetLike.scala b/core/src/main/scala/scala/collection/parallel/ParSetLike.scala index 24568bde..ace489a9 100644 --- a/core/src/main/scala/scala/collection/parallel/ParSetLike.scala +++ b/core/src/main/scala/scala/collection/parallel/ParSetLike.scala @@ -13,10 +13,8 @@ package scala package collection.parallel -import scala.collection.SetLike -import scala.collection.GenSetLike -import scala.collection.GenSet -import scala.collection.Set +import scala.collection.{Set, SetOps} +import scala.language.higherKinds /** A template trait for parallel sets. This trait is mixed in with concrete * parallel sets to override the representation type. @@ -31,21 +29,115 @@ import scala.collection.Set * @since 2.9 */ trait ParSetLike[T, - +Repr <: ParSetLike[T, Repr, Sequential] with ParSet[T], - +Sequential <: Set[T] with SetLike[T, Sequential]] -extends GenSetLike[T, Repr] - with ParIterableLike[T, Repr, Sequential] + +CC[X] <: ParIterable[X], + +Repr <: ParSet[T], + +Sequential <: Set[T] with SetOps[T, Set, Sequential]] +extends ParIterableLike[T, CC, Repr, Sequential] + with (T => Boolean) + with Equals { self => + // --- Members previously inherited from GenSetLike + def contains(elem: T): Boolean + final def apply(elem: T): Boolean = contains(elem) + def +(elem: T): Repr + def -(elem: T): Repr + + /** Computes the intersection between this set and another set. + * + * @param that the set to intersect with. + * @return a new set consisting of all elements that are both in this + * set and in the given set `that`. + */ + def intersect(that: ParSet[T]): Repr = this filter that + def intersect(that: Set[T]): Repr = this filter that + + /** Computes the intersection between this set and another set. + * + * '''Note:''' Same as `intersect`. + * @param that the set to intersect with. + * @return a new set consisting of all elements that are both in this + * set and in the given set `that`. + */ + def &(that: ParSet[T]): Repr = this intersect that + def &(that: Set[T]): Repr = this intersect that + + /** Computes the union between this set and another set. + * + * '''Note:''' Same as `union`. + * @param that the set to form the union with. + * @return a new set consisting of all elements that are in this + * set or in the given set `that`. + */ + def | (that: ParSet[T]): Repr = this union that + def | (that: Set[T]): Repr = this union that + + /** The difference of this set and another set. + * + * '''Note:''' Same as `diff`. + * @param that the set of elements to exclude. + * @return a set containing those elements of this + * set that are not also contained in the given set `that`. + */ + def &~(that: ParSet[T]): Repr = this diff that + def &~(that: Set[T]): Repr = this diff that + + /** Tests whether this set is a subset of another set. + * + * @param that the set to test. + * @return `true` if this set is a subset of `that`, i.e. if + * every element of this set is also an element of `that`. + */ + def subsetOf(that: ParSet[T]): Boolean = this.forall(that) + + /** Compares this set with another object for equality. + * + * '''Note:''' This operation contains an unchecked cast: if `that` + * is a set, it will assume with an unchecked cast + * that it has the same element type as this set. + * Any subsequent ClassCastException is treated as a `false` result. + * @param that the other object + * @return `true` if `that` is a set which contains the same elements + * as this set. + */ + override def equals(that: Any): Boolean = that match { + case that: ParSet[_] => + (this eq that) || + (that canEqual this) && + (this.size == that.size) && + (try this subsetOf that.asInstanceOf[ParSet[T]] + catch { case ex: ClassCastException => false }) + case _ => + false + } + + // Careful! Don't write a Set's hashCode like: + // override def hashCode() = this map (_.hashCode) sum + // Calling map on a set drops duplicates: any hashcode collisions would + // then be dropped before they can be added. + // Hash should be symmetric in set entries, but without trivial collisions. + override def hashCode()= scala.util.hashing.MurmurHash3.unorderedHash(this, "ParSet".hashCode) + + def canEqual(other: Any): Boolean = true + // --- + def empty: Repr // note: should not override toSet (could be mutable) - def union(that: GenSet[T]): Repr = sequentially { + def union(that: Set[T]): Repr = sequentially { _ union that } - def diff(that: GenSet[T]): Repr = sequentially { + def union(that: ParSet[T]): Repr = sequentially { + _ union that.seq + } + + def diff(that: Set[T]): Repr = sequentially { _ diff that } + + def diff(that: ParSet[T]): Repr = sequentially { + _ diff that.seq + } } diff --git a/core/src/main/scala/scala/collection/parallel/RemainsIterator.scala b/core/src/main/scala/scala/collection/parallel/RemainsIterator.scala index b63fc54c..727e6111 100644 --- a/core/src/main/scala/scala/collection/parallel/RemainsIterator.scala +++ b/core/src/main/scala/scala/collection/parallel/RemainsIterator.scala @@ -17,7 +17,6 @@ import scala.collection.generic.Signalling import scala.collection.generic.DelegatedSignalling import scala.collection.generic.IdleSignalling import scala.collection.mutable.Builder -import scala.collection.GenTraversableOnce import scala.collection.parallel.immutable.repetition private[collection] trait RemainsIterator[+T] extends Iterator[T] { @@ -89,15 +88,6 @@ private[collection] trait AugmentedIterableIterator[+T] extends RemainsIterator[ r } - override def copyToArray[U >: T](array: Array[U], from: Int, len: Int): Unit = { - var i = from - val until = from + len - while (i < until && hasNext) { - array(i) = next() - i += 1 - } - } - def reduceLeft[U >: T](howmany: Int, op: (U, U) => U): U = { var i = howmany - 1 var u: U = next() @@ -127,12 +117,9 @@ private[collection] trait AugmentedIterableIterator[+T] extends RemainsIterator[ cb } - def flatmap2combiner[S, That](f: T => GenTraversableOnce[S], cb: Combiner[S, That]): Combiner[S, That] = { - //val cb = pbf(repr) + def flatmap2combiner[S, That](f: T => IterableOnce[S], cb: Combiner[S, That]): Combiner[S, That] = { while (hasNext) { - val traversable = f(next()).seq - if (traversable.isInstanceOf[Iterable[_]]) cb ++= traversable.asInstanceOf[Iterable[S]].iterator - else cb ++= traversable + cb ++= f(next()) } cb } @@ -301,7 +288,7 @@ private[collection] trait AugmentedSeqIterator[+T] extends AugmentedIterableIter total } - override def indexWhere(pred: T => Boolean): Int = { + def indexWhere(pred: T => Boolean): Int = { var i = 0 var loop = true while (hasNext && loop) { @@ -656,7 +643,7 @@ self => override def zipAllParSeq[S, U >: T, R >: S](that: SeqSplitter[S], thisElem: U, thatElem: R) = new ZippedAll[U, R](that, thisElem, thatElem) def reverse: SeqSplitter[T] = { - val pa = mutable.ParArray.fromTraversables(self).reverse + val pa = mutable.ParArray.fromIterables(self).reverse new pa.ParArrayIterator { override def reverse = self } diff --git a/core/src/main/scala/scala/collection/parallel/Tasks.scala b/core/src/main/scala/scala/collection/parallel/Tasks.scala index 9237ed4f..c19217cf 100644 --- a/core/src/main/scala/scala/collection/parallel/Tasks.scala +++ b/core/src/main/scala/scala/collection/parallel/Tasks.scala @@ -27,7 +27,7 @@ trait Task[R, +Tp] { * Optionally is provided with the result from the previous completed task * or `None` if there was no previous task (or the previous task is uncompleted or unknown). */ - def leaf(result: Option[R]) + def leaf(result: Option[R]): Unit /** A result that can be accessed once the task is completed. */ var result: R @@ -97,11 +97,11 @@ trait Tasks { def split: Seq[WrappedTask[R, Tp]] /** Code that gets called after the task gets started - it may spawn other tasks instead of calling `leaf`. */ - def compute() + def compute(): Unit /** Start task. */ - def start() + def start(): Unit /** Wait for task to finish. */ - def sync() + def sync(): Unit /** Try to cancel the task. * @return `true` if cancellation is successful. */ @@ -197,7 +197,7 @@ trait AdaptiveWorkStealingTasks extends Tasks { var curr = this var chain = "chain: " while (curr != null) { - chain += curr + " ---> " + chain += curr.toString + " ---> " curr = curr.next } println(chain) diff --git a/core/src/main/scala/scala/collection/parallel/immutable/ParHashMap.scala b/core/src/main/scala/scala/collection/parallel/immutable/ParHashMap.scala index a76a6f91..7ee4d9d4 100644 --- a/core/src/main/scala/scala/collection/parallel/immutable/ParHashMap.scala +++ b/core/src/main/scala/scala/collection/parallel/immutable/ParHashMap.scala @@ -13,18 +13,16 @@ package scala package collection.parallel.immutable -import scala.collection.parallel.ParMapLike -import scala.collection.parallel.Combiner -import scala.collection.parallel.IterableSplitter +import scala.collection.parallel.{IterableSplitter, Combiner, Task} import scala.collection.mutable.UnrolledBuffer.Unrolled import scala.collection.mutable.UnrolledBuffer import scala.collection.generic.ParMapFactory import scala.collection.generic.CanCombineFrom import scala.collection.generic.GenericParMapTemplate import scala.collection.generic.GenericParMapCompanion -import scala.collection.immutable.{ HashMap, TrieIterator } +import scala.collection.immutable.{OldHashMap, TrieIterator} import scala.annotation.unchecked.uncheckedVariance -import scala.collection.parallel.Task +import scala.collection.Hashing /** Immutable parallel hash map, based on hash tries. * @@ -43,16 +41,16 @@ import scala.collection.parallel.Task * @define Coll `immutable.ParHashMap` * @define coll immutable parallel hash map */ -@SerialVersionUID(1L) -class ParHashMap[K, +V] private[immutable] (private[this] val trie: HashMap[K, V]) +@SerialVersionUID(3L) +class ParHashMap[K, +V] private[immutable] (private[this] val trie: OldHashMap[K, V]) extends ParMap[K, V] with GenericParMapTemplate[K, V, ParHashMap] - with ParMapLike[K, V, ParHashMap[K, V], HashMap[K, V]] + with ParMapLike[K, V, ParHashMap, ParHashMap[K, V], OldHashMap[K, V]] with Serializable { self => - def this() = this(HashMap.empty[K, V]) + def this() = this(OldHashMap.empty[K, V]) override def mapCompanion: GenericParMapCompanion[ParHashMap] = ParHashMap @@ -72,6 +70,8 @@ self => override def size = trie.size + def knownSize = trie.size + protected override def reuse[S, That](oldc: Option[Combiner[S, That]], newc: Combiner[S, That]) = oldc match { case Some(old) => old case None => newc @@ -81,7 +81,7 @@ self => extends IterableSplitter[(K, V)] { var i = 0 def dup = triter match { - case t: TrieIterator[_] => + case t: TrieIterator[(K, V)] => dupFromIterator(t.dupIterator) case _ => val buff = triter.toBuffer @@ -126,7 +126,7 @@ self => println("Parallel hash trie") println("Top level inner trie type: " + trie.getClass) trie match { - case hm: HashMap.HashMap1[k, v] => + case hm: OldHashMap.OldHashMap1[K, V] => println("single node type") println("key stored: " + hm.getKey) println("hash of key: " + hm.getHash) @@ -151,7 +151,7 @@ object ParHashMap extends ParMapFactory[ParHashMap] { new CanCombineFromMap[K, V] } - def fromTrie[K, V](t: HashMap[K, V]) = new ParHashMap(t) + def fromTrie[K, V](t: OldHashMap[K, V]) = new ParHashMap(t) var totalcombines = new java.util.concurrent.atomic.AtomicInteger(0) } @@ -160,11 +160,11 @@ private[parallel] abstract class HashMapCombiner[K, V] extends scala.collection.parallel.BucketCombiner[(K, V), ParHashMap[K, V], (K, V), HashMapCombiner[K, V]](HashMapCombiner.rootsize) { //self: EnvironmentPassingCombiner[(K, V), ParHashMap[K, V]] => import HashMapCombiner._ - val emptyTrie = HashMap.empty[K, V] + val emptyTrie = OldHashMap.empty[K, V] - def +=(elem: (K, V)) = { + def addOne(elem: (K, V)) = { sz += 1 - val hc = emptyTrie.computeHash(elem._1) + val hc = Hashing.computeHash(elem._1) val pos = hc & 0x1f if (buckets(pos) eq null) { // initialize bucket @@ -177,7 +177,7 @@ extends scala.collection.parallel.BucketCombiner[(K, V), ParHashMap[K, V], (K, V def result = { val bucks = buckets.filter(_ != null).map(_.headPtr) - val root = new Array[HashMap[K, V]](bucks.length) + val root = new Array[OldHashMap[K, V]](bucks.length) combinerTaskSupport.executeAndWaitResult(new CreateTrie(bucks, root, 0, bucks.length)) @@ -192,14 +192,14 @@ extends scala.collection.parallel.BucketCombiner[(K, V), ParHashMap[K, V], (K, V if (sz == 0) new ParHashMap[K, V] else if (sz == 1) new ParHashMap[K, V](root(0)) else { - val trie = new HashMap.HashTrieMap(bitmap, root, sz) + val trie = new OldHashMap.HashTrieMap(bitmap, root, sz) new ParHashMap[K, V](trie) } } def groupByKey[Repr](cbf: () => Combiner[V, Repr]): ParHashMap[K, Repr] = { val bucks = buckets.filter(_ != null).map(_.headPtr) - val root = new Array[HashMap[K, AnyRef]](bucks.length) + val root = new Array[OldHashMap[K, AnyRef]](bucks.length) combinerTaskSupport.executeAndWaitResult(new CreateGroupedTrie(cbf, bucks, root, 0, bucks.length)) @@ -212,9 +212,9 @@ extends scala.collection.parallel.BucketCombiner[(K, V), ParHashMap[K, V], (K, V val sz = root.foldLeft(0)(_ + _.size) if (sz == 0) new ParHashMap[K, Repr] - else if (sz == 1) new ParHashMap[K, Repr](root(0).asInstanceOf[HashMap[K, Repr]]) + else if (sz == 1) new ParHashMap[K, Repr](root(0).asInstanceOf[OldHashMap[K, Repr]]) else { - val trie = new HashMap.HashTrieMap(bitmap, root.asInstanceOf[Array[HashMap[K, Repr]]], sz) + val trie = new OldHashMap.HashTrieMap(bitmap, root.asInstanceOf[Array[OldHashMap[K, Repr]]], sz) new ParHashMap[K, Repr](trie) } } @@ -226,7 +226,7 @@ extends scala.collection.parallel.BucketCombiner[(K, V), ParHashMap[K, V], (K, V /* tasks */ - class CreateTrie(bucks: Array[Unrolled[(K, V)]], root: Array[HashMap[K, V]], offset: Int, howmany: Int) + class CreateTrie(bucks: Array[Unrolled[(K, V)]], root: Array[OldHashMap[K, V]], offset: Int, howmany: Int) extends Task[Unit, CreateTrie] { @volatile var result = () def leaf(prev: Option[Unit]) = { @@ -238,8 +238,8 @@ extends scala.collection.parallel.BucketCombiner[(K, V), ParHashMap[K, V], (K, V } result = result } - private def createTrie(elems: Unrolled[(K, V)]): HashMap[K, V] = { - var trie = new HashMap[K, V] + private def createTrie(elems: Unrolled[(K, V)]): OldHashMap[K, V] = { + var trie = OldHashMap.empty[K, V] var unrolled = elems var i = 0 @@ -248,7 +248,7 @@ extends scala.collection.parallel.BucketCombiner[(K, V), ParHashMap[K, V], (K, V val chunksz = unrolled.size while (i < chunksz) { val kv = chunkarr(i) - val hc = trie.computeHash(kv._1) + val hc = Hashing.computeHash(kv._1) trie = trie.updated0(kv._1, hc, rootbits, kv._2, kv, null) i += 1 } @@ -265,20 +265,20 @@ extends scala.collection.parallel.BucketCombiner[(K, V), ParHashMap[K, V], (K, V def shouldSplitFurther = howmany > scala.collection.parallel.thresholdFromSize(root.length, combinerTaskSupport.parallelismLevel) } - class CreateGroupedTrie[Repr](cbf: () => Combiner[V, Repr], bucks: Array[Unrolled[(K, V)]], root: Array[HashMap[K, AnyRef]], offset: Int, howmany: Int) + class CreateGroupedTrie[Repr](cbf: () => Combiner[V, Repr], bucks: Array[Unrolled[(K, V)]], root: Array[OldHashMap[K, AnyRef]], offset: Int, howmany: Int) extends Task[Unit, CreateGroupedTrie[Repr]] { @volatile var result = () def leaf(prev: Option[Unit]) = { var i = offset val until = offset + howmany while (i < until) { - root(i) = createGroupedTrie(bucks(i)).asInstanceOf[HashMap[K, AnyRef]] + root(i) = createGroupedTrie(bucks(i)).asInstanceOf[OldHashMap[K, AnyRef]] i += 1 } result = result } - private def createGroupedTrie(elems: Unrolled[(K, V)]): HashMap[K, Repr] = { - var trie = new HashMap[K, Combiner[V, Repr]] + private def createGroupedTrie(elems: Unrolled[(K, V)]): OldHashMap[K, Repr] = { + var trie = OldHashMap.empty[K, Combiner[V, Repr]] var unrolled = elems var i = 0 @@ -287,7 +287,7 @@ extends scala.collection.parallel.BucketCombiner[(K, V), ParHashMap[K, V], (K, V val chunksz = unrolled.size while (i < chunksz) { val kv = chunkarr(i) - val hc = trie.computeHash(kv._1) + val hc = Hashing.computeHash(kv._1) // check to see if already present val cmb: Combiner[V, Repr] = trie.get0(kv._1, hc, rootbits) match { @@ -304,23 +304,23 @@ extends scala.collection.parallel.BucketCombiner[(K, V), ParHashMap[K, V], (K, V unrolled = unrolled.next } - evaluateCombiners(trie).asInstanceOf[HashMap[K, Repr]] + evaluateCombiners(trie).asInstanceOf[OldHashMap[K, Repr]] } - private def evaluateCombiners(trie: HashMap[K, Combiner[V, Repr]]): HashMap[K, Repr] = trie match { - case hm1: HashMap.HashMap1[_, _] => + private def evaluateCombiners(trie: OldHashMap[K, Combiner[V, Repr]]): OldHashMap[K, Repr] = trie match { + case hm1: OldHashMap.OldHashMap1[_, _] => val evaledvalue = hm1.value.result - new HashMap.HashMap1[K, Repr](hm1.key, hm1.hash, evaledvalue, null) - case hmc: HashMap.HashMapCollision1[_, _] => + new OldHashMap.OldHashMap1[K, Repr](hm1.key, hm1.hash, evaledvalue, null) + case hmc: OldHashMap.OldHashMapCollision1[_, _] => val evaledkvs = hmc.kvs map { p => (p._1, p._2.result) } - new HashMap.HashMapCollision1[K, Repr](hmc.hash, evaledkvs) - case htm: HashMap.HashTrieMap[k, v] => + new OldHashMap.OldHashMapCollision1[K, Repr](hmc.hash, evaledkvs) + case htm: OldHashMap.HashTrieMap[k, v] => var i = 0 while (i < htm.elems.length) { - htm.elems(i) = evaluateCombiners(htm.elems(i)).asInstanceOf[HashMap[k, v]] + htm.elems(i) = evaluateCombiners(htm.elems(i)).asInstanceOf[OldHashMap[k, v]] i += 1 } - htm.asInstanceOf[HashMap[K, Repr]] - case empty => empty.asInstanceOf[HashMap[K, Repr]] + htm.asInstanceOf[OldHashMap[K, Repr]] + case empty => empty.asInstanceOf[OldHashMap[K, Repr]] } def split = { val fp = howmany / 2 diff --git a/core/src/main/scala/scala/collection/parallel/immutable/ParHashSet.scala b/core/src/main/scala/scala/collection/parallel/immutable/ParHashSet.scala index 9e784ff5..ae3ea957 100644 --- a/core/src/main/scala/scala/collection/parallel/immutable/ParHashSet.scala +++ b/core/src/main/scala/scala/collection/parallel/immutable/ParHashSet.scala @@ -15,17 +15,14 @@ package collection.parallel.immutable +import scala.collection.Hashing import scala.collection.parallel.ParSetLike import scala.collection.parallel.Combiner import scala.collection.parallel.IterableSplitter import scala.collection.mutable.UnrolledBuffer.Unrolled import scala.collection.mutable.UnrolledBuffer -import scala.collection.generic.ParSetFactory -import scala.collection.generic.CanCombineFrom -import scala.collection.generic.GenericParTemplate -import scala.collection.generic.GenericParCompanion -import scala.collection.generic.GenericCompanion -import scala.collection.immutable.{ HashSet, TrieIterator } +import scala.collection.generic.{CanCombineFrom, GenericParCompanion, GenericParTemplate, ParSetFactory} +import scala.collection.immutable.{OldHashSet, TrieIterator} import scala.collection.parallel.Task @@ -47,17 +44,17 @@ import scala.collection.parallel.Task * @define coll immutable parallel hash set */ @SerialVersionUID(1L) -class ParHashSet[T] private[immutable] (private[this] val trie: HashSet[T]) +class ParHashSet[T] private[immutable] (private[this] val trie: OldHashSet[T]) extends ParSet[T] with GenericParTemplate[T, ParHashSet] - with ParSetLike[T, ParHashSet[T], HashSet[T]] + with ParSetLike[T, ParHashSet, ParHashSet[T], OldHashSet[T]] with Serializable { self => - def this() = this(HashSet.empty[T]) + def this() = this(OldHashSet.empty[T]) - override def companion: GenericCompanion[ParHashSet] with GenericParCompanion[ParHashSet] = ParHashSet + override def companion: GenericParCompanion[ParHashSet] = ParHashSet override def empty: ParHashSet[T] = new ParHashSet[T] @@ -71,7 +68,8 @@ self => def contains(e: T): Boolean = trie.contains(e) - override def size = trie.size + def size = trie.size + def knownSize = trie.size protected override def reuse[S, That](oldc: Option[Combiner[S, That]], newc: Combiner[S, That]) = oldc match { case Some(old) => old @@ -129,10 +127,10 @@ self => object ParHashSet extends ParSetFactory[ParHashSet] { def newCombiner[T]: Combiner[T, ParHashSet[T]] = HashSetCombiner[T] - implicit def canBuildFrom[T]: CanCombineFrom[Coll, T, ParHashSet[T]] = + implicit def canBuildFrom[T]: CanCombineFrom[ParHashSet[_], T, ParHashSet[T]] = new GenericCanCombineFrom[T] - def fromTrie[T](t: HashSet[T]) = new ParHashSet(t) + def fromTrie[T](t: OldHashSet[T]) = new ParHashSet(t) } @@ -140,11 +138,11 @@ private[immutable] abstract class HashSetCombiner[T] extends scala.collection.parallel.BucketCombiner[T, ParHashSet[T], Any, HashSetCombiner[T]](HashSetCombiner.rootsize) { //self: EnvironmentPassingCombiner[T, ParHashSet[T]] => import HashSetCombiner._ - val emptyTrie = HashSet.empty[T] + val emptyTrie = OldHashSet.empty[T] - def +=(elem: T) = { + def addOne(elem: T) = { sz += 1 - val hc = emptyTrie.computeHash(elem) + val hc = Hashing.computeHash(elem) val pos = hc & 0x1f if (buckets(pos) eq null) { // initialize bucket @@ -155,9 +153,9 @@ extends scala.collection.parallel.BucketCombiner[T, ParHashSet[T], Any, HashSetC this } - def result = { + def result() = { val bucks = buckets.filter(_ != null).map(_.headPtr) - val root = new Array[HashSet[T]](bucks.length) + val root = new Array[OldHashSet[T]](bucks.length) combinerTaskSupport.executeAndWaitResult(new CreateTrie(bucks, root, 0, bucks.length)) @@ -172,14 +170,14 @@ extends scala.collection.parallel.BucketCombiner[T, ParHashSet[T], Any, HashSetC if (sz == 0) new ParHashSet[T] else if (sz == 1) new ParHashSet[T](root(0)) else { - val trie = new HashSet.HashTrieSet(bitmap, root, sz) + val trie = new OldHashSet.HashTrieSet(bitmap, root, sz) new ParHashSet[T](trie) } } /* tasks */ - class CreateTrie(bucks: Array[Unrolled[Any]], root: Array[HashSet[T]], offset: Int, howmany: Int) + class CreateTrie(bucks: Array[Unrolled[Any]], root: Array[OldHashSet[T]], offset: Int, howmany: Int) extends Task[Unit, CreateTrie] { var result = () def leaf(prev: Option[Unit]) = { @@ -190,8 +188,8 @@ extends scala.collection.parallel.BucketCombiner[T, ParHashSet[T], Any, HashSetC i += 1 } } - private def createTrie(elems: Unrolled[Any]): HashSet[T] = { - var trie = new HashSet[T] + private def createTrie(elems: Unrolled[Any]): OldHashSet[T] = { + var trie = OldHashSet.empty[T] var unrolled = elems var i = 0 @@ -200,7 +198,7 @@ extends scala.collection.parallel.BucketCombiner[T, ParHashSet[T], Any, HashSetC val chunksz = unrolled.size while (i < chunksz) { val v = chunkarr(i).asInstanceOf[T] - val hc = trie.computeHash(v) + val hc = Hashing.computeHash(v) trie = trie.updated0(v, hc, rootbits) // internal API, private[collection] i += 1 } diff --git a/core/src/main/scala/scala/collection/parallel/immutable/ParIterable.scala b/core/src/main/scala/scala/collection/parallel/immutable/ParIterable.scala index fa1e2152..d39a7f3e 100644 --- a/core/src/main/scala/scala/collection/parallel/immutable/ParIterable.scala +++ b/core/src/main/scala/scala/collection/parallel/immutable/ParIterable.scala @@ -30,13 +30,11 @@ import scala.collection.parallel.Combiner * @since 2.9 */ trait ParIterable[+T] -extends scala.collection.GenIterable[T] - with scala.collection.parallel.ParIterable[T] +extends scala.collection.parallel.ParIterable[T] with GenericParTemplate[T, ParIterable] - with ParIterableLike[T, ParIterable[T], scala.collection.immutable.Iterable[T]] - with Immutable + with ParIterableLike[T, ParIterable, ParIterable[T], scala.collection.immutable.Iterable[T]] { - override def companion: GenericCompanion[ParIterable] with GenericParCompanion[ParIterable] = ParIterable + override def companion: GenericParCompanion[ParIterable] = ParIterable // if `immutable.ParIterableLike` is introduced, please move these 4 methods there override def toIterable: ParIterable[T] = this override def toSeq: ParSeq[T] = toParCollection[T, ParSeq[T]](() => ParSeq.newCombiner[T]) @@ -45,7 +43,7 @@ extends scala.collection.GenIterable[T] /** $factoryInfo */ object ParIterable extends ParFactory[ParIterable] { - implicit def canBuildFrom[T]: CanCombineFrom[Coll, T, ParIterable[T]] = + implicit def canBuildFrom[T]: CanCombineFrom[ParIterable[_], T, ParIterable[T]] = new GenericCanCombineFrom[T] def newBuilder[T]: Combiner[T, ParIterable[T]] = ParVector.newBuilder[T] diff --git a/core/src/main/scala/scala/collection/parallel/immutable/ParMap.scala b/core/src/main/scala/scala/collection/parallel/immutable/ParMap.scala index 2537da71..9ec23007 100644 --- a/core/src/main/scala/scala/collection/parallel/immutable/ParMap.scala +++ b/core/src/main/scala/scala/collection/parallel/immutable/ParMap.scala @@ -18,9 +18,10 @@ import scala.collection.generic.ParMapFactory import scala.collection.generic.GenericParMapTemplate import scala.collection.generic.GenericParMapCompanion import scala.collection.generic.CanCombineFrom -import scala.collection.parallel.ParMapLike import scala.collection.parallel.Combiner +import scala.language.higherKinds + /** A template trait for immutable parallel maps. * * $sideeffects @@ -32,11 +33,10 @@ import scala.collection.parallel.Combiner * @since 2.9 */ trait ParMap[K, +V] -extends scala.collection/*.immutable*/.GenMap[K, V] - with GenericParMapTemplate[K, V, ParMap] +extends GenericParMapTemplate[K, V, ParMap] with parallel.ParMap[K, V] with ParIterable[(K, V)] - with ParMapLike[K, V, ParMap[K, V], scala.collection.immutable.Map[K, V]] + with ParMapLike[K, V, ParMap, ParMap[K, V], scala.collection.immutable.Map[K, V]] { self => @@ -46,12 +46,6 @@ self => override def stringPrefix = "ParMap" - override def toMap[P, Q](implicit ev: (K, V) <:< (P, Q)): ParMap[P, Q] = this.asInstanceOf[ParMap[P, Q]] - - override def updated [U >: V](key: K, value: U): ParMap[K, U] = this + ((key, value)) - - def + [U >: V](kv: (K, U)): ParMap[K, U] - /** The same map with a given default function. * Note: `get`, `contains`, `iterator`, `keys`, etc are not affected by `withDefault`. * @@ -73,6 +67,29 @@ self => } +trait ParMapLike[ + K, + +V, + +CC[X, Y] <: ParMap[X, Y], + +Repr <: ParMapLike[K, V, ParMap, Repr, Sequential] with ParMap[K, V], + +Sequential <: Map[K, V] with MapOps[K, V, Map, Sequential]] + extends parallel.ParMapLike[K, V, CC, Repr, Sequential] + with parallel.ParIterableLike[(K, V), ParIterable, Repr, Sequential] { + + def mapCompanion: GenericParMapCompanion[CC] + + def empty: Repr + + override def toMap[P, Q](implicit ev: (K, V) <:< (P, Q)): ParMap[P, Q] = this.asInstanceOf[ParMap[P, Q]] + + override def updated [U >: V](key: K, value: U): CC[K, U] = this + ((key, value)) + + def + [U >: V](kv: (K, U)): CC[K, U] + + def - (key: K): Repr + +} + object ParMap extends ParMapFactory[ParMap] { @@ -84,6 +101,7 @@ object ParMap extends ParMapFactory[ParMap] { class WithDefault[K, +V](underlying: ParMap[K, V], d: K => V) extends scala.collection.parallel.ParMap.WithDefault[K, V](underlying, d) with ParMap[K, V] { + def knownSize = underlying.knownSize override def empty = new WithDefault(underlying.empty, d) override def updated[U >: V](key: K, value: U): WithDefault[K, U] = new WithDefault[K, U](underlying.updated[U](key, value), d) override def + [U >: V](kv: (K, U)): WithDefault[K, U] = updated(kv._1, kv._2) diff --git a/core/src/main/scala/scala/collection/parallel/immutable/ParRange.scala b/core/src/main/scala/scala/collection/parallel/immutable/ParRange.scala index 14448125..84b97eb0 100644 --- a/core/src/main/scala/scala/collection/parallel/immutable/ParRange.scala +++ b/core/src/main/scala/scala/collection/parallel/immutable/ParRange.scala @@ -44,6 +44,7 @@ self => override def seq = range @inline final def length = range.length + @inline final def knownSize = range.knownSize @inline final def apply(idx: Int) = range.apply(idx) @@ -116,7 +117,7 @@ self => object ParRange { def apply(start: Int, end: Int, step: Int, inclusive: Boolean) = new ParRange( - if (inclusive) new Range.Inclusive(start, end, step) - else new Range(start, end, step) + if (inclusive) Range.inclusive(start, end, step) + else Range(start, end, step) ) } diff --git a/core/src/main/scala/scala/collection/parallel/immutable/ParSeq.scala b/core/src/main/scala/scala/collection/parallel/immutable/ParSeq.scala index 31f33950..3241caab 100644 --- a/core/src/main/scala/scala/collection/parallel/immutable/ParSeq.scala +++ b/core/src/main/scala/scala/collection/parallel/immutable/ParSeq.scala @@ -14,11 +14,7 @@ package scala package collection package parallel.immutable -import scala.collection.generic.GenericParTemplate -import scala.collection.generic.GenericCompanion -import scala.collection.generic.GenericParCompanion -import scala.collection.generic.CanCombineFrom -import scala.collection.generic.ParFactory +import scala.collection.generic.{CanCombineFrom, GenericParCompanion, GenericParTemplate, ParFactory} import scala.collection.parallel.ParSeqLike import scala.collection.parallel.Combiner @@ -28,13 +24,12 @@ import scala.collection.parallel.Combiner * @define coll mutable parallel sequence */ trait ParSeq[+T] -extends scala.collection/*.immutable*/.GenSeq[T] - with scala.collection.parallel.ParSeq[T] +extends scala.collection.parallel.ParSeq[T] with ParIterable[T] with GenericParTemplate[T, ParSeq] - with ParSeqLike[T, ParSeq[T], scala.collection.immutable.Seq[T]] + with ParSeqLike[T, ParSeq, ParSeq[T], scala.collection.immutable.Seq[T]] { - override def companion: GenericCompanion[ParSeq] with GenericParCompanion[ParSeq] = ParSeq + override def companion: GenericParCompanion[ParSeq] = ParSeq override def toSeq: ParSeq[T] = this } @@ -43,7 +38,7 @@ extends scala.collection/*.immutable*/.GenSeq[T] * @define coll mutable parallel sequence */ object ParSeq extends ParFactory[ParSeq] { - implicit def canBuildFrom[T]: CanCombineFrom[Coll, T, ParSeq[T]] = new GenericCanCombineFrom[T] + implicit def canBuildFrom[T]: CanCombineFrom[ParSeq[_], T, ParSeq[T]] = new GenericCanCombineFrom[T] def newBuilder[T]: Combiner[T, ParSeq[T]] = ParVector.newBuilder[T] def newCombiner[T]: Combiner[T, ParSeq[T]] = ParVector.newCombiner[T] diff --git a/core/src/main/scala/scala/collection/parallel/immutable/ParSet.scala b/core/src/main/scala/scala/collection/parallel/immutable/ParSet.scala index f509dde5..b8a893d1 100644 --- a/core/src/main/scala/scala/collection/parallel/immutable/ParSet.scala +++ b/core/src/main/scala/scala/collection/parallel/immutable/ParSet.scala @@ -24,16 +24,15 @@ import scala.collection.parallel.Combiner * @define coll mutable parallel set */ trait ParSet[T] -extends scala.collection/*.immutable*/.GenSet[T] - with GenericParTemplate[T, ParSet] +extends GenericParTemplate[T, ParSet] with parallel.ParSet[T] with ParIterable[T] - with ParSetLike[T, ParSet[T], scala.collection.immutable.Set[T]] + with ParSetLike[T, ParSet, ParSet[T], scala.collection.immutable.Set[T]] { self => override def empty: ParSet[T] = ParHashSet[T]() - override def companion: GenericCompanion[ParSet] with GenericParCompanion[ParSet] = ParSet + override def companion: GenericParCompanion[ParSet] = ParSet override def stringPrefix = "ParSet" @@ -48,5 +47,5 @@ self => object ParSet extends ParSetFactory[ParSet] { def newCombiner[T]: Combiner[T, ParSet[T]] = HashSetCombiner[T] - implicit def canBuildFrom[T]: CanCombineFrom[Coll, T, ParSet[T]] = new GenericCanCombineFrom[T] + implicit def canBuildFrom[T]: CanCombineFrom[ParSet[_], T, ParSet[T]] = new GenericCanCombineFrom[T] } diff --git a/core/src/main/scala/scala/collection/parallel/immutable/ParVector.scala b/core/src/main/scala/scala/collection/parallel/immutable/ParVector.scala index d71c0d67..a9c894b1 100644 --- a/core/src/main/scala/scala/collection/parallel/immutable/ParVector.scala +++ b/core/src/main/scala/scala/collection/parallel/immutable/ParVector.scala @@ -42,7 +42,7 @@ import immutable.VectorIterator class ParVector[+T](private[this] val vector: Vector[T]) extends ParSeq[T] with GenericParTemplate[T, ParVector] - with ParSeqLike[T, ParVector[T], Vector[T]] + with ParSeqLike[T, ParVector, ParVector[T], Vector[T]] with Serializable { override def companion = ParVector @@ -52,6 +52,7 @@ extends ParSeq[T] def apply(idx: Int) = vector.apply(idx) def length = vector.length + def knownSize = vector.knownSize def splitter: SeqSplitter[T] = { val pit = new ParVectorIterator(vector.startIndex, vector.endIndex) @@ -63,22 +64,27 @@ extends ParSeq[T] override def toVector: Vector[T] = vector + // TODO Implement ParVectorIterator without extending VectorIterator, which will eventually + // become private final. Inlining the contents of the current VectorIterator is not as easy + // as it seems because it relies a lot on Vector internals. + // Duplicating the whole Vector data structure seems to be the safest way, but we will loose + // interoperability with the standard Vector. class ParVectorIterator(_start: Int, _end: Int) extends VectorIterator[T](_start, _end) with SeqSplitter[T] { def remaining: Int = remainingElementCount def dup: SeqSplitter[T] = (new ParVector(remainingVector)).splitter - def split: Seq[ParVectorIterator] = { + def split: scala.collection.immutable.Seq[ParVectorIterator] = { val rem = remaining if (rem >= 2) psplit(rem / 2, rem - rem / 2) - else Seq(this) + else scala.collection.immutable.Seq(this) } - def psplit(sizes: Int*): Seq[ParVectorIterator] = { + def psplit(sizes: Int*): scala.Seq[ParVectorIterator] = { var remvector = remainingVector - val splitted = new ArrayBuffer[Vector[T]] + val splitted = List.newBuilder[Vector[T]] for (sz <- sizes) { splitted += remvector.take(sz) remvector = remvector.drop(sz) } - splitted.map(v => new ParVector(v).splitter.asInstanceOf[ParVectorIterator]) + splitted.result().map(v => new ParVector(v).splitter.asInstanceOf[ParVectorIterator]) } } } @@ -88,7 +94,7 @@ extends ParSeq[T] * @define coll immutable parallel vector */ object ParVector extends ParFactory[ParVector] { - implicit def canBuildFrom[T]: CanCombineFrom[Coll, T, ParVector[T]] = + implicit def canBuildFrom[T]: CanCombineFrom[ParVector[_], T, ParVector[T]] = new GenericCanCombineFrom[T] def newBuilder[T]: Combiner[T, ParVector[T]] = newCombiner[T] @@ -103,7 +109,7 @@ private[immutable] class LazyParVectorCombiner[T] extends Combiner[T, ParVector[ def size: Int = sz - def +=(elem: T): this.type = { + def addOne(elem: T): this.type = { vectors.last += elem sz += 1 this diff --git a/core/src/main/scala/scala/collection/parallel/immutable/package.scala b/core/src/main/scala/scala/collection/parallel/immutable/package.scala index 2a4e65ae..5130698c 100644 --- a/core/src/main/scala/scala/collection/parallel/immutable/package.scala +++ b/core/src/main/scala/scala/collection/parallel/immutable/package.scala @@ -27,6 +27,7 @@ package immutable { self => def apply(idx: Int) = if (0 <= idx && idx < length) elem else throw new IndexOutOfBoundsException("" + idx) + def knownSize = length override def seq: collection.immutable.Seq[T] = new collection.AbstractSeq[T] with collection.immutable.Seq[T] with CustomParallelizable[T, ParSeq[T]] { override def length: Int = self.length override def apply(idx: Int): T = self.apply(idx) diff --git a/core/src/main/scala/scala/collection/parallel/mutable/LazyCombiner.scala b/core/src/main/scala/scala/collection/parallel/mutable/LazyCombiner.scala index c0052d54..1f63c7fb 100644 --- a/core/src/main/scala/scala/collection/parallel/mutable/LazyCombiner.scala +++ b/core/src/main/scala/scala/collection/parallel/mutable/LazyCombiner.scala @@ -13,7 +13,7 @@ package scala package collection.parallel.mutable -import scala.collection.generic.Growable +import scala.collection.mutable.Growable import scala.collection.generic.Sizing import scala.collection.mutable.ArrayBuffer import scala.collection.parallel.Combiner @@ -30,7 +30,7 @@ trait LazyCombiner[Elem, +To, Buff <: Growable[Elem] with Sizing] extends Combin //self: scala.collection.parallel.EnvironmentPassingCombiner[Elem, To] => val chain: ArrayBuffer[Buff] val lastbuff = chain.last - def +=(elem: Elem) = { lastbuff += elem; this } + def addOne(elem: Elem) = { lastbuff += elem; this } def result: To = allocateAndCopy def clear() = { chain.clear() } def combine[N <: Elem, NewTo >: To](other: Combiner[N, NewTo]): Combiner[N, NewTo] = if (this ne other) { diff --git a/core/src/main/scala/scala/collection/parallel/mutable/ParArray.scala b/core/src/main/scala/scala/collection/parallel/mutable/ParArray.scala index 31c663f7..56928f85 100644 --- a/core/src/main/scala/scala/collection/parallel/mutable/ParArray.scala +++ b/core/src/main/scala/scala/collection/parallel/mutable/ParArray.scala @@ -16,10 +16,8 @@ package collection.parallel.mutable import scala.collection.generic.GenericParTemplate -import scala.collection.generic.GenericCompanion import scala.collection.generic.GenericParCompanion import scala.collection.generic.CanCombineFrom -import scala.collection.generic.CanBuildFrom import scala.collection.generic.ParFactory import scala.collection.parallel.Combiner import scala.collection.parallel.SeqSplitter @@ -28,7 +26,6 @@ import scala.collection.parallel.Task import scala.collection.parallel.CHECK_RATE import scala.collection.mutable.ArraySeq import scala.collection.mutable.Builder -import scala.collection.GenTraversableOnce import scala.reflect.ClassTag /** Parallel sequence holding elements in a linear array. @@ -55,30 +52,39 @@ import scala.reflect.ClassTag * */ @SerialVersionUID(1L) -class ParArray[T] private[mutable] (val arrayseq: ArraySeq[T]) +class ParArray[T] private[mutable] (val arrayseq: ArraySeq[T], sz: Int) extends ParSeq[T] with GenericParTemplate[T, ParArray] - with ParSeqLike[T, ParArray[T], ArraySeq[T]] + with ParSeqLike[T, ParArray, ParArray[T], ArraySeq[T]] with Serializable { self => @transient private var array: Array[Any] = arrayseq.array.asInstanceOf[Array[Any]] - override def companion: GenericCompanion[ParArray] with GenericParCompanion[ParArray] = ParArray + override def companion: GenericParCompanion[ParArray] = ParArray - def this(sz: Int) = this { + def this(arr: ArraySeq[T]) = this(arr, arr.length) + + def this(sz: Int) = this({ require(sz >= 0) - new ArraySeq[T](sz) - } + ArraySeq.make(new Array[Any](sz)).asInstanceOf[ArraySeq[T]] + }, sz) - def apply(i: Int) = array(i).asInstanceOf[T] + def apply(i: Int) = { + if (i >= sz) throw new IndexOutOfBoundsException(i.toString) + array(i).asInstanceOf[T] + } - def update(i: Int, elem: T) = array(i) = elem + def update(i: Int, elem: T) = { + if (i >= sz) throw new IndexOutOfBoundsException(i.toString) + array(i) = elem + } - def length = arrayseq.length + def length = sz + def knownSize = sz - override def seq = arrayseq + def seq = (if (length == arrayseq.length) arrayseq else arrayseq.take(length)): ArraySeq[T] protected[parallel] def splitter: ParArrayIterator = { val pit = new ParArrayIterator @@ -306,10 +312,11 @@ self => this } - override def copyToArray[U >: T](array: Array[U], from: Int, len: Int): Unit = { + override def copyToArray[U >: T](array: Array[U], from: Int, len: Int): Int = { val totallen = (self.length - i) min len min (array.length - from) Array.copy(arr, i, array, from, totallen) i += totallen + totallen } override def prefixLength(pred: T => Boolean): Int = { @@ -366,10 +373,11 @@ self => pos } - override def sameElements(that: Iterator[_]): Boolean = { + override def sameElements[B >: T](that: IterableOnce[B]): Boolean = { var same = true - while (i < until && that.hasNext) { - if (arr(i) != that.next) { + val thatIt = that.iterator + while (i < until && thatIt.hasNext) { + if (arr(i) != thatIt.next) { i = until same = false } @@ -413,12 +421,11 @@ self => } } - override def flatmap2combiner[S, That](f: T => GenTraversableOnce[S], cb: Combiner[S, That]): Combiner[S, That] = { + override def flatmap2combiner[S, That](f: T => IterableOnce[S], cb: Combiner[S, That]): Combiner[S, That] = { //val cb = pbf(self.repr) while (i < until) { - val traversable = f(arr(i).asInstanceOf[T]) - if (traversable.isInstanceOf[Iterable[_]]) cb ++= traversable.asInstanceOf[Iterable[S]].iterator - else cb ++= traversable.seq + val it = f(arr(i).asInstanceOf[T]) + cb ++= it i += 1 } cb @@ -577,25 +584,23 @@ self => /* operations */ - private def buildsArray[S, That](c: Builder[S, That]) = c.isInstanceOf[ParArrayCombiner[_]] - - override def map[S, That](f: T => S)(implicit bf: CanBuildFrom[ParArray[T], S, That]) = if (buildsArray(bf(repr))) { + override def map[S](f: T => S) = { // reserve an array - val targarrseq = new ArraySeq[S](length) - val targetarr = targarrseq.array.asInstanceOf[Array[Any]] + val targetarr = new Array[Any](length) + val targarrseq = ArraySeq.make(targetarr).asInstanceOf[ArraySeq[S]] // fill it in parallel tasksupport.executeAndWaitResult(new Map[S](f, targetarr, 0, length)) // wrap it into a parallel array - (new ParArray[S](targarrseq)).asInstanceOf[That] - } else super.map(f)(bf) + new ParArray[S](targarrseq) + } - override def scan[U >: T, That](z: U)(op: (U, U) => U)(implicit cbf: CanBuildFrom[ParArray[T], U, That]): That = - if (tasksupport.parallelismLevel > 1 && buildsArray(cbf(repr))) { + override def scan[U >: T](z: U)(op: (U, U) => U) = + if (tasksupport.parallelismLevel > 1) { // reserve an array - val targarrseq = new ArraySeq[U](length + 1) - val targetarr = targarrseq.array.asInstanceOf[Array[Any]] + val targetarr = new Array[Any](length + 1) + val targarrseq = ArraySeq.make(targetarr).asInstanceOf[ArraySeq[U]] targetarr(0) = z // do a parallel prefix scan @@ -604,8 +609,8 @@ self => }) // wrap the array into a parallel array - (new ParArray[U](targarrseq)).asInstanceOf[That] - } else super.scan(z)(op)(cbf) + new ParArray[U](targarrseq) + } else super.scan(z)(op) /* tasks */ @@ -688,7 +693,7 @@ self => * @define coll parallel array */ object ParArray extends ParFactory[ParArray] { - implicit def canBuildFrom[T]: CanCombineFrom[Coll, T, ParArray[T]] = new GenericCanCombineFrom[T] + implicit def canBuildFrom[T]: CanCombineFrom[ParArray[_], T, ParArray[T]] = new GenericCanCombineFrom[T] def newBuilder[T]: Combiner[T, ParArray[T]] = newCombiner def newCombiner[T]: Combiner[T, ParArray[T]] = ParArrayCombiner[T] @@ -700,10 +705,8 @@ object ParArray extends ParFactory[ParArray] { */ def handoff[T](arr: Array[T], sz: Int): ParArray[T] = wrapOrRebuild(arr, sz) - private def wrapOrRebuild[T](arr: AnyRef, sz: Int) = arr match { - case arr: Array[AnyRef] => new ParArray[T](new ExposedArraySeq[T](arr, sz)) - case _ => new ParArray[T](new ExposedArraySeq[T](scala.runtime.ScalaRunTime.toObjectArray(arr), sz)) - } + private def wrapOrRebuild[T](arr: AnyRef, sz: Int) = + new ParArray[T](ArraySeq.make(scala.runtime.ScalaRunTime.toObjectArray(arr)).asInstanceOf[ArraySeq[T]], sz) def createFromCopy[T <: AnyRef : ClassTag](arr: Array[T]): ParArray[T] = { val newarr = new Array[T](arr.length) @@ -711,10 +714,13 @@ object ParArray extends ParFactory[ParArray] { handoff(newarr) } - def fromTraversables[T](xss: GenTraversableOnce[T]*) = { + @deprecated("fromTraversables has been renamed to fromIterables", "0.1.3") + @inline final def fromTraversables[T](xss: IterableOnce[T]*): ParArray[T] = fromIterables(xss: _*) + + def fromIterables[T](xss: IterableOnce[T]*): ParArray[T] = { val cb = ParArrayCombiner[T]() for (xs <- xss) { - cb ++= xs.seq + cb ++= xs } cb.result } diff --git a/core/src/main/scala/scala/collection/parallel/mutable/ParFlatHashTable.scala b/core/src/main/scala/scala/collection/parallel/mutable/ParFlatHashTable.scala index 77240bb9..86e92637 100644 --- a/core/src/main/scala/scala/collection/parallel/mutable/ParFlatHashTable.scala +++ b/core/src/main/scala/scala/collection/parallel/mutable/ParFlatHashTable.scala @@ -30,7 +30,6 @@ trait ParFlatHashTable[T] extends scala.collection.mutable.FlatHashTable[T] { abstract class ParFlatHashTableIterator(var idx: Int, val until: Int, val totalsize: Int) extends IterableSplitter[T] with SizeMapUtils { - import scala.collection.DebugUtils._ private[this] var traversed = 0 private[this] val itertable = table @@ -68,9 +67,10 @@ trait ParFlatHashTable[T] extends scala.collection.mutable.FlatHashTable[T] { val sndtotal = remaining - fsttotal val sndit = newIterator(sndidx, snduntil, sndtotal) - Seq(fstit, sndit) - } else Seq(this) + scala.Seq(fstit, sndit) + } else scala.Seq(this) + import DebugUtils._ override def debugInformation = buildString { append => append("Parallel flat hash table iterator") diff --git a/core/src/main/scala/scala/collection/parallel/mutable/ParHashMap.scala b/core/src/main/scala/scala/collection/parallel/mutable/ParHashMap.scala index 2634b914..eb805d0f 100644 --- a/core/src/main/scala/scala/collection/parallel/mutable/ParHashMap.scala +++ b/core/src/main/scala/scala/collection/parallel/mutable/ParHashMap.scala @@ -15,7 +15,7 @@ package collection.parallel package mutable import scala.collection.generic._ -import scala.collection.mutable.DefaultEntry +import scala.collection.parallel.mutable.ParHashMap.DefaultEntry import scala.collection.mutable.HashEntry import scala.collection.mutable.HashTable import scala.collection.mutable.UnrolledBuffer @@ -36,18 +36,18 @@ import scala.collection.parallel.Task * @see [[http://docs.scala-lang.org/overviews/parallel-collections/concrete-parallel-collections.html#parallel_hash_tables Scala's Parallel Collections Library overview]] * section on Parallel Hash Tables for more information. */ -@SerialVersionUID(1L) -class ParHashMap[K, V] private[collection] (contents: HashTable.Contents[K, DefaultEntry[K, V]]) +@SerialVersionUID(3L) +class ParHashMap[K, V] private[collection] (contents: ParHashTable.Contents[K, DefaultEntry[K, V]]) extends ParMap[K, V] with GenericParMapTemplate[K, V, ParHashMap] - with ParMapLike[K, V, ParHashMap[K, V], scala.collection.mutable.HashMap[K, V]] - with ParHashTable[K, DefaultEntry[K, V]] + with ParMapLike[K, V, ParHashMap, ParHashMap[K, V], scala.collection.mutable.HashMap[K, V]] + with ParHashTable[K, V, DefaultEntry[K, V]] with Serializable { self => initWithContents(contents) - type Entry = scala.collection.mutable.DefaultEntry[K, V] + type Entry = DefaultEntry[K, V] def this() = this(null) @@ -57,13 +57,14 @@ self => protected[this] override def newCombiner = ParHashMapCombiner[K, V] - override def seq = new scala.collection.mutable.HashMap[K, V](hashTableContents) + // TODO Redesign ParHashMap so that it can be converted to a mutable.HashMap in constant time + def seq = scala.collection.mutable.HashMap.from(this) def splitter = new ParHashMapIterator(1, table.length, size, table(0).asInstanceOf[DefaultEntry[K, V]]) - override def size = tableSize + def knownSize = tableSize - override def clear() = clearTable() + def clear() = clearTable() def get(key: K): Option[V] = { val e = findEntry(key) @@ -85,13 +86,13 @@ self => else None } - def += (kv: (K, V)): this.type = { + def addOne(kv: (K, V)): this.type = { val e = findOrAddEntry(kv._1, kv._2) if (e ne null) e.value = kv._2 this } - def -=(key: K): this.type = { removeEntry(key); this } + def subtractOne(key: K): this.type = { removeEntry(key); this } override def stringPrefix = "ParHashMap" @@ -103,8 +104,8 @@ self => new ParHashMapIterator(idxFrom, idxUntil, totalSz, es) } - protected def createNewEntry[V1](key: K, value: V1): Entry = { - new Entry(key, value.asInstanceOf[V]) + def createNewEntry(key: K, value: V): Entry = { + new Entry(key, value) } private def writeObject(out: java.io.ObjectOutputStream): Unit = { @@ -115,7 +116,7 @@ self => } private def readObject(in: java.io.ObjectInputStream): Unit = { - init(in, createNewEntry(in.readObject().asInstanceOf[K], in.readObject())) + init(in, createNewEntry(in.readObject().asInstanceOf[K], in.readObject().asInstanceOf[V])) } private[parallel] override def brokenInvariants = { @@ -158,6 +159,10 @@ object ParHashMap extends ParMapFactory[ParHashMap] { def newCombiner[K, V]: Combiner[(K, V), ParHashMap[K, V]] = ParHashMapCombiner.apply[K, V] implicit def canBuildFrom[K, V]: CanCombineFrom[Coll, (K, V), ParHashMap[K, V]] = new CanCombineFromMap[K, V] + + final class DefaultEntry[K, V](val key: K, var value: V) extends HashEntry[K, DefaultEntry[K, V]] with Serializable { + override def toString: String = s"DefaultEntry($key -> $value)" + } } private[mutable] abstract class ParHashMapCombiner[K, V](private val tableLoadFactor: Int) @@ -167,7 +172,7 @@ extends scala.collection.parallel.BucketCombiner[(K, V), ParHashMap[K, V], Defau private val nonmasklen = ParHashMapCombiner.nonmasklength private val seedvalue = 27 - def +=(elem: (K, V)) = { + def addOne(elem: (K, V)) = { sz += 1 val hc = improve(elemHashCode(elem._1), seedvalue) val pos = (hc >>> nonmasklen) @@ -180,7 +185,7 @@ extends scala.collection.parallel.BucketCombiner[(K, V), ParHashMap[K, V], Defau this } - def result: ParHashMap[K, V] = if (size >= (ParHashMapCombiner.numblocks * sizeMapBucketSize)) { // 1024 + def result(): ParHashMap[K, V] = if (size >= (ParHashMapCombiner.numblocks * sizeMapBucketSize)) { // 1024 // construct table val table = new AddingHashTable(size, tableLoadFactor, seedvalue) val bucks = buckets.map(b => if (b ne null) b.headPtr else null) @@ -192,10 +197,10 @@ extends scala.collection.parallel.BucketCombiner[(K, V), ParHashMap[K, V], Defau } else { // construct a normal table and fill it sequentially // TODO parallelize by keeping separate sizemaps and merging them - object table extends HashTable[K, DefaultEntry[K, V]] { + object table extends HashTable[K, DefaultEntry[K, V], DefaultEntry[K, V]] with WithContents[K, DefaultEntry[K, V], DefaultEntry[K, V]] { type Entry = DefaultEntry[K, V] def insertEntry(e: Entry): Unit = { super.findOrAddEntry(e.key, e) } - def createNewEntry[E](key: K, entry: E): Entry = entry.asInstanceOf[Entry] + def createNewEntry(key: K, entry: Entry): Entry = entry sizeMapInit(table.length) } var i = 0 @@ -218,7 +223,9 @@ extends scala.collection.parallel.BucketCombiner[(K, V), ParHashMap[K, V], Defau * and true if the key was successfully inserted. It does not update the number of elements * in the table. */ - private[ParHashMapCombiner] class AddingHashTable(numelems: Int, lf: Int, _seedvalue: Int) extends HashTable[K, DefaultEntry[K, V]] { + private[ParHashMapCombiner] class AddingHashTable(numelems: Int, lf: Int, _seedvalue: Int) extends HashTable[K, V, DefaultEntry[K, V]] + with WithContents[K, V, DefaultEntry[K, V]] { + import HashTable._ _loadFactor = lf table = new Array[HashEntry[K, DefaultEntry[K, V]]](capacity(sizeForThreshold(_loadFactor, numelems))) @@ -227,7 +234,7 @@ extends scala.collection.parallel.BucketCombiner[(K, V), ParHashMap[K, V], Defau threshold = newThreshold(_loadFactor, table.length) sizeMapInit(table.length) def setSize(sz: Int) = tableSize = sz - def insertEntry(/*block: Int, */e: DefaultEntry[K, V]) = { + def insertEntry(/*block: Int, */e: DefaultEntry[K, V]): Boolean = { var h = index(elemHashCode(e.key)) val olde = table(h).asInstanceOf[DefaultEntry[K, V]] @@ -248,7 +255,7 @@ extends scala.collection.parallel.BucketCombiner[(K, V), ParHashMap[K, V], Defau true } else false } - protected def createNewEntry[X](key: K, x: X) = ??? + def createNewEntry(key: K, x: V) = ??? } /* tasks */ diff --git a/core/src/main/scala/scala/collection/parallel/mutable/ParHashSet.scala b/core/src/main/scala/scala/collection/parallel/mutable/ParHashSet.scala index 758e0f12..62bd41fa 100644 --- a/core/src/main/scala/scala/collection/parallel/mutable/ParHashSet.scala +++ b/core/src/main/scala/scala/collection/parallel/mutable/ParHashSet.scala @@ -41,7 +41,7 @@ import scala.collection.parallel.Task class ParHashSet[T] private[collection] (contents: FlatHashTable.Contents[T]) extends ParSet[T] with GenericParTemplate[T, ParHashSet] - with ParSetLike[T, ParHashSet[T], scala.collection.mutable.HashSet[T]] + with ParSetLike[T, ParHashSet, ParHashSet[T], scala.collection.mutable.HashSet[T]] with ParFlatHashTable[T] with Serializable { @@ -58,18 +58,19 @@ extends ParSet[T] override def iterator = splitter - override def size = tableSize - def clear() = clearTable() - override def seq = new scala.collection.mutable.HashSet(hashTableContents) + // TODO Redesign ParHashSet so that it can be converted to a mutable.HashSet in constant time + def seq = scala.collection.mutable.HashSet.from(this) + + def knownSize = tableSize - def +=(elem: T) = { + def addOne(elem: T) = { addElem(elem) this } - def -=(elem: T) = { + def subtractOne(elem: T) = { removeElem(elem) this } @@ -113,7 +114,7 @@ extends ParSet[T] * @define coll parallel hash set */ object ParHashSet extends ParSetFactory[ParHashSet] { - implicit def canBuildFrom[T]: CanCombineFrom[Coll, T, ParHashSet[T]] = new GenericCanCombineFrom[T] + implicit def canBuildFrom[T]: CanCombineFrom[ParHashSet[_], T, ParHashSet[T]] = new GenericCanCombineFrom[T] override def newBuilder[T]: Combiner[T, ParHashSet[T]] = newCombiner @@ -128,7 +129,7 @@ with scala.collection.mutable.FlatHashTable.HashUtils[T] { private val nonmasklen = ParHashSetCombiner.nonmasklength private val seedvalue = 27 - def +=(elem: T) = { + def addOne(elem: T) = { val entry = elemToEntry(elem) sz += 1 val hc = improve(entry.hashCode, seedvalue) diff --git a/core/src/main/scala/scala/collection/parallel/mutable/ParHashTable.scala b/core/src/main/scala/scala/collection/parallel/mutable/ParHashTable.scala index 9a52ff18..5385799c 100644 --- a/core/src/main/scala/scala/collection/parallel/mutable/ParHashTable.scala +++ b/core/src/main/scala/scala/collection/parallel/mutable/ParHashTable.scala @@ -21,7 +21,8 @@ import scala.collection.parallel.IterableSplitter * enriching the data structure by fulfilling certain requirements * for their parallel construction and iteration. */ -trait ParHashTable[K, Entry >: Null <: HashEntry[K, Entry]] extends scala.collection.mutable.HashTable[K, Entry] { +trait ParHashTable[K, V, Entry >: Null <: HashEntry[K, Entry]] extends scala.collection.mutable.HashTable[K, V, Entry] + with WithContents[K, V, Entry] { override def alwaysInitSizeMap = true @@ -77,7 +78,7 @@ trait ParHashTable[K, Entry >: Null <: HashEntry[K, Entry]] extends scala.collec def dup = newIterator(idx, until, totalsize, es) - def split: Seq[IterableSplitter[T]] = if (remaining > 1) { + def split: scala.Seq[IterableSplitter[T]] = if (remaining > 1) { if (until > idx) { // there is at least one more slot for the next iterator // divide the rest of the table @@ -95,7 +96,7 @@ trait ParHashTable[K, Entry >: Null <: HashEntry[K, Entry]] extends scala.collec val fes = es val ftotal = totalsize - stotal - Seq( + scala.Seq( newIterator(fidx, funtil, ftotal, fes), newIterator(sidx, suntil, stotal, ses) ) @@ -106,7 +107,7 @@ trait ParHashTable[K, Entry >: Null <: HashEntry[K, Entry]] extends scala.collec val arrpit = new scala.collection.parallel.BufferSplitter[T](arr, 0, arr.length, signalDelegate) arrpit.split } - } else Seq(this.asInstanceOf[IterRepr]) + } else scala.Seq(this.asInstanceOf[IterRepr]) private def convertToArrayBuffer(chainhead: Entry): mutable.ArrayBuffer[T] = { val buff = mutable.ArrayBuffer[Entry]() @@ -144,4 +145,53 @@ trait ParHashTable[K, Entry >: Null <: HashEntry[K, Entry]] extends scala.collec c } } + +} + +trait WithContents[K, V, Entry >: Null <: HashEntry[K, Entry]] { this: scala.collection.mutable.HashTable[K, V, Entry] => + + protected def initWithContents(c: ParHashTable.Contents[K, Entry]) = { + if (c != null) { + _loadFactor = c.loadFactor + table = c.table + tableSize = c.tableSize + threshold = c.threshold + seedvalue = c.seedvalue + sizemap = c.sizemap + } + if (alwaysInitSizeMap && sizemap == null) sizeMapInitAndRebuild() + } + + private[collection] def hashTableContents = new ParHashTable.Contents( + _loadFactor, + table, + tableSize, + threshold, + seedvalue, + sizemap + ) } + +private[collection] object ParHashTable { + class Contents[A, Entry >: Null <: HashEntry[A, Entry]]( + val loadFactor: Int, + val table: Array[HashEntry[A, Entry]], + val tableSize: Int, + val threshold: Int, + val seedvalue: Int, + val sizemap: Array[Int] + ) { + import scala.collection.DebugUtils._ + private[collection] def debugInformation = buildString { + append => + append("Hash table contents") + append("-------------------") + append("Table: [" + arrayString(table, 0, table.length) + "]") + append("Table size: " + tableSize) + append("Load factor: " + loadFactor) + append("Seedvalue: " + seedvalue) + append("Threshold: " + threshold) + append("Sizemap: [" + arrayString(sizemap, 0, sizemap.length) + "]") + } + } +} \ No newline at end of file diff --git a/core/src/main/scala/scala/collection/parallel/mutable/ParIterable.scala b/core/src/main/scala/scala/collection/parallel/mutable/ParIterable.scala index 5cb5cf20..450f0a14 100644 --- a/core/src/main/scala/scala/collection/parallel/mutable/ParIterable.scala +++ b/core/src/main/scala/scala/collection/parallel/mutable/ParIterable.scala @@ -28,12 +28,10 @@ import scala.collection.parallel.{ ParIterableLike, Combiner } * @author Aleksandar Prokopec * @since 2.9 */ -trait ParIterable[T] extends scala.collection.GenIterable[T] - with scala.collection.parallel.ParIterable[T] +trait ParIterable[T] extends scala.collection.parallel.ParIterable[T] with GenericParTemplate[T, ParIterable] - with ParIterableLike[T, ParIterable[T], Iterable[T]] - with Mutable { - override def companion: GenericCompanion[ParIterable] with GenericParCompanion[ParIterable] = ParIterable + with ParIterableLike[T, ParIterable, ParIterable[T], Iterable[T]] { + override def companion: GenericParCompanion[ParIterable] = ParIterable //protected[this] override def newBuilder = ParIterable.newBuilder[T] // if `mutable.ParIterableLike` is introduced, please move these methods there @@ -47,7 +45,7 @@ trait ParIterable[T] extends scala.collection.GenIterable[T] /** $factoryInfo */ object ParIterable extends ParFactory[ParIterable] { - implicit def canBuildFrom[T]: CanCombineFrom[Coll, T, ParIterable[T]] = new GenericCanCombineFrom[T] + implicit def canBuildFrom[T]: CanCombineFrom[ParIterable[_], T, ParIterable[T]] = new GenericCanCombineFrom[T] def newBuilder[T]: Combiner[T, ParIterable[T]] = ParArrayCombiner[T] def newCombiner[T]: Combiner[T, ParIterable[T]] = ParArrayCombiner[T] diff --git a/core/src/main/scala/scala/collection/parallel/mutable/ParMap.scala b/core/src/main/scala/scala/collection/parallel/mutable/ParMap.scala index 27093089..27fcfe4f 100644 --- a/core/src/main/scala/scala/collection/parallel/mutable/ParMap.scala +++ b/core/src/main/scala/scala/collection/parallel/mutable/ParMap.scala @@ -28,11 +28,10 @@ import scala.collection.parallel.Combiner * @since 2.9 */ trait ParMap[K, V] -extends GenMap[K, V] - with parallel.ParMap[K, V] +extends parallel.ParMap[K, V] with ParIterable[(K, V)] with GenericParMapTemplate[K, V, ParMap] - with ParMapLike[K, V, ParMap[K, V], mutable.Map[K, V]] + with ParMapLike[K, V, ParMap, ParMap[K, V], mutable.Map[K, V]] { protected[this] override def newCombiner: Combiner[(K, V), ParMap[K, V]] = ParMap.newCombiner[K, V] @@ -43,8 +42,6 @@ extends GenMap[K, V] def seq: scala.collection.mutable.Map[K, V] - override def updated [U >: V](key: K, value: U): ParMap[K, U] = this + ((key, value)) - /** The same map with a given default function. * Note: `get`, `contains`, `iterator`, `keys`, etc are not affected by `withDefault`. * @@ -74,8 +71,9 @@ object ParMap extends ParMapFactory[ParMap] { class WithDefault[K, V](underlying: ParMap[K, V], d: K => V) extends scala.collection.parallel.ParMap.WithDefault(underlying, d) with ParMap[K, V] { - override def += (kv: (K, V)) = {underlying += kv; this} - def -= (key: K) = {underlying -= key; this} + def knownSize = underlying.knownSize + def addOne(kv: (K, V)) = {underlying += kv; this} + def subtractOne(key: K) = {underlying -= key; this} override def empty = new WithDefault(underlying.empty, d) override def updated[U >: V](key: K, value: U): WithDefault[K, U] = new WithDefault[K, U](underlying.updated[U](key, value), d) override def + [U >: V](kv: (K, U)): WithDefault[K, U] = updated(kv._1, kv._2) diff --git a/core/src/main/scala/scala/collection/parallel/mutable/ParMapLike.scala b/core/src/main/scala/scala/collection/parallel/mutable/ParMapLike.scala index 28f76fc5..bed711d3 100644 --- a/core/src/main/scala/scala/collection/parallel/mutable/ParMapLike.scala +++ b/core/src/main/scala/scala/collection/parallel/mutable/ParMapLike.scala @@ -14,10 +14,8 @@ package scala package collection.parallel package mutable -import scala.collection.generic._ import scala.collection.mutable.Cloneable -import scala.collection.generic.Growable -import scala.collection.generic.Shrinkable +import scala.language.higherKinds /** A template trait for mutable parallel maps. This trait is to be mixed in * with concrete parallel maps to override the representation type. @@ -34,25 +32,24 @@ import scala.collection.generic.Shrinkable */ trait ParMapLike[K, V, - +Repr <: ParMapLike[K, V, Repr, Sequential] with ParMap[K, V], - +Sequential <: scala.collection.mutable.Map[K, V] with scala.collection.mutable.MapLike[K, V, Sequential]] -extends scala.collection.GenMapLike[K, V, Repr] - with scala.collection.parallel.ParMapLike[K, V, Repr, Sequential] - with Growable[(K, V)] - with Shrinkable[K] + +CC[X, Y] <: ParMap[X, Y], + +Repr <: ParMapLike[K, V, ParMap, Repr, Sequential] with ParMap[K, V], + +Sequential <: scala.collection.mutable.Map[K, V] with scala.collection.mutable.MapOps[K, V, scala.collection.mutable.Map, Sequential]] +extends scala.collection.parallel.ParIterableLike[(K, V), ParIterable, Repr, Sequential] + with scala.collection.parallel.ParMapLike[K, V, CC, Repr, Sequential] + with scala.collection.mutable.Growable[(K, V)] + with scala.collection.mutable.Shrinkable[K] with Cloneable[Repr] { // note: should not override toMap def put(key: K, value: V): Option[V] - def +=(kv: (K, V)): this.type - - def -=(key: K): this.type - - def +[U >: V](kv: (K, U)) = this.clone().asInstanceOf[ParMap[K, U]] += kv + def +[U >: V](kv: (K, U)) = this.clone().asInstanceOf[CC[K, U]] += kv def -(key: K) = this.clone() -= key def clear(): Unit + + override def clone(): Repr = empty ++= this } diff --git a/core/src/main/scala/scala/collection/parallel/mutable/ParSeq.scala b/core/src/main/scala/scala/collection/parallel/mutable/ParSeq.scala index 29d2889b..afa04345 100644 --- a/core/src/main/scala/scala/collection/parallel/mutable/ParSeq.scala +++ b/core/src/main/scala/scala/collection/parallel/mutable/ParSeq.scala @@ -14,7 +14,6 @@ package scala package collection.parallel.mutable import scala.collection.generic.GenericParTemplate -import scala.collection.generic.GenericCompanion import scala.collection.generic.GenericParCompanion import scala.collection.generic.CanCombineFrom import scala.collection.generic.ParFactory @@ -26,13 +25,12 @@ import scala.collection.parallel.Combiner * @define Coll `mutable.ParSeq` * @define coll mutable parallel sequence */ -trait ParSeq[T] extends scala.collection/*.mutable*/.GenSeq[T] // was: scala.collection.mutable.Seq[T] - with ParIterable[T] +trait ParSeq[T] extends ParIterable[T] with scala.collection.parallel.ParSeq[T] with GenericParTemplate[T, ParSeq] - with ParSeqLike[T, ParSeq[T], scala.collection.mutable.Seq[T]] { + with ParSeqLike[T, ParSeq, ParSeq[T], scala.collection.mutable.Seq[T]] { self => - override def companion: GenericCompanion[ParSeq] with GenericParCompanion[ParSeq] = ParSeq + override def companion: GenericParCompanion[ParSeq] = ParSeq //protected[this] override def newBuilder = ParSeq.newBuilder[T] def update(i: Int, elem: T): Unit @@ -48,7 +46,7 @@ self => * @define coll mutable parallel sequence */ object ParSeq extends ParFactory[ParSeq] { - implicit def canBuildFrom[T]: CanCombineFrom[Coll, T, ParSeq[T]] = new GenericCanCombineFrom[T] + implicit def canBuildFrom[T]: CanCombineFrom[ParSeq[_], T, ParSeq[T]] = new GenericCanCombineFrom[T] def newBuilder[T]: Combiner[T, ParSeq[T]] = ParArrayCombiner[T] diff --git a/core/src/main/scala/scala/collection/parallel/mutable/ParSet.scala b/core/src/main/scala/scala/collection/parallel/mutable/ParSet.scala index bef46205..2e8a7d82 100644 --- a/core/src/main/scala/scala/collection/parallel/mutable/ParSet.scala +++ b/core/src/main/scala/scala/collection/parallel/mutable/ParSet.scala @@ -21,14 +21,13 @@ import scala.collection.parallel.Combiner * @author Aleksandar Prokopec */ trait ParSet[T] -extends scala.collection/*.mutable*/.GenSet[T] - with ParIterable[T] +extends ParIterable[T] with scala.collection.parallel.ParSet[T] with GenericParTemplate[T, ParSet] - with ParSetLike[T, ParSet[T], scala.collection.mutable.Set[T]] + with ParSetLike[T, ParSet, ParSet[T], scala.collection.mutable.Set[T]] { self => - override def companion: GenericCompanion[ParSet] with GenericParCompanion[ParSet] = ParSet + override def companion: GenericParCompanion[ParSet] = ParSet override def empty: ParSet[T] = ParHashSet() def seq: scala.collection.mutable.Set[T] } @@ -39,7 +38,7 @@ self => * @define coll mutable parallel set */ object ParSet extends ParSetFactory[ParSet] { - implicit def canBuildFrom[T]: CanCombineFrom[Coll, T, ParSet[T]] = new GenericCanCombineFrom[T] + implicit def canBuildFrom[T]: CanCombineFrom[ParSet[_], T, ParSet[T]] = new GenericCanCombineFrom[T] override def newBuilder[T]: Combiner[T, ParSet[T]] = ParHashSet.newBuilder diff --git a/core/src/main/scala/scala/collection/parallel/mutable/ParSetLike.scala b/core/src/main/scala/scala/collection/parallel/mutable/ParSetLike.scala index 9a35a522..6dc7f586 100644 --- a/core/src/main/scala/scala/collection/parallel/mutable/ParSetLike.scala +++ b/core/src/main/scala/scala/collection/parallel/mutable/ParSetLike.scala @@ -15,9 +15,9 @@ package collection package parallel.mutable import scala.collection.mutable.Cloneable -import scala.collection.GenSetLike -import scala.collection.generic.Growable -import scala.collection.generic.Shrinkable +import scala.language.higherKinds +import scala.collection.mutable.Growable +import scala.collection.mutable.Shrinkable /** A template trait for mutable parallel sets. This trait is mixed in with concrete * parallel sets to override the representation type. @@ -32,11 +32,11 @@ import scala.collection.generic.Shrinkable * @since 2.9 */ trait ParSetLike[T, - +Repr <: ParSetLike[T, Repr, Sequential] with ParSet[T], - +Sequential <: mutable.Set[T] with mutable.SetLike[T, Sequential]] -extends GenSetLike[T, Repr] - with scala.collection.parallel.ParIterableLike[T, Repr, Sequential] - with scala.collection.parallel.ParSetLike[T, Repr, Sequential] + +CC[X] <: ParIterable[X], + +Repr <: ParSetLike[T, CC, Repr, Sequential] with ParSet[T], + +Sequential <: mutable.Set[T] with mutable.SetOps[T, mutable.Set, Sequential]] +extends scala.collection.parallel.ParIterableLike[T, CC, Repr, Sequential] + with scala.collection.parallel.ParSetLike[T, CC, Repr, Sequential] with Growable[T] with Shrinkable[T] with Cloneable[Repr] @@ -44,13 +44,14 @@ extends GenSetLike[T, Repr] self => override def empty: Repr - def +=(elem: T): this.type + def addOne(elem: T): this.type - def -=(elem: T): this.type + def subtractOne(elem: T): this.type def +(elem: T) = this.clone() += elem def -(elem: T) = this.clone() -= elem + override def clone(): Repr = empty ++= this // note: should not override toSet } diff --git a/core/src/main/scala/scala/collection/parallel/mutable/ParTrieMap.scala b/core/src/main/scala/scala/collection/parallel/mutable/ParTrieMap.scala index 57dee26b..3eb7c60c 100644 --- a/core/src/main/scala/scala/collection/parallel/mutable/ParTrieMap.scala +++ b/core/src/main/scala/scala/collection/parallel/mutable/ParTrieMap.scala @@ -40,7 +40,7 @@ import scala.collection.concurrent.TrieMapIterator final class ParTrieMap[K, V] private[collection] (private val ctrie: TrieMap[K, V]) extends ParMap[K, V] with GenericParMapTemplate[K, V, ParTrieMap] - with ParMapLike[K, V, ParTrieMap[K, V], TrieMap[K, V]] + with ParMapLike[K, V, ParTrieMap, ParTrieMap[K, V], TrieMap[K, V]] with ParTrieMapCombiner[K, V] with Serializable { @@ -68,12 +68,12 @@ extends ParMap[K, V] def remove(key: K): Option[V] = ctrie.remove(key) - def +=(kv: (K, V)): this.type = { + def addOne(kv: (K, V)): this.type = { ctrie.+=(kv) this } - def -=(key: K): this.type = { + def subtractOne(key: K): this.type = { ctrie.-=(key) this } @@ -90,6 +90,8 @@ extends ParMap[K, V] } } + def knownSize = -1 + override def stringPrefix = "ParTrieMap" /* tasks */ diff --git a/core/src/main/scala/scala/collection/parallel/mutable/ResizableParArrayCombiner.scala b/core/src/main/scala/scala/collection/parallel/mutable/ResizableParArrayCombiner.scala index a4a5c53c..9c16f101 100644 --- a/core/src/main/scala/scala/collection/parallel/mutable/ResizableParArrayCombiner.scala +++ b/core/src/main/scala/scala/collection/parallel/mutable/ResizableParArrayCombiner.scala @@ -26,14 +26,14 @@ trait ResizableParArrayCombiner[T] extends LazyCombiner[T, ParArray[T], ExposedA final def newLazyCombiner(c: ArrayBuffer[ExposedArrayBuffer[T]]) = ResizableParArrayCombiner(c) def allocateAndCopy = if (chain.size > 1) { - val arrayseq = new ArraySeq[T](size) - val array = arrayseq.array.asInstanceOf[Array[Any]] + val array = new Array[Any](size) + val arrayseq = ArraySeq.make(array).asInstanceOf[ArraySeq[T]] combinerTaskSupport.executeAndWaitResult(new CopyChainToArray(array, 0, size)) new ParArray(arrayseq) } else { // optimisation if there is only 1 array - new ParArray(new ExposedArraySeq[T](chain(0).internalArray, size)) + new ParArray(ArraySeq.make(chain(0).internalArray).asInstanceOf[ArraySeq[T]], size) } override def toString = "ResizableParArrayCombiner(" + size + "): " //+ chain @@ -84,7 +84,8 @@ trait ResizableParArrayCombiner[T] extends LazyCombiner[T, ParArray[T], ExposedA object ResizableParArrayCombiner { def apply[T](c: ArrayBuffer[ExposedArrayBuffer[T]]): ResizableParArrayCombiner[T] = { - new { val chain = c } with ResizableParArrayCombiner[T] // was: with EnvironmentPassingCombiner[T, ParArray[T]] + class ResizableParArrayCombinerC[A](val chain: ArrayBuffer[ExposedArrayBuffer[A]]) extends ResizableParArrayCombiner[A] // was: with EnvironmentPassingCombiner[T, ParArray[T]] + new ResizableParArrayCombinerC[T](c) } def apply[T](): ResizableParArrayCombiner[T] = apply(new ArrayBuffer[ExposedArrayBuffer[T]] += new ExposedArrayBuffer[T]) } diff --git a/core/src/main/scala/scala/collection/parallel/mutable/UnrolledParArrayCombiner.scala b/core/src/main/scala/scala/collection/parallel/mutable/UnrolledParArrayCombiner.scala index 88f3b38b..b6a63c8e 100644 --- a/core/src/main/scala/scala/collection/parallel/mutable/UnrolledParArrayCombiner.scala +++ b/core/src/main/scala/scala/collection/parallel/mutable/UnrolledParArrayCombiner.scala @@ -26,14 +26,14 @@ extends Combiner[T, ParArray[T]] { // because size is doubling, random access is O(logn)! val buff = new DoublingUnrolledBuffer[Any] - def +=(elem: T) = { + def addOne(elem: T) = { buff += elem this } def result = { - val arrayseq = new ArraySeq[T](size) - val array = arrayseq.array.asInstanceOf[Array[Any]] + val array = new Array[Any](size) + val arrayseq = ArraySeq.make(array).asInstanceOf[ArraySeq[T]] combinerTaskSupport.executeAndWaitResult(new CopyUnrolledToArray(array, 0, size)) diff --git a/core/src/main/scala/scala/collection/parallel/mutable/package.scala b/core/src/main/scala/scala/collection/parallel/mutable/package.scala index 0094bfd0..b289df23 100644 --- a/core/src/main/scala/scala/collection/parallel/mutable/package.scala +++ b/core/src/main/scala/scala/collection/parallel/mutable/package.scala @@ -14,7 +14,6 @@ package scala package collection.parallel import scala.collection.mutable.ArrayBuffer -import scala.collection.mutable.ArraySeq import scala.collection.generic.Sizing package object mutable { @@ -73,9 +72,4 @@ package mutable { } } - private[mutable] class ExposedArraySeq[T](arr: Array[AnyRef], sz: Int) extends ArraySeq[T](sz) { - override val array = arr - override val length = sz - override def stringPrefix = "ArraySeq" - } } diff --git a/core/src/main/scala/scala/collection/parallel/package.scala b/core/src/main/scala/scala/collection/parallel/package.scala index 567645a2..3ea99b31 100644 --- a/core/src/main/scala/scala/collection/parallel/package.scala +++ b/core/src/main/scala/scala/collection/parallel/package.scala @@ -13,8 +13,6 @@ package scala package collection -import scala.collection.generic.CanBuildFrom -import scala.collection.generic.CanCombineFrom import scala.collection.parallel.mutable.ParArray import scala.collection.mutable.UnrolledBuffer import scala.annotation.unchecked.uncheckedVariance @@ -43,22 +41,22 @@ package object parallel { def setTaskSupport[Coll](c: Coll, t: TaskSupport): Coll = { c match { - case pc: ParIterableLike[_, _, _] => pc.tasksupport = t + case pc: ParIterableLike[_, _, _, _] => pc.tasksupport = t case _ => // do nothing } c } /** Adds toParArray method to collection classes. */ - implicit class CollectionsHaveToParArray[C, T](c: C)(implicit asGto: C => scala.collection.GenTraversableOnce[T]) { + implicit class CollectionsHaveToParArray[C, T](c: C)(implicit asGto: C => scala.collection.IterableOnce[T]) { def toParArray = { val t = asGto(c) if (t.isInstanceOf[ParArray[_]]) t.asInstanceOf[ParArray[T]] else { - val it = t.toIterator + val it = t.iterator val cb = mutable.ParArrayCombiner[T]() - while (it.hasNext) cb += it.next - cb.result + while (it.hasNext) cb += it.next() + cb.result() } } } @@ -68,14 +66,7 @@ package object parallel { package parallel { /** Implicit conversions used in the implementation of parallel collections. */ private[collection] object ParallelCollectionImplicits { - implicit def factory2ops[From, Elem, To](bf: CanBuildFrom[From, Elem, To]) = new FactoryOps[From, Elem, To] { - def isParallel = bf.isInstanceOf[Parallel] - def asParallel = bf.asInstanceOf[CanCombineFrom[From, Elem, To]] - def ifParallel[R](isbody: CanCombineFrom[From, Elem, To] => R) = new Otherwise[R] { - def otherwise(notbody: => R) = if (isParallel) isbody(asParallel) else notbody - } - } - implicit def traversable2ops[T](t: scala.collection.GenTraversableOnce[T]) = new TraversableOps[T] { + implicit def traversable2ops[T](t: scala.collection.IterableOnce[T]) = new TraversableOps[T] { def isParallel = t.isInstanceOf[Parallel] def isParIterable = t.isInstanceOf[ParIterable[_]] def asParIterable = t.asInstanceOf[ParIterable[T]] @@ -87,16 +78,6 @@ package parallel { } } - trait FactoryOps[From, Elem, To] { - trait Otherwise[R] { - def otherwise(notbody: => R): R - } - - def isParallel: Boolean - def asParallel: CanCombineFrom[From, Elem, To] - def ifParallel[R](isbody: CanCombineFrom[From, Elem, To] => R): Otherwise[R] - } - trait TraversableOps[T] { trait Otherwise[R] { def otherwise(notbody: => R): R @@ -139,13 +120,13 @@ package parallel { } def remaining = until - index def dup = new BufferSplitter(buffer, index, until, signalDelegate) - def split: Seq[IterableSplitter[T]] = if (remaining > 1) { + def split: scala.Seq[IterableSplitter[T]] = if (remaining > 1) { val divsz = (until - index) / 2 - Seq( + scala.Seq( new BufferSplitter(buffer, index, index + divsz, signalDelegate), new BufferSplitter(buffer, index + divsz, until, signalDelegate) ) - } else Seq(this) + } else scala.Seq(this) private[parallel] override def debugInformation = { buildString { append => diff --git a/junit/src/test/scala/MiscTest.scala b/junit/src/test/scala/MiscTest.scala index 043c8c40..adb085fa 100644 --- a/junit/src/test/scala/MiscTest.scala +++ b/junit/src/test/scala/MiscTest.scala @@ -1,10 +1,10 @@ import collection._ - import scala.collection.parallel.CollectionConverters._ - import org.junit.Test import org.junit.Assert._ +import scala.collection.parallel.ParSeq + class MiscTest { @Test def si4459: Unit = { @@ -13,28 +13,22 @@ class MiscTest { } } - def foo(arg: GenSeq[_]): String = arg.map(x => x).mkString(",") + def foo(arg: ParSeq[_]): String = arg.map(x => x).mkString(",") @Test def si4608: Unit = { ((1 to 100) sliding 10).toList.par.map{_.map{i => i * i}}.flatten } - @Test - def si4723: Unit = { - assertTrue(Nil == collection.parallel.ParSeq()) - assertTrue(collection.parallel.ParSeq() == Nil) - } - @Test def si4761: Unit = { - val gs = for (x <- (1 to 5)) yield { if (x % 2 == 0) List(1).seq else List(1).par } + val gs = for (x <- (1 to 5)) yield { if (x % 2 == 0) List(1) else List(1).par } assertEquals("Vector(1, 1, 1, 1, 1)", gs.flatten.toString) - assertEquals("Vector(Vector(1, 1, 1, 1, 1))", gs.transpose.toString) +// assertEquals("Vector(Vector(1, 1, 1, 1, 1))", gs.transpose.toString) - val s = Stream(Vector(1).par, Vector(2).par) + val s = LazyList(Vector(1).par, Vector(2).par) assertEquals("List(1, 2)", s.flatten.toList.toString) - assertEquals("List(List(1, 2))", s.transpose.map(_.toList).toList.toString) +// assertEquals("List(List(1, 2))", s.transpose.map(_.toList).toList.toString) } @Test @@ -77,7 +71,7 @@ class MiscTest { assert(ex.getSuppressed.size > 0) assert(ex.getSuppressed.forall(_.isInstanceOf[MultipleOf37Exception])) assert(ex.i == 37) - assert(ex.getSuppressed.map(_.asInstanceOf[MultipleOf37Exception].i).toList == List(74, 148, 259, 518)) + assert(ex.getSuppressed.map(_.asInstanceOf[MultipleOf37Exception].i).forall(_ % 37 == 0)) case _: Throwable => assert(false) } @@ -92,7 +86,8 @@ class MiscTest { def check[T](i: Int, f: Int => T): Unit = { val gseq = seqarr(i).toSeq.groupBy(f) val gpar = pararr(i).groupBy(f) - assertEquals(gseq, gpar) + assertTrue(gseq.forall { case (k, vs) => gpar.get(k).exists(_.sameElements(vs)) }) + assertTrue(gpar.forall { case (k, vs) => gseq.get(k).exists(_.sameElements(vs)) }) } for (i <- 0 until 20) check(i, _ > 0) @@ -110,9 +105,9 @@ class MiscTest { @Test def si6467: Unit = { - assertEquals(List(1, 2, 3, 4).aggregate(new java.lang.StringBuffer)(_ append _, _ append _).toString, "1234") + assertEquals(List(1, 2, 3, 4).foldLeft(new java.lang.StringBuffer)(_ append _).toString, "1234") assertEquals(List(1, 2, 3, 4).par.aggregate(new java.lang.StringBuffer)(_ append _, _ append _).toString, "1234") - assertEquals(Seq(0 until 100: _*).aggregate(new java.lang.StringBuffer)(_ append _, _ append _).toString, (0 until 100).mkString) + assertEquals(Seq(0 until 100: _*).foldLeft(new java.lang.StringBuffer)(_ append _).toString, (0 until 100).mkString) assertEquals(Seq(0 until 100: _*).par.aggregate(new java.lang.StringBuffer)(_ append _, _ append _).toString, (0 until 100).mkString) } diff --git a/junit/src/test/scala/scala/SerializationStabilityTest.scala b/junit/src/test/scala/scala/SerializationStabilityTest.scala index 9551ff44..d053444f 100644 --- a/junit/src/test/scala/scala/SerializationStabilityTest.scala +++ b/junit/src/test/scala/scala/SerializationStabilityTest.scala @@ -30,8 +30,7 @@ object SerializationStability extends App { def patch(file: File, line: Int, prevResult: String, result: String): Unit = { amend(file) { content => - // Predef.augmentString = work around scala/bug#11125 on JDK 11 - augmentString(content).lines.toList.zipWithIndex.map { + content.linesIterator.toList.zipWithIndex.map { case (content, i) if i == line - 1 => val newContent = content.replaceAllLiterally(quote(prevResult), quote(result)) if (newContent != content) @@ -51,8 +50,7 @@ object SerializationStability extends App { val newComment = s" // Generated on $timestamp with Scala ${scala.util.Properties.versionString})" amend(file) { content => - // Predef.augmentString = work around scala/bug#11125 on JDK 11 - augmentString(content).lines.toList.map { + content.linesIterator.toList.map { f => f.replaceAll("""^ +// Generated on.*""", newComment) }.mkString("\n") } @@ -83,21 +81,21 @@ object SerializationStability extends App { } } - // Generated on 20170112-12:37:58 with Scala version 2.13.0-20170111-165407-6b8cc67) + // Generated on 20190112-18:04:29 with Scala version 2.13.0-20190109-081947-e69ecf1) overwrite.foreach(updateComment) // check(new collection.concurrent.TrieMap[Any, Any]())( "rO0ABXNyACNzY2FsYS5jb2xsZWN0aW9uLmNvbmN1cnJlbnQuVHJpZU1hcKckxpgOIYHPAwAETAALZXF1YWxpdHlvYmp0ABJMc2NhbGEvbWF0aC9FcXVpdjtMAApoYXNoaW5nb2JqdAAcTHNjYWxhL3V0aWwvaGFzaGluZy9IYXNoaW5nO0wABHJvb3R0ABJMamF2YS9sYW5nL09iamVjdDtMAAtyb290dXBkYXRlcnQAOUxqYXZhL3V0aWwvY29uY3VycmVudC9hdG9taWMvQXRvbWljUmVmZXJlbmNlRmllbGRVcGRhdGVyO3hwc3IAMnNjYWxhLmNvbGxlY3Rpb24uY29uY3VycmVudC5UcmllTWFwJE1hbmdsZWRIYXNoaW5nhTBoJQ/mgb0CAAB4cHNyABhzY2FsYS5tYXRoLkVxdWl2JCRhbm9uJDLBbyx4dy/qGwIAAHhwc3IANHNjYWxhLmNvbGxlY3Rpb24uY29uY3VycmVudC5UcmllTWFwU2VyaWFsaXphdGlvbkVuZCSbjdgbbGCt2gIAAHhweA==") // not sure why this one needs stable serialization. import collection.parallel - check(parallel.immutable.ParHashMap(1 -> 2))( "rO0ABXNyAC5zY2FsYS5jb2xsZWN0aW9uLnBhcmFsbGVsLmltbXV0YWJsZS5QYXJIYXNoTWFwAAAAAAAAAAECAANMAA9TY2FuTGVhZiRtb2R1bGV0ADVMc2NhbGEvY29sbGVjdGlvbi9wYXJhbGxlbC9QYXJJdGVyYWJsZUxpa2UkU2NhbkxlYWYkO0wAD1NjYW5Ob2RlJG1vZHVsZXQANUxzY2FsYS9jb2xsZWN0aW9uL3BhcmFsbGVsL1Bhckl0ZXJhYmxlTGlrZSRTY2FuTm9kZSQ7TAAEdHJpZXQAJExzY2FsYS9jb2xsZWN0aW9uL2ltbXV0YWJsZS9IYXNoTWFwO3hwcHBzcgA1c2NhbGEuY29sbGVjdGlvbi5pbW11dGFibGUuSGFzaE1hcCRTZXJpYWxpemF0aW9uUHJveHkAAAAAAAAAAgMAAHhwdwQAAAABc3IAEWphdmEubGFuZy5JbnRlZ2VyEuKgpPeBhzgCAAFJAAV2YWx1ZXhyABBqYXZhLmxhbmcuTnVtYmVyhqyVHQuU4IsCAAB4cAAAAAFzcQB+AAcAAAACeA==") - check(parallel.immutable.ParHashSet(1, 2, 3))( "rO0ABXNyAC5zY2FsYS5jb2xsZWN0aW9uLnBhcmFsbGVsLmltbXV0YWJsZS5QYXJIYXNoU2V0AAAAAAAAAAECAANMAA9TY2FuTGVhZiRtb2R1bGV0ADVMc2NhbGEvY29sbGVjdGlvbi9wYXJhbGxlbC9QYXJJdGVyYWJsZUxpa2UkU2NhbkxlYWYkO0wAD1NjYW5Ob2RlJG1vZHVsZXQANUxzY2FsYS9jb2xsZWN0aW9uL3BhcmFsbGVsL1Bhckl0ZXJhYmxlTGlrZSRTY2FuTm9kZSQ7TAAEdHJpZXQAJExzY2FsYS9jb2xsZWN0aW9uL2ltbXV0YWJsZS9IYXNoU2V0O3hwcHBzcgA1c2NhbGEuY29sbGVjdGlvbi5pbW11dGFibGUuSGFzaFNldCRTZXJpYWxpemF0aW9uUHJveHkAAAAAAAAAAgMAAHhwdwQAAAADc3IAEWphdmEubGFuZy5JbnRlZ2VyEuKgpPeBhzgCAAFJAAV2YWx1ZXhyABBqYXZhLmxhbmcuTnVtYmVyhqyVHQuU4IsCAAB4cAAAAAFzcQB+AAcAAAACc3EAfgAHAAAAA3g=") + check(parallel.immutable.ParHashMap(1 -> 2))( "rO0ABXNyAC5zY2FsYS5jb2xsZWN0aW9uLnBhcmFsbGVsLmltbXV0YWJsZS5QYXJIYXNoTWFwAAAAAAAAAAMCAANMAA9TY2FuTGVhZiRtb2R1bGV0ADVMc2NhbGEvY29sbGVjdGlvbi9wYXJhbGxlbC9QYXJJdGVyYWJsZUxpa2UkU2NhbkxlYWYkO0wAD1NjYW5Ob2RlJG1vZHVsZXQANUxzY2FsYS9jb2xsZWN0aW9uL3BhcmFsbGVsL1Bhckl0ZXJhYmxlTGlrZSRTY2FuTm9kZSQ7TAAEdHJpZXQAJ0xzY2FsYS9jb2xsZWN0aW9uL2ltbXV0YWJsZS9PbGRIYXNoTWFwO3hwcHBzcgAyc2NhbGEuY29sbGVjdGlvbi5nZW5lcmljLkRlZmF1bHRTZXJpYWxpemF0aW9uUHJveHkAAAAAAAAAAwMAAUwAB2ZhY3Rvcnl0ABpMc2NhbGEvY29sbGVjdGlvbi9GYWN0b3J5O3hwc3IAJXNjYWxhLmNvbGxlY3Rpb24uTWFwRmFjdG9yeSRUb0ZhY3RvcnkAAAAAAAAAAwIAAUwAB2ZhY3Rvcnl0AB1Mc2NhbGEvY29sbGVjdGlvbi9NYXBGYWN0b3J5O3hwc3IAJnNjYWxhLnJ1bnRpbWUuTW9kdWxlU2VyaWFsaXphdGlvblByb3h5AAAAAAAAAAECAAFMAAttb2R1bGVDbGFzc3QAEUxqYXZhL2xhbmcvQ2xhc3M7eHB2cgAmc2NhbGEuY29sbGVjdGlvbi5pbW11dGFibGUuT2xkSGFzaE1hcCQAAAAAAAAAAwMAAHhwdwQAAAABc3IADHNjYWxhLlR1cGxlMgH73c0i5zR6AgACTAACXzF0ABJMamF2YS9sYW5nL09iamVjdDtMAAJfMnEAfgAReHBzcgARamF2YS5sYW5nLkludGVnZXIS4qCk94GHOAIAAUkABXZhbHVleHIAEGphdmEubGFuZy5OdW1iZXKGrJUdC5TgiwIAAHhwAAAAAXNxAH4AEwAAAAJ4") + check(parallel.immutable.ParHashSet(1, 2, 3))( "rO0ABXNyAC5zY2FsYS5jb2xsZWN0aW9uLnBhcmFsbGVsLmltbXV0YWJsZS5QYXJIYXNoU2V0AAAAAAAAAAECAANMAA9TY2FuTGVhZiRtb2R1bGV0ADVMc2NhbGEvY29sbGVjdGlvbi9wYXJhbGxlbC9QYXJJdGVyYWJsZUxpa2UkU2NhbkxlYWYkO0wAD1NjYW5Ob2RlJG1vZHVsZXQANUxzY2FsYS9jb2xsZWN0aW9uL3BhcmFsbGVsL1Bhckl0ZXJhYmxlTGlrZSRTY2FuTm9kZSQ7TAAEdHJpZXQAJ0xzY2FsYS9jb2xsZWN0aW9uL2ltbXV0YWJsZS9PbGRIYXNoU2V0O3hwcHBzcgAyc2NhbGEuY29sbGVjdGlvbi5nZW5lcmljLkRlZmF1bHRTZXJpYWxpemF0aW9uUHJveHkAAAAAAAAAAwMAAUwAB2ZhY3Rvcnl0ABpMc2NhbGEvY29sbGVjdGlvbi9GYWN0b3J5O3hwc3IAKnNjYWxhLmNvbGxlY3Rpb24uSXRlcmFibGVGYWN0b3J5JFRvRmFjdG9yeQAAAAAAAAADAgABTAAHZmFjdG9yeXQAIkxzY2FsYS9jb2xsZWN0aW9uL0l0ZXJhYmxlRmFjdG9yeTt4cHNyACZzY2FsYS5ydW50aW1lLk1vZHVsZVNlcmlhbGl6YXRpb25Qcm94eQAAAAAAAAABAgABTAALbW9kdWxlQ2xhc3N0ABFMamF2YS9sYW5nL0NsYXNzO3hwdnIAJnNjYWxhLmNvbGxlY3Rpb24uaW1tdXRhYmxlLk9sZEhhc2hTZXQkAAAAAAAAAAMDAAB4cHcEAAAAA3NyABFqYXZhLmxhbmcuSW50ZWdlchLioKT3gYc4AgABSQAFdmFsdWV4cgAQamF2YS5sYW5nLk51bWJlcoaslR0LlOCLAgAAeHAAAAABc3EAfgAQAAAAAnNxAH4AEAAAAAN4") // TODO SI-8576 Uninitialized field under -Xcheckinit // check(new parallel.immutable.ParRange(new Range(0, 1, 2)))( "rO0ABXNyACxzY2FsYS5jb2xsZWN0aW9uLnBhcmFsbGVsLmltbXV0YWJsZS5QYXJSYW5nZQAAAAAAAAABAgAETAAXUGFyUmFuZ2VJdGVyYXRvciRtb2R1bGV0AEBMc2NhbGEvY29sbGVjdGlvbi9wYXJhbGxlbC9pbW11dGFibGUvUGFyUmFuZ2UkUGFyUmFuZ2VJdGVyYXRvciQ7TAAPU2NhbkxlYWYkbW9kdWxldAA1THNjYWxhL2NvbGxlY3Rpb24vcGFyYWxsZWwvUGFySXRlcmFibGVMaWtlJFNjYW5MZWFmJDtMAA9TY2FuTm9kZSRtb2R1bGV0ADVMc2NhbGEvY29sbGVjdGlvbi9wYXJhbGxlbC9QYXJJdGVyYWJsZUxpa2UkU2Nhbk5vZGUkO0wABXJhbmdldAAiTHNjYWxhL2NvbGxlY3Rpb24vaW1tdXRhYmxlL1JhbmdlO3hwcHBwc3IAIHNjYWxhLmNvbGxlY3Rpb24uaW1tdXRhYmxlLlJhbmdlabujVKsVMg0CAAdJAANlbmRaAAdpc0VtcHR5SQALbGFzdEVsZW1lbnRJABBudW1SYW5nZUVsZW1lbnRzSQAFc3RhcnRJAARzdGVwSQAPdGVybWluYWxFbGVtZW50eHAAAAABAAAAAAAAAAABAAAAAAAAAAIAAAAC") // TODO SI-8576 unstable under -Xcheckinit // check(parallel.mutable.ParArray(1, 2, 3))( "rO0ABXNyACpzY2FsYS5jb2xsZWN0aW9uLnBhcmFsbGVsLm11dGFibGUuUGFyQXJyYXkAAAAAAAAAAQMABEwAF1BhckFycmF5SXRlcmF0b3IkbW9kdWxldAA+THNjYWxhL2NvbGxlY3Rpb24vcGFyYWxsZWwvbXV0YWJsZS9QYXJBcnJheSRQYXJBcnJheUl0ZXJhdG9yJDtMAA9TY2FuTGVhZiRtb2R1bGV0ADVMc2NhbGEvY29sbGVjdGlvbi9wYXJhbGxlbC9QYXJJdGVyYWJsZUxpa2UkU2NhbkxlYWYkO0wAD1NjYW5Ob2RlJG1vZHVsZXQANUxzY2FsYS9jb2xsZWN0aW9uL3BhcmFsbGVsL1Bhckl0ZXJhYmxlTGlrZSRTY2FuTm9kZSQ7TAAIYXJyYXlzZXF0ACNMc2NhbGEvY29sbGVjdGlvbi9tdXRhYmxlL0FycmF5U2VxO3hwcHBwc3IAMXNjYWxhLmNvbGxlY3Rpb24ucGFyYWxsZWwubXV0YWJsZS5FeHBvc2VkQXJyYXlTZXGx2OTefAodSQIAAkkABmxlbmd0aFsABWFycmF5dAATW0xqYXZhL2xhbmcvT2JqZWN0O3hyACFzY2FsYS5jb2xsZWN0aW9uLm11dGFibGUuQXJyYXlTZXEVPD3SKEkOcwIAAkkABmxlbmd0aFsABWFycmF5cQB+AAd4cAAAAAN1cgATW0xqYXZhLmxhbmcuT2JqZWN0O5DOWJ8QcylsAgAAeHAAAAADcHBwAAAAA3VxAH4ACgAAABBzcgARamF2YS5sYW5nLkludGVnZXIS4qCk94GHOAIAAUkABXZhbHVleHIAEGphdmEubGFuZy5OdW1iZXKGrJUdC5TgiwIAAHhwAAAAAXNxAH4ADQAAAAJzcQB+AA0AAAADcHBwcHBwcHBwcHBwcHg=") - check(parallel.mutable.ParHashMap(1 -> 2))( "rO0ABXNyACxzY2FsYS5jb2xsZWN0aW9uLnBhcmFsbGVsLm11dGFibGUuUGFySGFzaE1hcAAAAAAAAAABAwACTAAPU2NhbkxlYWYkbW9kdWxldAA1THNjYWxhL2NvbGxlY3Rpb24vcGFyYWxsZWwvUGFySXRlcmFibGVMaWtlJFNjYW5MZWFmJDtMAA9TY2FuTm9kZSRtb2R1bGV0ADVMc2NhbGEvY29sbGVjdGlvbi9wYXJhbGxlbC9QYXJJdGVyYWJsZUxpa2UkU2Nhbk5vZGUkO3hwcHB3DQAAAu4AAAABAAAABAFzcgARamF2YS5sYW5nLkludGVnZXIS4qCk94GHOAIAAUkABXZhbHVleHIAEGphdmEubGFuZy5OdW1iZXKGrJUdC5TgiwIAAHhwAAAAAXNxAH4ABAAAAAJ4") - check(parallel.mutable.ParHashSet(1, 2, 3))( "rO0ABXNyACxzY2FsYS5jb2xsZWN0aW9uLnBhcmFsbGVsLm11dGFibGUuUGFySGFzaFNldAAAAAAAAAABAwACTAAPU2NhbkxlYWYkbW9kdWxldAA1THNjYWxhL2NvbGxlY3Rpb24vcGFyYWxsZWwvUGFySXRlcmFibGVMaWtlJFNjYW5MZWFmJDtMAA9TY2FuTm9kZSRtb2R1bGV0ADVMc2NhbGEvY29sbGVjdGlvbi9wYXJhbGxlbC9QYXJJdGVyYWJsZUxpa2UkU2Nhbk5vZGUkO3hwcHB3DQAAAcIAAAADAAAAGwFzcgARamF2YS5sYW5nLkludGVnZXIS4qCk94GHOAIAAUkABXZhbHVleHIAEGphdmEubGFuZy5OdW1iZXKGrJUdC5TgiwIAAHhwAAAAAXNxAH4ABAAAAAJzcQB+AAQAAAADeA==") + check(parallel.mutable.ParHashMap(1 -> 2))( "rO0ABXNyACxzY2FsYS5jb2xsZWN0aW9uLnBhcmFsbGVsLm11dGFibGUuUGFySGFzaE1hcAAAAAAAAAADAwAISQALX2xvYWRGYWN0b3JJAAlzZWVkdmFsdWVJAAl0YWJsZVNpemVJAAl0aHJlc2hvbGRMAA9TY2FuTGVhZiRtb2R1bGV0ADVMc2NhbGEvY29sbGVjdGlvbi9wYXJhbGxlbC9QYXJJdGVyYWJsZUxpa2UkU2NhbkxlYWYkO0wAD1NjYW5Ob2RlJG1vZHVsZXQANUxzY2FsYS9jb2xsZWN0aW9uL3BhcmFsbGVsL1Bhckl0ZXJhYmxlTGlrZSRTY2FuTm9kZSQ7WwAHc2l6ZW1hcHQAAltJWwAFdGFibGV0ACVbTHNjYWxhL2NvbGxlY3Rpb24vbXV0YWJsZS9IYXNoRW50cnk7eHB3DQAAAu4AAAABAAAABAFzcgARamF2YS5sYW5nLkludGVnZXIS4qCk94GHOAIAAUkABXZhbHVleHIAEGphdmEubGFuZy5OdW1iZXKGrJUdC5TgiwIAAHhwAAAAAXNxAH4ABgAAAAJ4") + check(parallel.mutable.ParHashSet(1, 2, 3))( "rO0ABXNyACxzY2FsYS5jb2xsZWN0aW9uLnBhcmFsbGVsLm11dGFibGUuUGFySGFzaFNldAAAAAAAAAABAwAISQALX2xvYWRGYWN0b3JJAAlzZWVkdmFsdWVJAAl0YWJsZVNpemVJAAl0aHJlc2hvbGRMAA9TY2FuTGVhZiRtb2R1bGV0ADVMc2NhbGEvY29sbGVjdGlvbi9wYXJhbGxlbC9QYXJJdGVyYWJsZUxpa2UkU2NhbkxlYWYkO0wAD1NjYW5Ob2RlJG1vZHVsZXQANUxzY2FsYS9jb2xsZWN0aW9uL3BhcmFsbGVsL1Bhckl0ZXJhYmxlTGlrZSRTY2FuTm9kZSQ7WwAHc2l6ZW1hcHQAAltJWwAFdGFibGV0ABNbTGphdmEvbGFuZy9PYmplY3Q7eHB3DQAAAcIAAAADAAAAGwFzcgARamF2YS5sYW5nLkludGVnZXIS4qCk94GHOAIAAUkABXZhbHVleHIAEGphdmEubGFuZy5OdW1iZXKGrJUdC5TgiwIAAHhwAAAAAXNxAH4ABgAAAAJzcQB+AAYAAAADeA==") } class SerializationStabilityTest { diff --git a/junit/src/test/scala/scala/collection/CollectionConversionsTest.scala b/junit/src/test/scala/scala/collection/CollectionConversionsTest.scala index b8d37e5e..0b12369f 100644 --- a/junit/src/test/scala/scala/collection/CollectionConversionsTest.scala +++ b/junit/src/test/scala/scala/collection/CollectionConversionsTest.scala @@ -14,20 +14,20 @@ class CollectionConversionsTest { val testVector = Vector(1,2,3) val testBuffer = Buffer(1,2,3) - val testGenSeq = GenSeq(1,2,3) - val testSeq = Seq(1,2,3) - val testStream = Stream(1,2,3) + val testSeq = scala.Seq(1,2,3) + val testLazyList = LazyList(1,2,3) val testArray = Array(1,2,3) val testParVector = ParVector(1,2,3) + val testParSeq = parallel.ParSeq(1,2,3) val testParArray = ParArray(1,2,3) @Test def testAll: Unit = { - testConversion("iterator", (1 to 3).iterator) + testConversionIterator("iterator", (1 to 3).iterator) testConversion("Vector", Vector(1,2,3)) testConversion("List", List(1,2,3)) testConversion("Buffer", Buffer(1,2,3)) - testConversion("ParVector", ParVector(1,2,3)) - testConversion("ParArray", ParArray(1,2,3)) + testConversionParIterable("ParVector", ParVector(1,2,3)) + testConversionParIterable("ParArray", ParArray(1,2,3)) testConversion("Set", Set(1,2,3)) testConversion("SetView", Set(1,2,3).view) testConversion("BufferView", Buffer(1,2,3).view) @@ -52,26 +52,108 @@ class CollectionConversionsTest { ok } - def testConversion[A: ClassTag](name: String, col: => GenTraversableOnce[A]): Unit = { - val tmp = col + def testConversion[A: ClassTag](name: String, col: => Iterable[A]): Unit = { out ++= ("-- Testing " + name + " ---\n") if(!( printResult("[Direct] Vector ", col.toVector, testVector) && - printResult("[Copy] Vector ", col.to[Vector], testVector) && + printResult("[Copy] Vector ", col.to(Vector), testVector) && printResult("[Direct] Buffer ", col.toBuffer, testBuffer) && - printResult("[Copy] Buffer ", col.to[Buffer], testBuffer) && - printResult("[Direct] GenSeq ", col.toSeq, testGenSeq) && - printResult("[Copy] GenSeq ", col.to[GenSeq], testGenSeq) && - printResult("[Copy] Seq ", col.to[Seq], testSeq) && - printResult("[Direct] Stream ", col.toStream, testStream) && - printResult("[Copy] Stream ", col.to[Stream], testStream) && + printResult("[Copy] Buffer ", col.to(Buffer), testBuffer) && + printResult("[Direct] Seq ", col.toSeq, testSeq) && + printResult("[Copy] Seq ", col.to(scala.Seq), testSeq) && + printResult("[Copy] Stream ", col.to(LazyList), testLazyList) && printResult("[Direct] Array ", col.toArray, testArray) && - printResult("[Copy] Array ", col.to[Array], testArray) && - printResult("[Copy] ParVector", col.to[ParVector], testParVector) && - printResult("[Copy] ParArray ", col.to[ParArray], testParArray) + printResult("[Copy] Array ", col.to(Array), testArray) && + printResult("[Copy] ParVector", col.to(ParVector), testParVector) && + printResult("[Copy] ParArray ", col.to(ParArray), testParArray) )) { print(out) fail("Not all tests successful") } } + + def testConversionIterator[A: ClassTag](name: String, col: => Iterator[A]): Unit = { + out ++= ("-- Testing " + name + " ---\n") + if(!( + printResult("[Direct] Vector ", col.toVector, testVector) && + printResult("[Copy] Vector ", col.to(Vector), testVector) && + printResult("[Direct] Buffer ", col.toBuffer, testBuffer) && + printResult("[Copy] Buffer ", col.to(Buffer), testBuffer) && + printResult("[Direct] Seq ", col.toSeq, testSeq) && + printResult("[Copy] Seq ", col.to(scala.Seq), testSeq) && + printResult("[Copy] Stream ", col.to(LazyList), testLazyList) && + printResult("[Direct] Array ", col.toArray, testArray) && + printResult("[Copy] Array ", col.to(Array), testArray) && + printResult("[Copy] ParVector", col.to(ParVector), testParVector) && + printResult("[Copy] ParArray ", col.to(ParArray), testParArray) + )) { + print(out) + fail("Not all tests successful") + } + } + + def testConversionParIterable[A: ClassTag](name: String, col: => parallel.ParIterable[A]): Unit = { + out ++= ("-- Testing " + name + " ---\n") + if(!( + printResult("[Direct] Vector ", col.toVector, testVector) && + printResult("[Copy] Vector ", col.to(Vector), testVector) && + printResult("[Direct] Buffer ", col.toBuffer, testBuffer) && + printResult("[Copy] Buffer ", col.to(Buffer), testBuffer) && + printResult("[Direct] ParSeq ", col.toSeq, testParSeq) && + printResult("[Copy] Seq ", col.to(scala.Seq), testSeq) && + printResult("[Copy] Stream ", col.to(LazyList), testLazyList) && + printResult("[Direct] Array ", col.toArray, testArray) && + printResult("[Copy] Array ", col.to(Array), testArray) && + printResult("[Copy] ParVector", col.to(ParVector), testParVector) && + printResult("[Copy] ParArray ", col.to(ParArray), testParArray) + )) { + print(out) + fail("Not all tests successful") + } + } + + // Tests checking that implicit conversions are correctly triggered for various types of collections + def testImplicitConverters(): Unit = { + import scala.{collection => sc} + import scala.collection.{mutable => scm, immutable => sci} + + import scala.collection.parallel.CollectionConverters._ + + // Iterable + val xs1 = sc.Iterable(1, 2, 3).par + val xs1T: sc.parallel.ParIterable[Int] = xs1 + // Seq + val xs2 = sc.Seq(1, 2, 3).par + val xs2T: sc.parallel.ParSeq[Int] = xs2 + val xs3 = scala.Seq(1, 2, 3).par + val xs3T: sc.parallel.immutable.ParSeq[Int] = xs3 + val xs4 = sci.Seq(1, 2, 3).par + val xs4T: sc.parallel.immutable.ParSeq[Int] = xs4 + val xs5 = List(1, 2, 3).par + val xs5T: sc.parallel.immutable.ParSeq[Int] = xs5 + val xs6 = Vector(1, 2, 3).par + val xs6T: sc.parallel.immutable.ParVector[Int] = xs6 + val xs7 = scm.Seq(1, 2, 3).par + val xs7T: sc.parallel.mutable.ParSeq[Int] = xs7 + val xs8 = scm.ArrayBuffer(1, 2, 3).par + val xs8T: sc.parallel.mutable.ParArray[Int] = xs8 + val xs9 = Array(1, 2, 3).par + val xs9T: sc.parallel.mutable.ParArray[Int] = xs9 + // Set + val xs10 = sc.Set(1, 2, 3).par + val xs10T: sc.parallel.ParSet[Int] = xs10 + val xs11 = sci.Set(1, 2, 3).par + val xs11T: sc.parallel.immutable.ParSet[Int] = xs11 + val xs12 = scm.Set(1, 2, 3).par + val xs12T: sc.parallel.mutable.ParSet[Int] = xs12 + // Map + val xs13 = sc.Map(1 -> 0, 2 -> 0).par + val xs13T: sc.parallel.ParMap[Int, Int] = xs13 + val xs14 = sci.Map(1 -> 0, 2 -> 0).par + val xs14T: sc.parallel.immutable.ParMap[Int, Int] = xs14 + val xs15 = scm.Map(1 -> 0, 2 -> 0).par + val xs15T: sc.parallel.mutable.ParMap[Int, Int] = xs15 + // TODO concurrent.TrieMap + } + } diff --git a/junit/src/test/scala/scala/collection/NewBuilderTest.scala b/junit/src/test/scala/scala/collection/NewBuilderTest.scala index 72c88f45..4c8959e5 100644 --- a/junit/src/test/scala/scala/collection/NewBuilderTest.scala +++ b/junit/src/test/scala/scala/collection/NewBuilderTest.scala @@ -22,101 +22,85 @@ class NewBuilderTest { assertTrue(s"$mapped (of class ${mapped.getClass} is not a in instance of ${expected}", isInstance) } - test[sc.GenTraversable[_] ]((sc.GenTraversable(1): sc.GenTraversable[Int]).map(x => x)) - test[sc.Traversable[_] ]((sc.Traversable(1): sc.GenTraversable[Int]).map(x => x)) - test[sc.GenIterable[_] ]((sc.GenIterable(1): sc.GenTraversable[Int]).map(x => x)) - test[sc.Iterable[_] ]((sc.Iterable(1): sc.GenTraversable[Int]).map(x => x)) - test[sc.GenSeq[_] ]((sc.GenSeq(1): sc.GenTraversable[Int]).map(x => x)) - test[sc.Seq[_] ]((sc.Seq(1): sc.GenTraversable[Int]).map(x => x)) - test[sc.LinearSeq[_] ]((sc.LinearSeq(1): sc.GenTraversable[Int]).map(x => x)) - test[sc.LinearSeq[_] ]((sc.LinearSeq(1): sc.Seq[Int] ).map(x => x)) - test[sc.IndexedSeq[_] ]((sc.IndexedSeq(1): sc.GenTraversable[Int]).map(x => x)) - test[sc.IndexedSeq[_] ]((sc.IndexedSeq(1): sc.Seq[Int] ).map(x => x)) - test[sc.GenSet[_] ]((sc.GenSet(1): sc.GenTraversable[Int]).map(x => x)) - test[sc.Set[_] ]((sc.Set(1): sc.GenTraversable[Int]).map(x => x)) - test[sc.GenMap[_, _] ]((sc.GenMap(1 -> 1): sc.GenMap[Int, Int] ).map(x => x)) - test[sc.Map[_, _] ]((sc.Map(1 -> 1): sc.GenMap[Int, Int] ).map(x => x)) - - test[scm.Traversable[_] ]((scm.Traversable(1): sc.GenTraversable[Int]).map(x => x)) - test[scm.Iterable[_] ]((scm.Iterable(1): sc.GenTraversable[Int]).map(x => x)) - test[scm.LinearSeq[_] ]((scm.LinearSeq(1): sc.GenTraversable[Int]).map(x => x)) - test[scm.LinearSeq[_] ]((scm.LinearSeq(1): sc.Seq[Int] ).map(x => x)) - test[scm.MutableList[_] ]((scm.MutableList(1): sc.GenTraversable[Int]).map(x => x)) - test[scm.MutableList[_] ]((scm.MutableList(1): sc.Seq[Int] ).map(x => x)) - test[scm.Queue[_] ]((scm.Queue(1): sc.GenTraversable[Int]).map(x => x)) - test[scm.Queue[_] ]((scm.Queue(1): sc.Seq[Int] ).map(x => x)) - test[scm.ArrayStack[_] ]((scm.ArrayStack(1): sc.GenTraversable[Int]).map(x => x)) - test[scm.ArrayStack[_] ]((scm.ArrayStack(1): sc.Seq[Int] ).map(x => x)) - test[scm.ArraySeq[_] ]((scm.ArraySeq(1): sc.GenTraversable[Int]).map(x => x)) - test[scm.ArraySeq[_] ]((scm.ArraySeq(1): sc.Seq[Int] ).map(x => x)) - - test[scm.Buffer[_] ]((scm.Buffer(1): sc.GenTraversable[Int]).map(x => x)) - test[scm.Buffer[_] ]((scm.Buffer(1): sc.Seq[Int] ).map(x => x)) - test[scm.IndexedSeq[_] ]((scm.IndexedSeq(1): sc.GenTraversable[Int]).map(x => x)) - test[scm.IndexedSeq[_] ]((scm.IndexedSeq(1): sc.Seq[Int] ).map(x => x)) - test[scm.ArrayBuffer[_] ]((scm.ArrayBuffer(1): sc.GenTraversable[Int]).map(x => x)) - test[scm.ArrayBuffer[_] ]((scm.ArrayBuffer(1): sc.Seq[Int] ).map(x => x)) - test[scm.ListBuffer[_] ]((scm.ListBuffer(1): sc.GenTraversable[Int]).map(x => x)) - test[scm.ListBuffer[_] ]((scm.ListBuffer(1): sc.Seq[Int] ).map(x => x)) - test[scm.Seq[_] ]((scm.Seq(1): sc.GenTraversable[Int]).map(x => x)) - test[scm.Seq[_] ]((scm.Seq(1): sc.Seq[Int] ).map(x => x)) - test[scm.ResizableArray[_] ]((scm.ResizableArray(1): sc.GenTraversable[Int]).map(x => x)) - test[scm.ResizableArray[_] ]((scm.ResizableArray(1): sc.Seq[Int] ).map(x => x)) - test[scm.Set[_] ]((scm.Set(1): sc.GenTraversable[Int]).map(x => x)) - test[scm.Set[_] ]((scm.Set(1): sc.Set[Int] ).map(x => x)) - test[scm.HashSet[_] ]((scm.HashSet(1): sc.GenTraversable[Int]).map(x => x)) - test[scm.HashSet[_] ]((scm.HashSet(1): sc.Set[Int] ).map(x => x)) - test[scm.LinkedHashSet[_] ]((scm.LinkedHashSet(1): sc.GenTraversable[Int]).map(x => x)) - test[scm.LinkedHashSet[_] ]((scm.LinkedHashSet(1): sc.Set[Int] ).map(x => x)) - - test[sci.Traversable[_] ]((sci.Traversable(1): sc.GenTraversable[Int]).map(x => x)) - test[sci.Iterable[_] ]((sci.Iterable(1): sc.GenTraversable[Int]).map(x => x)) - test[sci.LinearSeq[_] ]((sci.LinearSeq(1): sc.GenTraversable[Int]).map(x => x)) - test[sci.LinearSeq[_] ]((sci.LinearSeq(1): sc.Seq[Int] ).map(x => x)) - test[sci.List[_] ]((sci.List(1): sc.GenTraversable[Int]).map(x => x)) - test[sci.List[_] ]((sci.List(1): sc.Seq[Int] ).map(x => x)) - test[sci.Stream[_] ]((sci.Stream(1): sc.GenTraversable[Int]).map(x => x)) - test[sci.Stream[_] ]((sci.Stream(1): sc.Seq[Int] ).map(x => x)) - test[sci.Queue[_] ]((sci.Queue(1): sc.GenTraversable[Int]).map(x => x)) - test[sci.Queue[_] ]((sci.Queue(1): sc.Seq[Int] ).map(x => x)) - test[sci.IndexedSeq[_] ]((sci.IndexedSeq(1): sc.GenTraversable[Int]).map(x => x)) - test[sci.IndexedSeq[_] ]((sci.IndexedSeq(1): sc.Seq[Int] ).map(x => x)) - test[sci.Vector[_] ]((sci.Vector(1): sc.GenTraversable[Int]).map(x => x)) - test[sci.Vector[_] ]((sci.Vector(1): sc.Seq[Int] ).map(x => x)) - test[sci.Seq[_] ]((sci.Seq(1): sc.GenTraversable[Int]).map(x => x)) - test[sci.Seq[_] ]((sci.Seq(1): sc.Seq[Int] ).map(x => x)) - test[sci.Set[_] ]((sci.Set(1): sc.GenTraversable[Int]).map(x => x)) - test[sci.Set[_] ]((sci.Set(1): sc.Set[Int] ).map(x => x)) - test[sci.ListSet[_] ]((sci.ListSet(1): sc.GenTraversable[Int]).map(x => x)) - test[sci.ListSet[_] ]((sci.ListSet(1): sc.Set[Int] ).map(x => x)) - test[sci.HashSet[_] ]((sci.HashSet(1): sc.GenTraversable[Int]).map(x => x)) - test[sci.HashSet[_] ]((sci.HashSet(1): sc.Set[Int] ).map(x => x)) - - test[scp.ParIterable[_] ]((scp.ParIterable(1): sc.GenTraversable[Int]).map(x => x)) - test[scp.ParSeq[_] ]((scp.ParSeq(1): sc.GenTraversable[Int]).map(x => x)) - test[scp.ParSeq[_] ]((scp.ParSeq(1): sc.GenSeq[Int] ).map(x => x)) - test[scp.ParSet[_] ]((scp.ParSet(1): sc.GenTraversable[Int]).map(x => x)) - test[scp.ParSet[_] ]((scp.ParSet(1): sc.GenSet[Int] ).map(x => x)) - - test[scpm.ParIterable[_] ]((scpm.ParIterable(1): sc.GenTraversable[Int]).map(x => x)) - test[scpm.ParSeq[_] ]((scpm.ParSeq(1): sc.GenTraversable[Int]).map(x => x)) - test[scpm.ParSeq[_] ]((scpm.ParSeq(1): sc.GenSeq[Int] ).map(x => x)) - test[scpm.ParArray[_] ]((scpm.ParArray(1): sc.GenTraversable[Int]).map(x => x)) - test[scpm.ParArray[_] ]((scpm.ParArray(1): sc.GenSeq[Int] ).map(x => x)) - test[scpm.ParSet[_] ]((scpm.ParSet(1): sc.GenTraversable[Int]).map(x => x)) - test[scpm.ParSet[_] ]((scpm.ParSet(1): sc.GenSet[Int] ).map(x => x)) - test[scpm.ParHashSet[_] ]((scpm.ParHashSet(1): sc.GenTraversable[Int]).map(x => x)) - test[scpm.ParHashSet[_] ]((scpm.ParHashSet(1): sc.GenSet[Int] ).map(x => x)) - - test[scpi.ParIterable[_] ]((scpi.ParIterable(1): sc.GenTraversable[Int]).map(x => x)) - test[scpi.ParSeq[_] ]((scpi.ParSeq(1): sc.GenTraversable[Int]).map(x => x)) - test[scpi.ParSeq[_] ]((scpi.ParSeq(1): sc.GenSeq[Int] ).map(x => x)) - test[scpi.ParVector[_] ]((scpi.ParVector(1): sc.GenTraversable[Int]).map(x => x)) - test[scpi.ParVector[_] ]((scpi.ParVector(1): sc.GenSeq[Int] ).map(x => x)) - test[scpi.ParSet[_] ]((scpi.ParSet(1): sc.GenTraversable[Int]).map(x => x)) - test[scpi.ParSet[_] ]((scpi.ParSet(1): sc.GenSet[Int] ).map(x => x)) - test[scpi.ParHashSet[_] ]((scpi.ParHashSet(1): sc.GenTraversable[Int]).map(x => x)) - test[scpi.ParHashSet[_] ]((scpi.ParHashSet(1): sc.GenSet[Int] ).map(x => x)) + test[sc.Iterable[_] ]((sc.Iterable(1): sc.Iterable[Int]).map(x => x)) + test[sc.Seq[_] ]((sc.Seq(1): sc.Iterable[Int]).map(x => x)) + test[sc.LinearSeq[_] ]((sc.LinearSeq(1): sc.Iterable[Int]).map(x => x)) + test[sc.LinearSeq[_] ]((sc.LinearSeq(1): sc.Seq[Int] ).map(x => x)) + test[sc.IndexedSeq[_] ]((sc.IndexedSeq(1): sc.Iterable[Int]).map(x => x)) + test[sc.IndexedSeq[_] ]((sc.IndexedSeq(1): sc.Seq[Int] ).map(x => x)) + test[sc.Set[_] ]((sc.Set(1): sc.Iterable[Int]).map(x => x)) + test[sc.Map[_, _] ]((sc.Map(1 -> 1): sc.Map[Int, Int]).map(x => x)) + + test[scm.Iterable[_] ]((scm.Iterable(1): sc.Iterable[Int]).map(x => x)) + test[scm.Queue[_] ]((scm.Queue(1): sc.Iterable[Int]).map(x => x)) + test[scm.Queue[_] ]((scm.Queue(1): sc.Seq[Int] ).map(x => x)) + test[scm.ArraySeq[_] ]((scm.ArraySeq(1): sc.Iterable[Int]).map(x => x)) + test[scm.ArraySeq[_] ]((scm.ArraySeq(1): sc.Seq[Int] ).map(x => x)) + + test[scm.Buffer[_] ]((scm.Buffer(1): sc.Iterable[Int]).map(x => x)) + test[scm.Buffer[_] ]((scm.Buffer(1): sc.Seq[Int] ).map(x => x)) + test[scm.IndexedSeq[_] ]((scm.IndexedSeq(1): sc.Iterable[Int]).map(x => x)) + test[scm.IndexedSeq[_] ]((scm.IndexedSeq(1): sc.Seq[Int] ).map(x => x)) + test[scm.ArrayBuffer[_] ]((scm.ArrayBuffer(1): sc.Iterable[Int]).map(x => x)) + test[scm.ArrayBuffer[_] ]((scm.ArrayBuffer(1): sc.Seq[Int] ).map(x => x)) + test[scm.ListBuffer[_] ]((scm.ListBuffer(1): sc.Iterable[Int]).map(x => x)) + test[scm.ListBuffer[_] ]((scm.ListBuffer(1): sc.Seq[Int] ).map(x => x)) + test[scm.Seq[_] ]((scm.Seq(1): sc.Iterable[Int]).map(x => x)) + test[scm.Seq[_] ]((scm.Seq(1): sc.Seq[Int] ).map(x => x)) + test[scm.Set[_] ]((scm.Set(1): sc.Iterable[Int]).map(x => x)) + test[scm.Set[_] ]((scm.Set(1): sc.Set[Int] ).map(x => x)) + test[scm.HashSet[_] ]((scm.HashSet(1): sc.Iterable[Int]).map(x => x)) + test[scm.HashSet[_] ]((scm.HashSet(1): sc.Set[Int] ).map(x => x)) + test[scm.LinkedHashSet[_] ]((scm.LinkedHashSet(1): sc.Iterable[Int]).map(x => x)) + test[scm.LinkedHashSet[_] ]((scm.LinkedHashSet(1): sc.Set[Int] ).map(x => x)) + + test[sci.Iterable[_] ]((sci.Iterable(1): sc.Iterable[Int]).map(x => x)) + test[sci.LinearSeq[_] ]((sci.LinearSeq(1): sc.Iterable[Int]).map(x => x)) + test[sci.LinearSeq[_] ]((sci.LinearSeq(1): sc.Seq[Int] ).map(x => x)) + test[sci.List[_] ]((sci.List(1): sc.Iterable[Int]).map(x => x)) + test[sci.List[_] ]((sci.List(1): sc.Seq[Int] ).map(x => x)) + test[sci.LazyList[_] ]((sci.LazyList(1): sc.Iterable[Int]).map(x => x)) + test[sci.LazyList[_] ]((sci.LazyList(1): sc.Seq[Int] ).map(x => x)) + test[sci.Queue[_] ]((sci.Queue(1): sc.Iterable[Int]).map(x => x)) + test[sci.Queue[_] ]((sci.Queue(1): sc.Seq[Int] ).map(x => x)) + test[sci.IndexedSeq[_] ]((sci.IndexedSeq(1): sc.Iterable[Int]).map(x => x)) + test[sci.IndexedSeq[_] ]((sci.IndexedSeq(1): sc.Seq[Int] ).map(x => x)) + test[sci.Vector[_] ]((sci.Vector(1): sc.Iterable[Int]).map(x => x)) + test[sci.Vector[_] ]((sci.Vector(1): sc.Seq[Int] ).map(x => x)) + test[sci.Seq[_] ]((sci.Seq(1): sc.Iterable[Int]).map(x => x)) + test[sci.Seq[_] ]((sci.Seq(1): sc.Seq[Int] ).map(x => x)) + test[sci.Set[_] ]((sci.Set(1): sc.Iterable[Int]).map(x => x)) + test[sci.Set[_] ]((sci.Set(1): sc.Set[Int] ).map(x => x)) + test[sci.ListSet[_] ]((sci.ListSet(1): sc.Iterable[Int]).map(x => x)) + test[sci.ListSet[_] ]((sci.ListSet(1): sc.Set[Int] ).map(x => x)) + test[sci.HashSet[_] ]((sci.HashSet(1): sc.Iterable[Int]).map(x => x)) + test[sci.HashSet[_] ]((sci.HashSet(1): sc.Set[Int] ).map(x => x)) + + test[scp.ParIterable[_] ]((scp.ParIterable(1): scp.ParIterable[Int]).map(x => x)) + test[scp.ParSeq[_] ]((scp.ParSeq(1): scp.ParIterable[Int]).map(x => x)) + test[scp.ParSeq[_] ]((scp.ParSeq(1): scp.ParSeq[Int] ).map(x => x)) + test[scp.ParSet[_] ]((scp.ParSet(1): scp.ParIterable[Int]).map(x => x)) + test[scp.ParSet[_] ]((scp.ParSet(1): scp.ParSet[Int] ).map(x => x)) + + test[scpm.ParIterable[_] ]((scpm.ParIterable(1): scp.ParIterable[Int]).map(x => x)) + test[scpm.ParSeq[_] ]((scpm.ParSeq(1): scp.ParIterable[Int]).map(x => x)) + test[scpm.ParSeq[_] ]((scpm.ParSeq(1): scp.ParSeq[Int] ).map(x => x)) + test[scpm.ParArray[_] ]((scpm.ParArray(1): scp.ParIterable[Int]).map(x => x)) + test[scpm.ParArray[_] ]((scpm.ParArray(1): scp.ParSeq[Int] ).map(x => x)) + test[scpm.ParSet[_] ]((scpm.ParSet(1): scp.ParIterable[Int]).map(x => x)) + test[scpm.ParSet[_] ]((scpm.ParSet(1): scp.ParSet[Int] ).map(x => x)) + test[scpm.ParHashSet[_] ]((scpm.ParHashSet(1): scp.ParIterable[Int]).map(x => x)) + test[scpm.ParHashSet[_] ]((scpm.ParHashSet(1): scp.ParSet[Int] ).map(x => x)) + + test[scpi.ParIterable[_] ]((scpi.ParIterable(1): scp.ParIterable[Int]).map(x => x)) + test[scpi.ParSeq[_] ]((scpi.ParSeq(1): scp.ParIterable[Int]).map(x => x)) + test[scpi.ParSeq[_] ]((scpi.ParSeq(1): scp.ParSeq[Int] ).map(x => x)) + test[scpi.ParVector[_] ]((scpi.ParVector(1): scp.ParIterable[Int]).map(x => x)) + test[scpi.ParVector[_] ]((scpi.ParVector(1): scp.ParSeq[Int] ).map(x => x)) + test[scpi.ParSet[_] ]((scpi.ParSet(1): scp.ParIterable[Int]).map(x => x)) + test[scpi.ParSet[_] ]((scpi.ParSet(1): scp.ParSet[Int] ).map(x => x)) + test[scpi.ParHashSet[_] ]((scpi.ParHashSet(1): scp.ParIterable[Int]).map(x => x)) + test[scpi.ParHashSet[_] ]((scpi.ParHashSet(1): scp.ParSet[Int] ).map(x => x)) // These go through `GenMap.canBuildFrom`. There is no simple fix for Map like there is for Set. // A Map does not provide access to its companion object at runtime. (The `companion` field diff --git a/junit/src/test/scala/scala/collection/concurrent/ctries_new/ConcurrentMapSpec.scala b/junit/src/test/scala/scala/collection/concurrent/ctries_new/ConcurrentMapSpec.scala index 5c5af495..14e90375 100644 --- a/junit/src/test/scala/scala/collection/concurrent/ctries_new/ConcurrentMapSpec.scala +++ b/junit/src/test/scala/scala/collection/concurrent/ctries_new/ConcurrentMapSpec.scala @@ -69,7 +69,7 @@ class ConcurrentMapSpec extends Spec { var k = Int.MaxValue do { if (k != Int.MaxValue) repeats += 1 - k = ct.lookup(new Wrap(j)) + k = ct.getOrElse(new Wrap(j), 0) } while (!ct.replace(new Wrap(j), k, -k)) } //println("Thread %d repeats: %d".format(index, repeats)) @@ -98,7 +98,7 @@ class ConcurrentMapSpec extends Spec { for (i <- 0 until sz) { val j = (offs + i) % sz ct.putIfAbsent(new Wrap(j), j) - assert(ct.lookup(new Wrap(j)) == j) + assert(ct.getOrElse(new Wrap(j), null) == j) } } } diff --git a/junit/src/test/scala/scala/collection/concurrent/ctries_new/IteratorSpec.scala b/junit/src/test/scala/scala/collection/concurrent/ctries_new/IteratorSpec.scala index e306cf6b..5c62a58e 100644 --- a/junit/src/test/scala/scala/collection/concurrent/ctries_new/IteratorSpec.scala +++ b/junit/src/test/scala/scala/collection/concurrent/ctries_new/IteratorSpec.scala @@ -128,7 +128,7 @@ class IteratorSpec extends Spec { def assertEqual(a: Map[Wrap, Int], b: Map[Wrap, Int]): Unit = { if (a != b) { - println(a.size + " vs " + b.size) + println(a.size.toString + " vs " + b.size) } assert(a == b) } diff --git a/junit/src/test/scala/scala/collection/concurrent/ctries_new/LNodeSpec.scala b/junit/src/test/scala/scala/collection/concurrent/ctries_new/LNodeSpec.scala index c4e9532a..101c0e4c 100644 --- a/junit/src/test/scala/scala/collection/concurrent/ctries_new/LNodeSpec.scala +++ b/junit/src/test/scala/scala/collection/concurrent/ctries_new/LNodeSpec.scala @@ -35,18 +35,18 @@ class LNodeSpec extends Spec { "put elements with the same hash codes if absent" in { val ct = new TrieMap[DumbHash, Int] for (i <- 0 until initsz) ct.put(new DumbHash(i), i) - for (i <- 0 until initsz) assert(ct.lookup(new DumbHash(i)) == i) + for (i <- 0 until initsz) assert(ct.getOrElse(new DumbHash(i), null) == i) for (i <- 0 until initsz) assert(ct.putIfAbsent(new DumbHash(i), i) == Some(i)) for (i <- initsz until secondsz) assert(ct.putIfAbsent(new DumbHash(i), i) == None) - for (i <- initsz until secondsz) assert(ct.lookup(new DumbHash(i)) == i) + for (i <- initsz until secondsz) assert(ct.getOrElse(new DumbHash(i), null) == i) } "replace elements with the same hash codes" in { val ct = new TrieMap[DumbHash, Int] for (i <- 0 until initsz) assert(ct.put(new DumbHash(i), i) == None) - for (i <- 0 until initsz) assert(ct.lookup(new DumbHash(i)) == i) + for (i <- 0 until initsz) assert(ct.getOrElse(new DumbHash(i), null) == i) for (i <- 0 until initsz) assert(ct.replace(new DumbHash(i), -i) == Some(i)) - for (i <- 0 until initsz) assert(ct.lookup(new DumbHash(i)) == -i) + for (i <- 0 until initsz) assert(ct.getOrElse(new DumbHash(i), null) == -i) for (i <- 0 until initsz) assert(ct.replace(new DumbHash(i), -i, i) == true) } diff --git a/junit/src/test/scala/scala/collection/concurrent/ctries_old/ConcurrentMapSpec.scala b/junit/src/test/scala/scala/collection/concurrent/ctries_old/ConcurrentMapSpec.scala index 260479d5..2fbc685a 100644 --- a/junit/src/test/scala/scala/collection/concurrent/ctries_old/ConcurrentMapSpec.scala +++ b/junit/src/test/scala/scala/collection/concurrent/ctries_old/ConcurrentMapSpec.scala @@ -69,7 +69,7 @@ class ConcurrentMapSpec extends Spec { var k = Int.MaxValue do { if (k != Int.MaxValue) repeats += 1 - k = ct.lookup(new Wrap(j)) + k = ct.getOrElse(new Wrap(j), 0) } while (!ct.replace(new Wrap(j), k, -k)) } //println("Thread %d repeats: %d".format(index, repeats)) @@ -98,7 +98,7 @@ class ConcurrentMapSpec extends Spec { for (i <- 0 until sz) { val j = (offs + i) % sz ct.putIfAbsent(new Wrap(j), j) - assert(ct.lookup(new Wrap(j)) == j) + assert(ct.getOrElse(new Wrap(j), null) == j) } } } diff --git a/junit/src/test/scala/scala/collection/concurrent/ctries_old/IteratorSpec.scala b/junit/src/test/scala/scala/collection/concurrent/ctries_old/IteratorSpec.scala index 271de508..9e4108d3 100644 --- a/junit/src/test/scala/scala/collection/concurrent/ctries_old/IteratorSpec.scala +++ b/junit/src/test/scala/scala/collection/concurrent/ctries_old/IteratorSpec.scala @@ -129,7 +129,7 @@ class IteratorSpec extends Spec { def assertEqual(a: Map[Wrap, Int], b: Map[Wrap, Int]): Unit = { if (a != b) { - println(a.size + " vs " + b.size) + println(a.size.toString + " vs " + b.size) // println(a) // println(b) // println(a.toSeq.sortBy((x: (Wrap, Int)) => x._1.i)) diff --git a/junit/src/test/scala/scala/collection/concurrent/ctries_old/LNodeSpec.scala b/junit/src/test/scala/scala/collection/concurrent/ctries_old/LNodeSpec.scala index 38236fbd..bc7c900b 100644 --- a/junit/src/test/scala/scala/collection/concurrent/ctries_old/LNodeSpec.scala +++ b/junit/src/test/scala/scala/collection/concurrent/ctries_old/LNodeSpec.scala @@ -36,18 +36,18 @@ class LNodeSpec extends Spec { "put elements with the same hash codes if absent" in { val ct = new TrieMap[DumbHash, Int] for (i <- 0 until initsz) ct.put(new DumbHash(i), i) - for (i <- 0 until initsz) assert(ct.lookup(new DumbHash(i)) == i) + for (i <- 0 until initsz) assert(ct.getOrElse(new DumbHash(i), null) == i) for (i <- 0 until initsz) assert(ct.putIfAbsent(new DumbHash(i), i) == Some(i)) for (i <- initsz until secondsz) assert(ct.putIfAbsent(new DumbHash(i), i) == None) - for (i <- initsz until secondsz) assert(ct.lookup(new DumbHash(i)) == i) + for (i <- initsz until secondsz) assert(ct.getOrElse(new DumbHash(i), null) == i) } "replace elements with the same hash codes" in { val ct = new TrieMap[DumbHash, Int] for (i <- 0 until initsz) assert(ct.put(new DumbHash(i), i) == None) - for (i <- 0 until initsz) assert(ct.lookup(new DumbHash(i)) == i) + for (i <- 0 until initsz) assert(ct.getOrElse(new DumbHash(i), null) == i) for (i <- 0 until initsz) assert(ct.replace(new DumbHash(i), -i) == Some(i)) - for (i <- 0 until initsz) assert(ct.lookup(new DumbHash(i)) == -i) + for (i <- 0 until initsz) assert(ct.getOrElse(new DumbHash(i), null) == -i) for (i <- 0 until initsz) assert(ct.replace(new DumbHash(i), -i, i) == true) } diff --git a/junit/src/test/scala/scala/collection/immutable/ParallelConsistencyTest.scala b/junit/src/test/scala/scala/collection/immutable/ParallelConsistencyTest.scala deleted file mode 100644 index da963624..00000000 --- a/junit/src/test/scala/scala/collection/immutable/ParallelConsistencyTest.scala +++ /dev/null @@ -1,44 +0,0 @@ -package scala.collection.immutable - -import org.junit.Test -import org.junit.runner.RunWith -import org.junit.runners.JUnit4 - -@RunWith(classOf[JUnit4]) -class ParallelConsistencyTest { - - private val theSeq = Seq(1,2,3) - - // This collection will throw an exception if you do anything but call .length or .seq - private val mustCallSeq: collection.GenSeq[Int] = new collection.parallel.ParSeq[Int] { - def length = 3 - - // This method is surely sequential & safe -- want all access to go through here - def seq = theSeq - - def notSeq = throw new Exception("Access to parallel collection not via .seq") - - // These methods could possibly be used dangerously explicitly or internally - // (apply could also be used safely; if it is, do test with mustCallSeq) - def apply(i: Int) = notSeq - def splitter = notSeq - } - - // Test Vector ++ with a small parallel collection concatenation (SI-9072). - @Test - def testPlusPlus(): Unit = { - assert((Vector.empty ++ mustCallSeq) == theSeq, "Vector ++ unsafe with parallel vectors") - } - - // SI-9126, 1 of 2 - @Test - def testTranspose(): Unit = { - assert(List(mustCallSeq).transpose.flatten == theSeq, "Transposing inner parallel collection unsafe") - } - - // SI-9126, 2 of 2 - @Test - def testList_flatMap(): Unit = { - assert(List(1).flatMap(_ => mustCallSeq) == theSeq, "List#flatMap on inner parallel collection unsafe") - } -} diff --git a/junit/src/test/scala/scala/collection/parallel/ParMapTest.scala b/junit/src/test/scala/scala/collection/parallel/ParMapTest.scala index 87b7de6b..ad87a440 100644 --- a/junit/src/test/scala/scala/collection/parallel/ParMapTest.scala +++ b/junit/src/test/scala/scala/collection/parallel/ParMapTest.scala @@ -10,7 +10,7 @@ class ParMapTest { @Test def test: Unit = { - val gm: GenMap[Int, Int] = GenMap(0 -> 0, 1 -> 1).par + val gm: ParMap[Int, Int] = Map(0 -> 0, 1 -> 1).par // ops assertTrue(gm.isDefinedAt(1)) @@ -19,9 +19,9 @@ class ParMapTest { assertTrue(gm.getOrElse(2, 3) == 3) assertTrue(gm.keysIterator.toSet == Set(0, 1)) assertTrue(gm.valuesIterator.toSet == Set(0, 1)) - assertTrue(gm.keySet == Set(0, 1)) - assertTrue(gm.keys.toSet == Set(0, 1)) - assertTrue(gm.values.toSet == Set(0, 1)) + assertTrue(gm.keySet == ParSet(0, 1)) + assertTrue(gm.keys.toSet == ParSet(0, 1)) + assertTrue(gm.values.toSet == ParSet(0, 1)) try { gm.default(-1) assertTrue(false) diff --git a/junit/src/test/scala/scala/collection/parallel/ParSeqConversionsTest.scala b/junit/src/test/scala/scala/collection/parallel/ParSeqConversionsTest.scala index e2d1ce9c..efff53d2 100644 --- a/junit/src/test/scala/scala/collection/parallel/ParSeqConversionsTest.scala +++ b/junit/src/test/scala/scala/collection/parallel/ParSeqConversionsTest.scala @@ -11,82 +11,46 @@ class ParSeqConversionsTest { @Test def testConversions: Unit = { - // seq conversions - assertSeq(scala.collection.parallel.mutable.ParArray(1, 2, 3)) - assertSeq(scala.collection.parallel.mutable.ParHashMap(1 -> 2, 2 -> 3)) - assertSeq(scala.collection.parallel.mutable.ParHashSet(1, 2, 3)) - assertSeq(scala.collection.parallel.immutable.ParRange(1, 50, 1, false)) - assertSeq(scala.collection.parallel.immutable.ParHashMap(1 -> 2, 2 -> 4)) - assertSeq(scala.collection.parallel.immutable.ParHashSet(1, 2, 3)) - - // par conversions - assertPar(Array(1, 2, 3)) - assertPar(scala.collection.mutable.ArrayBuffer(1, 2, 3)) - assertPar(scala.collection.mutable.ArraySeq(1, 2, 3)) - assertPar(scala.collection.mutable.WrappedArray.make[Int](Array(1, 2, 3))) - assertPar(scala.collection.mutable.HashMap(1 -> 1, 2 -> 2)) - assertPar(scala.collection.mutable.HashSet(1, 2, 3)) - assertPar(scala.collection.immutable.Range(1, 50, 1)) - assertPar(scala.collection.immutable.HashMap(1 -> 1, 2 -> 2)) - assertPar(scala.collection.immutable.HashSet(1, 2, 3)) - // par.to* and to*.par tests - assertToPar(List(1 -> 1, 2 -> 2, 3 -> 3)) - assertToPar(Stream(1 -> 1, 2 -> 2)) - assertToPar(Array(1 -> 1, 2 -> 2)) - assertToPar(scala.collection.mutable.PriorityQueue(1 -> 1, 2 -> 2, 3 -> 3)) - assertToPar(scala.collection.mutable.ArrayBuffer(1 -> 1, 2 -> 2)) - assertToPar(scala.collection.mutable.ArraySeq(1 -> 3)) - assertToPar(scala.collection.mutable.WrappedArray.make[(Int, Int)](Array(1 -> 3))) - assertToPar(scala.collection.mutable.HashMap(1 -> 3)) - assertToPar(scala.collection.mutable.HashSet(1 -> 3)) - assertToPar(scala.collection.immutable.HashMap(1 -> 3)) - assertToPar(scala.collection.immutable.HashSet(1 -> 3)) assertToPar(scala.collection.parallel.mutable.ParArray(1 -> 1, 2 -> 2, 3 -> 3)) + assertToPar(scala.collection.parallel.immutable.ParVector(1 -> 1, 2 -> 2, 3 -> 3)) assertToPar(scala.collection.parallel.mutable.ParHashMap(1 -> 2)) assertToPar(scala.collection.parallel.mutable.ParHashSet(1 -> 2)) assertToPar(scala.collection.parallel.immutable.ParHashMap(1 -> 2)) assertToPar(scala.collection.parallel.immutable.ParHashSet(1 -> 3)) - assertToParWoMap(scala.collection.immutable.Range(1, 10, 2)) + assertToParWoMap(scala.collection.parallel.immutable.ParRange(1, 10, 2, false)) + assertToParWoMap(scala.collection.parallel.immutable.ParVector(1, 2, 3)) + assertToParWoMap(scala.collection.parallel.mutable.ParArray(1, 2, 3)) // seq and par again conversions) assertSeqPar(scala.collection.parallel.mutable.ParArray(1, 2, 3)) + assertSeqPar(scala.collection.parallel.immutable.ParVector(1, 2, 3)) + assertSeqPar(scala.collection.parallel.immutable.ParRange(1, 50, 1, false)) } def assertSeqPar[T](pc: scala.collection.parallel.ParIterable[T]) = pc.seq.par == pc - def assertSeq[T](pc: scala.collection.parallel.ParIterable[T]) = assertTrue(pc.seq == pc) - - def assertPar[T, P](xs: scala.collection.GenIterable[T]) = assertTrue(xs == xs.par) - - def assertToPar[K, V](xs: scala.collection.GenTraversable[(K, V)]): Unit = { - xs match { - case _: Seq[_] => - assertTrue(xs.toIterable.par == xs) - assertTrue(xs.par.toIterable == xs) - case _ => - } - + def assertToPar[K, V](xs: scala.collection.parallel.ParIterable[(K, V)]): Unit = { assertTrue(xs.toSeq.par == xs.toSeq) assertTrue(xs.par.toSeq == xs.toSeq) - assertTrue(xs.toSet.par == xs.toSet) - assertTrue(xs.par.toSet == xs.toSet) +// assertTrue(xs.toSet.par == xs.toSet) +// assertTrue(xs.par.toSet == xs.toSet) - assertTrue(xs.toMap.par == xs.toMap) - assertTrue(xs.par.toMap == xs.toMap) +// assertTrue(xs.toMap.par == xs.toMap) +// assertTrue(xs.par.toMap == xs.toMap) } - def assertToParWoMap[T](xs: scala.collection.GenSeq[T]): Unit = { - assertTrue(xs.toIterable.par == xs.toIterable) - assertTrue(xs.par.toIterable == xs.toIterable) + def assertToParWoMap[T](xs: scala.collection.parallel.ParSeq[T]): Unit = { +// assertTrue(xs.toIterable.par == xs.toIterable) +// assertTrue(xs.par.toIterable == xs.toIterable) assertTrue(xs.toSeq.par == xs.toSeq) assertTrue(xs.par.toSeq == xs.toSeq) - assertTrue(xs.toSet.par == xs.toSet) - assertTrue(xs.par.toSet == xs.toSet) +// assertTrue(xs.toSet.par == xs.toSet) +// assertTrue(xs.par.toSet == xs.toSet) } } diff --git a/junit/src/test/scala/scala/runtime/ScalaRunTimeTest.scala b/junit/src/test/scala/scala/runtime/ScalaRunTimeTest.scala deleted file mode 100644 index 851ed3ad..00000000 --- a/junit/src/test/scala/scala/runtime/ScalaRunTimeTest.scala +++ /dev/null @@ -1,70 +0,0 @@ -package scala.runtime - -import org.junit.Assert._ -import org.junit.Test -import org.junit.runner.RunWith -import org.junit.runners.JUnit4 - -/** Tests for the runtime object ScalaRunTime */ -@RunWith(classOf[JUnit4]) -class ScalaRunTimeTest { - @Test - def testStringOf(): Unit = { - import ScalaRunTime.stringOf - import scala.collection._ - import parallel.ParIterable - - assertEquals("null", stringOf(null)) - assertEquals( "\"\"", stringOf("")) - - assertEquals("abc", stringOf("abc")) - assertEquals("\" abc\"", stringOf(" abc")) - assertEquals("\"abc \"", stringOf("abc ")) - - assertEquals("""Array()""", stringOf(Array.empty[AnyRef])) - assertEquals("""Array()""", stringOf(Array.empty[Int])) - assertEquals("""Array(1, 2, 3)""", stringOf(Array(1, 2, 3))) - assertEquals("""Array(a, "", " c", null)""", stringOf(Array("a", "", " c", null))) - assertEquals("""Array(Array("", 1, Array(5)), Array(1))""", - stringOf(Array(Array("", 1, Array(5)), Array(1)))) - - val map = Map(1->"", 2->"a", 3->" a", 4->null) - assertEquals(s"""${map.stringPrefix}(1 -> "", 2 -> a, 3 -> " a", 4 -> null)""", stringOf(map)) - assertEquals(s"""${map.stringPrefix}(1 -> "", 2 -> a)""", stringOf(map, 2)) - - val iterable = Iterable("a", "", " c", null) - assertEquals(s"""${iterable.stringPrefix}(a, "", " c", null)""", stringOf(iterable)) - assertEquals(s"""${iterable.stringPrefix}(a, "")""", stringOf(iterable, 2)) - - val parIterable = ParIterable("a", "", " c", null) - assertEquals(s"""${parIterable.stringPrefix}(a, "", " c", null)""", stringOf(parIterable)) - assertEquals(s"""${parIterable.stringPrefix}(a, "")""", stringOf(parIterable, 2)) - - val traversable = new Traversable[Int] { - def foreach[U](f: Int => U): Unit = (0 to 3).foreach(f) - } - assertEquals(s"${traversable.stringPrefix}(0, 1, 2, 3)", stringOf(traversable)) - assertEquals(s"${traversable.stringPrefix}(0, 1)", stringOf(traversable, 2)) - - val tuple1 = Tuple1(0) - assertEquals("(0,)", stringOf(tuple1)) - assertEquals("(0,)", stringOf(tuple1, 0)) - assertEquals("(Array(0),)", stringOf(Tuple1(Array(0)))) - - val tuple2 = Tuple2(0, 1) - assertEquals("(0,1)", stringOf(tuple2)) - assertEquals("(0,1)", stringOf(tuple2, 0)) - assertEquals("(Array(0),1)", stringOf((Array(0), 1))) - - val tuple3 = Tuple3(0, 1, 2) - assertEquals("(0,1,2)", stringOf(tuple3)) - assertEquals("(0,1,2)", stringOf(tuple3, 0)) - assertEquals("(Array(0),1,2)", stringOf((Array(0), 1, 2))) - - val x = new Object { - override def toString(): String = "this is the stringOf string" - } - assertEquals(stringOf(x), "this is the stringOf string") - assertEquals(stringOf(x, 2), "this is the stringOf string") - } -} diff --git a/scalacheck/src/test/scala/IntOperators.scala b/scalacheck/src/test/scala/IntOperators.scala index 4e7182cc..6c36fbd9 100644 --- a/scalacheck/src/test/scala/IntOperators.scala +++ b/scalacheck/src/test/scala/IntOperators.scala @@ -50,7 +50,7 @@ trait IntOperators extends Operators[Int] { (Int.MinValue, math.max(_, _)), (Int.MaxValue, math.min(_, _)) ) - def addAllTraversables = List( + def addAllIterables = List( List[Int](), List(1), List(1, 2), diff --git a/scalacheck/src/test/scala/Operators.scala b/scalacheck/src/test/scala/Operators.scala index 7b5e7f57..0d899a3c 100644 --- a/scalacheck/src/test/scala/Operators.scala +++ b/scalacheck/src/test/scala/Operators.scala @@ -8,7 +8,7 @@ trait Operators[T] { def findPredicates: List[T => Boolean] def mapFunctions: List[T => T] def partialMapFunctions: List[PartialFunction[T, T]] - def flatMapFunctions: List[T => Traversable[T]] + def flatMapFunctions: List[T => Iterable[T]] def filterPredicates: List[T => Boolean] def filterNotPredicates: List[T => Boolean] def partitionPredicates: List[T => Boolean] @@ -16,7 +16,7 @@ trait Operators[T] { def dropWhilePredicates: List[T => Boolean] def spanPredicates: List[T => Boolean] def foldArguments: List[(T, (T, T) => T)] - def addAllTraversables: List[Traversable[T]] + def addAllIterables: List[Iterable[T]] def newArray(sz: Int): Array[T] def groupByFunctions: List[T => T] } diff --git a/scalacheck/src/test/scala/PairOperators.scala b/scalacheck/src/test/scala/PairOperators.scala index f1318498..9a5b10c1 100644 --- a/scalacheck/src/test/scala/PairOperators.scala +++ b/scalacheck/src/test/scala/PairOperators.scala @@ -43,8 +43,8 @@ trait PairOperators[K, V] extends Operators[(K, V)] { def flatMapFunctions = for { (kfm, vfm) <- koperators.flatMapFunctions zip voperators.flatMapFunctions - } yield new Function1[(K, V), Traversable[(K, V)]] { - def apply(kv: (K, V)) = kfm(kv._1).toIterable zip vfm(kv._2).toIterable + } yield new Function1[(K, V), Iterable[(K, V)]] { + def apply(kv: (K, V)) = kfm(kv._1) zip vfm(kv._2) } def filterPredicates = zipPredicates(koperators.filterPredicates, voperators.filterPredicates) @@ -65,9 +65,9 @@ trait PairOperators[K, V] extends Operators[(K, V)] { def apply(kv1: (K, V), kv2: (K, V)) = (kop(kv1._1, kv2._1), vop(kv1._2, kv2._2)) }) - def addAllTraversables = for { - (kt, vt) <- koperators.addAllTraversables zip voperators.addAllTraversables - } yield kt.toIterable zip vt.toIterable + def addAllIterables = for { + (kt, vt) <- koperators.addAllIterables zip voperators.addAllIterables + } yield kt zip vt def newArray(sz: Int) = new Array[(K, V)](sz) diff --git a/scalacheck/src/test/scala/ParallelArrayCheck.scala b/scalacheck/src/test/scala/ParallelArrayCheck.scala index 8db00815..6eb86b00 100644 --- a/scalacheck/src/test/scala/ParallelArrayCheck.scala +++ b/scalacheck/src/test/scala/ParallelArrayCheck.scala @@ -43,7 +43,7 @@ abstract class ParallelArrayCheck[T](tp: String) extends ParallelSeqCheck[T]("Pa property("array mappings must be equal") = forAllNoShrink(collectionPairs) { case (t, coll) => val results = for ((f, ind) <- mapFunctions.zipWithIndex) - yield ("op index: " + ind) |: t.map(f) == coll.map(f) + yield ("op index: " + ind) |: t.map(f).sameElements(coll.map(f)) results.reduceLeft(_ && _) } diff --git a/scalacheck/src/test/scala/ParallelCtrieCheck.scala b/scalacheck/src/test/scala/ParallelCtrieCheck.scala index 46b13226..51a4ecef 100644 --- a/scalacheck/src/test/scala/ParallelCtrieCheck.scala +++ b/scalacheck/src/test/scala/ParallelCtrieCheck.scala @@ -30,7 +30,7 @@ abstract class ParallelConcurrentTrieMapCheck[K, V](tp: String) extends Parallel ct } - def fromTraversable(t: Traversable[(K, V)]) = { + def fromIterable(t: Iterable[(K, V)]) = { val pct = new ParTrieMap[K, V] pct.tasksupport = tasksupport var i = 0 @@ -62,7 +62,7 @@ with PairValues[Int, Int] println("could not match data structure type: " + ds.getClass) } - override def checkDataStructureInvariants(orig: Traversable[(Int, Int)], ds: AnyRef) = ds match { + override def checkDataStructureInvariants(orig: Iterable[(Int, Int)], ds: AnyRef) = ds match { // case pm: ParHashMap[k, v] if 1 == 0 => // disabled this to make tests faster // val invs = pm.brokenInvariants diff --git a/scalacheck/src/test/scala/ParallelHashMapCheck.scala b/scalacheck/src/test/scala/ParallelHashMapCheck.scala index 4b5fe01a..a58e9657 100644 --- a/scalacheck/src/test/scala/ParallelHashMapCheck.scala +++ b/scalacheck/src/test/scala/ParallelHashMapCheck.scala @@ -30,7 +30,7 @@ abstract class ParallelHashMapCheck[K, V](tp: String) extends ParallelMapCheck[K hm } - def fromTraversable(t: Traversable[(K, V)]) = { + def fromIterable(t: Iterable[(K, V)]) = { val phm = new ParHashMap[K, V] phm.tasksupport = tasksupport var i = 0 @@ -62,7 +62,7 @@ with PairValues[Int, Int] println("could not match data structure type: " + ds.getClass) } - override def checkDataStructureInvariants(orig: Traversable[(Int, Int)], ds: AnyRef) = ds match { + override def checkDataStructureInvariants(orig: Iterable[(Int, Int)], ds: AnyRef) = ds match { // case pm: ParHashMap[k, v] if 1 == 0 => // disabled this to make tests faster // val invs = pm.brokenInvariants diff --git a/scalacheck/src/test/scala/ParallelHashSetCheck.scala b/scalacheck/src/test/scala/ParallelHashSetCheck.scala index 9c1b58f7..ff3882f4 100644 --- a/scalacheck/src/test/scala/ParallelHashSetCheck.scala +++ b/scalacheck/src/test/scala/ParallelHashSetCheck.scala @@ -30,7 +30,7 @@ abstract class ParallelHashSetCheck[T](tp: String) extends ParallelSetCheck[T](" hm } - def fromTraversable(t: Traversable[T]) = { + def fromIterable(t: Iterable[T]) = { val phs = new ParHashSet[T] phs.tasksupport = tasksupport var i = 0 @@ -54,7 +54,7 @@ with IntValues println("could not match data structure type: " + ds.getClass) } - override def checkDataStructureInvariants(orig: Traversable[Int], ds: AnyRef) = ds match { + override def checkDataStructureInvariants(orig: Iterable[Int], ds: AnyRef) = ds match { // case pm: ParHashSet[t] if 1 == 0 => // // for an example of how not to write code proceed below // val invs = pm.brokenInvariants diff --git a/scalacheck/src/test/scala/ParallelHashTrieCheck.scala b/scalacheck/src/test/scala/ParallelHashTrieCheck.scala index c1ad78e5..29a7fce8 100644 --- a/scalacheck/src/test/scala/ParallelHashTrieCheck.scala +++ b/scalacheck/src/test/scala/ParallelHashTrieCheck.scala @@ -30,7 +30,7 @@ abstract class ParallelHashMapCheck[K, V](tp: String) extends ParallelMapCheck[K hm } - def fromTraversable(t: Traversable[(K, V)]) = { + def fromIterable(t: Iterable[(K, V)]) = { var phm = new ParHashMap[K, V] phm.tasksupport = tasksupport var i = 0 @@ -76,13 +76,13 @@ abstract class ParallelHashSetCheck[T](tp: String) extends ParallelSetCheck[T](" def tasksupport: TaskSupport def ofSize(vals: Seq[Gen[T]], sz: Int) = { - var hm = new immutable.HashSet[T] + var hm = immutable.OldHashSet.empty[T] val gen = vals(rnd.nextInt(vals.size)) for (i <- 0 until sz) hm += sample(gen) hm } - def fromTraversable(t: Traversable[T]) = { + def fromIterable(t: Iterable[T]) = { var phs = new ParHashSet[T] phs.tasksupport = tasksupport var i = 0 diff --git a/scalacheck/src/test/scala/ParallelIterableCheck.scala b/scalacheck/src/test/scala/ParallelIterableCheck.scala index e818779f..06234570 100644 --- a/scalacheck/src/test/scala/ParallelIterableCheck.scala +++ b/scalacheck/src/test/scala/ParallelIterableCheck.scala @@ -6,6 +6,7 @@ import org.scalacheck.Gen._ import org.scalacheck.Prop._ import org.scalacheck.Properties +import scala.language.higherKinds import scala.collection._ import scala.collection.parallel._ @@ -14,7 +15,7 @@ abstract class ParallelIterableCheck[T](collName: String) extends Properties(col def values: Seq[Gen[T]] def ofSize(vals: Seq[Gen[T]], sz: Int): Iterable[T] - def fromTraversable(t: Traversable[T]): CollType + def fromIterable(t: Iterable[T]): CollType def isCheckingViews: Boolean def hasStrictOrder: Boolean @@ -30,7 +31,7 @@ abstract class ParallelIterableCheck[T](collName: String) extends Properties(col ) // used to check if constructed collection is valid - def checkDataStructureInvariants(orig: Traversable[T], cf: AnyRef) = { + def checkDataStructureInvariants(orig: Iterable[T], cf: AnyRef) = { // can be overridden in subclasses true } @@ -49,42 +50,44 @@ abstract class ParallelIterableCheck[T](collName: String) extends Properties(col def sampleValue: T = sample(values(rnd.nextInt(values.length))) - def collectionPairs = for (inst <- instances(values)) yield (inst, fromTraversable(inst)) + def collectionPairs = for (inst <- instances(values)) yield (inst, fromIterable(inst)) def collectionPairsWithLengths = for (inst <- instances(values); s <- choose(0, inst.size)) - yield (inst, fromTraversable(inst), s) + yield (inst, fromIterable(inst), s) def collectionPairsWith2Indices = for ( inst <- instances(values); f <- choose(0, inst.size); s <- choose(0, inst.size)) - yield (inst, fromTraversable(inst), f, s) + yield (inst, fromIterable(inst), f, s) def collectionTriplets = for (inst <- instances(values); updStart <- choose(0, inst.size); howMany <- choose(0, inst.size)) yield { val modif = inst.toSeq.patch(updStart, inst.toSeq, howMany) - (inst, fromTraversable(inst), modif) + (inst, fromIterable(inst), modif) } - def areEqual(t1: GenTraversable[T], t2: GenTraversable[T]) = if (hasStrictOrder) { - t1 == t2 && t2 == t1 + def areEqual(t1: Iterable[T], t2: ParIterable[T]) = if (hasStrictOrder) { + t1.iterator.sameElements(t2) && t2.sameElements(t1) } else (t1, t2) match { // it is slightly delicate what `equal` means if the order is not strict - case (m1: GenMap[_, _], m2: GenMap[_, _]) => m1 == m2 && m2 == m1 - case (i1: GenIterable[_], i2: GenIterable[_]) => - val i1s = i1.toSet - val i2s = i2.toSet - i1s == i2s && i2s == i1s - case _ => t1 == t2 && t2 == t1 + case (m1: Map[_, _], m2: ParMap[_, _]) => + val am1: Map[Any, Any] = m1.asInstanceOf[Map[Any, Any]] + val am2: ParMap[Any, Any] = m2.asInstanceOf[ParMap[Any, Any]] + am1.forall { case (k, v) => am2.get(k).contains(v) } && am2.forall { case (k, v) => am1.get(k).contains(v) } + case _ => + val s1 = t1.toSet + val s2 = t2.toSet + s1.forall(s2) && s2.forall(s1) } - def printDebugInfo(coll: ParIterableLike[_, _, _]): Unit = { + def printDebugInfo[A, CC[X] <: ParIterable[X], C <: ParIterable[A], S <: Iterable[A] with IterableOps[A, Iterable, S]](coll: ParIterableLike[A, CC, C, S]): Unit = { println("Collection debug info: ") coll.printDebugBuffer println("Task debug info: ") println(coll.tasksupport.debugMessages.mkString("\n")) } - def printComparison(t: Traversable[_], coll: ParIterable[_], tf: Traversable[_], cf: ParIterable[_], ind: Int): Unit = { + def printComparison(t: Iterable[_], coll: ParIterable[_], tf: Iterable[_], cf: ParIterable[_], ind: Int): Unit = { printDebugInfo(coll) println("Operator: " + ind) println("sz: " + t.size) @@ -100,8 +103,8 @@ abstract class ParallelIterableCheck[T](collName: String) extends Properties(col println("size: " + cf.size) println(cf) println - println("tf == cf - " + (tf == cf)) - println("cf == tf - " + (cf == tf)) + println("tf sameElements cf - " + (tf.iterator sameElements cf)) + println("cf sameElements tf - " + (cf.iterator sameElements tf)) } property("reductions must be equal for assoc. operators") = forAllNoShrink(collectionPairs) { case (t, coll) => @@ -206,7 +209,7 @@ abstract class ParallelIterableCheck[T](collName: String) extends Properties(col val tf = t.filter(p) val cf = coll.filter(p) val invs = checkDataStructureInvariants(tf, cf) - if (tf != cf || cf != tf || !invs) { + if (!areEqual(tf, cf) || !invs) { printDebugInfo(coll) println("Operator: " + ind) println("sz: " + t.size) @@ -221,12 +224,11 @@ abstract class ParallelIterableCheck[T](collName: String) extends Properties(col println println(tf) println - println("tf == cf - " + (tf == cf)) - println("cf == tf - " + (cf == tf)) + println("areEqual(tf, cf) - " + areEqual(tf, cf)) printDataStructureDebugInfo(cf) println("valid: " + invs) } - ("op index: " + ind) |: tf == cf && cf == tf && invs + ("op index: " + ind) |: (areEqual(tf, cf) && invs) }).reduceLeft(_ && _) } @@ -234,31 +236,31 @@ abstract class ParallelIterableCheck[T](collName: String) extends Properties(col (for ((p, ind) <- filterNotPredicates.zipWithIndex) yield { val tf = t.filterNot(p) val cf = coll.filterNot(p) - if (tf != cf || cf != tf) printComparison(t, coll, tf, cf, ind) - ("op index: " + ind) |: tf == cf && cf == tf + if (!areEqual(tf, cf)) printComparison(t, coll, tf, cf, ind) + ("op index: " + ind) |: areEqual(tf, cf) }).reduceLeft(_ && _) } if (!isCheckingViews) property("partitions must be equal") = forAllNoShrink(collectionPairs) { case (t, coll) => (for ((p, ind) <- partitionPredicates.zipWithIndex) yield { - val tpart = t.partition(p) - val cpart = coll.partition(p) - if (tpart != cpart) { + val tpart @ (tpart1, tpart2) = t.partition(p) + val cpart @ (cpart1, cpart2) = coll.partition(p) + if (!areEqual(tpart1, cpart1) || !areEqual(tpart2, cpart2)) { println("from: " + t) println("and: " + coll) println(cpart) println(tpart) } - ("op index: " + ind) |: tpart == cpart + ("op index: " + ind) |: (areEqual(tpart1, cpart1) && areEqual(tpart2, cpart2)) }).reduceLeft(_ && _) } if (hasStrictOrder) property("takes must be equal") = forAllNoShrink(collectionPairsWithLengths) { case (t, coll, n) => - ("take " + n + " elements") |: t.take(n) == coll.take(n) + ("take " + n + " elements") |: t.take(n).iterator.sameElements(coll.take(n)) } if (hasStrictOrder) property("drops must be equal") = forAllNoShrink(collectionPairsWithLengths) { case (t, coll, n) => - ("drop " + n + " elements") |: t.drop(n) == coll.drop(n) + ("drop " + n + " elements") |: t.drop(n).iterator.sameElements(coll.drop(n)) } if (hasStrictOrder) property("slices must be equal") = forAllNoShrink(collectionPairsWith2Indices) @@ -267,7 +269,7 @@ abstract class ParallelIterableCheck[T](collName: String) extends Properties(col val until = if (from + slicelength > t.size) t.size else from + slicelength val tsl = t.slice(from, until) val collsl = coll.slice(from, until) - if (tsl != collsl) { + if (!tsl.iterator.sameElements(collsl)) { println("---------------------- " + from + ", " + until) println("from: " + t) println("and: " + coll) @@ -282,42 +284,42 @@ abstract class ParallelIterableCheck[T](collName: String) extends Properties(col println(collsl.iterator.next) println(collsl.iterator.hasNext) } - ("slice from " + from + " until " + until) |: tsl == collsl + ("slice from " + from + " until " + until) |: tsl.iterator.sameElements(collsl) } if (hasStrictOrder) property("splits must be equal") = forAllNoShrink(collectionPairsWithLengths) { case (t, coll, n) => - val tspl = t.splitAt(n) - val cspl = coll.splitAt(n) - if (tspl != cspl) { + val tspl @ (tspl1, tspl2) = t.splitAt(n) + val cspl @ (cspl1, cspl2) = coll.splitAt(n) + if (!tspl1.iterator.sameElements(cspl1) || !tspl2.iterator.sameElements(cspl2)) { println("at: " + n) println("from: " + t) println("and: " + coll) println(tspl) println(cspl) } - ("splitAt " + n) |: tspl == cspl + ("splitAt " + n) |: (tspl1.iterator.sameElements(cspl1) && tspl2.iterator.sameElements(cspl2)) } if (hasStrictOrder) property("takeWhiles must be equal") = forAllNoShrink(collectionPairs) { case (t, coll) => (for ((pred, ind) <- takeWhilePredicates.zipWithIndex) yield { val tt = t.takeWhile(pred) val ct = coll.takeWhile(pred) - if (tt != ct) { + if (!tt.iterator.sameElements(ct)) { println("from: " + t) println("and: " + coll) println("taking while...") println(tt) println(ct) } - ("operator " + ind) |: tt == ct + ("operator " + ind) |: tt.iterator.sameElements(ct) }).reduceLeft(_ && _) } if (hasStrictOrder) property("spans must be equal") = forAllNoShrink(collectionPairs) { case (t, coll) => (for ((pred, ind) <- spanPredicates.zipWithIndex) yield { - val tsp = t.span(pred) - val csp = coll.span(pred) - if (tsp != csp) { + val tsp @ (tsp1, tsp2) = t.span(pred) + val csp @ (csp1, csp2) = coll.span(pred) + if (!tsp1.iterator.sameElements(csp1) || !tsp2.iterator.sameElements(csp2)) { println("from: " + t) println("and: " + coll) println("span with predicate " + ind) @@ -327,13 +329,13 @@ abstract class ParallelIterableCheck[T](collName: String) extends Properties(col println(coll.span(pred)) println("---------------------------------") } - ("operator " + ind) |: tsp == csp + ("operator " + ind) |: (tsp1.iterator.sameElements(csp1) && tsp2.iterator.sameElements(csp2)) }).reduceLeft(_ && _) } if (hasStrictOrder) property("dropWhiles must be equal") = forAllNoShrink(collectionPairs) { case (t, coll) => (for ((pred, ind) <- dropWhilePredicates.zipWithIndex) yield { - ("operator " + ind) |: t.dropWhile(pred) == coll.dropWhile(pred) + ("operator " + ind) |: t.dropWhile(pred).iterator.sameElements(coll.dropWhile(pred)) }).reduceLeft(_ && _) } @@ -365,9 +367,9 @@ abstract class ParallelIterableCheck[T](collName: String) extends Properties(col println(cr.iterator.toList) } ("adding " |: areEqual(tr, cr)) && - (for ((trav, ind) <- (addAllTraversables).zipWithIndex) yield { + (for ((trav, ind) <- addAllIterables.zipWithIndex) yield { val tadded = t ++ trav - val cadded = coll ++ collection.parallel.mutable.ParArray(trav.toSeq: _*) + val cadded = coll ++ trav if (!areEqual(tadded, cadded)) { println("----------------------") println("from: " + t) @@ -403,30 +405,34 @@ abstract class ParallelIterableCheck[T](collName: String) extends Properties(col (for (((first, op), ind) <- foldArguments.zipWithIndex) yield { val tscan = t.scanLeft(first)(op) val cscan = coll.scan(first)(op) - if (tscan != cscan || cscan != tscan) { + if (!tscan.iterator.sameElements(cscan) || !cscan.sameElements(tscan)) { println("from: " + t) println("and: " + coll) println("scans are: ") println(tscan) println(cscan) } - ("operator " + ind) |: tscan == cscan && cscan == tscan + ("operator " + ind) |: tscan.iterator.sameElements(cscan) && cscan.sameElements(tscan) }).reduceLeft(_ && _) } property("groupBy must be equal") = forAllNoShrink(collectionPairs) { case (t, coll) => (for ((f, ind) <- groupByFunctions.zipWithIndex) yield { - val tgroup = t.groupBy(f) - val cgroup = coll.groupBy(f) - if (tgroup != cgroup || cgroup != tgroup) { + val tgroup: scala.collection.Map[T, Iterable[T]] = t.groupBy(f) + val cgroup: scala.collection.parallel.ParMap[T, ParIterable[T]] = coll.groupBy(f) + val cgroupseq: scala.collection.parallel.ParMap[T, Iterable[T]] = cgroup.map { case (k, xs) => (k, xs.seq) } + val areMapsEqual = + tgroup.forall { case (k, v) => cgroupseq.get(k).contains(v) } && + cgroupseq.forall { case (k, v) => tgroup.get(k).contains(v) } + if (!areMapsEqual) { println("from: " + t) println("and: " + coll) println("groups are: ") println(tgroup) - println(cgroup) + println(cgroupseq) } - ("operator " + ind) |: tgroup == cgroup && cgroup == tgroup + ("operator " + ind) |: areMapsEqual }).reduceLeft(_ && _) } diff --git a/scalacheck/src/test/scala/ParallelMapCheck1.scala b/scalacheck/src/test/scala/ParallelMapCheck1.scala index 7d321875..e91e892b 100644 --- a/scalacheck/src/test/scala/ParallelMapCheck1.scala +++ b/scalacheck/src/test/scala/ParallelMapCheck1.scala @@ -20,4 +20,102 @@ abstract class ParallelMapCheck[K, V](collname: String) extends ParallelIterable ("Par contains elements of itself" |: containsSelf.forall(_ == true)) } + override def collectionPairs: Gen[(Map[K, V], CollType)] = + super.collectionPairs.map { case (iterable, parmap) => + (iterable.to(Map), parmap) + } + + override def collectionTriplets: Gen[(Map[K, V], CollType, scala.Seq[(K, V)])] = + super.collectionTriplets.map { case (iterable, parmap, seq) => + (iterable.to(Map), parmap, seq) + } + + // The following tests have been copied from `ParIterableCheck`, and adapted to test + // overloads of the methods that return Map and ParMap collections + // They are disabled for now because this behavior is unspecified and the tests fail. +// property("mappings returning maps must be equal") = forAll/*NoShrink*/(collectionPairs) { case (t, coll) => +// val results = for ((f, ind) <- mapFunctions.zipWithIndex.take(5)) yield { +// val ms: Map[K, V] = t.map(f) +// val mp: ParMap[K, V] = coll.map(f) +// val invs = checkDataStructureInvariants(ms, mp) +// if (!areEqual(ms, mp) || !invs) { +// println(t) +// println(coll) +// println("mapped to: ") +// println(ms) +// println(mp) +// println("sizes: ") +// println(ms.size) +// println(mp.size) +// println("valid: " + invs) +// } +// ("op index: " + ind) |: (areEqual(ms, mp) && invs) +// } +// results.reduceLeft(_ && _) +// } +// +// property("collects returning maps must be equal") = forAllNoShrink(collectionPairs) { case (t, coll) => +// val results = for ((f, ind) <- partialMapFunctions.zipWithIndex) yield { +// val ps: Map[K, V] = t.collect(f) +// val pp: ParMap[K, V] = coll.collect(f) +// if (!areEqual(ps, pp)) { +// println(t) +// println(coll) +// println("collected to: ") +// println(ps) +// println(pp) +// } +// ("op index: " + ind) |: areEqual(ps, pp) +// } +// results.reduceLeft(_ && _) +// } +// +// property("flatMaps returning maps must be equal") = forAllNoShrink(collectionPairs) { case (t, coll) => +// (for ((f, ind) <- flatMapFunctions.zipWithIndex) +// yield ("op index: " + ind) |: { +// val tf: Map[K, V] = t.flatMap(f) +// val collf: ParMap[K, V] = coll.flatMap(f) +// if (!areEqual(tf, collf)) { +// println("----------------------") +// println(s"t = $t") +// println(s"coll = $coll") +// println(s"tf = $tf") +// println(s"collf = $collf") +// } +// areEqual(t.flatMap(f), coll.flatMap(f)) +// }).reduceLeft(_ && _) +// } +// +// property("++s returning maps must be equal") = forAll(collectionTriplets) { case (t, coll, colltoadd) => +// try { +// val toadd = colltoadd +// val tr: Map[K, V] = t ++ toadd.iterator +// val cr: ParMap[K, V] = coll ++ toadd.iterator +// if (!areEqual(tr, cr)) { +// println("from: " + t) +// println("and: " + coll.iterator.toList) +// println("adding: " + toadd) +// println(tr.toList) +// println(cr.iterator.toList) +// } +// (s"adding " |: areEqual(tr, cr)) && +// (for ((trav, ind) <- addAllIterables.zipWithIndex) yield { +// val tadded: Map[K, V] = t ++ trav +// val cadded: ParMap[K, V] = coll ++ trav +// if (!areEqual(tadded, cadded)) { +// println("----------------------") +// println("from: " + t) +// println("and: " + coll) +// println("adding: " + trav) +// println(tadded) +// println(cadded) +// } +// ("traversable " + ind) |: areEqual(tadded, cadded) +// }).reduceLeft(_ && _) +// } catch { +// case e: java.lang.Exception => +// throw e +// } +// } + } diff --git a/scalacheck/src/test/scala/ParallelRangeCheck.scala b/scalacheck/src/test/scala/ParallelRangeCheck.scala index 439e0ad4..d0f56d48 100644 --- a/scalacheck/src/test/scala/ParallelRangeCheck.scala +++ b/scalacheck/src/test/scala/ParallelRangeCheck.scala @@ -26,7 +26,7 @@ abstract class ParallelRangeCheck(val tasksupport: TaskSupport) extends Parallel override def instances(vals: Seq[Gen[Int]]): Gen[Seq[Int]] = sized { start => sized { end => sized { step => - new Range(start, end, if (step != 0) step else 1) + Range(start, end, if (step != 0) step else 1) } } } @@ -43,11 +43,6 @@ abstract class ParallelRangeCheck(val tasksupport: TaskSupport) extends Parallel pa } - override def traversable2Seq(t: Traversable[Int]): Seq[Int] = t match { - case r: Range => r - case _ => t.toSeq - } - def values = Seq(choose(-100, 100)) } diff --git a/scalacheck/src/test/scala/ParallelSeqCheck.scala b/scalacheck/src/test/scala/ParallelSeqCheck.scala index 7e57ce79..f2aa3b32 100644 --- a/scalacheck/src/test/scala/ParallelSeqCheck.scala +++ b/scalacheck/src/test/scala/ParallelSeqCheck.scala @@ -9,7 +9,8 @@ import org.scalacheck.Properties import scala.collection._ import scala.collection.parallel._ -abstract class ParallelSeqCheck[T](collName: String) extends ParallelIterableCheck[T](collName) with SeqOperators[T] { +abstract class ParallelSeqCheck[T](collName: String) extends ParallelIterableCheck[T](collName) + with SeqOperators[T] { type CollType <: collection.parallel.ParSeq[T] @@ -27,10 +28,7 @@ abstract class ParallelSeqCheck[T](collName: String) extends ParallelIterableChe ) - def fromTraversable(t: Traversable[T]) = fromSeq(traversable2Seq(t)) - def traversable2Seq(t: Traversable[T]): Seq[T] = { - if (t.isInstanceOf[Iterable[_]]) t.asInstanceOf[Iterable[T]].iterator.toList else t.toList - } + def fromIterable(t: Iterable[T]) = fromSeq(t.toSeq) override def collectionPairs: Gen[(Seq[T], CollType)] = for (inst <- instances(values)) yield (inst, fromSeq(inst)) @@ -85,7 +83,7 @@ abstract class ParallelSeqCheck[T](collName: String) extends ParallelIterableChe property("prefixLengths must be equal") = forAllNoShrink(collectionPairs) { case (s, coll) => (for ((pred, ind) <- segmentLengthPredicates.zipWithIndex) yield { - ("operator " + ind) |: s.prefixLength(pred) == coll.prefixLength(pred) + ("operator " + ind) |: s.segmentLength(pred) == coll.prefixLength(pred) }).reduceLeft(_ && _) } @@ -118,19 +116,19 @@ abstract class ParallelSeqCheck[T](collName: String) extends ParallelIterableChe { val sr = s.reverse val cr = coll.reverse - if (sr != cr) { + if (!sr.sameElements(cr)) { println("from: " + s) println("and: " + coll) println(sr) println(cr) } - sr == cr + sr sameElements cr } } property("reverseMaps must be equal") = forAllNoShrink(collectionPairs) { case (s, coll) => (for ((f, ind) <- reverseMapFunctions.zipWithIndex) yield { - ("operator " + ind) |: s.reverseMap(f) == coll.reverseMap(f) + ("operator " + ind) |: s.reverseIterator.map(f).toSeq.sameElements(coll.reverseMap(f)) }).reduceLeft(_ && _) } @@ -196,7 +194,7 @@ abstract class ParallelSeqCheck[T](collName: String) extends ParallelIterableChe ("ends with tail" |: (s.length == 0 || s.endsWith(s.tail) == coll.endsWith(coll.tail))) && ("with each other" |: coll.endsWith(s)) && ("modified" |: s.startsWith(collmodif) == coll.endsWith(collmodif)) && - (for (sq <- startEndSeqs) yield { + (for (sq <- startEndSeqs if s.nonEmpty /* guard because of https://github.com/scala/bug/issues/11328 */) yield { val sew = s.endsWith(sq) val cew = coll.endsWith(fromSeq(sq)) if (sew != cew) { @@ -206,12 +204,12 @@ abstract class ParallelSeqCheck[T](collName: String) extends ParallelIterableChe println(cew) } ("seq " + sq) |: sew == cew - }).reduceLeft(_ && _) + }).foldLeft(Prop.passed)(_ && _) } property("unions must be equal") = forAllNoShrink(collectionPairsWithModified) { case (s, coll, collmodif) => - ("modified" |: s.union(collmodif.seq) == coll.union(collmodif)) && - ("empty" |: s.union(Nil) == coll.union(fromSeq(Nil))) + ("modified" |: s.++(collmodif.seq).sameElements(coll.union(collmodif))) && + ("empty" |: s.++(Nil).sameElements(coll.union(fromSeq(Nil)))) } // This is failing with my views patch: array index out of bounds in the array iterator. @@ -222,10 +220,10 @@ abstract class ParallelSeqCheck[T](collName: String) extends ParallelIterableChe // if (!isCheckingViews) property("patches must be equal") = forAll(collectionTripletsWith2Indices) { case (s, coll, pat, from, repl) => - ("with seq" |: s.patch(from, pat, repl) == coll.patch(from, pat, repl)) && - ("with par" |: s.patch(from, pat, repl) == coll.patch(from, fromSeq(pat), repl)) && - ("with empty" |: s.patch(from, Nil, repl) == coll.patch(from, fromSeq(Nil), repl)) && - ("with one" |: (s.length == 0 || s.patch(from, List(s(0)), 1) == coll.patch(from, fromSeq(List(coll(0))), 1))) + ("with seq" |: s.patch(from, pat, repl).sameElements(coll.patch(from, pat, repl))) && + ("with par" |: s.patch(from, pat, repl).sameElements(coll.patch(from, fromSeq(pat), repl))) && + ("with empty" |: s.patch(from, Nil, repl).sameElements(coll.patch(from, fromSeq(Nil), repl))) && + ("with one" |: (s.length == 0 || s.patch(from, List(s(0)), 1).sameElements(coll.patch(from, fromSeq(List(coll(0))), 1)))) } if (!isCheckingViews) property("updates must be equal") = forAllNoShrink(collectionPairsWithLengths) { case (s, coll, len) => @@ -233,36 +231,36 @@ abstract class ParallelSeqCheck[T](collName: String) extends ParallelIterableChe if (s.length > 0) { val supd = s.updated(pos, s(0)) val cupd = coll.updated(pos, coll(0)) - if (supd != cupd) { + if (!supd.sameElements(cupd)) { println("from: " + s) println("and: " + coll) println(supd) println(cupd) } - "from first" |: (supd == cupd) + "from first" |: (supd sameElements cupd) } else "trivially" |: true } property("prepends must be equal") = forAllNoShrink(collectionPairs) { case (s, coll) => - s.length == 0 || s(0) +: s == coll(0) +: coll + s.length == 0 || (s(0) +: s).sameElements(coll(0) +: coll) } property("appends must be equal") = forAllNoShrink(collectionPairs) { case (s, coll) => - s.length == 0 || s :+ s(0) == coll :+ coll(0) + s.length == 0 || (s :+ s(0)).sameElements(coll :+ coll(0)) } property("padTos must be equal") = forAllNoShrink(collectionPairsWithLengths) { case (s, coll, len) => val someValue = sampleValue val sdoub = s.padTo(len * 2, someValue) val cdoub = coll.padTo(len * 2, someValue) - if (sdoub != cdoub) { + if (!sdoub.sameElements(cdoub)) { println("from: " + s) println("and: " + coll) println(sdoub) println(cdoub) } - ("smaller" |: s.padTo(len / 2, someValue) == coll.padTo(len / 2, someValue)) && - ("bigger" |: sdoub == cdoub) + ("smaller" |: s.padTo(len / 2, someValue).sameElements(coll.padTo(len / 2, someValue))) && + ("bigger" |: sdoub.sameElements(cdoub)) } property("corresponds must be equal") = forAllNoShrink(collectionPairsWithModified) { case (s, coll, modified) => diff --git a/scalacheck/src/test/scala/ParallelSetCheck.scala b/scalacheck/src/test/scala/ParallelSetCheck.scala index deee81de..e17fa9e6 100644 --- a/scalacheck/src/test/scala/ParallelSetCheck.scala +++ b/scalacheck/src/test/scala/ParallelSetCheck.scala @@ -10,6 +10,7 @@ import scala.collection._ import scala.collection.parallel._ abstract class ParallelSetCheck[T](collname: String) extends ParallelIterableCheck[T](collname) { + type CollType <: ParSet[T] property("gets iterated keys") = forAllNoShrink(collectionPairs) { From 359e591941f57d1bb3843c18cda72f9fc3553a6f Mon Sep 17 00:00:00 2001 From: Seth Tisue Date: Tue, 29 Jan 2019 16:18:28 -0800 Subject: [PATCH 2/4] don't try to use 2.12 ScalaCheck --- build.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/build.sbt b/build.sbt index 7bc4df4d..365d8fd2 100644 --- a/build.sbt +++ b/build.sbt @@ -74,7 +74,7 @@ lazy val junit = project.in(file("junit")) lazy val scalacheck = project.in(file("scalacheck")) .settings(commonSettings) .settings( - libraryDependencies += "org.scalacheck" % "scalacheck_2.12" % "1.14.0", + libraryDependencies += "org.scalacheck" %% "scalacheck" % "1.14.0", fork in Test := true, testOptions in Test += Tests.Argument(TestFrameworks.ScalaCheck, "-workers", "1", "-minSize", "0", "-maxSize", "4000", "-minSuccessfulTests", "5"), disablePublishing From 4af1d05fb03d1654b947e7328513d59e5ebde902 Mon Sep 17 00:00:00 2001 From: Seth Tisue Date: Tue, 29 Jan 2019 18:50:05 -0800 Subject: [PATCH 3/4] use newer Scala 2.13 snapshot that has needed collections changes --- build.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/build.sbt b/build.sbt index 365d8fd2..c82cbc98 100644 --- a/build.sbt +++ b/build.sbt @@ -5,7 +5,7 @@ version in ThisBuild := "0.1.3-SNAPSHOT" resolvers in ThisBuild += "scala-integration" at "https://scala-ci.typesafe.com/artifactory/scala-integration/" scalaVersionsByJvm in ThisBuild := { - val v213 = "2.13.0-pre-021a9a4" + val v213 = "2.13.0-pre-b9306a2" Map( 8 -> List(v213 -> true), 11 -> List(v213 -> false)) From 17647af000bd764faf9e4e422374e7993f911620 Mon Sep 17 00:00:00 2001 From: Seth Tisue Date: Fri, 1 Feb 2019 13:55:40 -0800 Subject: [PATCH 4/4] adapt to scala/scala#7696 --- build.sbt | 2 +- core/src/main/scala/scala/collection/immutable/OldHashSet.scala | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/build.sbt b/build.sbt index c82cbc98..b29d728b 100644 --- a/build.sbt +++ b/build.sbt @@ -5,7 +5,7 @@ version in ThisBuild := "0.1.3-SNAPSHOT" resolvers in ThisBuild += "scala-integration" at "https://scala-ci.typesafe.com/artifactory/scala-integration/" scalaVersionsByJvm in ThisBuild := { - val v213 = "2.13.0-pre-b9306a2" + val v213 = "2.13.0-pre-e40c95e" Map( 8 -> List(v213 -> true), 11 -> List(v213 -> false)) diff --git a/core/src/main/scala/scala/collection/immutable/OldHashSet.scala b/core/src/main/scala/scala/collection/immutable/OldHashSet.scala index f6ba2f24..3fdb52c0 100644 --- a/core/src/main/scala/scala/collection/immutable/OldHashSet.scala +++ b/core/src/main/scala/scala/collection/immutable/OldHashSet.scala @@ -49,7 +49,7 @@ sealed abstract class OldHashSet[A] super.subsetOf(that) } - override def concat(that: collection.IterableOnce[A]): OldHashSet[A] = that match { + override def concat(that: collection.IterableOnce[A])(implicit dummy: DummyImplicit): OldHashSet[A] = that match { case that: OldHashSet[A] => val buffer = new Array[OldHashSet[A]](bufferSize(this.size + that.size)) nullToEmpty(union0(that, 0, buffer, 0))