diff --git a/.drone.yml b/.drone.yml index afe3fc6bfcf3..cd4702e0bdb9 100644 --- a/.drone.yml +++ b/.drone.yml @@ -27,7 +27,7 @@ steps: - name: test pull: default - image: lampepfl/dotty:2019-04-22 + image: lampepfl/dotty:2019-08-22 depends_on: [ clone ] commands: - cp -R . /tmp/1/ && cd /tmp/1/ @@ -36,7 +36,7 @@ steps: - name: test_bootstrapped pull: default - image: lampepfl/dotty:2019-04-22 + image: lampepfl/dotty:2019-08-22 depends_on: [ clone ] commands: - cp -R . /tmp/2/ && cd /tmp/2/ @@ -45,7 +45,7 @@ steps: - name: community_build pull: default - image: lampepfl/dotty:2019-04-22 + image: lampepfl/dotty:2019-08-22 depends_on: [ clone ] commands: - cp -R . /tmp/3/ && cd /tmp/3/ @@ -54,7 +54,7 @@ steps: - name: test_sbt pull: default - image: lampepfl/dotty:2019-04-22 + image: lampepfl/dotty:2019-08-22 depends_on: [ clone ] commands: - cp -R . /tmp/4/ && cd /tmp/4/ @@ -66,7 +66,7 @@ steps: - name: test_java11 pull: default - image: lampepfl/dotty:2019-04-22 + image: lampepfl/dotty:2019-08-22 depends_on: [ clone ] commands: - export PATH="/usr/lib/jvm/java-11-openjdk-amd64/bin:$PATH" @@ -80,7 +80,7 @@ steps: - name: documentation pull: default - image: lampepfl/dotty:2019-04-22 + image: lampepfl/dotty:2019-08-22 depends_on: - test - test_bootstrapped @@ -99,7 +99,7 @@ steps: - name: publish_nightly pull: default - image: lampepfl/dotty:2019-04-22 + image: lampepfl/dotty:2019-08-22 depends_on: - test - test_bootstrapped @@ -126,7 +126,7 @@ steps: - name: publish_release pull: default - image: lampepfl/dotty:2019-04-22 + image: lampepfl/dotty:2019-08-22 depends_on: - test - test_bootstrapped @@ -169,7 +169,7 @@ steps: - name: publish_sbt_release pull: default - image: lampepfl/dotty:2019-04-22 + image: lampepfl/dotty:2019-08-22 depends_on: - test - test_bootstrapped diff --git a/.gitmodules b/.gitmodules index 38e8b02ebd69..de045a725963 100644 --- a/.gitmodules +++ b/.gitmodules @@ -39,7 +39,7 @@ url = https://github.com/dotty-staging/sourcecode [submodule "community-build/community-projects/scala-xml"] path = community-build/community-projects/scala-xml - url = https://github.com/scala/scala-xml + url = https://github.com/dotty-staging/scala-xml [submodule "community-build/community-projects/shapeless"] path = community-build/community-projects/shapeless url = https://github.com/milessabin/shapeless diff --git a/bench/src/main/scala/Benchmarks.scala b/bench/src/main/scala/Benchmarks.scala index 60206de54f9a..779579cd1c6b 100644 --- a/bench/src/main/scala/Benchmarks.scala +++ b/bench/src/main/scala/Benchmarks.scala @@ -13,7 +13,7 @@ import org.openjdk.jmh.results.format._ import java.util.concurrent.TimeUnit import java.io.{File, FileOutputStream, BufferedWriter, FileWriter} -import scala.collection.JavaConversions._ +import scala.collection.JavaConverters._ import scala.io.Source object Bench { @@ -86,7 +86,7 @@ class CompilerOptions { @Setup def prepare: Unit = { - opts = Bench.readCompileOptions.to[Array] + opts = Bench.readCompileOptions.toArray } } diff --git a/community-build/community-projects/ScalaPB b/community-build/community-projects/ScalaPB index 79ecc8db710e..94935f6d93ba 160000 --- a/community-build/community-projects/ScalaPB +++ b/community-build/community-projects/ScalaPB @@ -1 +1 @@ -Subproject commit 79ecc8db710e53f07ea12d1dfef80993bbd62e96 +Subproject commit 94935f6d93baeff720e9b2839b985cbcb8d6fded diff --git a/community-build/community-projects/algebra b/community-build/community-projects/algebra index 5dda5f9a18cc..44a2acf5b758 160000 --- a/community-build/community-projects/algebra +++ b/community-build/community-projects/algebra @@ -1 +1 @@ -Subproject commit 5dda5f9a18cc2d1ef7863ab5a3d8a9b6d4fd9f71 +Subproject commit 44a2acf5b758c0425f48b30b7faa3c677e85a989 diff --git a/community-build/community-projects/betterfiles b/community-build/community-projects/betterfiles index 49b55d6b06e4..b3e30073b857 160000 --- a/community-build/community-projects/betterfiles +++ b/community-build/community-projects/betterfiles @@ -1 +1 @@ -Subproject commit 49b55d6b06e44f0656877105202236a480e89b25 +Subproject commit b3e30073b857dd900fd649a064c490dcee80ef94 diff --git a/community-build/community-projects/effpi b/community-build/community-projects/effpi index 072c66da8894..d1fea9d70ff8 160000 --- a/community-build/community-projects/effpi +++ b/community-build/community-projects/effpi @@ -1 +1 @@ -Subproject commit 072c66da8894f07cb29045f6d3c0f3284140370f +Subproject commit d1fea9d70ff8fccceafb0c688c9650c97185c590 diff --git a/community-build/community-projects/fastparse b/community-build/community-projects/fastparse index 79431b056713..5bb27834d584 160000 --- a/community-build/community-projects/fastparse +++ b/community-build/community-projects/fastparse @@ -1 +1 @@ -Subproject commit 79431b0567131084e82ed1f5769c3063e2917bf6 +Subproject commit 5bb27834d58422282bc12a1b5a03a69dc081338e diff --git a/community-build/community-projects/minitest b/community-build/community-projects/minitest index 780845b89f79..9d5fbb80dcb0 160000 --- a/community-build/community-projects/minitest +++ b/community-build/community-projects/minitest @@ -1 +1 @@ -Subproject commit 780845b89f79bd30081b97d8ad1c63bbac2d67f2 +Subproject commit 9d5fbb80dcb095baac88deb4960d616870745cf9 diff --git a/community-build/community-projects/scala-xml b/community-build/community-projects/scala-xml index 19f53ad12d73..04b481ffcc29 160000 --- a/community-build/community-projects/scala-xml +++ b/community-build/community-projects/scala-xml @@ -1 +1 @@ -Subproject commit 19f53ad12d7311ba85d8a239e40efed47f4786ab +Subproject commit 04b481ffcc29e7ea52f10f9091e9b918d58f9f7f diff --git a/community-build/community-projects/scalacheck b/community-build/community-projects/scalacheck index a7c5010ee2cc..f2f5e672c03b 160000 --- a/community-build/community-projects/scalacheck +++ b/community-build/community-projects/scalacheck @@ -1 +1 @@ -Subproject commit a7c5010ee2ccaac17e89b35d8acb080429f2e2a7 +Subproject commit f2f5e672c03b46e0ef96a86356561052045b86a7 diff --git a/community-build/community-projects/scalap b/community-build/community-projects/scalap index c7ebf924aeb9..dac8a812cfa6 160000 --- a/community-build/community-projects/scalap +++ b/community-build/community-projects/scalap @@ -1 +1 @@ -Subproject commit c7ebf924aeb963eec33d7ec72efcf3cdfb54a7f3 +Subproject commit dac8a812cfa68a5e96cbd6b7a34481e075bfd4e4 diff --git a/community-build/community-projects/scalatest b/community-build/community-projects/scalatest index d85fb115e845..a50e7605bad7 160000 --- a/community-build/community-projects/scalatest +++ b/community-build/community-projects/scalatest @@ -1 +1 @@ -Subproject commit d85fb115e845a9790375e540c6beb499d3512340 +Subproject commit a50e7605bad77a44973e9c020905e64db8bc5bd0 diff --git a/community-build/community-projects/scopt b/community-build/community-projects/scopt index 9155bdcf1342..4699b53dfa73 160000 --- a/community-build/community-projects/scopt +++ b/community-build/community-projects/scopt @@ -1 +1 @@ -Subproject commit 9155bdcf13424ed385ba1d2dfc8f6f54ba02b7f0 +Subproject commit 4699b53dfa730f78e025c4626329d8172c330e5f diff --git a/community-build/community-projects/semanticdb b/community-build/community-projects/semanticdb index 136ea646f10a..60fe89fb522b 160000 --- a/community-build/community-projects/semanticdb +++ b/community-build/community-projects/semanticdb @@ -1 +1 @@ -Subproject commit 136ea646f10a4c830bccc3745790f265c87fd29a +Subproject commit 60fe89fb522b3c2132155bdf2bf27b55a9eec63b diff --git a/community-build/community-projects/sourcecode b/community-build/community-projects/sourcecode index 30bab2fe8261..b23462bbaf37 160000 --- a/community-build/community-projects/sourcecode +++ b/community-build/community-projects/sourcecode @@ -1 +1 @@ -Subproject commit 30bab2fe8261cab8c68de1b8ab540a83750de733 +Subproject commit b23462bbaf3781e27fd54970a76b265cb2ae7e71 diff --git a/community-build/community-projects/squants b/community-build/community-projects/squants index c178ff07fe72..c30f46abc841 160000 --- a/community-build/community-projects/squants +++ b/community-build/community-projects/squants @@ -1 +1 @@ -Subproject commit c178ff07fe7287b89ec5324dade26500b83ebf67 +Subproject commit c30f46abc841b9b26f96850a21cfc009545cd22b diff --git a/community-build/community-projects/xml-interpolator b/community-build/community-projects/xml-interpolator index 0c020ee5a447..ce42d5bf7d00 160000 --- a/community-build/community-projects/xml-interpolator +++ b/community-build/community-projects/xml-interpolator @@ -1 +1 @@ -Subproject commit 0c020ee5a447db293f48a9ec2bf5ff23bd0d0726 +Subproject commit ce42d5bf7d00239aedf3bbf9d7ad8319216d9391 diff --git a/community-build/test/scala/dotty/communitybuild/CommunityBuildTest.scala b/community-build/test/scala/dotty/communitybuild/CommunityBuildTest.scala index b8b58c46d855..f6ef1ac1fb23 100644 --- a/community-build/test/scala/dotty/communitybuild/CommunityBuildTest.scala +++ b/community-build/test/scala/dotty/communitybuild/CommunityBuildTest.scala @@ -9,7 +9,7 @@ import org.junit.experimental.categories.Category @Category(Array(classOf[TestCategory])) class CommunityBuildTest { - lazy val communitybuildDir: Path = Paths.get(sys.props("user.dir") + "/community-build/") + lazy val communitybuildDir: Path = Paths.get(sys.props("user.dir")) lazy val compilerVersion: String = { val file = communitybuildDir.resolve("dotty-bootstrapped.version") @@ -153,15 +153,15 @@ class CommunityBuildTest { @Test def fastparse = test( project = "fastparse", - testCommand = "fastparseJVM/compile", - updateCommand = "fastparseJVM/update" + testCommand = "dotty-community-build/compile;dotty-community-build/test:compile", + updateCommand = "dotty-community-build/update" ) // TODO: revert to sourcecodeJVM/test @Test def sourcecode = test( project = "sourcecode", - testCommand = "sourcecodeJVM/compile", - updateCommand = "sourcecodeJVM/update" + testCommand = "sourcecode/compile;sourcecode/test:compile", + updateCommand = "sourcecode/update" ) @Test def stdLib213 = test( @@ -194,8 +194,16 @@ class CommunityBuildTest { // We set `useEffpiPlugin := false` because we don't want to run their // compiler plugin since it relies on external binaries (from the model // checker mcrl2), however we do compile the compiler plugin. - testCommand = ";set ThisBuild / useEffpiPlugin := false; effpi/test:compile; plugin/test:compile; benchmarks/test:compile; examples/test:compile; pluginBenchmarks/test:compile", - updateCommand = ";set ThisBuild / useEffpiPlugin := false; effpi/test:update; plugin/test:update; benchmarks/test:update; examples/test:update; pluginBenchmarks/test:update" + + // We have to drop the plugin and some akka tests for now, the plugin depends on github.com/bmc/scalasti which + // has not been updated since 2018, so no 2.13 compat. Some akka tests are dropped due to MutableBehaviour being + // dropped in the 2.13 compatible release + + // testCommand = ";set ThisBuild / useEffpiPlugin := false; effpi/test:compile; plugin/test:compile; benchmarks/test:compile; examples/test:compile; pluginBenchmarks/test:compile", + // updateCommand = ";set ThisBuild / useEffpiPlugin := false; effpi/test:update; plugin/test:update; benchmarks/test:update; examples/test:update; pluginBenchmarks/test:update" + + testCommand = ";set ThisBuild / useEffpiPlugin := false; effpi/test:compile; benchmarks/test:compile; examples/test:compile; pluginBenchmarks/test:compile", + updateCommand = ";set ThisBuild / useEffpiPlugin := false; effpi/test:update; benchmarks/test:update; examples/test:update; pluginBenchmarks/test:update" ) // TODO @oderky? It got broken by #5458 diff --git a/tests/run/t3361.check b/compiler/src-bootstrapped/.keep similarity index 100% rename from tests/run/t3361.check rename to compiler/src-bootstrapped/.keep diff --git a/tests/run/t3970.check b/compiler/src-non-bootstrapped/.keep similarity index 100% rename from tests/run/t3970.check rename to compiler/src-non-bootstrapped/.keep diff --git a/compiler/src/dotty/tools/backend/jvm/BCodeHelpers.scala b/compiler/src/dotty/tools/backend/jvm/BCodeHelpers.scala index 03d022c8fb5a..77cbe9d5c06b 100644 --- a/compiler/src/dotty/tools/backend/jvm/BCodeHelpers.scala +++ b/compiler/src/dotty/tools/backend/jvm/BCodeHelpers.scala @@ -387,7 +387,7 @@ trait BCodeHelpers extends BCodeIdiomatic with BytecodeWriters { } debuglog(s"Potentially conflicting names for forwarders: $conflictingNames") - for (m0 <- moduleClass.info.membersBasedOnFlags(ExcludedForwarderFlags, Flag_METHOD)) { + for (m0 <- moduleClass.info.sortedMembersBasedOnFlags(required = Flag_METHOD, excluded = ExcludedForwarderFlags)) { val m = if (m0.isBridge) m0.nextOverriddenSymbol else m0 if (m == NoSymbol) log(s"$m0 is a bridge method that overrides nothing, something went wrong in a previous phase.") diff --git a/compiler/src/dotty/tools/backend/jvm/BackendInterface.scala b/compiler/src/dotty/tools/backend/jvm/BackendInterface.scala index d428dd7e1562..f0c26583d8a1 100644 --- a/compiler/src/dotty/tools/backend/jvm/BackendInterface.scala +++ b/compiler/src/dotty/tools/backend/jvm/BackendInterface.scala @@ -602,7 +602,11 @@ abstract class BackendInterface extends BackendInterfaceDefinitions { def params: List[Symbol] def resultType: Type def memberInfo(s: Symbol): Type - def membersBasedOnFlags(excludedFlags: Flags, requiredFlags: Flags): List[Symbol] + + /** The members of this type that have all of `required` flags but none of `excluded` flags set. + * The members are sorted by name and signature to guarantee a stable ordering. + */ + def sortedMembersBasedOnFlags(required: Flags, excluded: Flags): List[Symbol] def members: List[Symbol] def decls: List[Symbol] def underlying: Type diff --git a/compiler/src/dotty/tools/backend/jvm/CoreBTypes.scala b/compiler/src/dotty/tools/backend/jvm/CoreBTypes.scala index 44dc73cafbc8..48fec71b6411 100644 --- a/compiler/src/dotty/tools/backend/jvm/CoreBTypes.scala +++ b/compiler/src/dotty/tools/backend/jvm/CoreBTypes.scala @@ -131,14 +131,6 @@ class CoreBTypes[BTFS <: BTypesFromSymbols[_ <: BackendInterface]](val bTypes: B lazy val srFloatRef : ClassBType = classBTypeFromSymbol(requiredClass[scala.runtime.FloatRef]) lazy val srDoubleRef : ClassBType = classBTypeFromSymbol(requiredClass[scala.runtime.DoubleRef]) - // scala.FunctionX and scala.runtim.AbstractFunctionX - lazy val FunctionReference : Vector[ClassBType] = (0 to MaxFunctionArity).map(i => classBTypeFromSymbol(FunctionClass(i)))(collection.breakOut) - lazy val AbstractFunctionReference : Vector[ClassBType] = (0 to MaxFunctionArity).map(i => classBTypeFromSymbol(AbstractFunctionClass(i)))(collection.breakOut) - lazy val AbstractFunctionArityMap : Map[ClassBType, Int] = AbstractFunctionReference.zipWithIndex.toMap - - lazy val PartialFunctionReference : ClassBType = classBTypeFromSymbol(PartialFunctionClass) - lazy val AbstractPartialFunctionReference : ClassBType = classBTypeFromSymbol(AbstractPartialFunctionClass) - lazy val BoxesRunTime: ClassBType = classBTypeFromSymbol(requiredClass[scala.runtime.BoxesRunTime]) /** @@ -263,13 +255,6 @@ final class CoreBTypesProxy[BTFS <: BTypesFromSymbols[_ <: BackendInterface]](va def srFloatRef : ClassBType = _coreBTypes.srFloatRef def srDoubleRef : ClassBType = _coreBTypes.srDoubleRef - def FunctionReference : Vector[ClassBType] = _coreBTypes.FunctionReference - def AbstractFunctionReference : Vector[ClassBType] = _coreBTypes.AbstractFunctionReference - def AbstractFunctionArityMap : Map[ClassBType, Int] = _coreBTypes.AbstractFunctionArityMap - - def PartialFunctionReference : ClassBType = _coreBTypes.PartialFunctionReference - def AbstractPartialFunctionReference : ClassBType = _coreBTypes.AbstractPartialFunctionReference - def BoxesRunTime: ClassBType = _coreBTypes.BoxesRunTime def asmBoxTo : Map[BType, MethodNameAndType] = _coreBTypes.asmBoxTo diff --git a/compiler/src/dotty/tools/backend/jvm/DottyBackendInterface.scala b/compiler/src/dotty/tools/backend/jvm/DottyBackendInterface.scala index dea62949ed79..8ace451fa300 100644 --- a/compiler/src/dotty/tools/backend/jvm/DottyBackendInterface.scala +++ b/compiler/src/dotty/tools/backend/jvm/DottyBackendInterface.scala @@ -878,8 +878,18 @@ class DottyBackendInterface(outputDirectory: AbstractFile, val superCallsMap: Ma def =:=(other: Type): Boolean = tp =:= other - def membersBasedOnFlags(excludedFlags: Flags, requiredFlags: Flags): List[Symbol] = - tp.membersBasedOnFlags(termFlagSet(requiredFlags), termFlagSet(excludedFlags)).map(_.symbol).toList + def sortedMembersBasedOnFlags(required: Flags, excluded: Flags): List[Symbol] = { + val requiredFlagSet = termFlagSet(required) + val excludedFlagSet = termFlagSet(excluded) + // The output of `memberNames` is a Set, sort it to guarantee a stable ordering. + val names = tp.memberNames(takeAllFilter).toSeq.sorted + val buffer = mutable.ListBuffer[Symbol]() + names.foreach { name => + buffer ++= tp.memberBasedOnFlags(name, requiredFlagSet, excludedFlagSet) + .alternatives.sortBy(_.signature)(Signature.lexicographicOrdering).map(_.symbol) + } + buffer.toList + } def resultType: Type = tp.resultType diff --git a/compiler/src/dotty/tools/backend/jvm/GenBCode.scala b/compiler/src/dotty/tools/backend/jvm/GenBCode.scala index 0b778a5861c4..69768e35b8e8 100644 --- a/compiler/src/dotty/tools/backend/jvm/GenBCode.scala +++ b/compiler/src/dotty/tools/backend/jvm/GenBCode.scala @@ -394,11 +394,11 @@ class GenBCodePipeline(val entryPoints: List[Symbol], val int: DottyBackendInter } for ((label, i) <- initialLabels.iterator.zipWithIndex) { mv.visitLabel(label) - emitLambdaDeserializeIndy(groups(i)) + emitLambdaDeserializeIndy(groups(i).toIndexedSeq) mv.visitInsn(ARETURN) } mv.visitLabel(terminalLabel) - emitLambdaDeserializeIndy(groups(numGroups - 1)) + emitLambdaDeserializeIndy(groups(numGroups - 1).toIndexedSeq) mv.visitInsn(ARETURN) } diff --git a/compiler/src/dotty/tools/backend/sjs/JSCodeGen.scala b/compiler/src/dotty/tools/backend/sjs/JSCodeGen.scala index bc4626d115dc..bd48a5361dfe 100644 --- a/compiler/src/dotty/tools/backend/sjs/JSCodeGen.scala +++ b/compiler/src/dotty/tools/backend/sjs/JSCodeGen.scala @@ -194,7 +194,7 @@ class JSCodeGen()(implicit ctx: Context) { ctx.settings.outputDir.value val pathParts = sym.fullName.toString.split("[./]") - val dir = (outputDirectory /: pathParts.init)(_.subdirectoryNamed(_)) + val dir = pathParts.init.foldLeft(outputDirectory)(_.subdirectoryNamed(_)) var filename = pathParts.last if (sym.is(ModuleClass)) diff --git a/compiler/src/dotty/tools/dotc/Bench.scala b/compiler/src/dotty/tools/dotc/Bench.scala index 892c683181d8..ea43bc5cc3d6 100644 --- a/compiler/src/dotty/tools/dotc/Bench.scala +++ b/compiler/src/dotty/tools/dotc/Bench.scala @@ -15,7 +15,7 @@ object Bench extends Driver { @sharable private[this] var numRuns = 1 private def ntimes(n: Int)(op: => Reporter): Reporter = - (emptyReporter /: (0 until n)) ((_, _) => op) + (0 until n).foldLeft(emptyReporter)((_, _) => op) override def doCompile(compiler: Compiler, fileNames: List[String])(implicit ctx: Context): Reporter = ntimes(numRuns) { diff --git a/compiler/src/dotty/tools/dotc/Driver.scala b/compiler/src/dotty/tools/dotc/Driver.scala index f3ab08a7c0cc..85db025812be 100644 --- a/compiler/src/dotty/tools/dotc/Driver.scala +++ b/compiler/src/dotty/tools/dotc/Driver.scala @@ -74,10 +74,10 @@ class Driver { val (classPaths, classNames) = fileNames0.flatMap { name => val path = Paths.get(name) if (name.endsWith(".jar")) { - new dotty.tools.io.Jar(File(name)).iterator.collect { + new dotty.tools.io.Jar(File(name)).toList.collect { case e if e.getName.endsWith(".tasty") => (name, e.getName.stripSuffix(".tasty").replace("/", ".")) - }.toList + } } else if (!name.endsWith(".tasty")) ("", name) :: Nil diff --git a/compiler/src/dotty/tools/dotc/Run.scala b/compiler/src/dotty/tools/dotc/Run.scala index 7444dc2a9c61..09bae4f1dc5b 100644 --- a/compiler/src/dotty/tools/dotc/Run.scala +++ b/compiler/src/dotty/tools/dotc/Run.scala @@ -56,7 +56,7 @@ class Run(comp: Compiler, ictx: Context) extends ImplicitRunInfo with Constraint ctx.initialize()(start) // re-initialize the base context with start def addImport(ctx: Context, refFn: () => TermRef) = ctx.fresh.setImportInfo(ImportInfo.rootImport(refFn)(ctx)) - (start.setRun(this) /: defn.RootImportFns)(addImport) + defn.RootImportFns.foldLeft(start.setRun(this))(addImport) } private[this] var compiling = false diff --git a/compiler/src/dotty/tools/dotc/ast/CheckTrees.scala.disabled b/compiler/src/dotty/tools/dotc/ast/CheckTrees.scala.disabled index ea8a8709b00f..e05de4f5af4c 100644 --- a/compiler/src/dotty/tools/dotc/ast/CheckTrees.scala.disabled +++ b/compiler/src/dotty/tools/dotc/ast/CheckTrees.scala.disabled @@ -64,10 +64,10 @@ object CheckTrees { check(arg.tpe <:< formal) } val MethodType(paramNames, paramTypes) = fn.tpe.widen // checked already at construction - (args, paramNames, paramTypes).zipped foreach checkArg + args.lazyZip(paramNames).lazyZip(paramTypes) foreach checkArg case TypeApply(fn, args) => val pt @ PolyType(_) = fn.tpe.widen // checked already at construction - (args, pt.instantiateBounds(args map (_.tpe))).zipped foreach checkTypeArg + args.lazyZip(pt.instantiateBounds(args map (_.tpe))) foreach checkTypeArg case Literal(const: Constant) => case New(tpt) => check(tpt.isValueType) @@ -184,7 +184,7 @@ object CheckTrees { check(tpt.isValueType) val tparams = tpt.tpe.typeParams check(sameLength(tparams, args)) - (args, tparams map (_.info.bounds)).zipped foreach checkTypeArg + args.lazyZip(tparams map (_.info.bounds)) foreach checkTypeArg case TypeBoundsTree(lo, hi) => check(lo.isValueType); check(hi.isValueType) check(lo.tpe <:< hi.tpe) diff --git a/compiler/src/dotty/tools/dotc/ast/Desugar.scala b/compiler/src/dotty/tools/dotc/ast/Desugar.scala index 91627864ac64..d2140b845901 100644 --- a/compiler/src/dotty/tools/dotc/ast/Desugar.scala +++ b/compiler/src/dotty/tools/dotc/ast/Desugar.scala @@ -491,12 +491,12 @@ object desugar { // Annotations are dropped from the constructor parameters but should be // preserved in all derived parameters. val derivedTparams = { - val impliedTparamsIt = impliedTparams.toIterator + val impliedTparamsIt = impliedTparams.iterator constrTparams.map(tparam => derivedTypeParam(tparam) .withAnnotations(impliedTparamsIt.next().mods.annotations)) } val derivedVparamss = { - val constrVparamsIt = constrVparamss.toIterator.flatten + val constrVparamsIt = constrVparamss.iterator.flatten constrVparamss.nestedMap(vparam => derivedTermParam(vparam) .withAnnotations(constrVparamsIt.next().mods.annotations)) } @@ -560,7 +560,7 @@ object desugar { case _ => constrVparamss } - val nu = (makeNew(classTypeRef) /: vparamss) { (nu, vparams) => + val nu = vparamss.foldLeft(makeNew(classTypeRef)) { (nu, vparams) => val app = Apply(nu, vparams.map(refOfDef)) vparams match { case vparam :: _ if vparam.mods.is(Given) => app.setGivenApply() @@ -610,26 +610,8 @@ object desugar { } } - // TODO When the Scala library is updated to 2.13.x add the override keyword to this generated method. - // (because Product.scala was updated) - def productElemNameMeth = { - val methodParam = makeSyntheticParameter(tpt = scalaDot(tpnme.Int)) - val paramRef = Ident(methodParam.name) - - val indexAsString = Apply(Select(javaDotLangDot(nme.String), nme.valueOf), paramRef) - val throwOutOfBound = Throw(New(javaDotLangDot(tpnme.IOOBException), List(List(indexAsString)))) - val defaultCase = CaseDef(Ident(nme.WILDCARD), EmptyTree, throwOutOfBound) - - val patternMatchCases = derivedVparamss.head.zipWithIndex.map { case (param, idx) => - CaseDef(Literal(Constant(idx)), EmptyTree, Literal(Constant(param.name.decode.toString))) - } :+ defaultCase - val body = Match(paramRef, patternMatchCases) - DefDef(nme.productElementName, Nil, List(List(methodParam)), javaDotLangDot(tpnme.String), body) - .withFlags(if (defn.isNewCollections) Override | Synthetic else Synthetic) - } - if (isCaseClass) - productElemNameMeth :: copyMeths ::: ordinalMeths ::: productElemMeths + copyMeths ::: ordinalMeths ::: productElemMeths else Nil } @@ -700,10 +682,9 @@ object desugar { isEnumCase) anyRef else // todo: also use anyRef if constructor has a dependent method type (or rule that out)! - (constrVparamss :\ classTypeRef) ( - (vparams, restpe) => Function(vparams map (_.tpt), restpe)) + constrVparamss.foldRight(classTypeRef)((vparams, restpe) => Function(vparams map (_.tpt), restpe)) def widenedCreatorExpr = - (creatorExpr /: widenDefs)((rhs, meth) => Apply(Ident(meth.name), rhs :: Nil)) + widenDefs.foldLeft(creatorExpr)((rhs, meth) => Apply(Ident(meth.name), rhs :: Nil)) val applyMeths = if (mods.is(Abstract)) Nil else { @@ -790,12 +771,12 @@ object desugar { val cdef1 = addEnumFlags { val tparamAccessors = { - val impliedTparamsIt = impliedTparams.toIterator + val impliedTparamsIt = impliedTparams.iterator derivedTparams.map(_.withMods(impliedTparamsIt.next().mods)) } val caseAccessor = if (isCaseClass) CaseAccessor else EmptyFlags val vparamAccessors = { - val originalVparamsIt = originalVparamss.toIterator.flatten + val originalVparamsIt = originalVparamss.iterator.flatten derivedVparamss match { case first :: rest => first.map(_.withMods(originalVparamsIt.next().mods | caseAccessor)) ++ @@ -1334,7 +1315,7 @@ object desugar { val ttree = ctx.typerPhase match { case phase: FrontEnd if phase.stillToBeEntered(parts.last) => val prefix = - ((Ident(nme.ROOTPKG): Tree) /: parts.init)((qual, name) => + parts.init.foldLeft(Ident(nme.ROOTPKG): Tree)((qual, name) => Select(qual, name.toTermName)) Select(prefix, parts.last.toTypeName) case _ => @@ -1526,7 +1507,7 @@ object desugar { val rhss = valeqs map { case GenAlias(_, rhs) => rhs } val (defpat0, id0) = makeIdPat(gen.pat) val (defpats, ids) = (pats map makeIdPat).unzip - val pdefs = (valeqs, defpats, rhss).zipped.map(makePatDef(_, Modifiers(), _, _)) + val pdefs = valeqs.lazyZip(defpats).lazyZip(rhss).map(makePatDef(_, Modifiers(), _, _)) val rhs1 = makeFor(nme.map, nme.flatMap, GenFrom(defpat0, gen.expr, gen.checkMode) :: Nil, Block(pdefs, makeTuple(id0 :: ids))) val allpats = gen.pat :: pats val vfrom1 = GenFrom(makeTuple(allpats), rhs1, GenCheckMode.Ignore) diff --git a/compiler/src/dotty/tools/dotc/ast/DesugarEnums.scala b/compiler/src/dotty/tools/dotc/ast/DesugarEnums.scala index fd0a7275b8a3..787d206dbf49 100644 --- a/compiler/src/dotty/tools/dotc/ast/DesugarEnums.scala +++ b/compiler/src/dotty/tools/dotc/ast/DesugarEnums.scala @@ -177,7 +177,7 @@ object DesugarEnums { parentTypes.head match { case parent: RefTree if parent.name == enumClass.name => // need a widen method to compute correct type parameters for enum base class - val widenParamType = (appliedEnumRef /: parentTypes.tail)(makeAndType) + val widenParamType = parentTypes.tail.foldLeft(appliedEnumRef)(makeAndType) val widenParam = makeSyntheticParameter(tpt = widenParamType) val widenDef = DefDef( name = s"${cdef.name}$$to$$${enumClass.name}".toTermName, diff --git a/compiler/src/dotty/tools/dotc/ast/TreeInfo.scala b/compiler/src/dotty/tools/dotc/ast/TreeInfo.scala index fd683a5c1582..27c8f1b786f9 100644 --- a/compiler/src/dotty/tools/dotc/ast/TreeInfo.scala +++ b/compiler/src/dotty/tools/dotc/ast/TreeInfo.scala @@ -342,7 +342,7 @@ trait UntypedTreeInfo extends TreeInfo[Untyped] { self: Trees.Instance[Untyped] * trait or class with this body can have as flags. */ def bodyKind(body: List[Tree])(implicit ctx: Context): FlagSet = - (NoInitsInterface /: body)((fs, stat) => fs & defKind(stat)) + body.foldLeft(NoInitsInterface)((fs, stat) => fs & defKind(stat)) // todo: fill with other methods from TreeInfo that only apply to untpd.Tree's } @@ -416,7 +416,7 @@ trait TypedTreeInfo extends TreeInfo[Type] { self: Trees.Instance[Type] => Impure } - private def minOf(l0: PurityLevel, ls: List[PurityLevel]) = (l0 /: ls)(_ `min` _) + private def minOf(l0: PurityLevel, ls: List[PurityLevel]) = ls.foldLeft(l0)(_ `min` _) def isPurePath(tree: Tree)(implicit ctx: Context): Boolean = tree.tpe match { case tpe: ConstantType => exprPurity(tree) >= Pure @@ -827,8 +827,8 @@ trait TypedTreeInfo extends TreeInfo[Type] { self: Trees.Instance[Type] => case t1: Ident => t1.symbol.hashCode case t1 @ Select(q1, _) => t1.symbol.hashCode * 41 + q1.hash case Literal(c1) => c1.hashCode - case Apply(f1, as1) => (f1.hash /: as1)((h, arg) => h * 41 + arg.hash) - case TypeApply(f1, ts1) => (f1.hash /: ts1)((h, arg) => h * 41 + arg.tpe.hash) + case Apply(f1, as1) => as1.foldLeft(f1.hash)((h, arg) => h * 41 + arg.hash) + case TypeApply(f1, ts1) => ts1.foldLeft(f1.hash)((h, arg) => h * 41 + arg.tpe.hash) case _ => t1.hashCode } } diff --git a/compiler/src/dotty/tools/dotc/ast/TreeTypeMap.scala b/compiler/src/dotty/tools/dotc/ast/TreeTypeMap.scala index 43643465f762..7ef2d8a30f6a 100644 --- a/compiler/src/dotty/tools/dotc/ast/TreeTypeMap.scala +++ b/compiler/src/dotty/tools/dotc/ast/TreeTypeMap.scala @@ -185,11 +185,13 @@ class TreeTypeMap( def withMappedSyms(syms: List[Symbol], mapped: List[Symbol]): TreeTypeMap = { val symsChanged = syms ne mapped val substMap = withSubstitution(syms, mapped) - val fullMap = (substMap /: mapped.filter(_.isClass)) { (tmap, cls) => + val fullMap = mapped.filter(_.isClass).foldLeft(substMap) { (tmap, cls) => val origDcls = cls.info.decls.toList val mappedDcls = ctx.mapSymbols(origDcls, tmap) val tmap1 = tmap.withMappedSyms(origDcls, mappedDcls) - if (symsChanged) (origDcls, mappedDcls).zipped.foreach(cls.asClass.replace) + if (symsChanged) { + origDcls.lazyZip(mappedDcls).foreach(cls.asClass.replace) + } tmap1 } if (symsChanged || (fullMap eq substMap)) fullMap diff --git a/compiler/src/dotty/tools/dotc/ast/Trees.scala b/compiler/src/dotty/tools/dotc/ast/Trees.scala index 1ec6b264a04b..2b689c7ef9ab 100644 --- a/compiler/src/dotty/tools/dotc/ast/Trees.scala +++ b/compiler/src/dotty/tools/dotc/ast/Trees.scala @@ -361,13 +361,16 @@ object Trees { val nameStart = if (point != span.start) point else { + // Use an immutable ArraySeq to work around https://github.com/scala/bug/issues/11708 + val content = collection.immutable.ArraySeq.unsafeWrapArray(source.content()) + // Point might be too far away from start to be recorded. In this case we fall back to scanning // forwards from the start offset for the name. // Note: This might be inaccurate since scanning might hit accidentally the same // name (e.g. in a comment) before finding the real definition. // To make this behavior more robust we'd have to change the trees for definitions to contain // a fully positioned Ident in place of a name. - val idx = source.content().indexOfSlice(realName, point) + val idx = content.indexOfSlice(realName, point) if (idx >= 0) idx else point // use `point` anyway. This is important if no source exists so scanning fails } @@ -851,7 +854,7 @@ object Trees { override def isEmpty: Boolean = trees.isEmpty override def toList: List[Tree[T]] = flatten(trees) override def toString: String = if (isEmpty) "EmptyTree" else "Thicket(" + trees.mkString(", ") + ")" - override def span: Span = (NoSpan /: trees) ((span, t) => span union t.span) + override def span: Span = trees.foldLeft(NoSpan) ((span, t) => span union t.span) override def withSpan(span: Span): this.type = mapElems(_.withSpan(span)).asInstanceOf[this.type] @@ -1365,7 +1368,7 @@ object Trees { // Ties the knot of the traversal: call `foldOver(x, tree))` to dive in the `tree` node. def apply(x: X, tree: Tree)(implicit ctx: Context): X - def apply(x: X, trees: Traversable[Tree])(implicit ctx: Context): X = (x /: trees)(apply) + def apply(x: X, trees: Traversable[Tree])(implicit ctx: Context): X = trees.foldLeft(x)(apply) def foldOver(x: X, tree: Tree)(implicit ctx: Context): X = if (tree.source != ctx.source && tree.source.exists) foldOver(x, tree)(ctx.withSource(tree.source)) @@ -1446,7 +1449,7 @@ object Trees { this(this(x, tpt), tree.rhs) case tree @ DefDef(name, tparams, vparamss, tpt, _) => implicit val ctx = localCtx - this(this((this(x, tparams) /: vparamss)(apply), tpt), tree.rhs) + this(this(vparamss.foldLeft(this(x, tparams))(apply), tpt), tree.rhs) case TypeDef(name, rhs) => implicit val ctx = localCtx this(x, rhs) diff --git a/compiler/src/dotty/tools/dotc/ast/tpd.scala b/compiler/src/dotty/tools/dotc/ast/tpd.scala index 5338b9e356dd..79afa05783a5 100644 --- a/compiler/src/dotty/tools/dotc/ast/tpd.scala +++ b/compiler/src/dotty/tools/dotc/ast/tpd.scala @@ -251,7 +251,7 @@ object tpd extends Trees.Instance[Type] with TypedTreeInfo { makeSym(origInfo) } - val params = (tp.paramNames, tp.paramInfos).zipped.map(valueParam) + val params = tp.paramNames.lazyZip(tp.paramInfos).map(valueParam) val (paramss, rtp) = valueParamss(tp.instantiate(params map (_.termRef))) (params :: paramss, rtp) case tp => (Nil, tp.widenExpr) @@ -295,7 +295,7 @@ object tpd extends Trees.Instance[Type] with TypedTreeInfo { for (tparam <- cls.typeParams if !(bodyTypeParams contains tparam)) yield TypeDef(tparam) val findLocalDummy = FindLocalDummyAccumulator(cls) - val localDummy = ((NoSymbol: Symbol) /: body)(findLocalDummy.apply) + val localDummy = body.foldLeft(NoSymbol: Symbol)(findLocalDummy.apply) .orElse(ctx.newLocalDummy(cls)) val impl = untpd.Template(constr, parents, Nil, selfType, newTypeParams ++ body) .withType(localDummy.termRef) @@ -328,7 +328,7 @@ object tpd extends Trees.Instance[Type] with TypedTreeInfo { if (fwdMeth.allOverriddenSymbols.exists(!_.is(Deferred))) fwdMeth.setFlag(Override) polyDefDef(fwdMeth, tprefs => prefss => ref(fn).appliedToTypes(tprefs).appliedToArgss(prefss)) } - val forwarders = (fns, methNames).zipped.map(forwarder) + val forwarders = fns.lazyZip(methNames).map(forwarder) val cdef = ClassDef(cls, DefDef(constr), forwarders) Block(cdef :: Nil, New(cls.typeRef, Nil)) } @@ -873,7 +873,7 @@ object tpd extends Trees.Instance[Type] with TypedTreeInfo { * `tree (argss(0)) ... (argss(argss.length -1))` */ def appliedToArgss(argss: List[List[Tree]])(implicit ctx: Context): Tree = - ((tree: Tree) /: argss)(Apply(_, _)) + argss.foldLeft(tree: Tree)(Apply(_, _)) /** The current tree applied to (): `tree()` */ def appliedToNone(implicit ctx: Context): Apply = appliedToArgs(Nil) diff --git a/compiler/src/dotty/tools/dotc/ast/untpd.scala b/compiler/src/dotty/tools/dotc/ast/untpd.scala index 18f1920d4122..6dcb63fa3eb8 100644 --- a/compiler/src/dotty/tools/dotc/ast/untpd.scala +++ b/compiler/src/dotty/tools/dotc/ast/untpd.scala @@ -354,7 +354,7 @@ object untpd extends Trees.Instance[Untyped] with UntypedTreeInfo { * PrepareInlineable. */ def New(tpt: Tree, argss: List[List[Tree]])(implicit ctx: Context): Tree = - ensureApplied((makeNew(tpt) /: argss)(Apply(_, _))) + ensureApplied(argss.foldLeft(makeNew(tpt))(Apply(_, _))) /** A new expression with constrictor and possibly type arguments. See * `New(tpt, argss)` for details. diff --git a/compiler/src/dotty/tools/dotc/classpath/AggregateClassPath.scala b/compiler/src/dotty/tools/dotc/classpath/AggregateClassPath.scala index 24cd2e4d632b..03e212fd7dc8 100644 --- a/compiler/src/dotty/tools/dotc/classpath/AggregateClassPath.scala +++ b/compiler/src/dotty/tools/dotc/classpath/AggregateClassPath.scala @@ -87,7 +87,7 @@ case class AggregateClassPath(aggregates: Seq[ClassPath]) extends ClassPath { * creates an entry containing both of them. If there would be more than one class or source * entries for the same class it always would use the first entry of each type found on a classpath. */ - private def mergeClassesAndSources(entries: Seq[ClassRepresentation]*): Seq[ClassRepresentation] = { + private def mergeClassesAndSources(entries: scala.collection.Seq[ClassRepresentation]*): Seq[ClassRepresentation] = { // based on the implementation from MergedClassPath var count = 0 val indices = collection.mutable.HashMap[String, Int]() diff --git a/compiler/src/dotty/tools/dotc/classpath/ClassPath.scala b/compiler/src/dotty/tools/dotc/classpath/ClassPath.scala index 6895897989d0..1c95589d3fb7 100644 --- a/compiler/src/dotty/tools/dotc/classpath/ClassPath.scala +++ b/compiler/src/dotty/tools/dotc/classpath/ClassPath.scala @@ -6,8 +6,8 @@ package dotty.tools.dotc.classpath import dotty.tools.io.AbstractFile import dotty.tools.io.ClassRepresentation -case class ClassPathEntries(packages: Seq[PackageEntry], classesAndSources: Seq[ClassRepresentation]) { - def toTuple: (Seq[PackageEntry], Seq[ClassRepresentation]) = (packages, classesAndSources) +case class ClassPathEntries(packages: scala.collection.Seq[PackageEntry], classesAndSources: scala.collection.Seq[ClassRepresentation]) { + def toTuple: (scala.collection.Seq[PackageEntry], scala.collection.Seq[ClassRepresentation]) = (packages, classesAndSources) } trait ClassFileEntry extends ClassRepresentation { diff --git a/compiler/src/dotty/tools/dotc/classpath/DirectoryClassPath.scala b/compiler/src/dotty/tools/dotc/classpath/DirectoryClassPath.scala index 73702947a991..be5727c43bfe 100644 --- a/compiler/src/dotty/tools/dotc/classpath/DirectoryClassPath.scala +++ b/compiler/src/dotty/tools/dotc/classpath/DirectoryClassPath.scala @@ -51,7 +51,7 @@ trait DirectoryLookup[FileEntryType <: ClassRepresentation] extends ClassPath { case Some(directory) => listChildren(directory, Some(isPackage)) } val prefix = PackageNameUtils.packagePrefix(inPackage) - nestedDirs.map(f => PackageEntryImpl(prefix + getName(f))) + nestedDirs.toIndexedSeq.map(f => PackageEntryImpl(prefix + getName(f))) } protected def files(inPackage: String): Seq[FileEntryType] = { @@ -60,7 +60,7 @@ trait DirectoryLookup[FileEntryType <: ClassRepresentation] extends ClassPath { case None => emptyFiles case Some(directory) => listChildren(directory, Some(isMatchingFile)) } - files.map(f => createFileEntry(toAbstractFile(f))) + files.iterator.map(f => createFileEntry(toAbstractFile(f))).toSeq } private[dotty] def list(inPackage: String): ClassPathEntries = { @@ -231,7 +231,7 @@ case class DirectorySourcePath(dir: JFile) extends JFileDirectoryLookup[SourceFi private def findSourceFile(className: String): Option[AbstractFile] = { val relativePath = FileUtils.dirPath(className) - val sourceFile = Stream("scala", "java") + val sourceFile = LazyList("scala", "java") .map(ext => new JFile(dir, relativePath + "." + ext)) .collectFirst { case file if file.exists() => file } diff --git a/compiler/src/dotty/tools/dotc/classpath/VirtualDirectoryClassPath.scala b/compiler/src/dotty/tools/dotc/classpath/VirtualDirectoryClassPath.scala index 7fd04fa36f72..92f462ec0c30 100644 --- a/compiler/src/dotty/tools/dotc/classpath/VirtualDirectoryClassPath.scala +++ b/compiler/src/dotty/tools/dotc/classpath/VirtualDirectoryClassPath.scala @@ -13,7 +13,7 @@ case class VirtualDirectoryClassPath(dir: VirtualDirectory) extends ClassPath wi // From AbstractFileClassLoader private final def lookupPath(base: AbstractFile)(pathParts: Seq[String], directory: Boolean): AbstractFile = { var file: AbstractFile = base - val dirParts = pathParts.init.toIterator + val dirParts = pathParts.init.iterator while (dirParts.hasNext) { val dirPart = dirParts.next file = file.lookupName(dirPart, directory = true) @@ -25,7 +25,7 @@ case class VirtualDirectoryClassPath(dir: VirtualDirectory) extends ClassPath wi protected def emptyFiles: Array[AbstractFile] = Array.empty protected def getSubDir(packageDirName: String): Option[AbstractFile] = - Option(lookupPath(dir)(packageDirName.split(java.io.File.separator), directory = true)) + Option(lookupPath(dir)(packageDirName.split(java.io.File.separator).toIndexedSeq, directory = true)) protected def listChildren(dir: AbstractFile, filter: Option[AbstractFile => Boolean] = None): Array[F] = filter match { case Some(f) => dir.iterator.filter(f).toArray case _ => dir.toArray @@ -42,7 +42,7 @@ case class VirtualDirectoryClassPath(dir: VirtualDirectory) extends ClassPath wi def findClassFile(className: String): Option[AbstractFile] = { val relativePath = FileUtils.dirPath(className) + ".class" - Option(lookupPath(dir)(relativePath.split(java.io.File.separator), directory = false)) + Option(lookupPath(dir)(relativePath.split(java.io.File.separator).toIndexedSeq, directory = false)) } private[dotty] def classes(inPackage: String): Seq[ClassFileEntry] = files(inPackage) diff --git a/compiler/src/dotty/tools/dotc/classpath/ZipAndJarFileLookupFactory.scala b/compiler/src/dotty/tools/dotc/classpath/ZipAndJarFileLookupFactory.scala index 2e3250bf174a..7dd9a3cd2393 100644 --- a/compiler/src/dotty/tools/dotc/classpath/ZipAndJarFileLookupFactory.scala +++ b/compiler/src/dotty/tools/dotc/classpath/ZipAndJarFileLookupFactory.scala @@ -94,7 +94,7 @@ object ZipAndJarClassPathFactory extends ZipAndJarFileLookupFactory { val packages = collection.mutable.HashMap[String, PackageFileInfo]() def getSubpackages(dir: AbstractFile): List[AbstractFile] = - (for (file <- dir if file.isPackage) yield file)(collection.breakOut) + (for (file <- dir if file.isPackage) yield file).toList @tailrec def traverse(packagePrefix: String, @@ -129,7 +129,7 @@ object ZipAndJarClassPathFactory extends ZipAndJarFileLookupFactory { override private[dotty] def classes(inPackage: String): Seq[ClassFileEntry] = cachedPackages.get(inPackage) match { case None => Seq.empty case Some(PackageFileInfo(pkg, _)) => - (for (file <- pkg if file.isClass) yield ClassFileEntryImpl(file))(collection.breakOut) + (for (file <- pkg if file.isClass) yield ClassFileEntryImpl(file)).toSeq } override private[dotty] def hasPackage(pkg: String) = cachedPackages.contains(pkg) diff --git a/compiler/src/dotty/tools/dotc/classpath/ZipArchiveFileLookup.scala b/compiler/src/dotty/tools/dotc/classpath/ZipArchiveFileLookup.scala index 42226e9a3b8f..e42414da9578 100644 --- a/compiler/src/dotty/tools/dotc/classpath/ZipArchiveFileLookup.scala +++ b/compiler/src/dotty/tools/dotc/classpath/ZipArchiveFileLookup.scala @@ -5,7 +5,7 @@ package dotty.tools.dotc.classpath import java.io.File import java.net.URL -import scala.collection.Seq + import dotty.tools.io.{ AbstractFile, FileZipArchive } import FileUtils.AbstractFileOps import dotty.tools.io.{ClassPath, ClassRepresentation} diff --git a/compiler/src/dotty/tools/dotc/config/ScalaSettings.scala b/compiler/src/dotty/tools/dotc/config/ScalaSettings.scala index c54bd5b08b6b..70c98b88b535 100644 --- a/compiler/src/dotty/tools/dotc/config/ScalaSettings.scala +++ b/compiler/src/dotty/tools/dotc/config/ScalaSettings.scala @@ -112,8 +112,6 @@ class ScalaSettings extends Settings.SettingGroup { val YdisableFlatCpCaching: Setting[Boolean] = BooleanSetting("-YdisableFlatCpCaching", "Do not cache flat classpath representation of classpath elements from jars across compiler instances.") val Yscala2Unpickler: Setting[String] = StringSetting("-Yscala2-unpickler", "", "Control where we may get Scala 2 symbols from. This is either \"always\", \"never\", or a classpath.", "always") - // TODO: Remove once we drop support for 2.12 standard library - val YnewCollections: Setting[Boolean] = BooleanSetting("-Ynew-collections", "Inform the compiler that we are using the 2.13 collection library (even if the 2.12 library is on the classpath).") val YnoImports: Setting[Boolean] = BooleanSetting("-Yno-imports", "Compile without importing scala.*, java.lang.*, or Predef.") val YnoInline: Setting[Boolean] = BooleanSetting("-Yno-inline", "Suppress inlining.") diff --git a/compiler/src/dotty/tools/dotc/config/Settings.scala b/compiler/src/dotty/tools/dotc/config/Settings.scala index 8ba6c8786b6f..84407b55d3ee 100644 --- a/compiler/src/dotty/tools/dotc/config/Settings.scala +++ b/compiler/src/dotty/tools/dotc/config/Settings.scala @@ -37,7 +37,7 @@ object Settings { def update(idx: Int, x: Any): SettingsState = if (_wasRead) - new SettingsState(values).update(idx, x) + new SettingsState(values.toSeq).update(idx, x) else { values(idx) = x this @@ -94,7 +94,7 @@ object Settings { def legalChoices: String = if (choices.isEmpty) "" else choices match { - case r: Range => r.head + ".." + r.last + case r: Range => s"${r.head}..${r.last}" case xs: List[_] => xs.mkString(", ") } @@ -194,7 +194,7 @@ object Settings { class SettingGroup { private[this] val _allSettings = new ArrayBuffer[Setting[_]] - def allSettings: Seq[Setting[_]] = _allSettings + def allSettings: Seq[Setting[_]] = _allSettings.toSeq def defaultState: SettingsState = new SettingsState(allSettings map (_.default)) @@ -205,10 +205,10 @@ object Settings { userSetSettings(state).mkString("(", " ", ")") private def checkDependencies(state: ArgsSummary): ArgsSummary = - (state /: userSetSettings(state.sstate))(checkDependenciesOfSetting) + userSetSettings(state.sstate).foldLeft(state)(checkDependenciesOfSetting) private def checkDependenciesOfSetting(state: ArgsSummary, setting: Setting[_]) = - (state /: setting.depends) { (s, dep) => + setting.depends.foldLeft(state) { (s, dep) => val (depSetting, reqValue) = dep if (depSetting.valueIn(state.sstate) == reqValue) s else s.fail(s"incomplete option ${setting.name} (requires ${depSetting.name})") diff --git a/compiler/src/dotty/tools/dotc/core/CheckRealizable.scala b/compiler/src/dotty/tools/dotc/core/CheckRealizable.scala index 0823fbbd2746..300009312e36 100644 --- a/compiler/src/dotty/tools/dotc/core/CheckRealizable.scala +++ b/compiler/src/dotty/tools/dotc/core/CheckRealizable.scala @@ -171,10 +171,10 @@ class CheckRealizable(implicit ctx: Context) { val baseProblems = tp.baseClasses.map(_.baseTypeOf(tp)).flatMap(baseTypeProblems) - ((((Realizable: Realizability) - /: memberProblems)(_ andAlso _) - /: refinementProblems)(_ andAlso _) - /: baseProblems)(_ andAlso _) + baseProblems.foldLeft( + refinementProblems.foldLeft( + memberProblems.foldLeft( + Realizable: Realizability)(_ andAlso _))(_ andAlso _))(_ andAlso _) } /** `Realizable` if all of `tp`'s non-strict fields have realizable types, @@ -199,7 +199,7 @@ class CheckRealizable(implicit ctx: Context) { // Reason: An embedded field could well be nullable, which means it // should not be part of a path and need not be checked; but we cannot recognize // this situation until we have a typesystem that tracks nullability. - ((Realizable: Realizability) /: tp.fields)(checkField) + tp.fields.foldLeft(Realizable: Realizability)(checkField) else Realizable } diff --git a/compiler/src/dotty/tools/dotc/core/ConstraintHandling.scala b/compiler/src/dotty/tools/dotc/core/ConstraintHandling.scala index 25bf35df6829..1fe82638e01d 100644 --- a/compiler/src/dotty/tools/dotc/core/ConstraintHandling.scala +++ b/compiler/src/dotty/tools/dotc/core/ConstraintHandling.scala @@ -72,10 +72,10 @@ trait ConstraintHandling[AbstractContext] { def nonParamBounds(param: TypeParamRef)(implicit actx: AbstractContext): TypeBounds = constraint.nonParamBounds(param) def fullLowerBound(param: TypeParamRef)(implicit actx: AbstractContext): Type = - (nonParamBounds(param).lo /: constraint.minLower(param))(_ | _) + constraint.minLower(param).foldLeft(nonParamBounds(param).lo)(_ | _) def fullUpperBound(param: TypeParamRef)(implicit actx: AbstractContext): Type = - (nonParamBounds(param).hi /: constraint.minUpper(param))(_ & _) + constraint.minUpper(param).foldLeft(nonParamBounds(param).hi)(_ & _) /** Full bounds of `param`, including other lower/upper params. * diff --git a/compiler/src/dotty/tools/dotc/core/Contexts.scala b/compiler/src/dotty/tools/dotc/core/Contexts.scala index 1e64f416b3c9..1cfb94b10c83 100644 --- a/compiler/src/dotty/tools/dotc/core/Contexts.scala +++ b/compiler/src/dotty/tools/dotc/core/Contexts.scala @@ -708,7 +708,7 @@ object Contexts { /** A map that associates label and size of all uniques sets */ def uniquesSizes: Map[String, (Int, Int, Int)] = - uniqueSets.mapValues(s => (s.size, s.accesses, s.misses)) + uniqueSets.transform((_, s) => (s.size, s.accesses, s.misses)) /** Number of findMember calls on stack */ private[core] var findMemberCount: Int = 0 diff --git a/compiler/src/dotty/tools/dotc/core/Decorators.scala b/compiler/src/dotty/tools/dotc/core/Decorators.scala index f562311d92c5..fda2534a2479 100644 --- a/compiler/src/dotty/tools/dotc/core/Decorators.scala +++ b/compiler/src/dotty/tools/dotc/core/Decorators.scala @@ -98,7 +98,7 @@ object Decorators { loop(xs, 0) } - /** Like `(xs, ys).zipped.map(f)`, but returns list `xs` itself + /** Like `xs.lazyZip(ys).map(f)`, but returns list `xs` itself * - instead of a copy - if function `f` maps all elements of * `xs` to themselves. Also, it is required that `ys` is at least * as long as `xs`. diff --git a/compiler/src/dotty/tools/dotc/core/Definitions.scala b/compiler/src/dotty/tools/dotc/core/Definitions.scala index 8351fb67ddfe..d0a5fadbdee0 100644 --- a/compiler/src/dotty/tools/dotc/core/Definitions.scala +++ b/compiler/src/dotty/tools/dotc/core/Definitions.scala @@ -347,23 +347,17 @@ class Definitions { @tu lazy val ScalaPredefModule: Symbol = ctx.requiredModule("scala.Predef") @tu lazy val Predef_conforms : Symbol = ScalaPredefModule.requiredMethod(nme.conforms_) @tu lazy val Predef_classOf : Symbol = ScalaPredefModule.requiredMethod(nme.classOf) + @tu lazy val Predef_identity : Symbol = ScalaPredefModule.requiredMethod(nme.identity) @tu lazy val Predef_undefined: Symbol = ScalaPredefModule.requiredMethod(nme.???) - def SubTypeClass(implicit ctx: Context): ClassSymbol = - if (isNewCollections) - ctx.requiredClass("scala.<:<") - else - ScalaPredefModule.requiredClass("<:<") + def SubTypeClass(implicit ctx: Context): ClassSymbol = ctx.requiredClass("scala.<:<") - def DummyImplicitClass(implicit ctx: Context): ClassSymbol = - if (isNewCollections) - ctx.requiredClass("scala.DummyImplicit") - else - ScalaPredefModule.requiredClass("DummyImplicit") + def DummyImplicitClass(implicit ctx: Context): ClassSymbol = ctx.requiredClass("scala.DummyImplicit") @tu lazy val ScalaRuntimeModule: Symbol = ctx.requiredModule("scala.runtime.ScalaRunTime") def runtimeMethodRef(name: PreName): TermRef = ScalaRuntimeModule.requiredMethodRef(name) def ScalaRuntime_drop: Symbol = runtimeMethodRef(nme.drop).symbol + @tu lazy val ScalaRuntime__hashCode: Symbol = ScalaRuntimeModule.requiredMethod(nme._hashCode_) @tu lazy val BoxesRunTimeModule: Symbol = ctx.requiredModule("scala.runtime.BoxesRunTime") @tu lazy val ScalaStaticsModule: Symbol = ctx.requiredModule("scala.runtime.Statics") @@ -385,14 +379,11 @@ class Definitions { def newGenericArrayMethod(implicit ctx: Context): TermSymbol = DottyArraysModule.requiredMethod("newGenericArray") def newArrayMethod(implicit ctx: Context): TermSymbol = DottyArraysModule.requiredMethod("newArray") - // TODO: Remove once we drop support for 2.12 standard library - @tu lazy val isNewCollections: Boolean = ctx.settings.YnewCollections.value - - def getWrapVarargsArrayModule: Symbol = if (isNewCollections) ScalaRuntimeModule else ScalaPredefModule + def getWrapVarargsArrayModule: Symbol = ScalaRuntimeModule // The set of all wrap{X, Ref}Array methods, where X is a value type val WrapArrayMethods: PerRun[collection.Set[Symbol]] = new PerRun({ implicit ctx => - val methodNames = ScalaValueTypes.map(ast.tpd.wrapArrayMethodName) + nme.wrapRefArray + val methodNames = ScalaValueTypes.map(ast.tpd.wrapArrayMethodName) `union` Set(nme.wrapRefArray) methodNames.map(getWrapVarargsArrayModule.requiredMethod(_)) }) @@ -406,9 +397,8 @@ class Definitions { List(AnyClass.typeRef), EmptyScope) @tu lazy val SingletonType: TypeRef = SingletonClass.typeRef - @tu lazy val SeqType: TypeRef = - if (isNewCollections) ctx.requiredClassRef("scala.collection.immutable.Seq") - else ctx.requiredClassRef("scala.collection.Seq") + @tu lazy val CollectionSeqType: TypeRef = ctx.requiredClassRef("scala.collection.Seq") + @tu lazy val SeqType: TypeRef = ctx.requiredClassRef("scala.collection.immutable.Seq") def SeqClass given Context: ClassSymbol = SeqType.symbol.asClass @tu lazy val Seq_apply : Symbol = SeqClass.requiredMethod(nme.apply) @tu lazy val Seq_head : Symbol = SeqClass.requiredMethod(nme.head) @@ -540,11 +530,7 @@ class Definitions { @tu lazy val ThrowableType: TypeRef = ctx.requiredClassRef("java.lang.Throwable") def ThrowableClass given Context: ClassSymbol = ThrowableType.symbol.asClass - @tu lazy val SerializableType: TypeRef = - if (isNewCollections) - JavaSerializableClass.typeRef - else - ctx.requiredClassRef("scala.Serializable") + @tu lazy val SerializableType: TypeRef = JavaSerializableClass.typeRef def SerializableClass given Context: ClassSymbol = SerializableType.symbol.asClass @tu lazy val JavaEnumClass: ClassSymbol = { @@ -581,11 +567,14 @@ class Definitions { @tu lazy val StringAdd_+ : Symbol = StringAddClass.requiredMethod(nme.raw.PLUS) @tu lazy val StringContextClass: ClassSymbol = ctx.requiredClass("scala.StringContext") - @tu lazy val StringContextS : Symbol = StringContextClass.requiredMethod(nme.s) - @tu lazy val StringContextRaw: Symbol = StringContextClass.requiredMethod(nme.raw_) - @tu lazy val StringContext_f : Symbol = StringContextClass.requiredMethod(nme.f) + @tu lazy val StringContext_s : Symbol = StringContextClass.requiredMethod(nme.s) + @tu lazy val StringContext_raw: Symbol = StringContextClass.requiredMethod(nme.raw_) + @tu lazy val StringContext_f : Symbol = StringContextClass.requiredMethod(nme.f) + @tu lazy val StringContext_parts: Symbol = StringContextClass.requiredMethod(nme.parts) @tu lazy val StringContextModule: Symbol = StringContextClass.companionModule @tu lazy val StringContextModule_apply: Symbol = StringContextModule.requiredMethod(nme.apply) + @tu lazy val StringContextModule_standardInterpolator: Symbol = StringContextModule.requiredMethod(nme.standardInterpolator) + @tu lazy val StringContextModule_processEscapes: Symbol = StringContextModule.requiredMethod(nme.processEscapes) @tu lazy val InternalStringContextMacroModule: Symbol = ctx.requiredModule("dotty.internal.StringContextMacro") @tu lazy val InternalStringContextMacroModule_f: Symbol = InternalStringContextMacroModule.requiredMethod(nme.f) @@ -607,10 +596,11 @@ class Definitions { @tu lazy val EnumValuesClass: ClassSymbol = ctx.requiredClass("scala.runtime.EnumValues") @tu lazy val ProductClass: ClassSymbol = ctx.requiredClass("scala.Product") - @tu lazy val Product_canEqual : Symbol = ProductClass.requiredMethod(nme.canEqual_) - @tu lazy val Product_productArity : Symbol = ProductClass.requiredMethod(nme.productArity) - @tu lazy val Product_productElement: Symbol = ProductClass.requiredMethod(nme.productElement) - @tu lazy val Product_productPrefix : Symbol = ProductClass.requiredMethod(nme.productPrefix) + @tu lazy val Product_canEqual : Symbol = ProductClass.requiredMethod(nme.canEqual_) + @tu lazy val Product_productArity : Symbol = ProductClass.requiredMethod(nme.productArity) + @tu lazy val Product_productElement : Symbol = ProductClass.requiredMethod(nme.productElement) + @tu lazy val Product_productElementName: Symbol = ProductClass.requiredMethod(nme.productElementName) + @tu lazy val Product_productPrefix : Symbol = ProductClass.requiredMethod(nme.productPrefix) @tu lazy val IteratorClass: ClassSymbol = ctx.requiredClass("scala.collection.Iterator") def IteratorModule(implicit ctx: Context): Symbol = IteratorClass.companionModule @@ -1204,7 +1194,7 @@ class Definitions { ByteType, ShortType, CharType, IntType, LongType, FloatType, DoubleType) @tu private lazy val ScalaNumericValueTypes: collection.Set[TypeRef] = ScalaNumericValueTypeList.toSet - @tu private lazy val ScalaValueTypes: collection.Set[TypeRef] = ScalaNumericValueTypes + UnitType + BooleanType + @tu private lazy val ScalaValueTypes: collection.Set[TypeRef] = ScalaNumericValueTypes `union` Set(UnitType, BooleanType) val ScalaNumericValueClasses: PerRun[collection.Set[Symbol]] = new PerRun(implicit ctx => ScalaNumericValueTypes.map(_.symbol)) val ScalaValueClasses: PerRun[collection.Set[Symbol]] = new PerRun(implicit ctx => ScalaValueTypes.map(_.symbol)) diff --git a/compiler/src/dotty/tools/dotc/core/Denotations.scala b/compiler/src/dotty/tools/dotc/core/Denotations.scala index 4b0c3e415167..f8f1d1f84a34 100644 --- a/compiler/src/dotty/tools/dotc/core/Denotations.scala +++ b/compiler/src/dotty/tools/dotc/core/Denotations.scala @@ -585,7 +585,7 @@ object Denotations { * otherwise generate new synthetic names. */ private def mergeParamNames(tp1: LambdaType, tp2: LambdaType): List[tp1.ThisName] = - (for ((name1, name2, idx) <- (tp1.paramNames, tp2.paramNames, tp1.paramNames.indices).zipped) + (for ((name1, name2, idx) <- tp1.paramNames.lazyZip(tp2.paramNames).lazyZip(tp1.paramNames.indices)) yield if (name1 == name2) name1 else tp1.companion.syntheticParamName(idx)).toList /** Normally, `tp1 & tp2`. diff --git a/compiler/src/dotty/tools/dotc/core/Flags.scala b/compiler/src/dotty/tools/dotc/core/Flags.scala index b4c3be1f9c24..c2be7fa11c72 100644 --- a/compiler/src/dotty/tools/dotc/core/Flags.scala +++ b/compiler/src/dotty/tools/dotc/core/Flags.scala @@ -173,7 +173,7 @@ object Flags { * lie in the given range */ private def flagRange(start: Int, end: Int) = - FlagSet((KINDFLAGS.toLong /: (start until end)) ((bits, idx) => + FlagSet((start until end).foldLeft(KINDFLAGS.toLong) ((bits, idx) => if (isDefinedAsFlag(idx)) bits | (1L << idx) else bits)) /** The union of all flags in given flag set */ diff --git a/compiler/src/dotty/tools/dotc/core/GadtConstraint.scala b/compiler/src/dotty/tools/dotc/core/GadtConstraint.scala index e31821e2b5e0..962773aea0b0 100644 --- a/compiler/src/dotty/tools/dotc/core/GadtConstraint.scala +++ b/compiler/src/dotty/tools/dotc/core/GadtConstraint.scala @@ -115,7 +115,7 @@ final class ProperGadtConstraint private( pt => defn.AnyType ) - val tvars = (params, poly1.paramRefs).zipped.map { (sym, paramRef) => + val tvars = params.lazyZip(poly1.paramRefs).map { (sym, paramRef) => val tv = new TypeVar(paramRef, creatorState = null) mapping = mapping.updated(sym, tv) reverseMapping = reverseMapping.updated(tv.origin, sym) @@ -251,12 +251,12 @@ final class ProperGadtConstraint private( } override def fullLowerBound(param: TypeParamRef)(implicit ctx: Context): Type = - (nonParamBounds(param).lo /: constraint.minLower(param)) { + constraint.minLower(param).foldLeft(nonParamBounds(param).lo) { (t, u) => t | externalize(u) } override def fullUpperBound(param: TypeParamRef)(implicit ctx: Context): Type = - (nonParamBounds(param).hi /: constraint.minUpper(param)) { + constraint.minUpper(param).foldLeft(nonParamBounds(param).hi) { (t, u) => t & externalize(u) } diff --git a/compiler/src/dotty/tools/dotc/core/NameKinds.scala b/compiler/src/dotty/tools/dotc/core/NameKinds.scala index ec0a6be801b3..af173b1a9bd5 100644 --- a/compiler/src/dotty/tools/dotc/core/NameKinds.scala +++ b/compiler/src/dotty/tools/dotc/core/NameKinds.scala @@ -349,7 +349,7 @@ object NameKinds { val OuterSelectName: NumberedNameKind = new NumberedNameKind(OUTERSELECT, "OuterSelect") { def mkString(underlying: TermName, info: ThisInfo) = { assert(underlying.isEmpty) - info.num + "_" + s"${info.num}_" } } diff --git a/compiler/src/dotty/tools/dotc/core/NameOps.scala b/compiler/src/dotty/tools/dotc/core/NameOps.scala index a7490274b0c2..9d2742e4e124 100644 --- a/compiler/src/dotty/tools/dotc/core/NameOps.scala +++ b/compiler/src/dotty/tools/dotc/core/NameOps.scala @@ -271,7 +271,7 @@ object NameOps { } def unmangle(kinds: List[NameKind]): N = { - val unmangled = (name /: kinds)(_.unmangle(_)) + val unmangled = kinds.foldLeft(name)(_.unmangle(_)) if (unmangled eq name) name else unmangled.unmangle(kinds) } } diff --git a/compiler/src/dotty/tools/dotc/core/Names.scala b/compiler/src/dotty/tools/dotc/core/Names.scala index 481a5821dd69..ff7173657089 100644 --- a/compiler/src/dotty/tools/dotc/core/Names.scala +++ b/compiler/src/dotty/tools/dotc/core/Names.scala @@ -164,7 +164,7 @@ object Names { override def isTypeName: Boolean = false override def isTermName: Boolean = true override def toTermName: TermName = this - override def asTypeName: Nothing = throw new ClassCastException(this + " is not a type name") + override def asTypeName: Nothing = throw new ClassCastException(s"$this is not a type name") override def asTermName: TermName = this @sharable // because it is only modified in the synchronized block of toTypeName. @@ -436,7 +436,7 @@ object Names { override def isTermName: Boolean = false override def toTypeName: TypeName = this override def asTypeName: TypeName = this - override def asTermName: Nothing = throw new ClassCastException(this + " is not a term name") + override def asTermName: Nothing = throw new ClassCastException(s"$this is not a term name") override def asSimpleName: SimpleName = toTermName.asSimpleName override def toSimpleName: SimpleName = toTermName.toSimpleName diff --git a/compiler/src/dotty/tools/dotc/core/OrderingConstraint.scala b/compiler/src/dotty/tools/dotc/core/OrderingConstraint.scala index 76d5ac88d38c..a47b13e61113 100644 --- a/compiler/src/dotty/tools/dotc/core/OrderingConstraint.scala +++ b/compiler/src/dotty/tools/dotc/core/OrderingConstraint.scala @@ -301,8 +301,8 @@ class OrderingConstraint(private val boundsMap: ParamBounds, val lo = normalizedType(bounds.lo, loBuf, isUpper = false) val hi = normalizedType(bounds.hi, hiBuf, isUpper = true) current = updateEntry(current, param, bounds.derivedTypeBounds(lo, hi)) - current = (current /: loBuf)(order(_, _, param)) - current = (current /: hiBuf)(order(_, param, _)) + current = loBuf.foldLeft(current)(order(_, _, param)) + current = hiBuf.foldLeft(current)(order(_, param, _)) loBuf.clear() hiBuf.clear() i += 1 @@ -323,8 +323,8 @@ class OrderingConstraint(private val boundsMap: ParamBounds, assert(contains(param2), i"$param2") val newUpper = param2 :: exclusiveUpper(param2, param1) val newLower = param1 :: exclusiveLower(param1, param2) - val current1 = (current /: newLower)(upperLens.map(this, _, _, newUpper ::: _)) - val current2 = (current1 /: newUpper)(lowerLens.map(this, _, _, newLower ::: _)) + val current1 = newLower.foldLeft(current)(upperLens.map(this, _, _, newUpper ::: _)) + val current2 = newUpper.foldLeft(current1)(lowerLens.map(this, _, _, newLower ::: _)) current2 } @@ -508,7 +508,7 @@ class OrderingConstraint(private val boundsMap: ParamBounds, } def mergeParams(ps1: List[TypeParamRef], ps2: List[TypeParamRef]) = - (ps1 /: ps2)((ps1, p2) => if (ps1.contains(p2)) ps1 else p2 :: ps1) + ps2.foldLeft(ps1)((ps1, p2) => if (ps1.contains(p2)) ps1 else p2 :: ps1) // Must be symmetric def mergeEntries(e1: Type, e2: Type): Type = @@ -679,7 +679,7 @@ class OrderingConstraint(private val boundsMap: ParamBounds, val assocs = for (param <- domainParams) yield - param.binder.paramNames(param.paramNum) + ": " + entryText(entry(param)) + s"${param.binder.paramNames(param.paramNum)}: ${entryText(entry(param))}" assocs.mkString("\n") } constrainedText + "\n" + boundsText diff --git a/compiler/src/dotty/tools/dotc/core/Phases.scala b/compiler/src/dotty/tools/dotc/core/Phases.scala index 5341fec243ec..88cebc7d35cb 100644 --- a/compiler/src/dotty/tools/dotc/core/Phases.scala +++ b/compiler/src/dotty/tools/dotc/core/Phases.scala @@ -212,8 +212,8 @@ object Phases { this.squashedPhases = this.phases } - config.println(s"Phases = ${phases.deep}") - config.println(s"nextDenotTransformerId = ${nextDenotTransformerId.deep}") + config.println(s"Phases = ${phases.toList}") + config.println(s"nextDenotTransformerId = ${nextDenotTransformerId.toList}") } private[this] var myTyperPhase: Phase = _ diff --git a/compiler/src/dotty/tools/dotc/core/Signature.scala b/compiler/src/dotty/tools/dotc/core/Signature.scala index 977bcef3f252..2b6de325ffcd 100644 --- a/compiler/src/dotty/tools/dotc/core/Signature.scala +++ b/compiler/src/dotty/tools/dotc/core/Signature.scala @@ -1,27 +1,42 @@ package dotty.tools.dotc package core +import scala.annotation.tailrec + import Names._, Types._, Contexts._, StdNames._, Decorators._ import TypeErasure.sigName - -import scala.annotation.tailrec +import Signature._ /** The signature of a denotation. - * Overloaded denotations with the same name are distinguished by - * their signatures. A signature of a method (of type PolyType,MethodType, or ExprType) is - * composed of a list of signature names, one for each parameter type, plus a signature for - * the result type. Methods are uncurried before taking their signatures. - * The signature name of a type is the fully qualified name of the type symbol of the type's erasure. + * + * Same-named denotations with different signatures are considered to be + * overloads, see `SingleDenotation#matches` for more details. + * + * A _method signature_ (a value of type `Signature`, excluding `NotAMethod` + * and `OverloadedSignature`) is composed of a list of _parameter signatures_, + * plus a _type signature_ for the final result type. + * + * A _parameter signature_ (a value of type `ParamSig`) is either an integer, + * representing the number of type parameters in a type parameter section, or + * the _type signature_ of a term parameter. + * + * A _type signature_ is the fully qualified name of the type symbol of the + * type's erasure. * * For instance a definition * - * def f(x: Int)(y: List[String]): String + * def f[T, S](x: Int)(y: List[T]): S * * would have signature * * Signature( - * List("scala.Int".toTypeName, "scala.collection.immutable.List".toTypeName), - * "scala.String".toTypeName) + * List(2, "scala.Int".toTypeName, "scala.collection.immutable.List".toTypeName), + * "java.lang.Object".toTypeName) + * + * Note that `paramsSig` has one entry for *a whole type parameter section* but + * one entry *for each term parameter* (currently, methods in Dotty can only + * have one type parameter section but this encoding leaves the door open for + * supporting multiple sections). * * The signatures of non-method types are always `NotAMethod`. * @@ -31,19 +46,18 @@ import scala.annotation.tailrec * - tpnme.WILDCARD Arises from a Wildcard or error type * - tpnme.Uninstantiated Arises from an uninstantiated type variable */ -case class Signature(paramsSig: List[TypeName], resSig: TypeName) { - import Signature._ +case class Signature(paramsSig: List[ParamSig], resSig: TypeName) { /** Two names are consistent if they are the same or one of them is tpnme.Uninstantiated */ - private def consistent(name1: TypeName, name2: TypeName) = + private def consistent(name1: ParamSig, name2: ParamSig) = name1 == name2 || name1 == tpnme.Uninstantiated || name2 == tpnme.Uninstantiated /** Does this signature coincide with that signature on their parameter parts? - * This is the case if all parameter names are _consistent_, i.e. they are either + * This is the case if all parameter signatures are _consistent_, i.e. they are either * equal or on of them is tpnme.Uninstantiated. */ final def consistentParams(that: Signature)(implicit ctx: Context): Boolean = { - @tailrec def loop(names1: List[TypeName], names2: List[TypeName]): Boolean = + @tailrec def loop(names1: List[ParamSig], names2: List[ParamSig]): Boolean = if (names1.isEmpty) names2.isEmpty else !names2.isEmpty && consistent(names1.head, names2.head) && loop(names1.tail, names2.tail) if (ctx.erasedTypes && (this == NotAMethod) != (that == NotAMethod)) @@ -56,22 +70,24 @@ case class Signature(paramsSig: List[TypeName], resSig: TypeName) { /** `that` signature, but keeping all corresponding parts of `this` signature. */ final def updateWith(that: Signature): Signature = { - def update(name1: TypeName, name2: TypeName): TypeName = + def update[T <: ParamSig](name1: T, name2: T): T = if (consistent(name1, name2)) name1 else name2 if (this == that) this else if (!this.paramsSig.hasSameLengthAs(that.paramsSig)) that else { val mapped = Signature( - this.paramsSig.zipWithConserve(that.paramsSig)(update), + // DOTTY: we shouldn't have to explicitly pass a type argument to `update`, + // see https://github.com/lampepfl/dotty/issues/4867 + this.paramsSig.zipWithConserve(that.paramsSig)(update[ParamSig]), update(this.resSig, that.resSig)) if (mapped == this) this else mapped } } /** The degree to which this signature matches `that`. - * If parameter names are consistent and result types names match (i.e. they are the same + * If parameter signatures are consistent and result types names match (i.e. they are the same * or one is a wildcard), the result is `FullMatch`. - * If only the parameter names are consistent, the result is `ParamMatch` before erasure and + * If only the parameter signatures are consistent, the result is `ParamMatch` before erasure and * `NoMatch` otherwise. * If the parameters are inconsistent, the result is always `NoMatch`. */ @@ -94,8 +110,16 @@ case class Signature(paramsSig: List[TypeName], resSig: TypeName) { * * Like Signature#apply, the result is only cacheable if `isUnderDefined == false`. */ - def prepend(params: List[Type], isJava: Boolean)(implicit ctx: Context): Signature = - Signature(params.map(p => sigName(p, isJava)) ++ paramsSig, resSig) + def prependTermParams(params: List[Type], isJava: Boolean)(implicit ctx: Context): Signature = + Signature(params.map(p => sigName(p, isJava)) ::: paramsSig, resSig) + + /** Construct a signature by prepending the length of a type parameter section + * to the parameter part of this signature. + * + * Like Signature#apply, the result is only cacheable if `isUnderDefined == false`. + */ + def prependTypeParams(typeParamSigsSectionLength: Int)(implicit ctx: Context): Signature = + Signature(typeParamSigsSectionLength :: paramsSig, resSig) /** A signature is under-defined if its paramsSig part contains at least one * `tpnme.Uninstantiated`. Under-defined signatures arise when taking a signature @@ -106,6 +130,10 @@ case class Signature(paramsSig: List[TypeName], resSig: TypeName) { } object Signature { + /** A parameter signature, see the documentation of `Signature` for more information. */ + type ParamSig = TypeName | Int + // Erasure means that our Ints will be boxed, but Integer#valueOf caches + // small values, so the performance hit should be minimal. enum MatchDegree { case NoMatch, ParamMatch, FullMatch @@ -131,4 +159,32 @@ object Signature { assert(!resultType.isInstanceOf[ExprType]) apply(Nil, sigName(resultType, isJava)) } + + val lexicographicOrdering: Ordering[Signature] = new Ordering[Signature] { + val paramSigOrdering: Ordering[Signature.ParamSig] = new Ordering[Signature.ParamSig] { + def compare(x: ParamSig, y: ParamSig): Int = x match { // `(x, y) match` leads to extra allocations + case x: TypeName => + y match { + case y: TypeName => + // `Ordering[TypeName]` doesn't work due to `Ordering` still being invariant + the[Ordering[Name]].compare(x, y) + case y: Int => + 1 + } + case x: Int => + y match { + case y: Name => + -1 + case y: Int => + x - y + } + } + } + def compare(x: Signature, y: Signature): Int = { + import scala.math.Ordering.Implicits.seqOrdering + val paramsOrdering = seqOrdering(paramSigOrdering).compare(x.paramsSig, y.paramsSig) + if (paramsOrdering != 0) paramsOrdering + else the[Ordering[Name]].compare(x.resSig, y.resSig) + } + } } diff --git a/compiler/src/dotty/tools/dotc/core/StdNames.scala b/compiler/src/dotty/tools/dotc/core/StdNames.scala index e331c3a84dc1..a25996e7d9aa 100644 --- a/compiler/src/dotty/tools/dotc/core/StdNames.scala +++ b/compiler/src/dotty/tools/dotc/core/StdNames.scala @@ -116,9 +116,9 @@ object StdNames { val ANON_FUN: N = str.ANON_FUN val BITMAP_PREFIX: N = "bitmap$" // @darkdimius: $bitmap? Also, the next 4 names are unused. val BITMAP_NORMAL: N = BITMAP_PREFIX // initialization bitmap for public/protected lazy vals - val BITMAP_TRANSIENT: N = BITMAP_PREFIX + "trans$" // initialization bitmap for transient lazy vals - val BITMAP_CHECKINIT: N = BITMAP_PREFIX + "init$" // initialization bitmap for checkinit values - val BITMAP_CHECKINIT_TRANSIENT: N = BITMAP_PREFIX + "inittrans$" // initialization bitmap for transient checkinit values + val BITMAP_TRANSIENT: N = s"${BITMAP_PREFIX}trans$$" // initialization bitmap for transient lazy vals + val BITMAP_CHECKINIT: N = s"${BITMAP_PREFIX}init$$" // initialization bitmap for checkinit values + val BITMAP_CHECKINIT_TRANSIENT: N = s"${BITMAP_PREFIX}inittrans$$" // initialization bitmap for transient checkinit values val DEFAULT_GETTER: N = str.DEFAULT_GETTER val DEFAULT_GETTER_INIT: N = "$lessinit$greater" val DO_WHILE_PREFIX: N = "doWhile$" @@ -141,7 +141,7 @@ object StdNames { val INITIALIZER_PREFIX: N = "initial$" val BOUNDTYPE_ANNOT: N = "$boundType$" val QUOTE: N = "'" - val TYPE_QUOTE: N = "type_'" + val TYPE_QUOTE: N = "type_'" val TRAIT_SETTER_SEPARATOR: N = str.TRAIT_SETTER_SEPARATOR // value types (and AnyRef) are all used as terms as well @@ -368,6 +368,7 @@ object StdNames { val TypeRef: N = "TypeRef" val UNIT : N = "UNIT" val add_ : N = "add" + val acc: N = "acc" val annotation: N = "annotation" val anyHash: N = "anyHash" val anyValClass: N = "anyValClass" @@ -448,6 +449,7 @@ object StdNames { val getOrElse: N = "getOrElse" val hasNext: N = "hasNext" val hashCode_ : N = "hashCode" + val _hashCode_ : N = "_hashCode" val hash_ : N = "hash" val head: N = "head" val higherKinds: N = "higherKinds" @@ -504,7 +506,9 @@ object StdNames { val ordinalDollar: N = "$ordinal" val ordinalDollar_ : N = "_$ordinal" val origin: N = "origin" + val parts: N = "parts" val prefix : N = "prefix" + val processEscapes: N = "processEscapes" val productArity: N = "productArity" val productElement: N = "productElement" val productElementName: N = "productElementName" @@ -536,6 +540,7 @@ object StdNames { val setType: N = "setType" val setTypeSignature: N = "setTypeSignature" val splice: N = "$splice" + val standardInterpolator: N = "standardInterpolator" val staticClass : N = "staticClass" val staticModule : N = "staticModule" val staticPackage : N = "staticPackage" diff --git a/compiler/src/dotty/tools/dotc/core/SymDenotations.scala b/compiler/src/dotty/tools/dotc/core/SymDenotations.scala index 373fa83b1c96..d1d53494b1ff 100644 --- a/compiler/src/dotty/tools/dotc/core/SymDenotations.scala +++ b/compiler/src/dotty/tools/dotc/core/SymDenotations.scala @@ -360,7 +360,7 @@ object SymDenotations { /** Add all given annotations to this symbol */ final def addAnnotations(annots: TraversableOnce[Annotation])(implicit ctx: Context): Unit = - annots.foreach(addAnnotation) + annots.iterator.foreach(addAnnotation) @tailrec private def dropOtherAnnotations(anns: List[Annotation], cls: Symbol)(implicit ctx: Context): List[Annotation] = anns match { diff --git a/compiler/src/dotty/tools/dotc/core/SymbolLoaders.scala b/compiler/src/dotty/tools/dotc/core/SymbolLoaders.scala index ca6b9ccdc9c9..04459cd1674d 100644 --- a/compiler/src/dotty/tools/dotc/core/SymbolLoaders.scala +++ b/compiler/src/dotty/tools/dotc/core/SymbolLoaders.scala @@ -331,7 +331,7 @@ abstract class SymbolLoader extends LazyType { self => else "error while loading " + root.name + ",\n" + msg) } try { - val start = currentTime + val start = System.currentTimeMillis if (Config.tracingEnabled && ctx.settings.YdebugTrace.value) trace(s">>>> loading ${root.debugString}", _ => s"<<<< loaded ${root.debugString}") { doComplete(root) diff --git a/compiler/src/dotty/tools/dotc/core/Symbols.scala b/compiler/src/dotty/tools/dotc/core/Symbols.scala index 3aaf8cb80838..0d7ca769eb23 100644 --- a/compiler/src/dotty/tools/dotc/core/Symbols.scala +++ b/compiler/src/dotty/tools/dotc/core/Symbols.scala @@ -288,7 +288,7 @@ trait Symbols { this: Context => } val tparams = tparamBuf.toList val bounds = boundsFn(trefBuf.toList) - for ((name, tparam, bound) <- (names, tparams, bounds).zipped) + for ((name, tparam, bound) <- names.lazyZip(tparams).lazyZip(bounds)) tparam.denot = SymDenotation(tparam, owner, name, flags | owner.typeParamCreationFlags, bound) tparams } @@ -323,7 +323,7 @@ trait Symbols { this: Context => newNakedSymbol[original.ThisName](original.coord) } val ttmap1 = ttmap.withSubstitution(originals, copies) - (originals, copies).zipped foreach { (original, copy) => + originals.lazyZip(copies) foreach { (original, copy) => val odenot = original.denot val oinfo = original.info match { case ClassInfo(pre, _, parents, decls, selfInfo) => diff --git a/compiler/src/dotty/tools/dotc/core/TypeApplications.scala b/compiler/src/dotty/tools/dotc/core/TypeApplications.scala index 8e01d30039cd..6c1521936e75 100644 --- a/compiler/src/dotty/tools/dotc/core/TypeApplications.scala +++ b/compiler/src/dotty/tools/dotc/core/TypeApplications.scala @@ -330,7 +330,7 @@ class TypeApplications(val self: Type) extends AnyVal { !tparams.corresponds(hkParams)(_.paramVariance == _.paramVariance) && tparams.corresponds(hkParams)(varianceConforms) => HKTypeLambda( - (tparams, hkParams).zipped.map((tparam, hkparam) => + tparams.lazyZip(hkParams).map((tparam, hkparam) => tparam.paramName.withVariance(hkparam.paramVariance)))( tl => arg.paramInfos.map(_.subst(arg, tl).bounds), tl => arg.resultType.subst(arg, tl) diff --git a/compiler/src/dotty/tools/dotc/core/TypeComparer.scala b/compiler/src/dotty/tools/dotc/core/TypeComparer.scala index d5eb260caf02..c8d7a0de975b 100644 --- a/compiler/src/dotty/tools/dotc/core/TypeComparer.scala +++ b/compiler/src/dotty/tools/dotc/core/TypeComparer.scala @@ -822,7 +822,7 @@ class TypeComparer(initctx: Context) extends ConstraintHandling[AbsentContext] w */ def isMatchingApply(tp1: Type): Boolean = tp1 match { case AppliedType(tycon1, args1) => - tycon1.dealiasKeepRefiningAnnots match { + def loop(tycon1: Type, args1: List[Type]): Boolean = tycon1.dealiasKeepRefiningAnnots match { case tycon1: TypeParamRef => (tycon1 == tycon2 || canConstrain(tycon1) && isSubType(tycon1, tycon2)) && @@ -865,12 +865,13 @@ class TypeComparer(initctx: Context) extends ConstraintHandling[AbsentContext] w false } case tycon1: TypeVar => - isMatchingApply(tycon1.underlying) + loop(tycon1.underlying, args1) case tycon1: AnnotatedType if !tycon1.isRefining => - isMatchingApply(tycon1.underlying) + loop(tycon1.underlying, args1) case _ => false } + loop(tycon1, args1) case _ => false } @@ -1724,7 +1725,7 @@ class TypeComparer(initctx: Context) extends ConstraintHandling[AbsentContext] w } /** The greatest lower bound of a list types */ - final def glb(tps: List[Type]): Type = ((AnyType: Type) /: tps)(glb) + final def glb(tps: List[Type]): Type = tps.foldLeft(AnyType: Type)(glb) def widenInUnions(implicit ctx: Context): Boolean = ctx.scala2Mode || ctx.erasedTypes @@ -1767,7 +1768,7 @@ class TypeComparer(initctx: Context) extends ConstraintHandling[AbsentContext] w /** The least upper bound of a list of types */ final def lub(tps: List[Type]): Type = - ((NothingType: Type) /: tps)(lub(_,_, canConstrain = false)) + tps.foldLeft(NothingType: Type)(lub(_,_, canConstrain = false)) /** Try to produce joint arguments for a lub `A[T_1, ..., T_n] | A[T_1', ..., T_n']` using * the following strategies: @@ -1962,10 +1963,10 @@ class TypeComparer(initctx: Context) extends ConstraintHandling[AbsentContext] w original(applied(tp1), tp2) else if (tparams1.hasSameLengthAs(tparams2)) HKTypeLambda( - paramNames = (HKTypeLambda.syntheticParamNames(tparams1.length), tparams1, tparams2) - .zipped.map((pname, tparam1, tparam2) => + paramNames = HKTypeLambda.syntheticParamNames(tparams1.length).lazyZip(tparams1).lazyZip(tparams2) + .map((pname, tparam1, tparam2) => pname.withVariance((tparam1.paramVariance + tparam2.paramVariance) / 2)))( - paramInfosExp = tl => (tparams1, tparams2).zipped.map((tparam1, tparam2) => + paramInfosExp = tl => tparams1.lazyZip(tparams2).map((tparam1, tparam2) => tl.integrate(tparams1, tparam1.paramInfoAsSeenFrom(tp1)).bounds & tl.integrate(tparams2, tparam2.paramInfoAsSeenFrom(tp2)).bounds), resultTypeExp = tl => @@ -2210,7 +2211,7 @@ class TypeComparer(initctx: Context) extends ConstraintHandling[AbsentContext] w } } - (args1, args2, tycon1.typeParams).zipped.exists { + args1.lazyZip(args2).lazyZip(tycon1.typeParams).exists { (arg1, arg2, tparam) => val v = tparam.paramVariance if (v > 0) diff --git a/compiler/src/dotty/tools/dotc/core/TypeErasure.scala b/compiler/src/dotty/tools/dotc/core/TypeErasure.scala index 9b5d943402f0..fc324ffdabb5 100644 --- a/compiler/src/dotty/tools/dotc/core/TypeErasure.scala +++ b/compiler/src/dotty/tools/dotc/core/TypeErasure.scala @@ -435,7 +435,6 @@ class TypeErasure(isJava: Boolean, semiEraseVCs: Boolean, isConstructor: Boolean val sym = tp.symbol if (!sym.isClass) this(tp.translucentSuperType) else if (semiEraseVCs && isDerivedValueClass(sym)) eraseDerivedValueClassRef(tp) - else if (sym == defn.ArrayClass) apply(tp.appliedTo(TypeBounds.empty)) // i966 shows that we can hit a raw Array type. else if (defn.isSyntheticFunctionClass(sym)) defn.erasedFunctionType(sym) else eraseNormalClassRef(tp) case tp: AppliedType => diff --git a/compiler/src/dotty/tools/dotc/core/TypeOps.scala b/compiler/src/dotty/tools/dotc/core/TypeOps.scala index 6db008b58d72..ef2a19c6848b 100644 --- a/compiler/src/dotty/tools/dotc/core/TypeOps.scala +++ b/compiler/src/dotty/tools/dotc/core/TypeOps.scala @@ -673,5 +673,5 @@ object TypeOps { // TODO: Move other typeops here. It's a bit weird that they are a part of `ctx` def nestedPairs(ts: List[Type])(implicit ctx: Context): Type = - (ts :\ (defn.UnitType: Type))(defn.PairClass.typeRef.appliedTo(_, _)) + ts.foldRight(defn.UnitType: Type)(defn.PairClass.typeRef.appliedTo(_, _)) } diff --git a/compiler/src/dotty/tools/dotc/core/TyperState.scala b/compiler/src/dotty/tools/dotc/core/TyperState.scala index bb2ec044fc80..d32352932d1d 100644 --- a/compiler/src/dotty/tools/dotc/core/TyperState.scala +++ b/compiler/src/dotty/tools/dotc/core/TyperState.scala @@ -77,7 +77,7 @@ class TyperState(private val previous: TyperState /* | Null */) { new TyperState(this).setReporter(new StoreReporter(reporter)).setCommittable(isCommittable) /** The uninstantiated variables */ - def uninstVars: Seq[TypeVar] = constraint.uninstVars + def uninstVars: collection.Seq[TypeVar] = constraint.uninstVars /** The set of uninstantiated type variables which have this state as their owning state */ private[this] var myOwnedVars: TypeVars = SimpleIdentitySet.empty diff --git a/compiler/src/dotty/tools/dotc/core/Types.scala b/compiler/src/dotty/tools/dotc/core/Types.scala index 322a46f6dff4..f88696789c90 100644 --- a/compiler/src/dotty/tools/dotc/core/Types.scala +++ b/compiler/src/dotty/tools/dotc/core/Types.scala @@ -25,7 +25,7 @@ import printing.Texts._ import printing.Printer import Hashable._ import Uniques._ -import collection.{mutable, Seq} +import collection.mutable import config.Config import annotation.tailrec import language.implicitConversions @@ -757,9 +757,9 @@ object Types { } def memberDenots(keepOnly: NameFilter, f: (Name, mutable.Buffer[SingleDenotation]) => Unit)(implicit ctx: Context): Seq[SingleDenotation] = { - val buf = mutable.ArrayBuffer[SingleDenotation]() + val buf = mutable.ListBuffer[SingleDenotation]() for (name <- memberNames(keepOnly)) f(name, buf) - buf + buf.toList } /** The set of abstract term members of this type. */ @@ -3154,7 +3154,7 @@ object Types { else { val result = if (paramInfos.isEmpty) NoDeps - else (NoDeps /: paramInfos.tail)(depStatus(_, _)) + else paramInfos.tail.foldLeft(NoDeps)(depStatus(_, _)) if ((result & Provisional) == 0) myParamDependencyStatus = result (result & StatusMask).toByte } @@ -3216,7 +3216,7 @@ object Types { def computeSignature(implicit ctx: Context): Signature = { val params = if (isErasedMethod) Nil else paramInfos - resultSignature.prepend(params, isJavaMethod) + resultSignature.prependTermParams(params, isJavaMethod) } protected def prefixString: String = companion.prefixString @@ -3402,7 +3402,8 @@ object Types { assert(resType.isInstanceOf[TermType], this) assert(paramNames.nonEmpty) - def computeSignature(implicit ctx: Context): Signature = resultSignature + def computeSignature(implicit ctx: Context): Signature = + resultSignature.prependTypeParams(paramNames.length) override def isContextualMethod = resType.isContextualMethod override def isImplicitMethod = resType.isImplicitMethod @@ -4235,7 +4236,7 @@ object Types { object AnnotatedType { def make(underlying: Type, annots: List[Annotation]): Type = - (underlying /: annots)(AnnotatedType(_, _)) + annots.foldLeft(underlying)(AnnotatedType(_, _)) } // Special type objects and classes ----------------------------------------------------- diff --git a/compiler/src/dotty/tools/dotc/core/classfile/ClassfileParser.scala b/compiler/src/dotty/tools/dotc/core/classfile/ClassfileParser.scala index 1d87723cd258..483d06be6dd1 100644 --- a/compiler/src/dotty/tools/dotc/core/classfile/ClassfileParser.scala +++ b/compiler/src/dotty/tools/dotc/core/classfile/ClassfileParser.scala @@ -132,8 +132,20 @@ class ClassfileParser( /** Parse parents for Java classes. For Scala, return AnyRef, since the real type will be unpickled. * Updates the read pointer of 'in'. */ def parseParents: List[Type] = { - val superType = if (isAnnotation) { in.nextChar; defn.AnnotationClass.typeRef } - else pool.getSuperClass(in.nextChar).typeRef + val superType = + if (isAnnotation) { + in.nextChar + defn.AnnotationClass.typeRef + } + else if (classRoot.symbol == defn.ComparableClass || + classRoot.symbol == defn.JavaCloneableClass || + classRoot.symbol == defn.JavaSerializableClass) { + // Treat these interfaces as universal traits + in.nextChar + defn.AnyType + } + else + pool.getSuperClass(in.nextChar).typeRef val ifaceCount = in.nextChar var ifaces = for (i <- (0 until ifaceCount).toList) yield pool.getSuperClass(in.nextChar).typeRef // Dotty deviation: was @@ -409,7 +421,7 @@ class ClassfileParser( if (sig(index) != ':') // guard against empty class bound ts += objToAny(sig2type(tparams, skiptvs)) } - TypeBounds.upper(((NoType: Type) /: ts)(_ & _) orElse defn.AnyType) + TypeBounds.upper(ts.foldLeft(NoType: Type)(_ & _) orElse defn.AnyType) } var tparams = classTParams @@ -866,7 +878,7 @@ class ClassfileParser( def originalName: SimpleName = pool.getName(name) override def toString: String = - originalName + " in " + outerName + "(" + externalName + ")" + s"$originalName in $outerName($externalName)" } object innerClasses extends scala.collection.mutable.HashMap[Name, InnerClassEntry] { @@ -1152,7 +1164,7 @@ class ClassfileParser( val start = starts(index) if (in.buf(start).toInt != CONSTANT_UTF8) errorBadTag(start) val len = in.getChar(start + 1) - bytesBuffer ++= in.buf.view(start + 3, start + 3 + len) + bytesBuffer ++= in.buf.view.slice(start + 3, start + 3 + len) } value = getSubArray(bytesBuffer.toArray) values(indices.head) = value diff --git a/compiler/src/dotty/tools/dotc/core/tasty/NameBuffer.scala b/compiler/src/dotty/tools/dotc/core/tasty/NameBuffer.scala index edab7045095a..30ed9ac5aeae 100644 --- a/compiler/src/dotty/tools/dotc/core/tasty/NameBuffer.scala +++ b/compiler/src/dotty/tools/dotc/core/tasty/NameBuffer.scala @@ -4,7 +4,7 @@ package core package tasty import collection.mutable -import Names.{Name, chrs, SimpleName, DerivedName} +import Names.{Name, chrs, SimpleName, DerivedName, TypeName} import NameKinds._ import Decorators._ import TastyBuffer._ @@ -23,7 +23,13 @@ class NameBuffer extends TastyBuffer(10000) { case None => name1 match { case SignedName(original, Signature(params, result)) => - nameIndex(original); nameIndex(result); params.foreach(nameIndex) + nameIndex(original) + nameIndex(result) + params.foreach { + case param: TypeName => + nameIndex(param) + case _ => + } case AnyQualifiedName(prefix, name) => nameIndex(prefix); nameIndex(name) case AnyUniqueName(original, separator, num) => @@ -50,6 +56,16 @@ class NameBuffer extends TastyBuffer(10000) { def writeNameRef(ref: NameRef): Unit = writeNat(ref.index) def writeNameRef(name: Name): Unit = writeNameRef(nameRefs(name.toTermName)) + def writeParamSig(paramSig: Signature.ParamSig): Unit ={ + val encodedValue = paramSig match { + case paramSig: TypeName => + nameRefs(paramSig.toTermName).index + case paramSig: Int => + -paramSig + } + writeInt(encodedValue) + } + def pickleNameContents(name: Name): Unit = { val tag = name.toTermName.info.kind.tag writeByte(tag) @@ -70,10 +86,10 @@ class NameBuffer extends TastyBuffer(10000) { } case AnyNumberedName(original, num) => withLength { writeNameRef(original); writeNat(num) } - case SignedName(original, Signature(params, result)) => + case SignedName(original, Signature(paramsSig, result)) => withLength( - { writeNameRef(original); writeNameRef(result); params.foreach(writeNameRef) }, - if ((params.length + 2) * maxIndexWidth <= maxNumInByte) 1 else 2) + { writeNameRef(original); writeNameRef(result); paramsSig.foreach(writeParamSig) }, + if ((paramsSig.length + 2) * maxIndexWidth <= maxNumInByte) 1 else 2) case DerivedName(original, _) => withLength { writeNameRef(original) } } diff --git a/compiler/src/dotty/tools/dotc/core/tasty/TastyFormat.scala b/compiler/src/dotty/tools/dotc/core/tasty/TastyFormat.scala index 607c5d6c46ec..b19a96bfd7c0 100644 --- a/compiler/src/dotty/tools/dotc/core/tasty/TastyFormat.scala +++ b/compiler/src/dotty/tools/dotc/core/tasty/TastyFormat.scala @@ -42,7 +42,10 @@ Macro-format: INLINEACCESSOR Length underlying_NameRef -- inline$A OBJECTCLASS Length underlying_NameRef -- A$ (name of the module class for module A) - SIGNED Length original_NameRef resultSig_NameRef paramSig_NameRef* -- name + signature + SIGNED Length original_NameRef resultSig_NameRef ParamSig* -- name + signature + + ParamSig = Int // If negative, the absolute value represents the length of a type parameter section + // If positive, this is a NameRef for the fully qualified name of a term parameter. NameRef = Nat // ordinal number of name in name table, starting from 1. @@ -248,7 +251,7 @@ Standard Section: "Comments" Comment* object TastyFormat { final val header: Array[Int] = Array(0x5C, 0xA1, 0xAB, 0x1F) - val MajorVersion: Int = 16 + val MajorVersion: Int = 17 val MinorVersion: Int = 0 /** Tags used to serialize names */ diff --git a/compiler/src/dotty/tools/dotc/core/tasty/TastyPrinter.scala b/compiler/src/dotty/tools/dotc/core/tasty/TastyPrinter.scala index 5be43b1c4ba7..3d329e185cd4 100644 --- a/compiler/src/dotty/tools/dotc/core/tasty/TastyPrinter.scala +++ b/compiler/src/dotty/tools/dotc/core/tasty/TastyPrinter.scala @@ -33,17 +33,17 @@ class TastyPrinter(bytes: Array[Byte])(implicit ctx: Context) { sb.append("Trees:\n") unpickle(new TreeSectionUnpickler) match { case Some(s) => sb.append(s) - case _ => Unit + case _ => } sb.append("\n\n") unpickle(new PositionSectionUnpickler) match { case Some(s) => sb.append(s) - case _ => Unit + case _ => } sb.append("\n\n") unpickle(new CommentSectionUnpickler) match { case Some(s) => sb.append(s) - case _ => Unit + case _ => } sb.result } diff --git a/compiler/src/dotty/tools/dotc/core/tasty/TastyString.scala b/compiler/src/dotty/tools/dotc/core/tasty/TastyString.scala index 459a8fcfc534..d484514b3521 100644 --- a/compiler/src/dotty/tools/dotc/core/tasty/TastyString.scala +++ b/compiler/src/dotty/tools/dotc/core/tasty/TastyString.scala @@ -15,7 +15,7 @@ object TastyString { /** Encode TASTY bytes into a List of String */ def pickle(bytes: Array[Byte]): PickledQuote = { val str = new String(Base64.getEncoder().encode(bytes), UTF_8) - str.sliding(maxStringSize, maxStringSize).toList + str.toSeq.sliding(maxStringSize, maxStringSize).map(_.unwrap).toList } /** Decode the List of Strings into TASTY bytes */ diff --git a/compiler/src/dotty/tools/dotc/core/tasty/TastyUnpickler.scala b/compiler/src/dotty/tools/dotc/core/tasty/TastyUnpickler.scala index a849b467f7e7..1d7d0f825686 100644 --- a/compiler/src/dotty/tools/dotc/core/tasty/TastyUnpickler.scala +++ b/compiler/src/dotty/tools/dotc/core/tasty/TastyUnpickler.scala @@ -36,6 +36,14 @@ class TastyUnpickler(reader: TastyReader) { private def readName(): TermName = nameAtRef(readNameRef()) private def readString(): String = readName().toString + private def readParamSig(): Signature.ParamSig = { + val ref = readInt() + if (ref < 0) + ref.abs + else + nameAtRef(NameRef(ref)).toTypeName + } + private def readNameContents(): TermName = { val tag = readByte() val length = readNat() @@ -58,8 +66,10 @@ class TastyUnpickler(reader: TastyReader) { case SIGNED => val original = readName() val result = readName().toTypeName - val params = until(end)(readName().toTypeName) - var sig = Signature(params, result) + // DOTTY: we shouldn't have to give an explicit type to paramsSig, + // see https://github.com/lampepfl/dotty/issues/4867 + val paramsSig: List[Signature.ParamSig] = until(end)(readParamSig()) + val sig = Signature(paramsSig, result) SignedName(original, sig) case _ => simpleNameKindOfTag(tag)(readName()) diff --git a/compiler/src/dotty/tools/dotc/core/tasty/TreePickler.scala b/compiler/src/dotty/tools/dotc/core/tasty/TreePickler.scala index 1f62b948b0a1..cbac6f01bf38 100644 --- a/compiler/src/dotty/tools/dotc/core/tasty/TreePickler.scala +++ b/compiler/src/dotty/tools/dotc/core/tasty/TreePickler.scala @@ -271,7 +271,7 @@ class TreePickler(pickler: TastyPickler) { writeByte(tag) withLength { pickleType(tpe.resultType, richTypes = true) - (tpe.paramNames, tpe.paramInfos).zipped.foreach { (name, tpe) => + tpe.paramNames.lazyZip(tpe.paramInfos).foreach { (name, tpe) => pickleName(name); pickleType(tpe) } } diff --git a/compiler/src/dotty/tools/dotc/core/unpickleScala2/PickleFormat.scala b/compiler/src/dotty/tools/dotc/core/unpickleScala2/PickleFormat.scala index f50e53d6b882..634faeeee0d4 100644 --- a/compiler/src/dotty/tools/dotc/core/unpickleScala2/PickleFormat.scala +++ b/compiler/src/dotty/tools/dotc/core/unpickleScala2/PickleFormat.scala @@ -117,7 +117,7 @@ object PickleFormat { * len is remaining length after `len`. */ val MajorVersion: Int = 5 - val MinorVersion: Int = 0 + val MinorVersion: Int = 2 final val TERMname = 1 final val TYPEname = 2 diff --git a/compiler/src/dotty/tools/dotc/core/unpickleScala2/Scala2Unpickler.scala b/compiler/src/dotty/tools/dotc/core/unpickleScala2/Scala2Unpickler.scala index b032b6f45593..3e9c2635cad8 100644 --- a/compiler/src/dotty/tools/dotc/core/unpickleScala2/Scala2Unpickler.scala +++ b/compiler/src/dotty/tools/dotc/core/unpickleScala2/Scala2Unpickler.scala @@ -348,16 +348,27 @@ class Scala2Unpickler(bytes: Array[Byte], classRoot: ClassDenotation, moduleClas def atEnd = readIndex == end def readExtSymbol(): Symbol = { - val name = readNameRef().decode + val nameRef = readNameRef() + var name = nameRef.decode + + // If the symbol tag is EXTMODCLASSref, then we know that the method names + // mangling do not make sense, but in general we don't know what kind of + // symbol we're reading at this point, so we don't know which unmanglings + // are safe to apply. Empirically, we at least need to unmangle default + // getter names, since they're used to encode the default parameters of + // annotations, but more might be needed. + if (tag != EXTMODCLASSref) + name = name.unmangle(Scala2MethodNameKinds) + val owner = if (atEnd) loadingMirror.RootClass else readSymbolRef() def adjust(denot: Denotation) = { val denot1 = denot.disambiguate(p) val sym = denot1.symbol if (denot.exists && !denot1.exists) { // !!!DEBUG - val alts = denot.alternatives map (d => d + ":" + d.info + "/" + d.signature) + val alts = denot.alternatives map (d => s"$d:${d.info}/${d.signature}") System.err.println(s"!!! disambiguation failure: $alts") - val members = denot.alternatives.head.symbol.owner.info.decls.toList map (d => d + ":" + d.info + "/" + d.signature) + val members = denot.alternatives.head.symbol.owner.info.decls.toList map (d => s"$d:${d.info}/${d.signature}") System.err.println(s"!!! all members: $members") } if (tag == EXTref) sym else sym.moduleClass @@ -521,9 +532,6 @@ class Scala2Unpickler(bytes: Array[Byte], classRoot: ClassDenotation, moduleClas else if (isModuleClassRoot) completeRoot( moduleClassRoot, rootClassUnpickler(start, moduleClassRoot.symbol, moduleClassRoot.sourceModule, infoRef), privateWithin) - else if (name == tpnme.REFINE_CLASS) - // create a type alias instead - ctx.newSymbol(owner, name, flags, localMemberUnpickler, privateWithin, coord = start) else { def completer(cls: Symbol) = { val unpickler = new ClassUnpickler(infoRef) withDecls symScope(cls) @@ -569,7 +577,7 @@ class Scala2Unpickler(bytes: Array[Byte], classRoot: ClassDenotation, moduleClas val tp = at(inforef, () => readType()(ctx)) denot match { - case denot: ClassDenotation => + case denot: ClassDenotation if !isRefinementClass(denot.symbol) => val selfInfo = if (atEnd) NoType else readTypeRef() setClassInfo(denot, tp, fromScala2 = true, selfInfo) case denot => @@ -762,19 +770,17 @@ class Scala2Unpickler(bytes: Array[Byte], classRoot: ClassDenotation, moduleClas case TYPEBOUNDStpe => TypeBounds(readTypeRef(), readTypeRef()) case REFINEDtpe => - val clazz = readSymbolRef() + val clazz = readSymbolRef().asClass val decls = symScope(clazz) symScopes(clazz) = EmptyScope // prevent further additions val parents = until(end, () => readTypeRef()) val parent = parents.reduceLeft(AndType(_, _)) if (decls.isEmpty) parent else { - def subst(info: Type, rt: RecType) = - if (clazz.isClass) info.substThis(clazz.asClass, rt.recThis) - else info // turns out some symbols read into `clazz` are not classes, not sure why this is the case. + def subst(info: Type, rt: RecType) = info.substThis(clazz.asClass, rt.recThis) def addRefinement(tp: Type, sym: Symbol) = RefinedType(tp, sym.name, sym.info) - val refined = (parent /: decls.toList)(addRefinement) - RecType.closeOver(rt => subst(refined, rt)) + val refined = decls.toList.foldLeft(parent)(addRefinement) + RecType.closeOver(rt => refined.substThis(clazz, rt.recThis)) } case CLASSINFOtpe => val clazz = readSymbolRef() diff --git a/compiler/src/dotty/tools/dotc/parsing/JavaParsers.scala b/compiler/src/dotty/tools/dotc/parsing/JavaParsers.scala index 5e5cb394ebad..d8648fd84f39 100644 --- a/compiler/src/dotty/tools/dotc/parsing/JavaParsers.scala +++ b/compiler/src/dotty/tools/dotc/parsing/JavaParsers.scala @@ -656,7 +656,7 @@ object JavaParsers { syntaxError(start, "illegal import", skipIt = false) List() } else { - val qual = ((Ident(names.head): Tree) /: names.tail.init) (Select(_, _)) + val qual = names.tail.init.foldLeft(Ident(names.head): Tree)(Select(_, _)) val lastname = names.last val ident = Ident(lastname).withSpan(Span(lastnameOffset)) // val selector = lastname match { diff --git a/compiler/src/dotty/tools/dotc/parsing/Parsers.scala b/compiler/src/dotty/tools/dotc/parsing/Parsers.scala index 7dd5ccf429e0..4b1bc583c746 100644 --- a/compiler/src/dotty/tools/dotc/parsing/Parsers.scala +++ b/compiler/src/dotty/tools/dotc/parsing/Parsers.scala @@ -1455,7 +1455,7 @@ object Parsers { t } case AT if location != Location.InPattern => - (t /: annotations())(Annotated) + annotations().foldLeft(t)(Annotated) case _ => val tpt = typeDependingOn(location) if (isWildcard(t) && location != Location.InPattern) { diff --git a/compiler/src/dotty/tools/dotc/parsing/xml/SymbolicXMLBuilder.scala b/compiler/src/dotty/tools/dotc/parsing/xml/SymbolicXMLBuilder.scala index 74493215f12a..ac350b6b1de4 100644 --- a/compiler/src/dotty/tools/dotc/parsing/xml/SymbolicXMLBuilder.scala +++ b/compiler/src/dotty/tools/dotc/parsing/xml/SymbolicXMLBuilder.scala @@ -96,7 +96,7 @@ class SymbolicXMLBuilder(parser: Parser, preserveWS: Boolean)(implicit ctx: Cont attrs: Tree, scope: Tree, empty: Boolean, - children: Seq[Tree]): Tree = + children: collection.Seq[Tree]): Tree = { def starArgs = if (children.isEmpty) Nil @@ -129,7 +129,7 @@ class SymbolicXMLBuilder(parser: Parser, preserveWS: Boolean)(implicit ctx: Cont protected def ProcInstr(target: Tree, txt: Tree): Tree = New(_scala_xml_ProcInstr, LL(target, txt)) /** @todo: attributes */ - def makeXMLpat(span: Span, n: String, args: Seq[Tree]): Tree = { + def makeXMLpat(span: Span, n: String, args: collection.Seq[Tree]): Tree = { val (prepat, labpat) = splitPrefix(n) match { case (Some(pre), rest) => (const(pre), const(rest)) case _ => (wild, const(n)) @@ -141,7 +141,7 @@ class SymbolicXMLBuilder(parser: Parser, preserveWS: Boolean)(implicit ctx: Cont case _: Literal => makeTextPat(t) case _ => t } - protected def convertToTextPat(buf: Seq[Tree]): List[Tree] = + protected def convertToTextPat(buf: collection.Seq[Tree]): List[Tree] = (buf map convertToTextPat).toList def parseAttribute(span: Span, s: String): Tree = { @@ -159,7 +159,7 @@ class SymbolicXMLBuilder(parser: Parser, preserveWS: Boolean)(implicit ctx: Cont } /** could optimize if args.length == 0, args.length == 1 AND args(0) is <: Node. */ - def makeXMLseq(span: Span, args: Seq[Tree]): Block = { + def makeXMLseq(span: Span, args: collection.Seq[Tree]): Block = { val buffer = ValDef(_buf, TypeTree(), New(_scala_xml_NodeBuffer, ListOfNil)) val applies = args filterNot isEmptyText map (t => Apply(Select(Ident(_buf), _plus), List(t))) @@ -173,13 +173,13 @@ class SymbolicXMLBuilder(parser: Parser, preserveWS: Boolean)(implicit ctx: Cont } /** Various node constructions. */ - def group(span: Span, args: Seq[Tree]): Tree = + def group(span: Span, args: collection.Seq[Tree]): Tree = atSpan(span)( New(_scala_xml_Group, LL(makeXMLseq(span, args))) ) def unparsed(span: Span, str: String): Tree = atSpan(span)( New(_scala_xml_Unparsed, LL(const(str))) ) - def element(span: Span, qname: String, attrMap: mutable.Map[String, Tree], empty: Boolean, args: Seq[Tree]): Tree = { + def element(span: Span, qname: String, attrMap: mutable.Map[String, Tree], empty: Boolean, args: collection.Seq[Tree]): Tree = { def handleNamespaceBinding(pre: String, z: String): Tree = { def mkAssign(t: Tree): Tree = Assign( Ident(_tmpscope), diff --git a/compiler/src/dotty/tools/dotc/printing/Formatting.scala b/compiler/src/dotty/tools/dotc/printing/Formatting.scala index 9be652e4eea3..9c0ef8132229 100644 --- a/compiler/src/dotty/tools/dotc/printing/Formatting.scala +++ b/compiler/src/dotty/tools/dotc/printing/Formatting.scala @@ -40,7 +40,7 @@ object Formatting { private def treatArg(arg: Any, suffix: String)(implicit ctx: Context): (Any, String) = arg match { case arg: Seq[_] if suffix.nonEmpty && suffix.head == '%' => val (rawsep, rest) = suffix.tail.span(_ != '%') - val sep = StringContext.treatEscapes(rawsep) + val sep = StringContext.processEscapes(rawsep) if (rest.nonEmpty) (arg.map(showArg).mkString(sep), rest.tail) else (arg, suffix) case _ => @@ -57,7 +57,7 @@ object Formatting { case head :: tail => (head.stripMargin, tail map stripTrailingPart) case Nil => ("", Nil) } - val (args1, suffixes1) = (args, suffixes).zipped.map(treatArg(_, _)).unzip + val (args1, suffixes1) = args.lazyZip(suffixes).map(treatArg(_, _)).unzip new StringContext(prefix :: suffixes1.toList: _*).s(args1: _*) } } diff --git a/compiler/src/dotty/tools/dotc/printing/PlainPrinter.scala b/compiler/src/dotty/tools/dotc/printing/PlainPrinter.scala index 1a874d0df76f..ef444da7182e 100644 --- a/compiler/src/dotty/tools/dotc/printing/PlainPrinter.scala +++ b/compiler/src/dotty/tools/dotc/printing/PlainPrinter.scala @@ -232,7 +232,7 @@ class PlainPrinter(_ctx: Context) extends Printer { protected def paramsText(tp: LambdaType): Text = { def paramText(name: Name, tp: Type) = toText(name) ~ toTextRHS(tp) - Text((tp.paramNames, tp.paramInfos).zipped.map(paramText), ", ") + Text(tp.paramNames.lazyZip(tp.paramInfos).map(paramText), ", ") } protected def ParamRefNameString(name: Name): String = name.toString diff --git a/compiler/src/dotty/tools/dotc/printing/RefinedPrinter.scala b/compiler/src/dotty/tools/dotc/printing/RefinedPrinter.scala index 39b50fe6c693..8ce095f96de8 100644 --- a/compiler/src/dotty/tools/dotc/printing/RefinedPrinter.scala +++ b/compiler/src/dotty/tools/dotc/printing/RefinedPrinter.scala @@ -594,7 +594,7 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) { case GenAlias(pat, expr) => toText(pat) ~ " = " ~ toText(expr) case ContextBounds(bounds, cxBounds) => - (toText(bounds) /: cxBounds) {(t, cxb) => + cxBounds.foldLeft(toText(bounds)) {(t, cxb) => t ~ cxBoundToText(cxb) } case PatDef(mods, pats, tpt, rhs) => @@ -649,10 +649,29 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) { val tp = tp1.tryNormalize if (tp != NoType) tp else tp1 } + val tp3 = + if (homogenizedView && tree.isInstanceOf[If | Match | Annotated | Block | CaseDef]) { + // Types of non-leaf trees are not pickled but reconstructed when + // unpickled using the TypeAssigner. Sometimes, this requires choosing + // arbitrarily between two =:= types (e.g., when typing an `if`, where + // one branch is typed with a type alias and the other with a + // dealiased version of the same type) and we cannot guarantee that + // the same choice was made by the original Typer (e.g., because the + // original choice involved type variables). So we need to get rid of + // any alias in these types to make -Ytest-pickler work (the list of + // types in the isInstanceOf check above is conservative and might + // need to be expanded). + val dealiasMap = new TypeMap { + def apply(tp: Type) = mapOver(tp.dealias) + } + dealiasMap(tp2) + } + else tp2 + if (!suppressTypes) - txt = ("<" ~ txt ~ ":" ~ toText(tp2) ~ ">").close + txt = ("<" ~ txt ~ ":" ~ toText(tp3) ~ ">").close else if (tree.isType && !homogenizedView) - txt = toText(tp2) + txt = toText(tp3) } if (!suppressPositions) { if (printPos) { @@ -728,7 +747,7 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) { val (leading, paramss) = if (isExtension && vparamss.nonEmpty) (paramsText(vparamss.head) ~ " " ~ txt, vparamss.tail) else (txt, vparamss) - (leading /: paramss)((txt, params) => + paramss.foldLeft(leading)((txt, params) => txt ~ (Str(" given ") provided params.nonEmpty && params.head.mods.is(Given)) ~ paramsText(params)) diff --git a/compiler/src/dotty/tools/dotc/printing/ReplPrinter.scala b/compiler/src/dotty/tools/dotc/printing/ReplPrinter.scala index 72edf40ab112..f97cbfdabae5 100644 --- a/compiler/src/dotty/tools/dotc/printing/ReplPrinter.scala +++ b/compiler/src/dotty/tools/dotc/printing/ReplPrinter.scala @@ -28,9 +28,11 @@ class ReplPrinter(_ctx: Context) extends DecompilerPrinter(_ctx) { else if (debugPrint) super.toText(sym) else keyString(sym) ~~ nameString(sym.name.stripModuleClassSuffix) + inline private val qSc = '"'; + override def toText(const: Constant): Text = if (debugPrint) super.toText(const) - else if (const.tag == Constants.StringTag) Str('"' + const.value.toString + '"') + else if (const.tag == Constants.StringTag) Str(s"${qSc}${const.value}$qSc") else Str(const.value.toString) override def dclText(sym: Symbol): Text = if (debugPrint) super.dclText(sym) else { diff --git a/compiler/src/dotty/tools/dotc/printing/Texts.scala b/compiler/src/dotty/tools/dotc/printing/Texts.scala index e3921a17429c..ea64ae7ead1f 100644 --- a/compiler/src/dotty/tools/dotc/printing/Texts.scala +++ b/compiler/src/dotty/tools/dotc/printing/Texts.scala @@ -49,7 +49,7 @@ object Texts { case Vertical(_) => throw new IllegalArgumentException("Unexpected Vertical.appendToLastLine") } case Fluid(relems) => - (this /: relems.reverse)(_ appendToLastLine _) + relems.reverse.foldLeft(this)(_ appendToLastLine _) case Vertical(_) => throw new IllegalArgumentException("Unexpected Text.appendToLastLine(Vertical(...))") } @@ -62,7 +62,7 @@ object Texts { else if (that.isVertical) appendIndented(that)(width) else if (this.isVertical) Fluid(that.layout(width) :: this.relems) else if (that.remaining(width - lengthWithoutAnsi(lastLine)) >= 0) appendToLastLine(that) - else if (that.isSplittable) (this /: that.relems.reverse)(_.append(width)(_)) + else if (that.isSplittable) that.relems.reverse.foldLeft(this)(_.append(width)(_)) else appendIndented(that)(width) } @@ -73,7 +73,7 @@ object Texts { case Str(s, _) => this case Fluid(relems) => - ((Str(""): Text) /: relems.reverse)(_.append(width)(_)) + relems.reverse.foldLeft(Str(""): Text)(_.append(width)(_)) case Vertical(relems) => Vertical(relems map (_ layout width)) } @@ -125,7 +125,7 @@ object Texts { def maxLine: Int = this match { case Str(_, lines) => lines.end - case _ => (-1 /: relems)((acc, relem) => acc max relem.maxLine) + case _ => relems.foldLeft(-1)((acc, relem) => acc max relem.maxLine) } def mkString(width: Int, withLineNumbers: Boolean): String = { diff --git a/compiler/src/dotty/tools/dotc/reporting/MessageRendering.scala b/compiler/src/dotty/tools/dotc/reporting/MessageRendering.scala index 9cb9b6057ebc..bf0d24feffab 100644 --- a/compiler/src/dotty/tools/dotc/reporting/MessageRendering.scala +++ b/compiler/src/dotty/tools/dotc/reporting/MessageRendering.scala @@ -145,7 +145,7 @@ trait MessageRendering { /** The whole message rendered from `msg` */ def messageAndPos(msg: Message, pos: SourcePosition, diagnosticLevel: String)(implicit ctx: Context): String = { - val sb = mutable.StringBuilder.newBuilder + val sb = mutable.StringBuilder() val posString = posStr(pos, diagnosticLevel, msg) if (posString.nonEmpty) sb.append(posString).append(EOL) if (pos.exists && pos.source.file.exists) { diff --git a/compiler/src/dotty/tools/dotc/reporting/Reporter.scala b/compiler/src/dotty/tools/dotc/reporting/Reporter.scala index a1897f14b028..41713e53c42d 100644 --- a/compiler/src/dotty/tools/dotc/reporting/Reporter.scala +++ b/compiler/src/dotty/tools/dotc/reporting/Reporter.scala @@ -309,12 +309,12 @@ abstract class Reporter extends interfaces.ReporterResult { /** Returns a string meaning "n elements". */ protected def countString(n: Int, elements: String): String = n match { - case 0 => "no " + elements + "s" - case 1 => "one " + elements - case 2 => "two " + elements + "s" - case 3 => "three " + elements + "s" - case 4 => "four " + elements + "s" - case _ => n + " " + elements + "s" + case 0 => s"no ${elements}s" + case 1 => s"one ${elements}s" + case 2 => s"two ${elements}s" + case 3 => s"three ${elements}s" + case 4 => s"four ${elements}s" + case _ => s"$n ${elements}s" } /** Should this diagnostic not be reported at all? */ diff --git a/compiler/src/dotty/tools/dotc/reporting/diagnostic/messages.scala b/compiler/src/dotty/tools/dotc/reporting/diagnostic/messages.scala index 1215b294cfec..d4ab84129995 100644 --- a/compiler/src/dotty/tools/dotc/reporting/diagnostic/messages.scala +++ b/compiler/src/dotty/tools/dotc/reporting/diagnostic/messages.scala @@ -2152,19 +2152,25 @@ object messages { val kind: String = "Duplicate Symbol" val msg: String = { def nameAnd = if (decl.name != previousDecl.name) " name and" else "" - val details = if (decl.isRealMethod && previousDecl.isRealMethod) { - // compare the signatures when both symbols represent methods - decl.signature.matchDegree(previousDecl.signature) match { - case Signature.MatchDegree.NoMatch => - // DOTTY problem: Need to qualify MatchDegree enum vals since otherwise exhaustivity fails. - // To fix this, we need to export vals under singleton types. - "" // shouldn't be reachable - case Signature.MatchDegree.ParamMatch => - "have matching parameter types." - case Signature.MatchDegree.FullMatch => - i"have the same$nameAnd type after erasure." + def details(implicit ctx: Context): String = + if (decl.isRealMethod && previousDecl.isRealMethod) { + // compare the signatures when both symbols represent methods + decl.signature.matchDegree(previousDecl.signature) match { + case Signature.MatchDegree.NoMatch => + // If the signatures don't match at all at the current phase, then + // they might match after erasure. + val elimErasedCtx = ctx.withPhaseNoEarlier(ctx.elimErasedValueTypePhase.next) + if (elimErasedCtx != ctx) + details(elimErasedCtx) + else + "" // shouldn't be reachable + case Signature.MatchDegree.ParamMatch => + "have matching parameter types." + case Signature.MatchDegree.FullMatch => + i"have the same$nameAnd type after erasure." + } } - } else "" + else "" def symLocation(sym: Symbol) = { val lineDesc = if (sym.span.exists && sym.span != sym.owner.span) diff --git a/compiler/src/dotty/tools/dotc/sbt/ExtractAPI.scala b/compiler/src/dotty/tools/dotc/sbt/ExtractAPI.scala index cd3ff9ec6249..e39161435344 100644 --- a/compiler/src/dotty/tools/dotc/sbt/ExtractAPI.scala +++ b/compiler/src/dotty/tools/dotc/sbt/ExtractAPI.scala @@ -355,7 +355,7 @@ private class ExtractAPICollector(implicit val ctx: Context) extends ThunkHolder qual.info.member(DefaultGetterName(sym.name, start + i)).exists) } else pnames.indices.map(Function.const(false)) - val params = (pnames, ptypes, defaults).zipped.map((pname, ptype, isDefault) => + val params = pnames.lazyZip(ptypes).lazyZip(defaults).map((pname, ptype, isDefault) => api.MethodParameter.of(pname.toString, apiType(ptype), isDefault, api.ParameterModifier.Plain)) api.ParameterList.of(params.toArray, mt.isImplicitMethod) :: paramLists(restpe, params.length) @@ -365,7 +365,7 @@ private class ExtractAPICollector(implicit val ctx: Context) extends ThunkHolder val tparams = sym.info match { case pt: TypeLambda => - (pt.paramNames, pt.paramInfos).zipped.map((pname, pbounds) => + pt.paramNames.lazyZip(pt.paramInfos).map((pname, pbounds) => apiTypeParameter(pname.toString, 0, pbounds.lo, pbounds.hi)) case _ => Nil diff --git a/compiler/src/dotty/tools/dotc/sbt/ShowAPI.scala b/compiler/src/dotty/tools/dotc/sbt/ShowAPI.scala index 110024435977..89a652c0172a 100644 --- a/compiler/src/dotty/tools/dotc/sbt/ShowAPI.scala +++ b/compiler/src/dotty/tools/dotc/sbt/ShowAPI.scala @@ -33,11 +33,11 @@ object ShowAPI { def showDefinition(d: Definition)(implicit nesting: Int): String = d match { case v: Val => showMonoDef(v, "val") + ": " + showType(v.tpe) case v: Var => showMonoDef(v, "var") + ": " + showType(v.tpe) - case d: Def => showPolyDef(d, "def") + showValueParams(d.valueParameters) + ": " + showType(d.returnType) + case d: Def => showPolyDef(d, "def") + showValueParams(d.valueParameters.toIndexedSeq) + ": " + showType(d.returnType) case ta: TypeAlias => showPolyDef(ta, "type") + " = " + showType(ta.tpe) case td: TypeDeclaration => showPolyDef(td, "type") + showBounds(td.lowerBound, td.upperBound) case cl: ClassLike => showMonoDef(d, showDefinitionType(cl.definitionType)) + - showTypeParameters(cl.typeParameters) + " extends " + showTemplate(cl) + showTypeParameters(cl.typeParameters.toIndexedSeq) + " extends " + showTemplate(cl) case cl: ClassLikeDef => showPolyDef(cl, showDefinitionType(cl.definitionType)) } @@ -47,8 +47,8 @@ object ShowAPI { val showSelf = if (cl.selfType.isInstanceOf[EmptyType]) "" else " self: " + showNestedType(cl.selfType) + " =>" cl.structure.parents.map(showNestedType).mkString("", " with ", " {") + showSelf + - lines(truncateDecls(cl.structure.inherited).map(d => "^inherited^ " + showNestedDefinition(d))) + - lines(truncateDecls(cl.structure.declared).map(showNestedDefinition)) + + lines(truncateDecls(cl.structure.inherited).toIndexedSeq.map(d => "^inherited^ " + showNestedDefinition(d))) + + lines(truncateDecls(cl.structure.declared).toIndexedSeq.map(showNestedDefinition)) + "}" } @@ -59,7 +59,7 @@ object ShowAPI { case st: EmptyType => "" case p: Parameterized => showType(p.baseType) + p.typeArguments.map(showType).mkString("[", ", ", "]") case c: Constant => showType(c.baseType) + "(" + c.value + ")" - case a: Annotated => showAnnotations(a.annotations) + " " + showType(a.baseType) + case a: Annotated => showAnnotations(a.annotations.toIndexedSeq) + " " + showType(a.baseType) case s: Structure => s.parents.map(showType).mkString(" with ") + ( if (nesting <= 0) "{ }" @@ -72,7 +72,7 @@ object ShowAPI { ) case p: Polymorphic => showType(p.baseType) + ( if (nesting <= 0) " [ ]" - else showNestedTypeParameters(p.parameters) + else showNestedTypeParameters(p.parameters.toIndexedSeq) ) } @@ -85,17 +85,17 @@ object ShowAPI { private def space(s: String) = if (s.isEmpty) s else s + " " private def showMonoDef(d: Definition, label: String)(implicit nesting: Int): String = - space(showAnnotations(d.annotations)) + space(showAccess(d.access)) + space(showModifiers(d.modifiers)) + space(label) + d.name + space(showAnnotations(d.annotations.toIndexedSeq)) + space(showAccess(d.access)) + space(showModifiers(d.modifiers)) + space(label) + d.name private def showPolyDef(d: ParameterizedDefinition, label: String)(implicit nesting: Int): String = - showMonoDef(d, label) + showTypeParameters(d.typeParameters) + showMonoDef(d, label) + showTypeParameters(d.typeParameters.toIndexedSeq) private def showTypeParameters(tps: Seq[TypeParameter])(implicit nesting: Int): String = if (tps.isEmpty) "" else tps.map(showTypeParameter).mkString("[", ", ", "]") private def showTypeParameter(tp: TypeParameter)(implicit nesting: Int): String = - showAnnotations(tp.annotations) + " " + showVariance(tp.variance) + tp.id + showTypeParameters(tp.typeParameters) + " " + showBounds(tp.lowerBound, tp.upperBound) + showAnnotations(tp.annotations.toIndexedSeq) + " " + showVariance(tp.variance) + tp.id + showTypeParameters(tp.typeParameters.toIndexedSeq) + " " + showBounds(tp.lowerBound, tp.upperBound) private def showAnnotations(as: Seq[Annotation])(implicit nesting: Int) = as.map(showAnnotation).mkString(" ") private def showAnnotation(a: Annotation)(implicit nesting: Int) = diff --git a/compiler/src/dotty/tools/dotc/tastyreflect/ReflectionCompilerInterface.scala b/compiler/src/dotty/tools/dotc/tastyreflect/ReflectionCompilerInterface.scala index 8eb0bf628cce..b80771415d81 100644 --- a/compiler/src/dotty/tools/dotc/tastyreflect/ReflectionCompilerInterface.scala +++ b/compiler/src/dotty/tools/dotc/tastyreflect/ReflectionCompilerInterface.scala @@ -1398,8 +1398,13 @@ class ReflectionCompilerInterface(val rootContext: core.Contexts.Context) extend type Signature = core.Signature - def Signature_paramSigs(self: Signature): List[String] = - self.paramsSig.map(_.toString) + def Signature_paramSigs(self: Signature): List[String | Int] = + self.paramsSig.map { + case paramSig: core.Names.TypeName => + paramSig.toString + case paramSig: Int => + paramSig + } def Signature_resultSig(self: Signature): String = self.resSig.toString diff --git a/compiler/src/dotty/tools/dotc/transform/CapturedVars.scala b/compiler/src/dotty/tools/dotc/transform/CapturedVars.scala index 6209acc906f6..9c26de87d227 100644 --- a/compiler/src/dotty/tools/dotc/transform/CapturedVars.scala +++ b/compiler/src/dotty/tools/dotc/transform/CapturedVars.scala @@ -32,7 +32,7 @@ class CapturedVars extends MiniPhase with IdentityDenotTransformer { thisPhase = private class RefInfo(implicit ctx: Context) { /** The classes for which a Ref type exists. */ val refClassKeys: collection.Set[Symbol] = - defn.ScalaNumericValueClasses() + defn.BooleanClass + defn.ObjectClass + defn.ScalaNumericValueClasses() `union` Set(defn.BooleanClass, defn.ObjectClass) val refClass: Map[Symbol, Symbol] = refClassKeys.map(rc => rc -> ctx.requiredClass(s"scala.runtime.${rc.name}Ref")).toMap diff --git a/compiler/src/dotty/tools/dotc/transform/Erasure.scala b/compiler/src/dotty/tools/dotc/transform/Erasure.scala index 76f03286a4fb..f44802520741 100644 --- a/compiler/src/dotty/tools/dotc/transform/Erasure.scala +++ b/compiler/src/dotty/tools/dotc/transform/Erasure.scala @@ -706,7 +706,7 @@ object Erasure { def sameSymbol(tp1: Type, tp2: Type) = tp1.typeSymbol == tp2.typeSymbol val paramAdaptationNeeded = - (implParamTypes, samParamTypes).zipped.exists((implType, samType) => + implParamTypes.lazyZip(samParamTypes).exists((implType, samType) => !sameSymbol(implType, samType) && !autoAdaptedParam(implType)) val resultAdaptationNeeded = !sameSymbol(implResultType, samResultType) && !autoAdaptedResult(implResultType) @@ -723,7 +723,7 @@ object Erasure { implicit val ctx = bridgeCtx val List(bridgeParams) = bridgeParamss - val rhs = Apply(meth, (bridgeParams, implParamTypes).zipped.map(adapt(_, _))) + val rhs = Apply(meth, bridgeParams.lazyZip(implParamTypes).map(adapt(_, _))) adapt(rhs, bridgeType.resultType) }, targetType = implClosure.tpt.tpe) } else implClosure diff --git a/compiler/src/dotty/tools/dotc/transform/ExtensionMethods.scala b/compiler/src/dotty/tools/dotc/transform/ExtensionMethods.scala index a45b2e32ce6b..e62ca5079ff3 100644 --- a/compiler/src/dotty/tools/dotc/transform/ExtensionMethods.scala +++ b/compiler/src/dotty/tools/dotc/transform/ExtensionMethods.scala @@ -15,6 +15,7 @@ import SymDenotations._, Symbols._, StdNames._, Denotations._ import TypeErasure.{ valueErasure, ErasedValueType } import NameKinds.{ExtMethName, UniqueExtMethName} import Decorators._ +import TypeUtils._ /** * Perform Step 1 in the inline classes SIP: Creates extension methods for all @@ -131,8 +132,7 @@ class ExtensionMethods extends MiniPhase with DenotTransformer with FullParamete else NoSymbol private def createExtensionMethod(imeth: Symbol, staticClass: Symbol)(implicit ctx: Context): TermSymbol = { - val extensionName = extensionNames(imeth).head.toTermName - val extensionMeth = ctx.newSymbol(staticClass, extensionName, + val extensionMeth = ctx.newSymbol(staticClass, extensionName(imeth), (imeth.flags | Final) &~ (Override | Protected | AbsOverride), fullyParameterizedType(imeth.info, imeth.owner.asClass), privateWithin = imeth.privateWithin, coord = imeth.coord) @@ -180,40 +180,9 @@ class ExtensionMethods extends MiniPhase with DenotTransformer with FullParamete object ExtensionMethods { val name: String = "extmethods" - /** Generate stream of possible names for the extension version of given instance method `imeth`. - * If the method is not overloaded, this stream consists of just "imeth$extension". - * If the method is overloaded, the stream has as first element "imeth$extenionX", where X is the - * index of imeth in the sequence of overloaded alternatives with the same name. This choice will - * always be picked as the name of the generated extension method. - * After this first choice, all other possible indices in the range of 0 until the number - * of overloaded alternatives are returned. The secondary choices are used to find a matching method - * in `extensionMethod` if the first name has the wrong type. We thereby gain a level of insensitivity - * of how overloaded types are ordered between phases and picklings. - */ - private def extensionNames(imeth: Symbol)(implicit ctx: Context): Stream[Name] = { - val decl = imeth.owner.info.decl(imeth.name) - - /** No longer needed for Dotty, as we are more disciplined with scopes now. - // Bridge generation is done at phase `erasure`, but new scopes are only generated - // for the phase after that. So bridges are visible in earlier phases. - // - // `info.member(imeth.name)` filters these out, but we need to use `decl` - // to restrict ourselves to members defined in the current class, so we - // must do the filtering here. - val declTypeNoBridge = decl.filter(sym => !sym.isBridge).tpe - */ - decl match { - case decl: MultiDenotation => - val alts = decl.alternatives - val index = alts indexOf imeth.denot - assert(index >= 0, alts + " does not contain " + imeth) - def altName(index: Int) = UniqueExtMethName(imeth.name.asTermName, index) - altName(index) #:: ((0 until alts.length).toStream filter (index != _) map altName) - case decl => - assert(decl.exists, imeth.name + " not found in " + imeth.owner + "'s decls: " + imeth.owner.info.decls) - Stream(ExtMethName(imeth.name.asTermName)) - } - } + /** Name of the extension method that corresponds to given instance method `meth`. */ + def extensionName(imeth: Symbol)(implicit ctx: Context): TermName = + ExtMethName(imeth.name.asTermName) /** Return the extension method that corresponds to given instance method `meth`. */ def extensionMethod(imeth: Symbol)(implicit ctx: Context): TermSymbol = @@ -221,22 +190,22 @@ object ExtensionMethods { // FIXME use toStatic instead? val companion = imeth.owner.companionModule val companionInfo = companion.info - val candidates = extensionNames(imeth) map (companionInfo.decl(_).symbol) filter (_.exists) - val matching = candidates filter (c => FullParameterization.memberSignature(c.info) == imeth.signature) + val candidates = companionInfo.decl(extensionName(imeth)).alternatives + val matching = + // See the documentation of `memberSignature` to understand why `.stripPoly.ensureMethodic` is needed here. + candidates filter (c => FullParameterization.memberSignature(c.info) == imeth.info.stripPoly.ensureMethodic.signature) assert(matching.nonEmpty, i"""no extension method found for: | - | $imeth:${imeth.info.show} with signature ${imeth.signature} in ${companion.moduleClass} + | $imeth:${imeth.info.show} with signature ${imeth.info.signature} in ${companion.moduleClass} | | Candidates: | - | ${candidates.map(c => c.name + ":" + c.info.show).mkString("\n")} + | ${candidates.map(c => s"${c.name}:${c.info.show}").mkString("\n")} | | Candidates (signatures normalized): | - | ${candidates.map(c => c.name + ":" + c.info.signature + ":" + FullParameterization.memberSignature(c.info)).mkString("\n")} - | - | Eligible Names: ${extensionNames(imeth).mkString(",")}""") - matching.head.asTerm + | ${candidates.map(c => s"${c.name}:${c.info.signature}:${FullParameterization.memberSignature(c.info)}").mkString("\n")}""") + matching.head.symbol.asTerm } } diff --git a/compiler/src/dotty/tools/dotc/transform/FirstTransform.scala b/compiler/src/dotty/tools/dotc/transform/FirstTransform.scala index 971aa33f3163..2fc953cfdad2 100644 --- a/compiler/src/dotty/tools/dotc/transform/FirstTransform.scala +++ b/compiler/src/dotty/tools/dotc/transform/FirstTransform.scala @@ -73,7 +73,7 @@ class FirstTransform extends MiniPhase with InfoTransformer { thisPhase => case (stat: TypeDef) :: stats1 if stat.symbol.isClass => if (stat.symbol.is(Flags.Module)) { def pushOnTop(xs: List[Tree], ys: List[Tree]): List[Tree] = - (ys /: xs)((ys, x) => x :: ys) + xs.foldLeft(ys)((ys, x) => x :: ys) moduleClassDefs += (stat.name -> stat) singleClassDefs -= stat.name.stripModuleClassSuffix val stats1r = reorder(stats1, Nil) @@ -134,7 +134,7 @@ class FirstTransform extends MiniPhase with InfoTransformer { thisPhase => private def toTypeTree(tree: Tree)(implicit ctx: Context) = { val binders = collectBinders.apply(Nil, tree) val result: Tree = TypeTree(tree.tpe).withSpan(tree.span) - (result /: binders)(Annotated(_, _)) + binders.foldLeft(result)(Annotated(_, _)) } override def transformOther(tree: Tree)(implicit ctx: Context): Tree = tree match { diff --git a/compiler/src/dotty/tools/dotc/transform/FullParameterization.scala b/compiler/src/dotty/tools/dotc/transform/FullParameterization.scala index c6c0e1d8b92f..7a5ca8a5ee5f 100644 --- a/compiler/src/dotty/tools/dotc/transform/FullParameterization.scala +++ b/compiler/src/dotty/tools/dotc/transform/FullParameterization.scala @@ -235,7 +235,7 @@ trait FullParameterization { val meth = acc.tpe.asInstanceOf[MethodType] val paramTypes = meth.instantiateParamInfos(vparams.map(_.tpe)) acc.appliedToArgs( - (vparams, paramTypes).zipped.map((vparam, paramType) => { + vparams.lazyZip(paramTypes).map((vparam, paramType) => { assert(vparam.tpe <:< paramType.widen) // type should still conform to widened type ref(vparam.symbol).ensureConforms(paramType) })) @@ -247,7 +247,16 @@ trait FullParameterization { object FullParameterization { /** Assuming `info` is a result of a `fullyParameterizedType` call, the signature of the - * original method type `X` such that `info = fullyParameterizedType(X, ...)`. + * original method type `X` after stripping its leading type parameters section, + * such that: + * info.stripPoly.ensureMethodic = fullyParameterizedType(X, ...).stripPoly.ensureMethodic + * + * NOTE: Keeping the polymorphic part of the signature would be more precise, + * but we cannot distinguish which type parameters of `info` are also type + * parameters of`X`. This could be fixed by using a specific NameKind for the + * extra type parameters, but that wouldn't help for extension methods + * unpickled from Scala 2 (because Scala 2 extmeths phase happens before + * pickling, which is maybe something we should change for 2.14). */ def memberSignature(info: Type)(implicit ctx: Context): Signature = info match { case info: PolyType => diff --git a/compiler/src/dotty/tools/dotc/transform/LambdaLift.scala b/compiler/src/dotty/tools/dotc/transform/LambdaLift.scala index ecc805746b2a..1dbb0b51dd97 100644 --- a/compiler/src/dotty/tools/dotc/transform/LambdaLift.scala +++ b/compiler/src/dotty/tools/dotc/transform/LambdaLift.scala @@ -294,7 +294,7 @@ object LambdaLift { else sym.name.freshened private def generateProxies()(implicit ctx: Context): Unit = - for ((owner, freeValues) <- free.toIterator) { + for ((owner, freeValues) <- free.iterator) { val newFlags = Synthetic | (if (owner.isClass) ParamAccessor | Private else Param) ctx.debuglog(i"free var proxy: ${owner.showLocated}, ${freeValues.toList}%, %") proxyMap(owner) = { @@ -437,7 +437,7 @@ object LambdaLift { val classProxies = fvs.map(proxyOf(sym.owner, _)) val constrProxies = fvs.map(proxyOf(sym, _)) ctx.debuglog(i"copy params ${constrProxies.map(_.showLocated)}%, % to ${classProxies.map(_.showLocated)}%, %}") - seq((classProxies, constrProxies).zipped.map(proxyInit), rhs) + seq(classProxies.lazyZip(constrProxies).map(proxyInit), rhs) } tree match { diff --git a/compiler/src/dotty/tools/dotc/transform/LazyVals.scala b/compiler/src/dotty/tools/dotc/transform/LazyVals.scala index e0f432e1ebc5..71bf24a133c7 100644 --- a/compiler/src/dotty/tools/dotc/transform/LazyVals.scala +++ b/compiler/src/dotty/tools/dotc/transform/LazyVals.scala @@ -377,7 +377,7 @@ class LazyVals extends MiniPhase with IdentityDenotTransformer { var flag: Tree = EmptyTree var ord = 0 - def offsetName(id: Int) = (StdNames.nme.LAZY_FIELD_OFFSET + (if (x.symbol.owner.is(Module)) "_m_" else "") + id.toString).toTermName + def offsetName(id: Int) = s"${StdNames.nme.LAZY_FIELD_OFFSET}${if (x.symbol.owner.is(Module)) "_m_" else ""}$id".toTermName // compute or create appropriate offsetSymbol, bitmap and bits used by current ValDef appendOffsetDefs.get(claz) match { @@ -394,7 +394,7 @@ class LazyVals extends MiniPhase with IdentityDenotTransformer { } else { // need to create a new flag offsetSymbol = ctx.newSymbol(claz, offsetById, Synthetic, defn.LongType).enteredAfter(this) offsetSymbol.addAnnotation(Annotation(defn.ScalaStaticAnnot)) - val flagName = (StdNames.nme.BITMAP_PREFIX + id.toString).toTermName + val flagName = s"${StdNames.nme.BITMAP_PREFIX}$id".toTermName val flagSymbol = ctx.newSymbol(claz, flagName, containerFlags, defn.LongType).enteredAfter(this) flag = ValDef(flagSymbol, Literal(Constant(0L))) val offsetTree = ValDef(offsetSymbol, getOffset.appliedTo(thizClass, Literal(Constant(flagName.toString)))) @@ -404,7 +404,7 @@ class LazyVals extends MiniPhase with IdentityDenotTransformer { case None => offsetSymbol = ctx.newSymbol(claz, offsetName(0), Synthetic, defn.LongType).enteredAfter(this) offsetSymbol.addAnnotation(Annotation(defn.ScalaStaticAnnot)) - val flagName = (StdNames.nme.BITMAP_PREFIX + "0").toTermName + val flagName = s"${StdNames.nme.BITMAP_PREFIX}0".toTermName val flagSymbol = ctx.newSymbol(claz, flagName, containerFlags, defn.LongType).enteredAfter(this) flag = ValDef(flagSymbol, Literal(Constant(0L))) val offsetTree = ValDef(offsetSymbol, getOffset.appliedTo(thizClass, Literal(Constant(flagName.toString)))) diff --git a/compiler/src/dotty/tools/dotc/transform/Memoize.scala b/compiler/src/dotty/tools/dotc/transform/Memoize.scala index bc25654757fb..65263544c3ab 100644 --- a/compiler/src/dotty/tools/dotc/transform/Memoize.scala +++ b/compiler/src/dotty/tools/dotc/transform/Memoize.scala @@ -110,7 +110,7 @@ class Memoize extends MiniPhase with IdentityDenotTransformer { thisPhase => else if (sym eq defn.NullClass) nullLiteral else if (sym eq defn.BoxedUnitClass) ref(defn.BoxedUnit_UNIT) else { - assert(false, sym + " has no erased bottom tree") + assert(false, s"$sym has no erased bottom tree") EmptyTree } } diff --git a/compiler/src/dotty/tools/dotc/transform/Mixin.scala b/compiler/src/dotty/tools/dotc/transform/Mixin.scala index af40223d1421..9e20dd13abd1 100644 --- a/compiler/src/dotty/tools/dotc/transform/Mixin.scala +++ b/compiler/src/dotty/tools/dotc/transform/Mixin.scala @@ -191,8 +191,8 @@ class Mixin extends MiniPhase with SymTransformer { thisPhase => for (p <- impl.parents; constr = stripBlock(p).symbol if constr.isConstructor) yield constr.owner -> transformConstructor(p) ).toMap - val superCalls = superCallsAndArgs.mapValues(_._1) - val initArgs = superCallsAndArgs.mapValues(_._2) + val superCalls = superCallsAndArgs.transform((_, v) => v._1) + val initArgs = superCallsAndArgs.transform((_, v) => v._2) def superCallOpt(baseCls: Symbol): List[Tree] = superCalls.get(baseCls) match { case Some(call) => diff --git a/compiler/src/dotty/tools/dotc/transform/PatternMatcherOld.scala.disabled b/compiler/src/dotty/tools/dotc/transform/PatternMatcherOld.scala.disabled index 1fb155eb4f0d..a1493f463bee 100644 --- a/compiler/src/dotty/tools/dotc/transform/PatternMatcherOld.scala.disabled +++ b/compiler/src/dotty/tools/dotc/transform/PatternMatcherOld.scala.disabled @@ -161,7 +161,7 @@ class PatternMatcherOld extends MiniPhase with DenotTransformer { } def emitValDefs: List[ValDef] = { - (lhs, rhs).zipped.map((symbol, tree) => ValDef(symbol.asTerm, tree.ensureConforms(symbol.info))) + lhs.lazyZip(rhs).map((symbol, tree) => ValDef(symbol.asTerm, tree.ensureConforms(symbol.info))) } } object NoRebindings extends Rebindings(Nil, Nil) @@ -570,7 +570,7 @@ class PatternMatcherOld extends MiniPhase with DenotTransformer { def emitVars = storedBinders.nonEmpty - lazy val storedSubsted = (subPatBinders, subPatRefs).zipped.partition{ case (sym, _) => storedBinders(sym) } + lazy val storedSubsted = subPatBinders.lazyZip(subPatRefs).partition{ case (sym, _) => storedBinders(sym) } def stored = storedSubsted._1 @@ -607,7 +607,7 @@ class PatternMatcherOld extends MiniPhase with DenotTransformer { // only store binders actually used val (subPatBindersStored, subPatRefsStored) = stored.filter{case (b, _) => usedBinders(b)}.unzip - Block((subPatBindersStored.toList, subPatRefsStored.toList).zipped.map((bind, ref) => { + Block(subPatBindersStored.toList.lazyZip(subPatRefsStored.toList).map((bind, ref) => { // required in case original pattern had a more precise type // eg case s@"foo" => would be otherwise translated to s with type String instead of String("foo") def refTpeWiden = ref.tpe.widen @@ -1387,7 +1387,7 @@ class PatternMatcherOld extends MiniPhase with DenotTransformer { // (it will later result in a type test when `tp` is not a subtype of `b.info`) // TODO: can we simplify this, together with the Bound case? def subPatBinders = subBoundTrees map (_.binder) - lazy val subBoundTrees = (args, subPatTypes).zipped map newBoundTree + lazy val subBoundTrees = args.lazyZip(subPatTypes) map newBoundTree // never store these in local variables (for PreserveSubPatBinders) lazy val ignoredSubPatBinders: Set[Symbol] = subPatBinders zip args collect { case (b, PatternBoundToUnderscore()) => b } toSet @@ -1699,7 +1699,7 @@ class PatternMatcherOld extends MiniPhase with DenotTransformer { private def typedAsSequence(pat: Pattern) = TypedPat(pat, extractor.sequenceType) private def productPats = patterns.fixed take prodArity private def elementPats = patterns.fixed drop prodArity - private def products = (productPats, productTypes).zipped map TypedPat + private def products = productPats.lazyZip(productTypes) map TypedPat private def elements = elementPats map typedAsElement private def stars = patterns.starPatterns map typedAsSequence diff --git a/compiler/src/dotty/tools/dotc/transform/Splicer.scala b/compiler/src/dotty/tools/dotc/transform/Splicer.scala index 8ce3420db72b..1fffeaaad92f 100644 --- a/compiler/src/dotty/tools/dotc/transform/Splicer.scala +++ b/compiler/src/dotty/tools/dotc/transform/Splicer.scala @@ -394,7 +394,7 @@ object Splicer { case _ => // Take the flatten name of the class and the full package name val pack = tpe.classSymbol.topLevelClass.owner - val packageName = if (pack == defn.EmptyPackageClass) "" else pack.fullName + "." + val packageName = if (pack == defn.EmptyPackageClass) "" else s"${pack.fullName}." packageName + tpe.classSymbol.fullNameSeparated(FlatName).toString } diff --git a/compiler/src/dotty/tools/dotc/transform/SyntheticMembers.scala b/compiler/src/dotty/tools/dotc/transform/SyntheticMembers.scala index 75f86dcfabe1..4b9f4d891d94 100644 --- a/compiler/src/dotty/tools/dotc/transform/SyntheticMembers.scala +++ b/compiler/src/dotty/tools/dotc/transform/SyntheticMembers.scala @@ -62,7 +62,8 @@ class SyntheticMembers(thisPhase: DenotTransformer) { if (myValueSymbols.isEmpty) { myValueSymbols = List(defn.Any_hashCode, defn.Any_equals) myCaseSymbols = myValueSymbols ++ List(defn.Any_toString, defn.Product_canEqual, - defn.Product_productArity, defn.Product_productPrefix, defn.Product_productElement) + defn.Product_productArity, defn.Product_productPrefix, defn.Product_productElement, + defn.Product_productElementName) myCaseModuleSymbols = myCaseSymbols.filter(_ ne defn.Any_equals) myEnumCaseSymbols = List(defn.Enum_ordinal) } @@ -119,13 +120,14 @@ class SyntheticMembers(thisPhase: DenotTransformer) { def syntheticRHS(vrefss: List[List[Tree]])(implicit ctx: Context): Tree = synthetic.name match { case nme.hashCode_ if isDerivedValueClass(clazz) => valueHashCodeBody - case nme.hashCode_ => caseHashCodeBody + case nme.hashCode_ => chooseHashcode case nme.toString_ => if (clazz.is(ModuleClass)) ownName else forwardToRuntime(vrefss.head) case nme.equals_ => equalsBody(vrefss.head.head) case nme.canEqual_ => canEqualBody(vrefss.head.head) case nme.productArity => Literal(Constant(accessors.length)) case nme.productPrefix => ownName case nme.productElement => productElementBody(accessors.length, vrefss.head.head) + case nme.productElementName => productElementNameBody(accessors.length, vrefss.head.head) case nme.ordinal => Select(This(clazz), nme.ordinalDollar) } ctx.log(s"adding $synthetic to $clazz at ${ctx.phase}") @@ -149,6 +151,40 @@ class SyntheticMembers(thisPhase: DenotTransformer) { * ``` */ def productElementBody(arity: Int, index: Tree)(implicit ctx: Context): Tree = { + // case N => _${N + 1} + val cases = 0.until(arity).map { i => + CaseDef(Literal(Constant(i)), EmptyTree, Select(This(clazz), nme.selectorName(i))) + } + + Match(index, (cases :+ generateIOBECase(index)).toList) + } + + /** The class + * + * ``` + * case class C(x: T, y: T) + * ``` + * + * gets the `productElementName` method: + * + * ``` + * def productElementName(index: Int): String = index match { + * case 0 => "x" + * case 1 => "y" + * case _ => throw new IndexOutOfBoundsException(index.toString) + * } + * ``` + */ + def productElementNameBody(arity: Int, index: Tree)(implicit ctx: Context): Tree = { + // case N => // name for case arg N + val cases = 0.until(arity).map { i => + CaseDef(Literal(Constant(i)), EmptyTree, Literal(Constant(accessors(i).name.toString))) + } + + Match(index, (cases :+ generateIOBECase(index)).toList) + } + + def generateIOBECase(index: Tree): CaseDef = { val ioob = defn.IndexOutOfBoundsException.typeRef // Second constructor of ioob that takes a String argument def filterStringConstructor(s: Symbol): Boolean = s.info match { @@ -160,14 +196,7 @@ class SyntheticMembers(thisPhase: DenotTransformer) { val error = Throw(New(ioob, constructor, List(stringIndex))) // case _ => throw new IndexOutOfBoundsException(i.toString) - val defaultCase = CaseDef(Underscore(defn.IntType), EmptyTree, error) - - // case N => _${N + 1} - val cases = 0.until(arity).map { i => - CaseDef(Literal(Constant(i)), EmptyTree, Select(This(clazz), nme.selectorName(i))) - } - - Match(index, (cases :+ defaultCase).toList) + CaseDef(Underscore(defn.IntType), EmptyTree, error) } /** The class @@ -232,15 +261,50 @@ class SyntheticMembers(thisPhase: DenotTransformer) { /** The class * * ``` - * package p - * case class C(x: T, y: T) + * case object C + * ``` + * + * gets the `hashCode` method: + * + * ``` + * def hashCode: Int = "C".hashCode // constant folded + * ``` + * + * The class + * + * ``` + * case class C(x: T, y: U) + * ``` + * + * if none of `T` or `U` are primitive types, gets the `hashCode` method: + * + * ``` + * def hashCode: Int = ScalaRunTime._hashCode(this) + * ``` + * + * else if either `T` or `U` are primitive, gets the `hashCode` method implemented by [[caseHashCodeBody]] + */ + def chooseHashcode(implicit ctx: Context) = { + if (clazz.is(ModuleClass)) + Literal(Constant(clazz.name.stripModuleClassSuffix.toString.hashCode)) + else if (accessors.exists(_.info.finalResultType.classSymbol.isPrimitiveValueClass)) + caseHashCodeBody + else + ref(defn.ScalaRuntime__hashCode).appliedTo(This(clazz)) + } + + /** The class + * + * ``` + * case class C(x: Int, y: T) * ``` * * gets the `hashCode` method: * * ``` * def hashCode: Int = { - * var acc: Int = "p.C".hashCode // constant folded + * var acc: Int = 0xcafebabe + * acc = Statics.mix(acc, this.productPrefix.hashCode()); * acc = Statics.mix(acc, x); * acc = Statics.mix(acc, Statics.this.anyHash(y)); * Statics.finalizeHash(acc, 2) @@ -248,19 +312,14 @@ class SyntheticMembers(thisPhase: DenotTransformer) { * ``` */ def caseHashCodeBody(implicit ctx: Context): Tree = { - val seed = clazz.fullName.toString.hashCode - if (accessors.nonEmpty) { - val acc = ctx.newSymbol(ctx.owner, "acc".toTermName, Mutable | Synthetic, defn.IntType, coord = ctx.owner.span) - val accDef = ValDef(acc, Literal(Constant(seed))) - val mixes = for (accessor <- accessors) yield - Assign(ref(acc), ref(defn.staticsMethod("mix")).appliedTo(ref(acc), hashImpl(accessor))) - val finish = ref(defn.staticsMethod("finalizeHash")).appliedTo(ref(acc), Literal(Constant(accessors.size))) - Block(accDef :: mixes, finish) - } else { - // Pre-compute the hash code - val hash = scala.runtime.Statics.finalizeHash(seed, 0) - Literal(Constant(hash)) - } + val acc = ctx.newSymbol(ctx.owner, nme.acc, Mutable | Synthetic, defn.IntType, coord = ctx.owner.span) + val accDef = ValDef(acc, Literal(Constant(0xcafebabe))) + val mixPrefix = Assign(ref(acc), + ref(defn.staticsMethod("mix")).appliedTo(ref(acc), This(clazz).select(defn.Product_productPrefix).select(defn.Any_hashCode).appliedToNone)) + val mixes = for (accessor <- accessors) yield + Assign(ref(acc), ref(defn.staticsMethod("mix")).appliedTo(ref(acc), hashImpl(accessor))) + val finish = ref(defn.staticsMethod("finalizeHash")).appliedTo(ref(acc), Literal(Constant(accessors.size))) + Block(accDef :: mixPrefix :: mixes, finish) } /** The `hashCode` implementation for given symbol `sym`. */ diff --git a/compiler/src/dotty/tools/dotc/transform/TailRec.scala b/compiler/src/dotty/tools/dotc/transform/TailRec.scala index 48761a2142e6..964713673497 100644 --- a/compiler/src/dotty/tools/dotc/transform/TailRec.scala +++ b/compiler/src/dotty/tools/dotc/transform/TailRec.scala @@ -147,7 +147,7 @@ class TailRec extends MiniPhase { val varsForRewrittenParamSyms = transformer.varsForRewrittenParamSyms val initialVarDefs = { - val initialParamVarDefs = (rewrittenParamSyms, varsForRewrittenParamSyms).zipped.map { + val initialParamVarDefs = rewrittenParamSyms.lazyZip(varsForRewrittenParamSyms).map { (param, local) => ValDef(local.asTerm, ref(param)) } varForRewrittenThis match { diff --git a/compiler/src/dotty/tools/dotc/transform/TreeChecker.scala b/compiler/src/dotty/tools/dotc/transform/TreeChecker.scala index 2333221b50b2..315880f106a6 100644 --- a/compiler/src/dotty/tools/dotc/transform/TreeChecker.scala +++ b/compiler/src/dotty/tools/dotc/transform/TreeChecker.scala @@ -91,7 +91,7 @@ class TreeChecker extends Phase with SymTransformer { if (ctx.settings.YtestPickler.value && ctx.phase.prev.isInstanceOf[Pickler]) ctx.echo("Skipping Ycheck after pickling with -Ytest-pickler, the returned tree contains stale symbols") else if (ctx.phase.prev.isCheckable) - check(ctx.base.allPhases, ctx) + check(ctx.base.allPhases.toIndexedSeq, ctx) } private def previousPhases(phases: List[Phase])(implicit ctx: Context): List[Phase] = phases match { diff --git a/compiler/src/dotty/tools/dotc/transform/localopt/StringInterpolatorOpt.scala b/compiler/src/dotty/tools/dotc/transform/localopt/StringInterpolatorOpt.scala index f5bd2d64b319..962cb0e2e648 100644 --- a/compiler/src/dotty/tools/dotc/transform/localopt/StringInterpolatorOpt.scala +++ b/compiler/src/dotty/tools/dotc/transform/localopt/StringInterpolatorOpt.scala @@ -2,10 +2,12 @@ package dotty.tools.dotc.transform.localopt import dotty.tools.dotc.ast.Trees._ import dotty.tools.dotc.ast.tpd +import dotty.tools.dotc.core.Decorators._ import dotty.tools.dotc.core.Constants.Constant import dotty.tools.dotc.core.Contexts.Context import dotty.tools.dotc.core.StdNames._ import dotty.tools.dotc.core.Symbols._ +import dotty.tools.dotc.core.Types.MethodType import dotty.tools.dotc.transform.MegaPhase.MiniPhase /** @@ -21,6 +23,16 @@ class StringInterpolatorOpt extends MiniPhase { override def phaseName: String = "stringInterpolatorOpt" + override def checkPostCondition(tree: tpd.Tree)(implicit ctx: Context): Unit = { + tree match { + case tree: RefTree => + val sym = tree.symbol + assert(sym != defn.StringContext_raw && sym != defn.StringContext_s, + i"$tree in ${ctx.owner.showLocated} should have been rewritten by phase $phaseName") + case _ => + } + } + /** Matches a list of constant literals */ private object Literals { def unapply(tree: SeqLiteral)(implicit ctx: Context): Option[List[Literal]] = { @@ -60,7 +72,7 @@ class StringInterpolatorOpt extends MiniPhase { def unapply(tree: Apply)(implicit ctx: Context): Option[(List[Literal], List[Tree])] = { tree match { case SOrRawInterpolator(strs, elems) => - if (tree.symbol == defn.StringContextRaw) Some(strs, elems) + if (tree.symbol == defn.StringContext_raw) Some(strs, elems) else { // tree.symbol == defn.StringContextS try { val escapedStrs = strs.map { str => @@ -80,28 +92,46 @@ class StringInterpolatorOpt extends MiniPhase { override def transformApply(tree: Apply)(implicit ctx: Context): Tree = { val sym = tree.symbol val isInterpolatedMethod = // Test names first to avoid loading scala.StringContext if not used - (sym.name == nme.raw_ && sym.eq(defn.StringContextRaw)) || - (sym.name == nme.s && sym.eq(defn.StringContextS)) - if (isInterpolatedMethod) transformInterpolator(tree) - else tree - } + (sym.name == nme.raw_ && sym.eq(defn.StringContext_raw)) || + (sym.name == nme.s && sym.eq(defn.StringContext_s)) + if (isInterpolatedMethod) + tree match { + case StringContextIntrinsic(strs: List[Literal], elems: List[Tree]) => + val stri = strs.iterator + val elemi = elems.iterator + var result: Tree = stri.next + def concat(tree: Tree): Unit = { + result = result.select(defn.String_+).appliedTo(tree) + } + while (elemi.hasNext) { + concat(elemi.next) + val str = stri.next + if (!str.const.stringValue.isEmpty) concat(str) + } + result + // Starting with Scala 2.13, s and raw are macros in the standard + // library, so we need to expand them manually. + // sc.s(args) --> standardInterpolator(processEscapes, args, sc.parts) + // sc.raw(args) --> standardInterpolator(x => x, args, sc.parts) + case Apply(intp, args :: Nil) => + val pre = intp match { + case Select(pre, _) => pre + case intp: Ident => tpd.desugarIdentPrefix(intp) + } + val isRaw = sym eq defn.StringContext_raw + val stringToString = defn.StringContextModule_processEscapes.info.asInstanceOf[MethodType] - private def transformInterpolator(tree: Tree)(implicit ctx: Context): Tree = { - tree match { - case StringContextIntrinsic(strs: List[Literal], elems: List[Tree]) => - val stri = strs.iterator - val elemi = elems.iterator - var result: Tree = stri.next - def concat(tree: Tree): Unit = { - result = result.select(defn.String_+).appliedTo(tree) - } - while (elemi.hasNext) { - concat(elemi.next) - val str = stri.next - if (!str.const.stringValue.isEmpty) concat(str) - } - result - case _ => tree - } + val process = tpd.Lambda(stringToString, args => + if (isRaw) args.head else ref(defn.StringContextModule_processEscapes).appliedToArgs(args)) + + evalOnce(pre) { sc => + val parts = sc.select(defn.StringContext_parts) + + ref(defn.StringContextModule_standardInterpolator) + .appliedToArgs(List(process, args, parts)) + } + } + else + tree } } diff --git a/compiler/src/dotty/tools/dotc/transform/patmat/Space.scala b/compiler/src/dotty/tools/dotc/transform/patmat/Space.scala index df0fc59ae925..f0cc631f1dd6 100644 --- a/compiler/src/dotty/tools/dotc/transform/patmat/Space.scala +++ b/compiler/src/dotty/tools/dotc/transform/patmat/Space.scala @@ -288,7 +288,7 @@ class SpaceEngine(implicit ctx: Context) extends SpaceLogic { import tpd._ import SpaceEngine._ - private val scalaSeqFactoryClass = ctx.requiredClass("scala.collection.generic.SeqFactory") + private val scalaSeqFactoryClass = ctx.requiredClass("scala.collection.SeqFactory") private val scalaListType = ctx.requiredClassRef("scala.collection.immutable.List") private val scalaNilType = ctx.requiredModuleRef("scala.collection.immutable.Nil") private val scalaConsType = ctx.requiredClassRef("scala.collection.immutable.::") diff --git a/compiler/src/dotty/tools/dotc/typer/Applications.scala b/compiler/src/dotty/tools/dotc/typer/Applications.scala index 8f7929328eff..15deb9ac698c 100644 --- a/compiler/src/dotty/tools/dotc/typer/Applications.scala +++ b/compiler/src/dotty/tools/dotc/typer/Applications.scala @@ -78,8 +78,8 @@ object Applications { * { * def lengthCompare(len: Int): Int // or, def length: Int * def apply(i: Int): T = a(i) - * def drop(n: Int): scala.Seq[T] - * def toSeq: scala.Seq[T] + * def drop(n: Int): scala.collection.Seq[T] + * def toSeq: scala.collection.Seq[T] * } * ``` * returns `T`, otherwise NoType. @@ -88,8 +88,8 @@ object Applications { def lengthTp = ExprType(defn.IntType) def lengthCompareTp = MethodType(List(defn.IntType), defn.IntType) def applyTp(elemTp: Type) = MethodType(List(defn.IntType), elemTp) - def dropTp(elemTp: Type) = MethodType(List(defn.IntType), defn.SeqType.appliedTo(elemTp)) - def toSeqTp(elemTp: Type) = ExprType(defn.SeqType.appliedTo(elemTp)) + def dropTp(elemTp: Type) = MethodType(List(defn.IntType), defn.CollectionSeqType.appliedTo(elemTp)) + def toSeqTp(elemTp: Type) = ExprType(defn.CollectionSeqType.appliedTo(elemTp)) // the result type of `def apply(i: Int): T` val elemTp = getTp.member(nme.apply).suchThat(_.info <:< applyTp(WildcardType)).info.resultType @@ -1152,7 +1152,7 @@ trait Applications extends Compatibility { self: Typer with Dynamic => argTypes = argTypes.take(args.length) ++ List.fill(argTypes.length - args.length)(WildcardType) } - val unapplyPatterns = (bunchedArgs, argTypes).zipped map (typed(_, _)) + val unapplyPatterns = bunchedArgs.lazyZip(argTypes) map (typed(_, _)) val result = assignType(cpy.UnApply(tree)(unapplyFn, unapplyImplicits(unapplyApp), unapplyPatterns), ownType) unapp.println(s"unapply patterns = $unapplyPatterns") if ((ownType eq selType) || ownType.isError) result diff --git a/compiler/src/dotty/tools/dotc/typer/Checking.scala b/compiler/src/dotty/tools/dotc/typer/Checking.scala index 0b0e06c6a652..217f76d03bad 100644 --- a/compiler/src/dotty/tools/dotc/typer/Checking.scala +++ b/compiler/src/dotty/tools/dotc/typer/Checking.scala @@ -48,7 +48,7 @@ object Checking { * See TypeOps.boundsViolations for an explanation of the parameters. */ def checkBounds(args: List[tpd.Tree], boundss: List[TypeBounds], instantiate: (Type, List[Type]) => Type, app: Type = NoType)(implicit ctx: Context): Unit = { - (args, boundss).zipped.foreach { (arg, bound) => + args.lazyZip(boundss).foreach { (arg, bound) => if (!bound.isLambdaSub && !arg.tpe.hasSimpleKind) { // see MissingTypeParameterFor ctx.error(ex"missing type parameter(s) for $arg", arg.sourcePos) diff --git a/compiler/src/dotty/tools/dotc/typer/EtaExpansion.scala b/compiler/src/dotty/tools/dotc/typer/EtaExpansion.scala index 2806fdcf1d7a..1753cd1b824f 100644 --- a/compiler/src/dotty/tools/dotc/typer/EtaExpansion.scala +++ b/compiler/src/dotty/tools/dotc/typer/EtaExpansion.scala @@ -80,7 +80,7 @@ abstract class Lifter { def liftArgs(defs: mutable.ListBuffer[Tree], methRef: Type, args: List[Tree])(implicit ctx: Context): List[Tree] = methRef.widen match { case mt: MethodType => - (args, mt.paramNames, mt.paramInfos).zipped.map { (arg, name, tp) => + args.lazyZip(mt.paramNames).lazyZip(mt.paramInfos).map { (arg, name, tp) => val lifter = if (tp.isInstanceOf[ExprType]) exprLifter else this lifter.liftArg(defs, arg, if (name.firstPart contains '$') EmptyTermName else name) } @@ -210,7 +210,7 @@ object EtaExpansion extends LiftImpure { var paramFlag = Synthetic | Param if (mt.isContextualMethod) paramFlag |= Given else if (mt.isImplicitMethod) paramFlag |= Implicit - val params = (mt.paramNames, paramTypes).zipped.map((name, tpe) => + val params = mt.paramNames.lazyZip(paramTypes).map((name, tpe) => ValDef(name, tpe, EmptyTree).withFlags(paramFlag).withSpan(tree.span.startPos)) var ids: List[Tree] = mt.paramNames map (name => Ident(name).withSpan(tree.span.startPos)) if (mt.paramInfos.nonEmpty && mt.paramInfos.last.isRepeatedParam) diff --git a/compiler/src/dotty/tools/dotc/typer/Implicits.scala b/compiler/src/dotty/tools/dotc/typer/Implicits.scala index 702c9866000a..2e2d3b5b55cc 100644 --- a/compiler/src/dotty/tools/dotc/typer/Implicits.scala +++ b/compiler/src/dotty/tools/dotc/typer/Implicits.scala @@ -145,10 +145,8 @@ object Implicits { val isFunctionInS2 = ctx.scala2Mode && tpw.derivesFrom(defn.FunctionClass(1)) && ref.symbol != defn.Predef_conforms val isImplicitConversion = tpw.derivesFrom(defn.ConversionClass) - val isConforms = // An implementation of <:< counts as a view, except that $conforms is always omitted - tpw.derivesFrom(defn.SubTypeClass) && - (defn.isNewCollections || // In 2.13, the type of `$conforms` changed from `A <:< A` to `A => A` - ref.symbol != defn.Predef_conforms) + // An implementation of <:< counts as a view + val isConforms = tpw.derivesFrom(defn.SubTypeClass) val hasExtensions = resType match { case SelectionProto(name, _, _, _) => tpw.memberBasedOnFlags(name, required = ExtensionMethod).exists @@ -522,7 +520,7 @@ trait ImplicitRunInfo { self: Run => def apply(tp: Type) = tp.widenDealias match { case tp: TypeRef => - ((defn.AnyType: Type) /: anchors(tp))(AndType.make(_, _)) + anchors(tp).foldLeft(defn.AnyType: Type)(AndType.make(_, _)) case tp: TypeVar => apply(tp.underlying) case tp: AppliedType if !tp.tycon.typeSymbol.isClass => @@ -531,7 +529,7 @@ trait ImplicitRunInfo { self: Run => case WildcardType(TypeBounds(lo, hi)) => AndType.make(lo, hi) case _ => arg } - (apply(tp.tycon) /: tp.args)((tc, arg) => AndType.make(tc, applyArg(arg))) + tp.args.foldLeft(apply(tp.tycon))((tc, arg) => AndType.make(tc, applyArg(arg))) case tp: TypeLambda => apply(tp.resType) case _ => @@ -1020,7 +1018,7 @@ trait Implicits { self: Typer => resType <:< target val tparams = poly.paramRefs val variances = caseClass.typeParams.map(_.paramVariance) - val instanceTypes = (tparams, variances).zipped.map((tparam, variance) => + val instanceTypes = tparams.lazyZip(variances).map((tparam, variance) => ctx.typeComparer.instanceType(tparam, fromBelow = variance < 0)) resType.substParams(poly, instanceTypes) } diff --git a/compiler/src/dotty/tools/dotc/typer/Inliner.scala b/compiler/src/dotty/tools/dotc/typer/Inliner.scala index 32fa6971c1bb..f85621fe09a8 100644 --- a/compiler/src/dotty/tools/dotc/typer/Inliner.scala +++ b/compiler/src/dotty/tools/dotc/typer/Inliner.scala @@ -285,14 +285,14 @@ class Inliner(call: tpd.Tree, rhsToInline: tpd.Tree)(implicit ctx: Context) { */ private def computeParamBindings(tp: Type, targs: List[Tree], argss: List[List[Tree]]): Unit = tp match { case tp: PolyType => - (tp.paramNames, targs).zipped.foreach { (name, arg) => + tp.paramNames.lazyZip(targs).foreach { (name, arg) => paramSpan(name) = arg.span paramBinding(name) = arg.tpe.stripTypeVar } computeParamBindings(tp.resultType, Nil, argss) case tp: MethodType => assert(argss.nonEmpty, i"missing bindings: $tp in $call") - (tp.paramNames, tp.paramInfos, argss.head).zipped.foreach { (name, paramtp, arg) => + tp.paramNames.lazyZip(tp.paramInfos).lazyZip(argss.head).foreach { (name, paramtp, arg) => paramSpan(name) = arg.span paramBinding(name) = arg.tpe.dealias match { case _: SingletonType if isIdempotentPath(arg) => arg.tpe @@ -538,7 +538,7 @@ class Inliner(call: tpd.Tree, rhsToInline: tpd.Tree)(implicit ctx: Context) { trace(i"inlining $call", inlining, show = true) { // The normalized bindings collected in `bindingsBuf` - bindingsBuf.transform { binding => + bindingsBuf.mapInPlace { binding => // Set trees to symbols allow macros to see the definition tree. // This is used by `underlyingArgument`. reducer.normalizeBinding(binding)(inlineCtx).setDefTree @@ -734,7 +734,7 @@ class Inliner(call: tpd.Tree, rhsToInline: tpd.Tree)(implicit ctx: Context) { ddef.tpe.widen match { case mt: MethodType if ddef.vparamss.head.length == args.length => val bindingsBuf = new mutable.ListBuffer[ValOrDefDef] - val argSyms = (mt.paramNames, mt.paramInfos, args).zipped.map { (name, paramtp, arg) => + val argSyms = mt.paramNames.lazyZip(mt.paramInfos).lazyZip(args).map { (name, paramtp, arg) => arg.tpe.dealias match { case ref @ TermRef(NoPrefix, _) => ref.symbol case _ => paramBindingDef(name, paramtp, arg, bindingsBuf)(ctx.withSource(cl.source)).symbol @@ -1011,7 +1011,7 @@ class Inliner(call: tpd.Tree, rhsToInline: tpd.Tree)(implicit ctx: Context) { override def typedSelect(tree: untpd.Select, pt: Type)(implicit ctx: Context): Tree = { assert(tree.hasType, tree) val qual1 = typed(tree.qualifier, selectionProto(tree.name, pt, this)) - val res = untpd.cpy.Select(tree)(qual1, tree.name).withType(tree.typeOpt) + val res = constToLiteral(untpd.cpy.Select(tree)(qual1, tree.name).withType(tree.typeOpt)) ensureAccessible(res.tpe, tree.qualifier.isInstanceOf[untpd.Super], tree.sourcePos) res } diff --git a/compiler/src/dotty/tools/dotc/typer/Namer.scala b/compiler/src/dotty/tools/dotc/typer/Namer.scala index 36fc755a67bc..c526a4324a10 100644 --- a/compiler/src/dotty/tools/dotc/typer/Namer.scala +++ b/compiler/src/dotty/tools/dotc/typer/Namer.scala @@ -133,7 +133,7 @@ trait NamerContextOps { this: Context => /** The method type corresponding to given parameters and result type */ def methodType(typeParams: List[Symbol], valueParamss: List[List[Symbol]], resultType: Type, isJava: Boolean = false)(implicit ctx: Context): Type = { val monotpe = - (valueParamss :\ resultType) { (params, resultType) => + valueParamss.foldRight(resultType) { (params, resultType) => val (isContextual, isImplicit, isErased) = if (params.isEmpty) (false, false, false) else (params.head.is(Given), params.head.is(Implicit), params.head.is(Erased)) @@ -720,7 +720,7 @@ class Namer { typer: Typer => stats.foreach(expand) mergeCompanionDefs() - val ctxWithStats = (ctx /: stats) ((ctx, stat) => indexExpanded(stat)(ctx)) + val ctxWithStats = stats.foldLeft(ctx)((ctx, stat) => indexExpanded(stat)(ctx)) createCompanionLinks(ctxWithStats) ctxWithStats } @@ -1248,7 +1248,7 @@ class Namer { typer: Typer => // TODO: Look only at member of supertype instead? lazy val schema = paramFn(WildcardType) val site = sym.owner.thisType - ((NoType: Type) /: sym.owner.info.baseClasses.tail) { (tp, cls) => + sym.owner.info.baseClasses.tail.foldLeft(NoType: Type) { (tp, cls) => def instantiatedResType(info: Type, tparams: List[Symbol], paramss: List[List[Symbol]]): Type = info match { case info: PolyType => if (info.paramNames.length == typeParams.length) diff --git a/compiler/src/dotty/tools/dotc/typer/RefChecks.scala b/compiler/src/dotty/tools/dotc/typer/RefChecks.scala index 5c51d259919e..64f8fc44d00e 100644 --- a/compiler/src/dotty/tools/dotc/typer/RefChecks.scala +++ b/compiler/src/dotty/tools/dotc/typer/RefChecks.scala @@ -181,7 +181,7 @@ object RefChecks { def printMixinOverrideErrors(): Unit = { mixinOverrideErrors.toList match { - case List() => + case Nil => case List(MixinOverrideError(_, msg)) => ctx.error(msg, clazz.sourcePos) case MixinOverrideError(member, msg) :: others => @@ -364,7 +364,7 @@ object RefChecks { else if (member.owner != clazz && other.owner != clazz && !(other.owner derivesFrom member.owner)) emitOverrideError( - clazz + " inherits conflicting members:\n " + s"$clazz inherits conflicting members:\n " + infoStringWithLocation(other) + " and\n " + infoStringWithLocation(member) + "\n(Note: this can be resolved by declaring an override in " + clazz + ".)") else @@ -457,8 +457,8 @@ object RefChecks { def abstractClassError(mustBeMixin: Boolean, msg: String): Unit = { def prelude = ( if (clazz.isAnonymousClass || clazz.is(Module)) "object creation impossible" - else if (mustBeMixin) clazz + " needs to be a mixin" - else clazz + " needs to be abstract") + ", since" + else if (mustBeMixin) s"$clazz needs to be a mixin" + else s"$clazz needs to be abstract") + ", since" if (abstractErrors.isEmpty) abstractErrors ++= List(prelude, msg) else abstractErrors += msg @@ -857,7 +857,7 @@ object RefChecks { class LevelInfo(outerLevelAndIndex: LevelAndIndex, stats: List[Tree])(implicit ctx: Context) extends OptLevelInfo { override val levelAndIndex: LevelAndIndex = - ((outerLevelAndIndex, 0) /: stats) {(mi, stat) => + stats.foldLeft(outerLevelAndIndex, 0) {(mi, stat) => val (m, idx) = mi val m1 = stat match { case stat: MemberDef => m.updated(stat.symbol, (this, idx)) @@ -988,7 +988,7 @@ class RefChecks extends MiniPhase { thisPhase => override def transformApply(tree: Apply)(implicit ctx: Context): Apply = { if (isSelfConstrCall(tree)) { - assert(currentLevel.isInstanceOf[LevelInfo], ctx.owner + "/" + i"$tree") + assert(currentLevel.isInstanceOf[LevelInfo], s"${ctx.owner}/" + i"$tree") val level = currentLevel.asInstanceOf[LevelInfo] if (level.maxIndex > 0) { // An implementation restriction to avoid VerifyErrors and lazyvals mishaps; see SI-4717 diff --git a/compiler/src/dotty/tools/dotc/typer/TypeAssigner.scala b/compiler/src/dotty/tools/dotc/typer/TypeAssigner.scala index b36a6a7097aa..2b93cd4e46c1 100644 --- a/compiler/src/dotty/tools/dotc/typer/TypeAssigner.scala +++ b/compiler/src/dotty/tools/dotc/typer/TypeAssigner.scala @@ -72,7 +72,7 @@ trait TypeAssigner { def isRefinable(sym: Symbol) = !sym.is(Private) && !sym.isConstructor val refinableDecls = info.decls.filter(isRefinable) - val raw = (parentType /: refinableDecls)(addRefinement) + val raw = refinableDecls.foldLeft(parentType)(addRefinement) HKTypeLambda.fromParams(cls.typeParams, raw) match { case tl: HKTypeLambda => tl.derivedLambdaType(resType = close(tl.resType)) case tp => close(tp) @@ -555,7 +555,7 @@ trait TypeAssigner { else if (!rinfo.exists) parent // can happen after failure in self type definition else RefinedType(parent, rsym.name, rinfo) } - val refined = (parent.tpe /: refinements)(addRefinement) + val refined = refinements.foldLeft(parent.tpe)(addRefinement) tree.withType(RecType.closeOver(rt => refined.substThis(refineCls, rt.recThis))) } diff --git a/compiler/src/dotty/tools/dotc/typer/Typer.scala b/compiler/src/dotty/tools/dotc/typer/Typer.scala index 892ef2a15d93..8865e8154d8a 100644 --- a/compiler/src/dotty/tools/dotc/typer/Typer.scala +++ b/compiler/src/dotty/tools/dotc/typer/Typer.scala @@ -991,13 +991,22 @@ class Typer extends Namer pt match { case SAMType(sam) if !defn.isFunctionType(pt) && mt <:< sam => - if (!isFullyDefined(pt, ForceDegree.all)) - ctx.error(ex"result type of lambda is an underspecified SAM type $pt", tree.sourcePos) - else if (pt.classSymbol.isOneOf(FinalOrSealed)) { + val targetTpe = + if (!isFullyDefined(pt, ForceDegree.all)) { + if (pt.isRef(defn.PartialFunctionClass)) + // Replace the underspecified expected type by one based on the closure method type + defn.PartialFunctionOf(mt.firstParamTypes.head, mt.resultType) + else { + ctx.error(ex"result type of lambda is an underspecified SAM type $pt", tree.sourcePos) + pt + } + } + else pt + if (pt.classSymbol.isOneOf(FinalOrSealed)) { val offendingFlag = pt.classSymbol.flags & FinalOrSealed ctx.error(ex"lambda cannot implement $offendingFlag ${pt.classSymbol}", tree.sourcePos) } - TypeTree(pt) + TypeTree(targetTpe) case _ => if (mt.isParamDependent) { throw new java.lang.Error( @@ -1337,7 +1346,7 @@ class Typer extends Namer } args.zipWithConserve(tparams)(typedArg(_, _)).asInstanceOf[List[Tree]] } - val paramBounds = (tparams, args).zipped.map { + val paramBounds = tparams.lazyZip(args).map { case (tparam, TypeBoundsTree(EmptyTree, EmptyTree)) => // if type argument is a wildcard, suppress kind checking since // there is no real argument. @@ -1532,7 +1541,7 @@ class Typer extends Namer // that their type parameters are aliases of the class type parameters. // See pos/i941.scala rhsCtx.gadt.addToConstraint(tparams1.map(_.symbol)) - (tparams1, sym.owner.typeParams).zipped.foreach { (tdef, tparam) => + tparams1.lazyZip(sym.owner.typeParams).foreach { (tdef, tparam) => val tr = tparam.typeRef rhsCtx.gadt.addBound(tdef.symbol, tr, isUpper = false) rhsCtx.gadt.addBound(tdef.symbol, tr, isUpper = true) @@ -1747,7 +1756,7 @@ class Typer extends Namer parents match { case p :: _ if p.classSymbol.isRealClass => parents case _ => - val pcls = (defn.ObjectClass /: parents)(improve) + val pcls = parents.foldLeft(defn.ObjectClass)(improve) typr.println(i"ensure first is class $parents%, % --> ${parents map (_ baseType pcls)}%, %") val first = ctx.typeComparer.glb(defn.ObjectType :: parents.map(_.baseType(pcls))) checkFeasibleParent(first, ctx.source.atSpan(span), em" in inferred superclass $first") :: parents @@ -1931,9 +1940,9 @@ class Typer extends Namer val pts = if (arity == pt.tupleArity) pt.tupleElementTypes else List.fill(arity)(defn.AnyType) - val elems = (tree.trees, pts).zipped.map(typed(_, _)) + val elems = tree.trees.lazyZip(pts).map(typed(_, _)) if (ctx.mode.is(Mode.Type)) - (elems :\ (TypeTree(defn.UnitType): Tree))((elemTpt, elemTpts) => + elems.foldRight(TypeTree(defn.UnitType): Tree)((elemTpt, elemTpts) => AppliedTypeTree(TypeTree(defn.PairClass.typeRef), List(elemTpt, elemTpts))) .withSpan(tree.span) else { @@ -1943,7 +1952,7 @@ class Typer extends Namer val app1 = typed(app, defn.TupleXXLClass.typeRef) if (ctx.mode.is(Mode.Pattern)) app1 else { - val elemTpes = (elems, pts).zipped.map((elem, pt) => + val elemTpes = elems.lazyZip(pts).map((elem, pt) => ctx.typeComparer.widenInferred(elem.tpe, pt)) val resTpe = TypeOps.nestedPairs(elemTpes) app1.cast(resTpe) @@ -2550,7 +2559,7 @@ class Typer extends Namer val propFail = propagatedFailure(args) def issueErrors(): Tree = { - (wtp.paramNames, wtp.paramInfos, args).zipped.foreach { (paramName, formal, arg) => + wtp.paramNames.lazyZip(wtp.paramInfos).lazyZip(args).foreach { (paramName, formal, arg) => arg.tpe match { case failure: SearchFailureType => ctx.error( @@ -2571,7 +2580,7 @@ class Typer extends Namer // If method has default params, fall back to regular application // where all inferred implicits are passed as named args. if (methPart(tree).symbol.hasDefaultParams && !propFail.isInstanceOf[AmbiguousImplicits]) { - val namedArgs = (wtp.paramNames, args).zipped.flatMap { (pname, arg) => + val namedArgs = wtp.paramNames.lazyZip(args).flatMap { (pname, arg) => if (arg.tpe.isError) Nil else untpd.NamedArg(pname, untpd.TypedSplice(arg)) :: Nil } tryEither { @@ -2688,7 +2697,10 @@ class Typer extends Namer tree.tpe <:< wildApprox(pt) readaptSimplified(Inliner.inlineCall(tree)) } - else if (tree.symbol.isScala2Macro) { + else if (tree.symbol.isScala2Macro && + // raw and s are eliminated by the StringInterpolatorOpt phase + tree.symbol != defn.StringContext_raw && + tree.symbol != defn.StringContext_s) { if (tree.symbol eq defn.StringContext_f) { // As scala.StringContext.f is defined in the standard library which // we currently do not bootstrap we cannot implement the macro in the library. diff --git a/compiler/src/dotty/tools/dotc/typer/Variances.scala b/compiler/src/dotty/tools/dotc/typer/Variances.scala index 014c23c961ee..4a42ba32a1d0 100644 --- a/compiler/src/dotty/tools/dotc/typer/Variances.scala +++ b/compiler/src/dotty/tools/dotc/typer/Variances.scala @@ -28,7 +28,7 @@ object Variances { /** Compute variance of type parameter `tparam' in types of all symbols `sym'. */ def varianceInSyms(syms: List[Symbol])(tparam: Symbol)(implicit ctx: Context): Variance = - (Bivariant /: syms) ((v, sym) => v & varianceInSym(sym)(tparam)) + syms.foldLeft(Bivariant) ((v, sym) => v & varianceInSym(sym)(tparam)) /** Compute variance of type parameter `tparam' in type of symbol `sym'. */ def varianceInSym(sym: Symbol)(tparam: Symbol)(implicit ctx: Context): Variance = @@ -37,7 +37,7 @@ object Variances { /** Compute variance of type parameter `tparam' in all types `tps'. */ def varianceInTypes(tps: List[Type])(tparam: Symbol)(implicit ctx: Context): Variance = - (Bivariant /: tps) ((v, tp) => v & varianceInType(tp)(tparam)) + tps.foldLeft(Bivariant) ((v, tp) => v & varianceInType(tp)(tparam)) /** Compute variance of type parameter `tparam' in all type arguments * tps which correspond to formal type parameters `tparams1'. @@ -55,7 +55,7 @@ object Variances { /** Compute variance of type parameter `tparam' in all type annotations `annots'. */ def varianceInAnnots(annots: List[Annotation])(tparam: Symbol)(implicit ctx: Context): Variance = { - (Bivariant /: annots) ((v, annot) => v & varianceInAnnot(annot)(tparam)) + annots.foldLeft(Bivariant) ((v, annot) => v & varianceInAnnot(annot)(tparam)) } /** Compute variance of type parameter `tparam' in type annotation `annot'. */ diff --git a/compiler/src/dotty/tools/dotc/util/HashSet.scala b/compiler/src/dotty/tools/dotc/util/HashSet.scala index 024383af4f89..972ca57c5c25 100644 --- a/compiler/src/dotty/tools/dotc/util/HashSet.scala +++ b/compiler/src/dotty/tools/dotc/util/HashSet.scala @@ -95,7 +95,7 @@ class HashSet[T >: Null <: AnyRef](powerOfTwoInitialCapacity: Int, loadFactor: F /** Add all entries in `xs` to set */ def addEntries(xs: TraversableOnce[T]): Unit = { - xs foreach addEntry + xs.iterator foreach addEntry } /** The iterator of all elements in the set */ diff --git a/compiler/src/dotty/tools/dotc/util/LRUCache.scala b/compiler/src/dotty/tools/dotc/util/LRUCache.scala index cc54519fe1fc..61f7457a6435 100644 --- a/compiler/src/dotty/tools/dotc/util/LRUCache.scala +++ b/compiler/src/dotty/tools/dotc/util/LRUCache.scala @@ -95,6 +95,6 @@ object LRUCache { /** The initial ring: 0 -> 1 -> ... -> 7 -> 0 */ val initialRing: SixteenNibbles = - (new SixteenNibbles(0L) /: (0 until Retained))((nibbles, idx) => + (0 until Retained).foldLeft(new SixteenNibbles(0L))((nibbles, idx) => nibbles.updated(idx, (idx + 1) % Retained)) } diff --git a/compiler/src/dotty/tools/dotc/util/ShowPickled.scala b/compiler/src/dotty/tools/dotc/util/ShowPickled.scala index bb7bc52febc9..c4e9ad0f9e62 100644 --- a/compiler/src/dotty/tools/dotc/util/ShowPickled.scala +++ b/compiler/src/dotty/tools/dotc/util/ShowPickled.scala @@ -144,7 +144,7 @@ object ShowPickled { val accessBoundary = ( for (idx <- privateWithin) yield { val s = entryList nameAt idx - idx + "(" + s + ")" + s"$idx($s)" } ) val flagString = PickleBuffer.unpickleScalaFlags(pflags, isType).toString @@ -169,12 +169,12 @@ object ShowPickled { */ def printEntry(i: Int): Unit = { buf.readIndex = index(i) - p(i + "," + buf.readIndex + ": ") + p(s"$i,${buf.readIndex}: ") val tag = buf.readByte() out.print(tag2string(tag)) val len = buf.readNat() val end = len + buf.readIndex - p(" " + len + ":") + p(s" $len:") tag match { case TERMname => out.print(" ") diff --git a/compiler/src/dotty/tools/dotc/util/Signatures.scala b/compiler/src/dotty/tools/dotc/util/Signatures.scala index e782aeb43e84..9b4d7934dcc0 100644 --- a/compiler/src/dotty/tools/dotc/util/Signatures.scala +++ b/compiler/src/dotty/tools/dotc/util/Signatures.scala @@ -84,12 +84,10 @@ object Signatures { def toParamss(tp: MethodType)(implicit ctx: Context): List[List[Param]] = { val rest = tp.resType match { case res: MethodType => - // Hide parameter lists consisting only of CanBuildFrom or DummyImplicit, - // we can remove the CanBuildFrom special-case once we switch to the 2.13 standard library. + // Hide parameter lists consisting only of DummyImplicit, if (res.resultType.isParameterless && res.isImplicitMethod && res.paramInfos.forall(info => - info.classSymbol.fullName.toString == "scala.collection.generic.CanBuildFrom" || info.classSymbol.derivesFrom(ctx.definitions.DummyImplicitClass))) Nil else diff --git a/compiler/src/dotty/tools/dotc/util/SimpleIdentitySet.scala b/compiler/src/dotty/tools/dotc/util/SimpleIdentitySet.scala index e71e1ce8a65d..b2483e47198a 100644 --- a/compiler/src/dotty/tools/dotc/util/SimpleIdentitySet.scala +++ b/compiler/src/dotty/tools/dotc/util/SimpleIdentitySet.scala @@ -140,7 +140,7 @@ object SimpleIdentitySet { def exists[E >: Elem <: AnyRef](p: E => Boolean): Boolean = xs.asInstanceOf[Array[E]].exists(p) def /: [A, E >: Elem <: AnyRef](z: A)(f: (A, E) => A): A = - (z /: xs.asInstanceOf[Array[E]])(f) + xs.asInstanceOf[Array[E]].foldLeft(z)(f) def toList: List[Elem] = { val buf = new mutable.ListBuffer[Elem] foreach(buf += _) diff --git a/compiler/src/dotty/tools/dotc/util/WeakHashSet.scala b/compiler/src/dotty/tools/dotc/util/WeakHashSet.scala index ef1d333cdab2..22c62798c278 100644 --- a/compiler/src/dotty/tools/dotc/util/WeakHashSet.scala +++ b/compiler/src/dotty/tools/dotc/util/WeakHashSet.scala @@ -201,7 +201,7 @@ final class WeakHashSet[A <: AnyRef](initialCapacity: Int, loadFactor: Double) e } // add an element to this set unless it's already in there and return this set - override def += (elem: A): this.type = elem match { + override def addOne(elem: A): this.type = elem match { case null => throw new NullPointerException("WeakHashSet cannot hold nulls") case _ => { removeStaleEntries() @@ -228,7 +228,7 @@ final class WeakHashSet[A <: AnyRef](initialCapacity: Int, loadFactor: Double) e } // remove an element from this set and return this set - override def -= (elem: A): this.type = elem match { + override def subtractOne(elem: A): this.type = elem match { case null => this case _ => { removeStaleEntries() diff --git a/compiler/src/dotty/tools/io/Jar.scala b/compiler/src/dotty/tools/io/Jar.scala index ec0f54738774..e857e46b3f90 100644 --- a/compiler/src/dotty/tools/io/Jar.scala +++ b/compiler/src/dotty/tools/io/Jar.scala @@ -35,7 +35,7 @@ import scala.annotation.tailrec // static Attributes.Name SPECIFICATION_VENDOR // static Attributes.Name SPECIFICATION_VERSION -class Jar(file: File) extends Iterable[JarEntry] { +class Jar(file: File) { def this(jfile: JFile) = this(File(jfile.toPath)) def this(path: String) = this(File(path)) @@ -64,10 +64,9 @@ class Jar(file: File) extends Iterable[JarEntry] { new JarWriter(file, Jar.WManifest.apply(mainAttrs: _*).underlying) } - override def foreach[U](f: JarEntry => U): Unit = withJarInput { in => - Iterator continually in.getNextJarEntry() takeWhile (_ != null) foreach f + def toList: List[JarEntry] = withJarInput { in => + Iterator.continually(in.getNextJarEntry()).takeWhile(_ != null).toList } - override def iterator: Iterator[JarEntry] = this.toList.iterator def getEntryStream(entry: JarEntry): java.io.InputStream = jarFile getInputStream entry match { case null => errorFn("No such entry: " + entry) ; null diff --git a/compiler/src/dotty/tools/repl/Rendering.scala b/compiler/src/dotty/tools/repl/Rendering.scala index 132a03ea5f54..0ce10a454a03 100644 --- a/compiler/src/dotty/tools/repl/Rendering.scala +++ b/compiler/src/dotty/tools/repl/Rendering.scala @@ -5,8 +5,6 @@ import java.io.{ StringWriter, PrintWriter } import java.lang.{ ClassLoader, ExceptionInInitializerError } import java.lang.reflect.InvocationTargetException -import scala.runtime.ScalaRunTime - import dotc.core.Contexts.Context import dotc.core.Denotations.Denotation import dotc.core.Flags @@ -23,25 +21,42 @@ import dotc.core.StdNames.str */ private[repl] class Rendering(parentClassLoader: Option[ClassLoader] = None) { + private[this] val MaxStringElements: Int = 1000 // no need to mkString billions of elements + private[this] var myClassLoader: ClassLoader = _ + private[this] var myReplStringOf: Object => String = _ + + /** Class loader used to load compiled code */ private[repl] def classLoader()(implicit ctx: Context) = if (myClassLoader != null) myClassLoader else { val parent = parentClassLoader.getOrElse { - // the compiler's classpath, as URL's val compilerClasspath = ctx.platform.classPath(ctx).asURLs - new java.net.URLClassLoader(compilerClasspath.toArray, classOf[ReplDriver].getClassLoader) + new java.net.URLClassLoader(compilerClasspath.toArray, null) } myClassLoader = new AbstractFileClassLoader(ctx.settings.outputDir.value, parent) - // Set the current Java "context" class loader to this rendering class loader - Thread.currentThread.setContextClassLoader(myClassLoader) + myReplStringOf = { + // We need to use the ScalaRunTime class coming from the scala-library + // on the user classpath, and not the one avilable in the current + // classloader, so we use reflection instead of simply calling + // `ScalaRunTime.replStringOf`. + val scalaRuntime = Class.forName("scala.runtime.ScalaRunTime", true, myClassLoader) + val meth = scalaRuntime.getMethod("replStringOf", classOf[Object], classOf[Int]) + + (value: Object) => meth.invoke(null, value, Integer.valueOf(MaxStringElements)).asInstanceOf[String] + } myClassLoader } - private[this] def MaxStringElements = 1000 // no need to mkString billions of elements + /** Return a String representation of a value we got from `classLoader()`. */ + private[repl] def replStringOf(value: Object)(implicit ctx: Context): String = { + assert(myReplStringOf != null, + "replStringOf should only be called on values creating using `classLoader()`, but `classLoader()` has not been called so far") + myReplStringOf(value) + } /** Load the value of the symbol using reflection. * @@ -55,7 +70,7 @@ private[repl] class Rendering(parentClassLoader: Option[ClassLoader] = None) { resObj .getDeclaredMethods.find(_.getName == sym.name.encode.toString) .map(_.invoke(null)) - val string = value.map(ScalaRunTime.replStringOf(_, MaxStringElements).trim) + val string = value.map(replStringOf(_).trim) if (!sym.is(Flags.Method) && sym.info == defn.UnitType) None else diff --git a/compiler/src/dotty/tools/repl/ReplDriver.scala b/compiler/src/dotty/tools/repl/ReplDriver.scala index 9ccf543e4c93..ce89b744dd35 100644 --- a/compiler/src/dotty/tools/repl/ReplDriver.scala +++ b/compiler/src/dotty/tools/repl/ReplDriver.scala @@ -139,8 +139,19 @@ class ReplDriver(settings: Array[String], // TODO: i5069 final def bind(name: String, value: Any)(implicit state: State): State = state - private def withRedirectedOutput(op: => State): State = - Console.withOut(out) { Console.withErr(out) { op } } + private def withRedirectedOutput(op: => State): State = { + val savedOut = System.out + val savedErr = System.err + try { + System.setOut(out) + System.setErr(out) + op + } + finally { + System.setOut(savedOut) + System.setErr(savedErr) + } + } private def newRun(state: State) = { val run = compiler.newRun(rootCtx.fresh.setReporter(newStoreReporter), state) diff --git a/compiler/src/dotty/tools/repl/ScriptEngine.scala b/compiler/src/dotty/tools/repl/ScriptEngine.scala index b3e80887f443..05f22ad1669b 100644 --- a/compiler/src/dotty/tools/repl/ScriptEngine.scala +++ b/compiler/src/dotty/tools/repl/ScriptEngine.scala @@ -17,7 +17,12 @@ import dotc.core.StdNames.str * println(e.eval("42")) */ class ScriptEngine extends AbstractScriptEngine { - private[this] val driver = new ReplDriver(Array("-usejavacp", "-color:never"), Console.out, None) + private[this] val driver = new ReplDriver( + Array( + "-classpath", "", // Avoid the default "." + "-usejavacp", + "-color:never" + ), Console.out, None) private[this] val rendering = new Rendering private[this] var state: State = driver.initialState diff --git a/compiler/test-resources/repl/i5551 b/compiler/test-resources/repl-macros/i5551 similarity index 100% rename from compiler/test-resources/repl/i5551 rename to compiler/test-resources/repl-macros/i5551 diff --git a/compiler/test/debug/Gen.scala b/compiler/test/debug/Gen.scala index b8db4a5e1a59..179331492669 100755 --- a/compiler/test/debug/Gen.scala +++ b/compiler/test/debug/Gen.scala @@ -77,7 +77,7 @@ object Gen { } } - Program(breaks, cmds) + Program(breaks.toList, cmds.toList) } def generate(program: Program, source: String = "tests/debug/"): String = { diff --git a/compiler/test/dotty/tools/CheckTypesTests.scala b/compiler/test/dotty/tools/CheckTypesTests.scala index 0b68fd5e5072..021574d0c271 100644 --- a/compiler/test/dotty/tools/CheckTypesTests.scala +++ b/compiler/test/dotty/tools/CheckTypesTests.scala @@ -61,7 +61,7 @@ class CheckTypeTest extends DottyTest { case (List(sups, subs), context) => implicit val ctx = context - (sups, subs).zipped.foreach { (sup, sub) => assertTrue(sub <:< sup) } + sups.lazyZip(subs).foreach { (sup, sub) => assertTrue(sub <:< sup) } case _ => fail } diff --git a/compiler/test/dotty/tools/TestSources.scala b/compiler/test/dotty/tools/TestSources.scala index 878bca27b93a..4fbf0e9fc5dd 100644 --- a/compiler/test/dotty/tools/TestSources.scala +++ b/compiler/test/dotty/tools/TestSources.scala @@ -7,14 +7,6 @@ import scala.collection.JavaConverters._ object TestSources { - // Std Lib - def stdLibSources: List[String] = { - val blacklisted = List( - "volatile.scala", // see #5610 - ) - sources(Paths.get("tests/scala2-library/src/library/"), excludedFiles = blacklisted) - } - // pos tests lists def posFromTastyBlacklistFile: String = "compiler/test/dotc/pos-from-tasty.blacklist" diff --git a/compiler/test/dotty/tools/backend/jvm/AsmConverters.scala b/compiler/test/dotty/tools/backend/jvm/AsmConverters.scala index a290e3583a7d..27cf4d31ea3a 100644 --- a/compiler/test/dotty/tools/backend/jvm/AsmConverters.scala +++ b/compiler/test/dotty/tools/backend/jvm/AsmConverters.scala @@ -35,7 +35,7 @@ object ASMConverters { def dropStaleLabels = { val definedLabels: Set[Instruction] = self.filter(_.isInstanceOf[Label]).toSet - val usedLabels: Set[Instruction] = self.flatMap(referencedLabels)(collection.breakOut) + val usedLabels: Set[Instruction] = self.iterator.flatMap(referencedLabels).toSet self.filterNot(definedLabels diff usedLabels) } @@ -144,19 +144,19 @@ object ASMConverters { case i: t.LineNumberNode => LineNumber (i.line, applyLabel(i.start)) } - private def convertBsmArgs(a: Array[Object]): List[Object] = a.map({ + private def convertBsmArgs(a: Array[Object]): List[Object] = a.iterator.map({ case h: asm.Handle => convertMethodHandle(h) case _ => a // can be: Class, method Type, primitive constant - })(collection.breakOut) + }).toList private def convertMethodHandle(h: asm.Handle): MethodHandle = MethodHandle(h.getTag, h.getOwner, h.getName, h.getDesc, h.isInterface) private def convertHandlers(method: t.MethodNode): List[ExceptionHandler] = { - method.tryCatchBlocks.asScala.map(h => ExceptionHandler(applyLabel(h.start), applyLabel(h.end), applyLabel(h.handler), Option(h.`type`)))(collection.breakOut) + method.tryCatchBlocks.asScala.map(h => ExceptionHandler(applyLabel(h.start), applyLabel(h.end), applyLabel(h.handler), Option(h.`type`))).toList } private def convertLocalVars(method: t.MethodNode): List[LocalVariable] = { - method.localVariables.asScala.map(v => LocalVariable(v.name, v.desc, Option(v.signature), applyLabel(v.start), applyLabel(v.end), v.index))(collection.breakOut) + method.localVariables.asScala.map(v => LocalVariable(v.name, v.desc, Option(v.signature), applyLabel(v.start), applyLabel(v.end), v.index)).toList } } @@ -233,7 +233,7 @@ object ASMConverters { def unconvertBsmArgs(a: List[Object]): Array[Object] = a.map({ case h: MethodHandle => unconvertMethodHandle(h) case o => o - })(collection.breakOut) + }).toArray private def visitMethod(method: t.MethodNode, instruction: Instruction, asmLabel: Map[Label, asm.Label]): Unit = instruction match { case Field(op, owner, name, desc) => method.visitFieldInsn(op, owner, name, desc) diff --git a/compiler/test/dotty/tools/backend/jvm/DottyBytecodeTest.scala b/compiler/test/dotty/tools/backend/jvm/DottyBytecodeTest.scala index f547c3576aab..5d5626888183 100644 --- a/compiler/test/dotty/tools/backend/jvm/DottyBytecodeTest.scala +++ b/compiler/test/dotty/tools/backend/jvm/DottyBytecodeTest.scala @@ -85,8 +85,8 @@ trait DottyBytecodeTest { val width = isa.map(_.toString.length).max val lineWidth = len.toString.length (1 to len) foreach { line => - val isaPadded = isa.map(_.toString) orElse Stream.continually("") - val isbPadded = isb.map(_.toString) orElse Stream.continually("") + val isaPadded = isa.map(_.toString) orElse LazyList.continually("") + val isbPadded = isb.map(_.toString) orElse LazyList.continually("") val a = isaPadded(line-1) val b = isbPadded(line-1) @@ -157,7 +157,7 @@ trait DottyBytecodeTest { (false, s"Different member counts in $name1 and $name2") } else { val msg = new StringBuilder - val success = (ms1, ms2).zipped forall { (m1, m2) => + val success = ms1.lazyZip(ms2) forall { (m1, m2) => val c1 = f(m1) val c2 = f(m2).replaceAllLiterally(name2, name1) if (c1 == c2) diff --git a/compiler/test/dotty/tools/backend/jvm/StringConcatTest.scala b/compiler/test/dotty/tools/backend/jvm/StringConcatTest.scala index 4ce5351f9d2d..6de37d0fad39 100644 --- a/compiler/test/dotty/tools/backend/jvm/StringConcatTest.scala +++ b/compiler/test/dotty/tools/backend/jvm/StringConcatTest.scala @@ -117,7 +117,7 @@ class StringConcatTest extends DottyBytecodeTest { chsq: java.lang.CharSequence, chrs: Array[Char]) = { val s1 = str + obj + v + z + c + b + s + i + f + l + d + sbuf + chsq + chrs - val s2 = obj + str + v + z + c + b + s + i + f + l + d + sbuf + chsq + chrs + val s2 = String.valueOf(obj) + str + v + z + c + b + s + i + f + l + d + sbuf + chsq + chrs s1 + "//" + s2 } def sbuf = { val r = new java.lang.StringBuffer(); r.append("sbuf"); r } diff --git a/compiler/test/dotty/tools/dotc/CompilationTests.scala b/compiler/test/dotty/tools/dotc/CompilationTests.scala index 03184a19a06b..c68eff6ea7be 100644 --- a/compiler/test/dotty/tools/dotc/CompilationTests.scala +++ b/compiler/test/dotty/tools/dotc/CompilationTests.scala @@ -32,16 +32,9 @@ class CompilationTests extends ParallelTesting { // Positive tests ------------------------------------------------------------ - // @Test // enable to test compileStdLib separately with detailed stats - def compileStdLibOnly: Unit = { - implicit val testGroup: TestGroup = TestGroup("compileStdLibOnly") - compileList("compileStdLib", TestSources.stdLibSources, scala2Mode.and("-migration", "-Yno-inline")) - }.checkCompile() - @Test def pos: Unit = { implicit val testGroup: TestGroup = TestGroup("compilePos") aggregateTests( - compileList("compileStdLib", TestSources.stdLibSources, scala2Mode.and("-migration", "-Yno-inline")), compileFile("tests/pos/nullarify.scala", defaultOptions.and("-Ycheck:nullarify")), compileFile("tests/pos-scala2/rewrites.scala", scala2Mode.and("-rewrite")).copyToTarget(), compileFile("tests/pos-special/utf8encoded.scala", explicitUTF8), @@ -50,45 +43,10 @@ class CompilationTests extends ParallelTesting { compileFile("tests/pos-special/completeFromSource/Test2.scala", defaultOptions.and("-sourcepath", "tests/pos-special")), compileFile("tests/pos-special/completeFromSource/Test3.scala", defaultOptions.and("-sourcepath", "tests/pos-special", "-scansource")), compileFile("tests/pos-special/completeFromSource/nested/Test4.scala", defaultOptions.and("-sourcepath", "tests/pos-special", "-scansource")), - compileFile("tests/pos-special/repeatedArgs213.scala", defaultOptions.and("-Ynew-collections")), compileFilesInDir("tests/pos-special/fatal-warnings", defaultOptions.and("-Xfatal-warnings", "-feature")), - compileList( - "compileMixed", - List( - "tests/pos/B.scala", - "tests/scala2-library/src/library/scala/collection/immutable/Seq.scala", - "tests/scala2-library/src/library/scala/collection/parallel/ParSeq.scala", - "tests/scala2-library/src/library/scala/package.scala", - "tests/scala2-library/src/library/scala/collection/GenSeqLike.scala", - "tests/scala2-library/src/library/scala/collection/SeqLike.scala", - "tests/scala2-library/src/library/scala/collection/generic/GenSeqFactory.scala" - ), - scala2Mode - ), compileFilesInDir("tests/pos-special/spec-t5545", defaultOptions), compileFilesInDir("tests/pos-special/strawman-collections", defaultOptions), compileFilesInDir("tests/pos-special/isInstanceOf", allowDeepSubtypes.and("-Xfatal-warnings")), - compileFile("tests/scala2-library/src/library/scala/collection/immutable/IndexedSeq.scala", defaultOptions), - compileFile("tests/scala2-library/src/library/scala/collection/parallel/mutable/ParSetLike.scala", defaultOptions), - compileList( - "parSetSubset", - List( - "tests/scala2-library/src/library/scala/collection/parallel/mutable/ParSetLike.scala", - "tests/scala2-library/src/library/scala/collection/parallel/mutable/ParSet.scala", - "tests/scala2-library/src/library/scala/collection/mutable/SetLike.scala" - ), - scala2Mode - ), - // FIXME: This fails with .times(2), see #2799 - compileList( - "testPredefDeprecatedNonCyclic", - List( - "tests/scala2-library/src/library/scala/io/Position.scala", - "tests/scala2-library/src/library/scala/Predef.scala", - "tests/scala2-library/src/library/scala/deprecated.scala" - ), - scala2Mode - ), compileFilesInDir("tests/new", defaultOptions), compileFilesInDir("tests/pos-scala2", scala2Mode), compileFilesInDir("tests/pos", defaultOptions), @@ -111,7 +69,6 @@ class CompilationTests extends ParallelTesting { compileFile("tests/pos/erasure.scala", defaultOptions), compileFile("tests/pos/Coder.scala", defaultOptions), compileFile("tests/pos/blockescapes.scala", defaultOptions), - compileFile("tests/pos/collections.scala", defaultOptions), compileFile("tests/pos/functions1.scala", defaultOptions), compileFile("tests/pos/implicits1.scala", defaultOptions), compileFile("tests/pos/inferred.scala", defaultOptions), @@ -174,7 +131,6 @@ class CompilationTests extends ParallelTesting { compileFile("tests/neg-custom-args/i3627.scala", allowDeepSubtypes), compileFile("tests/neg-custom-args/matchtype-loop.scala", allowDeepSubtypes), compileFile("tests/neg-custom-args/completeFromSource/nested/Test1.scala", defaultOptions.and("-sourcepath", "tests/neg-custom-args", "-scansource")), - compileFile("tests/neg-custom-args/repeatedArgs213.scala", defaultOptions.and("-Ynew-collections")), compileList("duplicate source", List( "tests/neg-custom-args/toplevel-samesource/S.scala", "tests/neg-custom-args/toplevel-samesource/nested/S.scala"), diff --git a/compiler/test/dotty/tools/dotc/InterfaceEntryPointTest.scala b/compiler/test/dotty/tools/dotc/InterfaceEntryPointTest.scala index fd998e9d7c7f..a23ca77707ee 100644 --- a/compiler/test/dotty/tools/dotc/InterfaceEntryPointTest.scala +++ b/compiler/test/dotty/tools/dotc/InterfaceEntryPointTest.scala @@ -28,7 +28,12 @@ class InterfaceEntryPointTest { if (Files.notExists(out)) Files.createDirectory(out) - val args = sources ++ List("-d", out.toString, "-usejavacp") + val args = sources ++ List( + "-d", + out.toString, + "-classpath", "", // Avoid the default "." + "-usejavacp" + ) val mainClass = Class.forName("dotty.tools.dotc.Main") val process = mainClass.getMethod("process", diff --git a/compiler/test/dotty/tools/dotc/parsing/ScannerTest.scala b/compiler/test/dotty/tools/dotc/parsing/ScannerTest.scala index 7e2cd290a6eb..9d3edb73ca78 100644 --- a/compiler/test/dotty/tools/dotc/parsing/ScannerTest.scala +++ b/compiler/test/dotty/tools/dotc/parsing/ScannerTest.scala @@ -57,9 +57,4 @@ class ScannerTest extends DottyTest { def scanDotty() = { scanDir("src") } - - @Test - def scanScala() = { - scanDir("../tests/scala2-library/src") - } } diff --git a/compiler/test/dotty/tools/dotc/parsing/desugarPackage.scala b/compiler/test/dotty/tools/dotc/parsing/desugarPackage.scala index a69a2dd55151..8e2011f4b2d0 100644 --- a/compiler/test/dotty/tools/dotc/parsing/desugarPackage.scala +++ b/compiler/test/dotty/tools/dotc/parsing/desugarPackage.scala @@ -12,7 +12,6 @@ object desugarPackage extends DeSugarTest { val start = System.nanoTime() val startNodes = Trees.ntrees parseDir("./src") - parseDir("./tests/scala2-library/src") val ms1 = (System.nanoTime() - start)/1000000 val nodes = Trees.ntrees val buf = parsedTrees map desugarTree diff --git a/compiler/test/dotty/tools/dotc/parsing/parsePackage.scala b/compiler/test/dotty/tools/dotc/parsing/parsePackage.scala index 8d1fa2eee7f5..c58cc673ccee 100644 --- a/compiler/test/dotty/tools/dotc/parsing/parsePackage.scala +++ b/compiler/test/dotty/tools/dotc/parsing/parsePackage.scala @@ -66,7 +66,6 @@ object parsePackage extends ParserTest { nodes = 0 val start = System.nanoTime() parseDir("./src") - parseDir("./tests/scala2-library/src") val ms1 = (System.nanoTime() - start)/1000000 val buf = parsedTrees map transformer.transform val ms2 = (System.nanoTime() - start)/1000000 diff --git a/compiler/test/dotty/tools/dotc/printing/PrintingTest.scala b/compiler/test/dotty/tools/dotc/printing/PrintingTest.scala index c718bc8329af..7767e5891b9c 100644 --- a/compiler/test/dotty/tools/dotc/printing/PrintingTest.scala +++ b/compiler/test/dotty/tools/dotc/printing/PrintingTest.scala @@ -43,7 +43,7 @@ class PrintingTest { val actualLines = byteStream.toString("UTF-8").split("\\r?\\n") - FileDiff.checkAndDump(path.toString, actualLines, checkFilePath) + FileDiff.checkAndDump(path.toString, actualLines.toIndexedSeq, checkFilePath) } @Test diff --git a/compiler/test/dotty/tools/dotc/reporting/ErrorMessagesTests.scala b/compiler/test/dotty/tools/dotc/reporting/ErrorMessagesTests.scala index d0ec7fbc87e6..d539e25b31a3 100644 --- a/compiler/test/dotty/tools/dotc/reporting/ErrorMessagesTests.scala +++ b/compiler/test/dotty/tools/dotc/reporting/ErrorMessagesTests.scala @@ -1475,18 +1475,6 @@ class ErrorMessagesTests extends ErrorMessagesTest { assertEquals("class Object", parentSym.show) } - @Test def i3187 = - checkMessagesAfter(GenBCode.name) { - """ - |package scala - |object collection - """.stripMargin - }.expect { (itcx, messages) => - implicit val ctx: Context = itcx - - assert(ctx.reporter.hasErrors) - } - @Test def typeDoubleDeclaration = checkMessagesAfter(FrontEnd.name) { """ diff --git a/compiler/test/dotty/tools/dotc/transform/PatmatExhaustivityTest.scala b/compiler/test/dotty/tools/dotc/transform/PatmatExhaustivityTest.scala index 22dd15aee97c..4d0d197cbc47 100644 --- a/compiler/test/dotty/tools/dotc/transform/PatmatExhaustivityTest.scala +++ b/compiler/test/dotty/tools/dotc/transform/PatmatExhaustivityTest.scala @@ -34,7 +34,7 @@ class PatmatExhaustivityTest { val actualLines: Seq[String] = stringBuffer.toString.trim.replaceAll("\\s+\n", "\n") match { case "" => Nil - case s => s.split("\\r?\\n") + case s => s.split("\\r?\\n").toIndexedSeq } val baseFilePath = path.toString.stripSuffix(".scala") val checkFilePath = baseFilePath + ".check" @@ -61,10 +61,10 @@ class PatmatExhaustivityTest { val actualLines: Seq[String] = stringBuffer.toString.trim.replaceAll("\\s+\n", "\n") match { case "" => Nil - case s => s.split("\\r?\\n") + case s => s.split("\\r?\\n").toIndexedSeq } - val checkFilePath = path + File.separator + "expected.check" + val checkFilePath = s"${path}${File.separator}expected.check" FileDiff.checkAndDump(path.toString, actualLines, checkFilePath) } diff --git a/compiler/test/dotty/tools/dotc/typer/DivergenceChecker.scala b/compiler/test/dotty/tools/dotc/typer/DivergenceChecker.scala index eaf133bfe0ad..5088f9cfd97a 100644 --- a/compiler/test/dotty/tools/dotc/typer/DivergenceChecker.scala +++ b/compiler/test/dotty/tools/dotc/typer/DivergenceChecker.scala @@ -58,7 +58,7 @@ class DivergenceCheckerTests extends DottyTest { 5 ) - (tpes, expectedSizes, expectedCoveringSets).zipped.foreach { + tpes.lazyZip(expectedSizes).lazyZip(expectedCoveringSets).foreach { case (tpe, expectedSize, expectedCoveringSet) => val size = tpe.typeSize val cs = tpe.coveringSet diff --git a/compiler/test/dotty/tools/repl/ReplCompilerTests.scala b/compiler/test/dotty/tools/repl/ReplCompilerTests.scala index ad9be3401bfc..34867fa29474 100644 --- a/compiler/test/dotty/tools/repl/ReplCompilerTests.scala +++ b/compiler/test/dotty/tools/repl/ReplCompilerTests.scala @@ -65,7 +65,7 @@ class ReplCompilerTests extends ReplTest { assertEquals(1, state.imports.size) run("""mutable.Map("one" -> 1)""") assertEquals( - "val res0: scala.collection.mutable.Map[String, Int] = Map(one -> 1)", + "val res0: scala.collection.mutable.Map[String, Int] = HashMap(one -> 1)", storedOutput().trim ) } diff --git a/compiler/test/dotty/tools/repl/ReplTest.scala b/compiler/test/dotty/tools/repl/ReplTest.scala index 921d16a651a3..5a59de3e4ad5 100644 --- a/compiler/test/dotty/tools/repl/ReplTest.scala +++ b/compiler/test/dotty/tools/repl/ReplTest.scala @@ -3,19 +3,27 @@ package repl import vulpix.TestConfiguration -import java.io.{ByteArrayOutputStream, PrintStream} +import java.lang.System.{lineSeparator => EOL} +import java.io.{ByteArrayOutputStream, File => JFile, PrintStream} +import scala.io.Source + +import scala.collection.mutable.ArrayBuffer import dotty.tools.dotc.reporting.MessageRendering import org.junit.{After, Before} +import org.junit.Assert._ -class ReplTest private (out: ByteArrayOutputStream) extends ReplDriver( - Array("-classpath", TestConfiguration.basicClasspath, "-color:never"), +class ReplTest(withCompiler: Boolean = false, out: ByteArrayOutputStream = new ByteArrayOutputStream) extends ReplDriver( + Array( + "-classpath", + if (withCompiler) + TestConfiguration.withCompilerClasspath + else + TestConfiguration.basicClasspath, + "-color:never"), new PrintStream(out) ) with MessageRendering { - - def this() = this(new ByteArrayOutputStream) - /** Get the stored output from `out`, resetting the buffer */ def storedOutput(): String = { val output = stripColor(out.toString) @@ -37,4 +45,77 @@ class ReplTest private (out: ByteArrayOutputStream) extends ReplDriver( implicit class TestingState(state: State) { def andThen[A](op: State => A): A = op(state) } + + def scripts(path: String): Array[JFile] = { + val dir = new JFile(getClass.getResource(path).getPath) + assert(dir.exists && dir.isDirectory, "Couldn't load scripts dir") + dir.listFiles + } + + def testFile(f: JFile): Unit = { + val prompt = "scala>" + val lines = Source.fromFile(f, "UTF-8").getLines().buffered + + assert(lines.head.startsWith(prompt), + s"""Each file has to start with the prompt: "$prompt"""") + + def extractInputs(prompt: String): List[String] = { + val input = lines.next() + + if (!input.startsWith(prompt)) extractInputs(prompt) + else if (lines.hasNext) { + // read lines and strip trailing whitespace: + while (lines.hasNext && !lines.head.startsWith(prompt)) + lines.next() + + input :: { if (lines.hasNext) extractInputs(prompt) else Nil } + } + else Nil + } + + def evaluate(state: State, input: String, prompt: String) = + try { + val nstate = run(input.drop(prompt.length))(state) + val out = input + EOL + storedOutput() + (out, nstate) + } + catch { + case ex: Throwable => + System.err.println(s"failed while running script: $f, on:\n$input") + throw ex + } + + def filterEmpties(line: String): List[String] = + line.replaceAll("""(?m)\s+$""", "") match { + case "" => Nil + case nonEmptyLine => nonEmptyLine :: Nil + } + + val expectedOutput = + Source.fromFile(f, "UTF-8").getLines().flatMap(filterEmpties).mkString(EOL) + val actualOutput = { + resetToInitial() + val inputRes = extractInputs(prompt) + val buf = new ArrayBuffer[String] + inputRes.foldLeft(initialState) { (state, input) => + val (out, nstate) = evaluate(state, input, prompt) + buf.append(out) + + assert(out.endsWith("\n"), + s"Expected output of $input to end with newline") + + nstate + } + buf.flatMap(filterEmpties).mkString(EOL) + } + + if (expectedOutput != actualOutput) { + println("expected =========>") + println(expectedOutput) + println("actual ===========>") + println(actualOutput) + + fail(s"Error in file $f, expected output did not match actual") + } + } } diff --git a/compiler/test/dotty/tools/repl/ScriptedTests.scala b/compiler/test/dotty/tools/repl/ScriptedTests.scala index bdc76f1e1c00..70f2a44ceea8 100644 --- a/compiler/test/dotty/tools/repl/ScriptedTests.scala +++ b/compiler/test/dotty/tools/repl/ScriptedTests.scala @@ -2,98 +2,16 @@ package dotty package tools package repl -import java.io.{File => JFile} -import java.lang.System.{lineSeparator => EOL} - -import org.junit.Assert._ import org.junit.Test -import org.junit.experimental.categories.Category - -import scala.collection.mutable.ArrayBuffer -import scala.io.Source - -import dotc.reporting.MessageRendering /** Runs all tests contained in `compiler/test-resources/repl/` */ -class ScriptedTests extends ReplTest with MessageRendering { - - private def scripts(path: String): Array[JFile] = { - val dir = new JFile(getClass.getResource(path).getPath) - assert(dir.exists && dir.isDirectory, "Couldn't load scripts dir") - dir.listFiles - } - - private def testFile(f: JFile): Unit = { - val prompt = "scala>" - val lines = Source.fromFile(f, "UTF-8").getLines().buffered - - assert(lines.head.startsWith(prompt), - s"""Each file has to start with the prompt: "$prompt"""") - - def extractInputs(prompt: String): List[String] = { - val input = lines.next() - - if (!input.startsWith(prompt)) extractInputs(prompt) - else if (lines.hasNext) { - // read lines and strip trailing whitespace: - while (lines.hasNext && !lines.head.startsWith(prompt)) - lines.next() - - input :: { if (lines.hasNext) extractInputs(prompt) else Nil } - } - else Nil - } - - def evaluate(state: State, input: String, prompt: String) = - try { - val nstate = run(input.drop(prompt.length))(state) - val out = input + EOL + storedOutput() - (out, nstate) - } - catch { - case ex: Throwable => - System.err.println(s"failed while running script: $f, on:\n$input") - throw ex - } - - def filterEmpties(line: String): List[String] = - line.replaceAll("""(?m)\s+$""", "") match { - case "" => Nil - case nonEmptyLine => nonEmptyLine :: Nil - } - - val expectedOutput = - Source.fromFile(f, "UTF-8").getLines().flatMap(filterEmpties).mkString(EOL) - val actualOutput = { - resetToInitial() - val inputRes = extractInputs(prompt) - val buf = new ArrayBuffer[String] - inputRes.foldLeft(initialState) { (state, input) => - val (out, nstate) = evaluate(state, input, prompt) - buf.append(out) - - assert(out.endsWith("\n"), - s"Expected output of $input to end with newline") - - nstate - } - buf.flatMap(filterEmpties).mkString(EOL) - } - - if (expectedOutput != actualOutput) { - println("expected =========>") - println(expectedOutput) - println("actual ===========>") - println(actualOutput) - - fail(s"Error in file $f, expected output did not match actual") - } - } +class ScriptedTests extends ReplTest { + import ScriptedTests._ @Test def replTests = scripts("/repl").foreach(testFile) @Test def typePrinterTests = scripts("/type-printer").foreach(testFile) +} - @Category(Array(classOf[BootstrappedOnlyTests])) - @Test def replMacrosTests = scripts("/repl-macros").foreach(testFile) +object ScriptedTests { } diff --git a/compiler/test/dotty/tools/repl/TabcompleteTests.scala b/compiler/test/dotty/tools/repl/TabcompleteTests.scala index f45f59668249..9d3e3525118e 100644 --- a/compiler/test/dotty/tools/repl/TabcompleteTests.scala +++ b/compiler/test/dotty/tools/repl/TabcompleteTests.scala @@ -100,7 +100,7 @@ class TabcompleteTests extends ReplTest { @Test def importScala = fromInitialState { implicit s => val comp = tabComplete("import scala.") // check that there are no special symbols leaked: , , ... - assertEquals(comp.find(_.startsWith("<")), None) + assertEquals(comp.find(_.startsWith("<")), Some("<:<")) assert(!comp.contains("package")) } diff --git a/compiler/test/dotty/tools/vulpix/FileDiff.scala b/compiler/test/dotty/tools/vulpix/FileDiff.scala index a322d9598596..2ff3524ce772 100644 --- a/compiler/test/dotty/tools/vulpix/FileDiff.scala +++ b/compiler/test/dotty/tools/vulpix/FileDiff.scala @@ -20,7 +20,7 @@ object FileDiff { def linesMatch = outputLines.length == checkLines.length && - (outputLines, checkLines).zipped.forall(_ == _) + outputLines.lazyZip(checkLines).forall(_ == _) if (!linesMatch) Some( s"""|Output from '$sourceTitle' did not match check file. Actual output: diff --git a/compiler/test/dotty/tools/vulpix/ParallelTesting.scala b/compiler/test/dotty/tools/vulpix/ParallelTesting.scala index e6be248a41fa..6570e653d2b9 100644 --- a/compiler/test/dotty/tools/vulpix/ParallelTesting.scala +++ b/compiler/test/dotty/tools/vulpix/ParallelTesting.scala @@ -235,7 +235,7 @@ trait ParallelTesting extends RunnerOrchestration { self => FileDiff.dump(checkFile.toPath.toString, actual) echo("Updated checkfile: " + checkFile.getPath) } else { - val outFile = checkFile.toPath.resolveSibling(checkFile.toPath.getFileName + ".out").toString + val outFile = checkFile.toPath.resolveSibling(s"${checkFile.toPath.getFileName}.out").toString FileDiff.dump(outFile, actual) echo(FileDiff.diffMessage(checkFile.getPath, outFile)) } @@ -460,7 +460,7 @@ trait ParallelTesting extends RunnerOrchestration { self => if (times == 1) new Driver else new Driver { private def ntimes(n: Int)(op: Int => Reporter): Reporter = - (emptyReporter /: (1 to n)) ((_, i) => op(i)) + (1 to n).foldLeft(emptyReporter) ((_, i) => op(i)) override def doCompile(comp: Compiler, files: List[String])(implicit ctx: Context) = ntimes(times) { run => @@ -625,7 +625,7 @@ trait ParallelTesting extends RunnerOrchestration { self => override def maybeFailureMessage(testSource: TestSource, reporters: Seq[TestReporter]): Option[String] = { def compilerCrashed = reporters.exists(_.compilerCrashed) - lazy val (errorMap, expectedErrors) = getErrorMapAndExpectedCount(testSource.sourceFiles) + lazy val (errorMap, expectedErrors) = getErrorMapAndExpectedCount(testSource.sourceFiles.toIndexedSeq) lazy val actualErrors = reporters.foldLeft(0)(_ + _.errorCount) def hasMissingAnnotations = getMissingExpectedErrors(errorMap, reporters.iterator.flatMap(_.errors)) @@ -654,12 +654,11 @@ trait ParallelTesting extends RunnerOrchestration { self => var expectedErrors = 0 files.filter(_.getName.endsWith(".scala")).foreach { file => Source.fromFile(file, "UTF-8").getLines().zipWithIndex.foreach { case (line, lineNbr) => - val errors = line.sliding("// error".length).count(_.mkString == "// error") + val errors = line.toSeq.sliding("// error".length).count(_.unwrap == "// error") if (errors > 0) errorMap.put(s"${file.getPath}:${lineNbr}", errors) - - val noposErrors = line.sliding("// nopos-error".length).count(_.mkString == "// nopos-error") + val noposErrors = line.toSeq.sliding("// nopos-error".length).count(_.unwrap == "// nopos-error") if (noposErrors > 0) { val nopos = errorMap.get("nopos") val existing: Integer = if (nopos eq null) 0 else nopos diff --git a/doc-tool/src/dotty/tools/dottydoc/model/JavaConverters.scala b/doc-tool/src/dotty/tools/dottydoc/model/JavaConverters.scala index 9da11331bdfa..f9d26e1052d1 100644 --- a/doc-tool/src/dotty/tools/dottydoc/model/JavaConverters.scala +++ b/doc-tool/src/dotty/tools/dottydoc/model/JavaConverters.scala @@ -241,12 +241,12 @@ object JavaConverters { }.asJava } - implicit class JavaMap(val map: collection.Map[String, Package]) extends AnyVal { + implicit class JavaMap(val map: collection.immutable.Map[String, Package]) extends AnyVal { def toJavaList: LinkedList[AnyRef] = - convertToList(map.mapValues(_.asJava)) + convertToList(map.transform((_, v) => v.asJava).toMap) def flattened: LinkedList[AnyRef] = - convertToList(map.mapValues(flattenEntity)) + convertToList(map.transform((_, v) => flattenEntity(v)).toMap) private[this] def convertToList(ms: collection.Map[String, AnyRef]): LinkedList[AnyRef] = ms.toList.sortBy(_._1) diff --git a/doc-tool/src/dotty/tools/dottydoc/model/comment/BodyEntities.scala b/doc-tool/src/dotty/tools/dottydoc/model/comment/BodyEntities.scala index c4957b66efff..019ba24dfec4 100644 --- a/doc-tool/src/dotty/tools/dottydoc/model/comment/BodyEntities.scala +++ b/doc-tool/src/dotty/tools/dottydoc/model/comment/BodyEntities.scala @@ -2,7 +2,7 @@ package dotty.tools.dottydoc package model package comment -import scala.collection._ +import scala.collection.SortedMap /** A body of text. A comment has a single body, which is composed of * at least one block. Inside every body is exactly one summary (see diff --git a/doc-tool/src/dotty/tools/dottydoc/model/comment/Comment.scala b/doc-tool/src/dotty/tools/dottydoc/model/comment/Comment.scala index e6a0219e5ede..8103cf082750 100644 --- a/doc-tool/src/dotty/tools/dottydoc/model/comment/Comment.scala +++ b/doc-tool/src/dotty/tools/dottydoc/model/comment/Comment.scala @@ -87,8 +87,8 @@ trait MarkupConversion[T] extends MemberLookup { see = filterEmpty(parsed.see).map(markupToHtml), result = single("@result", parsed.result).map(markupToHtml), throws = linkedExceptions(parsed.throws), - valueParams = filterEmpty(parsed.valueParams).mapValues(markupToHtml), - typeParams = filterEmpty(parsed.typeParams).mapValues(markupToHtml), + valueParams = filterEmpty(parsed.valueParams).transform((_, v) => markupToHtml(v)).toMap, + typeParams = filterEmpty(parsed.typeParams).transform((_, v) => markupToHtml(v)).toMap, version = single("@version", parsed.version).map(markupToHtml), since = single("@since", parsed.since).map(markupToHtml), todo = filterEmpty(parsed.todo).map(markupToHtml), @@ -97,9 +97,9 @@ trait MarkupConversion[T] extends MemberLookup { example = filterEmpty(parsed.example).map(markupToHtml), constructor = single("@constructor", parsed.constructor).map(markupToHtml), group = single("@group", parsed.group).map(markupToHtml), - groupDesc = filterEmpty(parsed.groupDesc).mapValues(markupToHtml), - groupNames = filterEmpty(parsed.groupNames).mapValues(markupToHtml), - groupPrio = filterEmpty(parsed.groupPrio).mapValues(markupToHtml), + groupDesc = filterEmpty(parsed.groupDesc).transform((_, v) => markupToHtml(v)).toMap, + groupNames = filterEmpty(parsed.groupNames).transform((_, v) => markupToHtml(v)).toMap, + groupPrio = filterEmpty(parsed.groupPrio).transform((_, v) => markupToHtml(v)).toMap, hideImplicitConversions = filterEmpty(parsed.hideImplicitConversions).map(markupToHtml) ) } @@ -130,16 +130,16 @@ extends MarkupConversion[MarkdownNode] { .map(stringToMarkup) def filterEmpty(xs: Map[String, String])(implicit ctx: Context) = - xs.mapValues(_.trim) + xs.transform((_, v) => v.trim) .filterNot { case (_, v) => v.isEmpty } - .mapValues(stringToMarkup) + .transform((_, v) => stringToMarkup(v)) } case class WikiComment(ent: Entity, parsed: ParsedComment, span: Span) extends MarkupConversion[Body] { def filterEmpty(xs: Map[String,String])(implicit ctx: Context) = - xs.mapValues(_.toWiki(ent, ctx.docbase.packages, span)) + xs.transform((_, v) => v.toWiki(ent, ctx.docbase.packages, span)) .filterNot { case (_, v) => v.blocks.isEmpty } def filterEmpty(xs: List[String])(implicit ctx: Context) = @@ -157,7 +157,7 @@ extends MarkupConversion[Body] { } def linkedExceptions(m: Map[String, String])(implicit ctx: Context) = { - m.mapValues(_.toWiki(ent, ctx.docbase.packages, span)).map { case (targetStr, body) => + m.transform((_, v) => v.toWiki(ent, ctx.docbase.packages, span)).map { case (targetStr, body) => val link = lookup(Some(ent), ctx.docbase.packages, targetStr) val newBody = body match { case Body(List(Paragraph(Chain(content)))) => diff --git a/doc-tool/src/dotty/tools/dottydoc/model/comment/CommentCleaner.scala b/doc-tool/src/dotty/tools/dottydoc/model/comment/CommentCleaner.scala index cfcf788af9dd..3ac2ea1623c8 100644 --- a/doc-tool/src/dotty/tools/dottydoc/model/comment/CommentCleaner.scala +++ b/doc-tool/src/dotty/tools/dottydoc/model/comment/CommentCleaner.scala @@ -19,7 +19,7 @@ trait CommentCleaner { val javadoclessComment = JavadocTags.replaceAllIn(safeComment, { javadocReplacement(_) }) val markedTagComment = SafeTags.replaceAllIn(javadoclessComment, { mtch => - Matcher.quoteReplacement(safeTagMarker + mtch.matched + safeTagMarker) + Matcher.quoteReplacement(s"${safeTagMarker}${mtch.matched}${safeTagMarker}") }) markedTagComment.linesIterator.toList map (cleanLine) } diff --git a/doc-tool/src/dotty/tools/dottydoc/model/comment/WikiParser.scala b/doc-tool/src/dotty/tools/dottydoc/model/comment/WikiParser.scala index 3220e08bf13b..04baa96a672c 100644 --- a/doc-tool/src/dotty/tools/dottydoc/model/comment/WikiParser.scala +++ b/doc-tool/src/dotty/tools/dottydoc/model/comment/WikiParser.scala @@ -97,7 +97,7 @@ private[comment] final class WikiParser( line = listLine(indent, style) } val constructor = listStyles(style) - constructor(lines) + constructor(lines.toList) } val indent = countWhitespace @@ -370,7 +370,7 @@ private[comment] final class WikiParser( // maxSkip - size of the longest common whitespace prefix of non-empty lines val nonEmptyLines = lines.filter(_.trim.nonEmpty) - val maxSkip = if (nonEmptyLines.isEmpty) 0 else nonEmptyLines.map(line => line.prefixLength(_ == ' ')).min + val maxSkip = if (nonEmptyLines.isEmpty) 0 else nonEmptyLines.map(line => line.takeWhile(_ == ' ').length).min // remove common whitespace prefix lines.map(line => if (line.trim.nonEmpty) line.substring(maxSkip) else line).mkString("\n") diff --git a/doc-tool/src/dotty/tools/dottydoc/staticsite/Page.scala b/doc-tool/src/dotty/tools/dottydoc/staticsite/Page.scala index 76e401188dfd..e82812794344 100644 --- a/doc-tool/src/dotty/tools/dottydoc/staticsite/Page.scala +++ b/doc-tool/src/dotty/tools/dottydoc/staticsite/Page.scala @@ -99,15 +99,15 @@ trait Page { _yaml = updatedYaml { yamlCollector .getData().asScala - .mapValues { - case xs if xs.size == 1 => + .toMap + .transform { + case (_, xs) if xs.size == 1 => val str = xs.get(0) if (str.length > 0 && str.head == '"' && str.last == '"') str.substring(1, str.length - 1) else str - case xs => xs + case (_, xs) => xs } - .toMap } // YAML must start with "---" and end in either "---" or "..." diff --git a/doc-tool/src/dotty/tools/dottydoc/staticsite/Site.scala b/doc-tool/src/dotty/tools/dottydoc/staticsite/Site.scala index 89c6ca0f7e96..dcc2c5ba0dc4 100644 --- a/doc-tool/src/dotty/tools/dottydoc/staticsite/Site.scala +++ b/doc-tool/src/dotty/tools/dottydoc/staticsite/Site.scala @@ -155,7 +155,7 @@ case class Site( "js/tether.min.js" -> "/js/tether.min.js", "js/highlight.pack.js" -> "/js/highlight.pack.js" ) - .mapValues(getResource) + .transform((_, v) => getResource(v)) .foreach { case (path, resource) => val source = new ByteArrayInputStream(resource.getBytes(StandardCharsets.UTF_8)) val target = mkdirs(fs.getPath(outDir.getAbsolutePath, path)) diff --git a/doc-tool/test/dotty/tools/dottydoc/GenDocs.scala b/doc-tool/test/dotty/tools/dottydoc/GenDocs.scala index 78b0a6f89294..6eb1f53eec74 100644 --- a/doc-tool/test/dotty/tools/dottydoc/GenDocs.scala +++ b/doc-tool/test/dotty/tools/dottydoc/GenDocs.scala @@ -28,15 +28,6 @@ trait LocalResources extends DocDriver { files } -object GenCollections extends LocalResources { - import Files._ - - val collections = TestWhitelistedCollections.files - - override def main(args: Array[String]): Unit = - super.main(withClasspath(collections.toArray)) -} - object GenDottyDocs extends LocalResources { import Files._ diff --git a/doc-tool/test/dotty/tools/dottydoc/JavaConverterTest.scala b/doc-tool/test/dotty/tools/dottydoc/JavaConverterTest.scala index 66872308ef06..795a9c08e0a3 100644 --- a/doc-tool/test/dotty/tools/dottydoc/JavaConverterTest.scala +++ b/doc-tool/test/dotty/tools/dottydoc/JavaConverterTest.scala @@ -63,7 +63,7 @@ class JavaConverterTest { def members = df :: Nil def traitParams = List(paramList) def companionPath = "path" :: "to" :: "companion" :: Nil - def companionPath_=(xs: List[String]) = Unit + def companionPath_=(xs: List[String]) = {} } assertSerializedCorrectly(trt, trt.asJava) val cls = new Class { @@ -78,7 +78,7 @@ class JavaConverterTest { def superTypes = new NoLink("title", "query") :: Nil def members = Nil def companionPath = "path" :: "to" :: "companion" :: Nil - def companionPath_=(xs: List[String]) = Unit + def companionPath_=(xs: List[String]) = {} def constructors = List(List(paramList)) } assertSerializedCorrectly(cls, cls.asJava) @@ -95,7 +95,7 @@ class JavaConverterTest { def superTypes = new NoLink("title", "query") :: Nil def members = Nil def companionPath = "path" :: "to" :: "companion" :: Nil - def companionPath_=(xs: List[String]) = Unit + def companionPath_=(xs: List[String]) = {} } assertSerializedCorrectly(caseClass, caseClass.asJava) val obj = new EObject { @@ -110,7 +110,7 @@ class JavaConverterTest { def superTypes = new NoLink("title", "query") :: Nil def members = df :: Nil def companionPath = "path" :: "to" :: "companion" :: Nil - def companionPath_=(xs: List[String]) = Unit + def companionPath_=(xs: List[String]) = {} } assertSerializedCorrectly(obj, obj.asJava) val typeAlias = new TypeAlias { diff --git a/doc-tool/test/dotty/tools/dottydoc/WhitelistedStdLib.scala b/doc-tool/test/dotty/tools/dottydoc/WhitelistedStdLib.scala deleted file mode 100644 index ba15e46a9ee9..000000000000 --- a/doc-tool/test/dotty/tools/dottydoc/WhitelistedStdLib.scala +++ /dev/null @@ -1,37 +0,0 @@ -package dotty.tools -package dottydoc - -import org.junit.Test -import org.junit.Assert._ - -class TestWhitelistedCollections extends DottyDocTest with CheckFromSource { - - @Test def arrayAndImmutableHasDocumentation = - checkFiles(TestWhitelistedCollections.files) { (ctx, packages) => - val array = - packages("scala") - .children.find(_.path.mkString(".") == "scala.Array") - .get - - assert(array.comment.get.body.length > 0, - "scala.Array didn't have any documentation") - - val imm = - packages("scala") - .children.find(_.path.mkString(".") == "scala.Immutable") - .get - - assert( - imm.kind == "trait" && imm.name == "Immutable", - "Found wrong `Immutable`") - assert( - imm.comment.map(_.body).get.length > 0, - "Imm did not have a comment with length > 0") - } -} - -object TestWhitelistedCollections { - val files: List[String] = - TestSources.stdLibSources - .filterNot(_.endsWith("package.scala")) -} diff --git a/language-server/src/dotty/tools/languageserver/DottyLanguageServer.scala b/language-server/src/dotty/tools/languageserver/DottyLanguageServer.scala index b801d3137313..4a75070457d2 100644 --- a/language-server/src/dotty/tools/languageserver/DottyLanguageServer.scala +++ b/language-server/src/dotty/tools/languageserver/DottyLanguageServer.scala @@ -432,10 +432,9 @@ class DottyLanguageServer extends LanguageServer val changes = refs.groupBy(ref => toUriOption(ref.source)) - .flatMap((uriOpt, ref) => uriOpt.map(uri => (uri.toString, ref))) - .mapValues(refs => - refs.flatMap(ref => - range(ref.namePos).map(nameRange => new TextEdit(nameRange, newName))).distinct.asJava) + .flatMap { case (uriOpt, refs) => uriOpt.map(uri => (uri.toString, refs)) } + .transform((_, refs) => refs.flatMap(ref => + range(ref.namePos).map(nameRange => new TextEdit(nameRange, newName))).distinct.asJava) new WorkspaceEdit(changes.asJava) } @@ -584,7 +583,7 @@ class DottyLanguageServer extends LanguageServer definition <- definitions.toSet uri <- toUriOption(definition.pos.source).toSet config = configFor(uri) - project <- dependentProjects(config) + config + project <- dependentProjects(config) union Set(config) } yield project } } diff --git a/language-server/test/dotty/tools/languageserver/SignatureHelpTest.scala b/language-server/test/dotty/tools/languageserver/SignatureHelpTest.scala index a7ad1afc804f..22393a576fc4 100644 --- a/language-server/test/dotty/tools/languageserver/SignatureHelpTest.scala +++ b/language-server/test/dotty/tools/languageserver/SignatureHelpTest.scala @@ -19,9 +19,10 @@ class SignatureHelpTest { @Test def fromScala2: Unit = { val applySig = - S("apply[A]", Nil, List(List(P("xs", "A*"))), Some("List[A]")) + // TODO: Ideally this should say `List[A]`, not `CC[A]` + S("apply[A]", Nil, List(List(P("elems", "A*"))), Some("CC[A]")) val mapSig = - S("map[B, That]", Nil, List(List(P("f", "A => B"))), Some("That")) + S("map[B]", Nil, List(List(P("f", "A => B"))), Some("List[B]")) code"""object O { List($m1) List(1, 2, 3).map($m2) diff --git a/language-server/test/dotty/tools/languageserver/util/actions/CodeRename.scala b/language-server/test/dotty/tools/languageserver/util/actions/CodeRename.scala index b7b39edbd702..78adb6dd1ac7 100644 --- a/language-server/test/dotty/tools/languageserver/util/actions/CodeRename.scala +++ b/language-server/test/dotty/tools/languageserver/util/actions/CodeRename.scala @@ -48,8 +48,8 @@ class CodeRename(override val marker: CodeMarker, val results = query.get() - val changes = results.getChanges.asScala.mapValues(_.asScala.toSet.map(ch => (ch.getNewText, ch.getRange))) - val expectedChanges = expected.groupBy(_.file.uri).mapValues(_.map(range => (newName, range.toRange))) + val changes = results.getChanges.asScala.view.mapValues(_.asScala.toSet.map(ch => (ch.getNewText, ch.getRange))).toMap + val expectedChanges = expected.groupBy(_.file.uri).view.mapValues(_.map(range => (newName, range.toRange))).toMap assertNull(results.getDocumentChanges) assertEquals(expectedChanges, changes) diff --git a/language-server/test/dotty/tools/languageserver/util/actions/Implementation.scala b/language-server/test/dotty/tools/languageserver/util/actions/Implementation.scala index 5ab220e687ec..4294382e6317 100644 --- a/language-server/test/dotty/tools/languageserver/util/actions/Implementation.scala +++ b/language-server/test/dotty/tools/languageserver/util/actions/Implementation.scala @@ -23,7 +23,7 @@ class Implementation(override val range: CodeRange, expected: List[CodeRange]) e override def onMarker(marker: CodeMarker): Exec[Unit] = { val expectedLocations = expected.map(_.toLocation) - val results: Seq[org.eclipse.lsp4j.Location] = server.implementation(marker.toTextDocumentPositionParams).get().asScala + val results: Seq[org.eclipse.lsp4j.Location] = server.implementation(marker.toTextDocumentPositionParams).get().asScala.toSeq assertEquals(expectedLocations.length, results.length) expectedLocations.sorted.zip(results.sorted).foreach { diff --git a/language-server/test/dotty/tools/languageserver/util/server/TestServer.scala b/language-server/test/dotty/tools/languageserver/util/server/TestServer.scala index 25a8ca417d4e..ea6538c90f7b 100644 --- a/language-server/test/dotty/tools/languageserver/util/server/TestServer.scala +++ b/language-server/test/dotty/tools/languageserver/util/server/TestServer.scala @@ -41,9 +41,9 @@ class TestServer(testFolder: Path, projects: List[Project]) { * @return A JSON object representing the configuration for this project. */ def projectSetup(project: Project): String = { - def showSeq[T](lst: Seq[T]): String = + def showSeq[T](lst: collection.Seq[T]): String = lst - .map(elem => '"' + elem.toString.replace('\\', '/') + '"') + .map(elem => '"'.toString + elem.toString.replace('\\', '/') + '"'.toString) .mkString("[ ", ", ", " ]") if (project.sources.exists(_.isInstanceOf[TastyWithPositions])) { @@ -117,7 +117,7 @@ class TestServer(testFolder: Path, projects: List[Project]) { project.dependsOn.flatMap { dep => classDirectory(dep, wipe = false).toString +: dependencyClasspath(dep) } - }.distinct + }.distinct.toSeq private def sourceDirectory(project: Project, wipe: Boolean): Path = { val path = TestFile.sourceDir.resolve(project.name).toAbsolutePath diff --git a/library/src-bootstrapped/scala/tasty/reflect/TreeUtils.scala b/library/src-bootstrapped/scala/tasty/reflect/TreeUtils.scala index 432db07a76b0..675b6a5460bb 100644 --- a/library/src-bootstrapped/scala/tasty/reflect/TreeUtils.scala +++ b/library/src-bootstrapped/scala/tasty/reflect/TreeUtils.scala @@ -14,8 +14,8 @@ trait TreeUtils def foldTree(x: X, tree: Tree) given (ctx: Context): X def foldPattern(x: X, tree: Pattern) given (ctx: Context): X - def foldTrees(x: X, trees: Iterable[Tree]) given (ctx: Context): X = (x /: trees)(foldTree) - def foldPatterns(x: X, trees: Iterable[Pattern]) given (ctx: Context): X = (x /: trees)(foldPattern) + def foldTrees(x: X, trees: Iterable[Tree]) given (ctx: Context): X = trees.foldLeft(x)(foldTree) + def foldPatterns(x: X, trees: Iterable[Pattern]) given (ctx: Context): X = trees.foldLeft(x)(foldPattern) def foldOverTree(x: X, tree: Tree) given (ctx: Context): X = { def localCtx(definition: Definition): Context = definition.symbol.localContext @@ -65,7 +65,7 @@ trait TreeUtils foldTrees(foldTree(x, tpt), rhs) case IsDefinition(ddef @ DefDef(_, tparams, vparamss, tpt, rhs)) => implicit val ctx = localCtx(ddef) - foldTrees(foldTree((foldTrees(x, tparams) /: vparamss)(foldTrees), tpt), rhs) + foldTrees(foldTree(vparamss.foldLeft(foldTrees(x, tparams))(foldTrees), tpt), rhs) case IsDefinition(tdef @ TypeDef(_, rhs)) => implicit val ctx = localCtx(tdef) foldTree(x, rhs) diff --git a/library/src/scala/tasty/reflect/CompilerInterface.scala b/library/src/scala/tasty/reflect/CompilerInterface.scala index 5157df662c30..68b5e0bd48d2 100644 --- a/library/src/scala/tasty/reflect/CompilerInterface.scala +++ b/library/src/scala/tasty/reflect/CompilerInterface.scala @@ -1121,13 +1121,10 @@ trait CompilerInterface { // SIGNATURES // - /** JVM signature of a method */ type Signature <: AnyRef - /** The (JVM) erased signatures of the parameters */ - def Signature_paramSigs(self: Signature): List[String] + def Signature_paramSigs(self: Signature): List[String | Int] - /** The (JVM) erased result type */ def Signature_resultSig(self: Signature): String // diff --git a/library/src/scala/tasty/reflect/Core.scala b/library/src/scala/tasty/reflect/Core.scala index 44be03c02a51..ebddcc878ce1 100644 --- a/library/src/scala/tasty/reflect/Core.scala +++ b/library/src/scala/tasty/reflect/Core.scala @@ -400,7 +400,7 @@ trait Core { /** Untyped identifier */ type Id = internal.Id - /** JVM signature of a method */ + /** Signature of a method */ type Signature = internal.Signature /** Position in a source file */ diff --git a/library/src/scala/tasty/reflect/Printers.scala b/library/src/scala/tasty/reflect/Printers.scala index 697fee3fb932..0e22b26dfb4d 100644 --- a/library/src/scala/tasty/reflect/Printers.scala +++ b/library/src/scala/tasty/reflect/Printers.scala @@ -356,7 +356,7 @@ trait Printers def visitSignature(sig: Signature): Buffer = { val Signature(params, res) = sig - this += "Signature(" ++= params += ", " += res += ")" + this += "Signature(" ++= params.map(_.toString) += ", " += res += ")" } def visitImportSelector(sel: ImportSelector): Buffer = sel match { @@ -1371,6 +1371,9 @@ trait Printers } + inline private val qc = '\'' + inline private val qSc = '"' + def printConstant(const: Constant): Buffer = const match { case Constant(()) => this += highlightLiteral("()") case Constant(null) => this += highlightLiteral("null") @@ -1381,8 +1384,8 @@ trait Printers case Constant(v: Long) => this += highlightLiteral(v.toString + "L") case Constant(v: Float) => this += highlightLiteral(v.toString + "f") case Constant(v: Double) => this += highlightLiteral(v.toString) - case Constant(v: Char) => this += highlightString('\'' + escapedChar(v) + '\'') - case Constant(v: String) => this += highlightString('"' + escapedString(v) + '"') + case Constant(v: Char) => this += highlightString(s"${qc}${escapedChar(v)}${qc}") + case Constant(v: String) => this += highlightString(s"${qSc}${escapedString(v)}${qSc}") case Constant.ClassTag(v) => this += "classOf" inSquare(printType(v)) diff --git a/library/src/scala/tasty/reflect/SignatureOps.scala b/library/src/scala/tasty/reflect/SignatureOps.scala index 539a3752c652..bff5d4d894bc 100644 --- a/library/src/scala/tasty/reflect/SignatureOps.scala +++ b/library/src/scala/tasty/reflect/SignatureOps.scala @@ -2,19 +2,25 @@ package scala.tasty.reflect trait SignatureOps extends Core { - /** Erased (JVM) signatures. */ + /** The signature of a method */ object Signature { - /** Matches the erased (JVM) signature and returns its parameters and result type. */ - def unapply(sig: Signature) given (ctx: Context): Option[(List[String], String)] = + /** Matches the method signature and returns its parameters and result type. */ + def unapply(sig: Signature) given (ctx: Context): Option[(List[String | Int], String)] = Some((sig.paramSigs, sig.resultSig)) } implicit class SignatureAPI(sig: Signature) { - /** The (JVM) erased signatures of the parameters */ - def paramSigs: List[String]= internal.Signature_paramSigs(sig) + /** The signatures of the method parameters. + * + * Each *type parameter section* is represented by a single Int corresponding + * to the number of type parameters in the section. + * Each *term parameter* is represented by a String corresponding to the fully qualified + * name of the parameter type. + */ + def paramSigs: List[String | Int] = internal.Signature_paramSigs(sig) - /** The (JVM) erased result type */ + /** The signature of the result type */ def resultSig: String = internal.Signature_resultSig(sig) } diff --git a/library/src/scala/tasty/reflect/TreeOps.scala b/library/src/scala/tasty/reflect/TreeOps.scala index 9976c41c3711..09a5c53666af 100644 --- a/library/src/scala/tasty/reflect/TreeOps.scala +++ b/library/src/scala/tasty/reflect/TreeOps.scala @@ -196,7 +196,7 @@ trait TreeOps extends Core { * `tree (argss(0)) ... (argss(argss.length -1))` */ def appliedToArgss(argss: List[List[Term]]) given (ctx: Context): Term = - ((self: Term) /: argss)(Apply(_, _)) + argss.foldLeft(self: Term)(Apply(_, _)) /** The current tree applied to (): `tree()` */ def appliedToNone given (ctx: Context): Apply = appliedToArgs(Nil) diff --git a/project/Build.scala b/project/Build.scala index 11985ef9ca4d..e3cbbe75a847 100644 --- a/project/Build.scala +++ b/project/Build.scala @@ -58,10 +58,9 @@ object MyScalaJSPlugin extends AutoPlugin { } object Build { - val scalacVersion = "2.12.8" - val referenceVersion = "0.17.0-RC1" + val referenceVersion = "0.18.1-bin-20190825-6960f8d-NIGHTLY" - val baseVersion = "0.18.0" + val baseVersion = "0.18.1" val baseSbtDottyVersion = "0.3.4" // Versions used by the vscode extension to create a new project @@ -71,6 +70,16 @@ object Build { val publishedDottyVersion = referenceVersion val publishedSbtDottyVersion = "0.3.3" + /** scala-library version required to compile Dotty. + * + * Both the non-bootstrapped and bootstrapped version should match, unless + * we're in the process of upgrading to a new major version of + * scala-library. + */ + def stdlibVersion(implicit mode: Mode): String = mode match { + case NonBootstrapped => "2.13.0" + case Bootstrapped => "2.13.0" + } val dottyOrganization = "ch.epfl.lamp" val dottyGithubUrl = "https://github.com/lampepfl/dotty" @@ -158,7 +167,15 @@ object Build { // include sources in eclipse (downloads source code for all dependencies) //http://stackoverflow.com/questions/10472840/how-to-attach-sources-to-sbt-managed-dependencies-in-scala-ide#answer-11683728 - EclipseKeys.withSource := true + EclipseKeys.withSource := true, + + // Avoid various sbt craziness involving classloaders and parallelism + fork in run := true, + fork in Test := true, + parallelExecution in Test := false, + + // enable verbose exception messages for JUnit + testOptions in Test += Tests.Argument(TestFrameworks.JUnit, "-a", "-v"), ) // Settings shared globally (scoped in Global). Used in build.sbt @@ -207,8 +224,6 @@ object Build { libraryDependencies += "com.novocode" % "junit-interface" % "0.11" % Test, - // enable verbose exception messages for JUnit - testOptions in Test += Tests.Argument(TestFrameworks.JUnit, "-a", "-v") ) // Settings used for projects compiled only with Java @@ -230,13 +245,17 @@ object Build { // Settings used when compiling dotty with the reference compiler lazy val commonNonBootstrappedSettings = commonDottySettings ++ Seq( + unmanagedSourceDirectories in Compile += baseDirectory.value / "src-non-bootstrapped", + version := dottyNonBootstrappedVersion, scalaVersion := referenceVersion, - excludeFromIDE := true + excludeFromIDE := true, ) // Settings used when compiling dotty with a non-bootstrapped dotty lazy val commonBootstrappedSettings = commonDottySettings ++ Seq( + unmanagedSourceDirectories in Compile += baseDirectory.value / "src-bootstrapped", + version := dottyVersion, scalaVersion := dottyNonBootstrappedVersion, @@ -250,8 +269,8 @@ object Build { // Enforce that the only Scala 2 classfiles we unpickle come from scala-library /* scalacOptions ++= { - val attList = (dependencyClasspath in `dotty-library` in Compile).value - val scalaLib = findLib(attList, "scala-library") + val cp = (dependencyClasspath in `dotty-library` in Compile).value + val scalaLib = findArtifactPath(cp, "scala-library") Seq("-Yscala2-unpickler", scalaLib) }, */ @@ -262,13 +281,8 @@ object Build { // Compile using the non-bootstrapped and non-published dotty managedScalaInstance := false, scalaInstance := { - val externalDeps = externalDependencyClasspath.in(`dotty-doc`, Compile).value - def getExternalDep(name: String): File = - externalDeps.find(_.get(artifact.key).exists(_.name == name)) - .getOrElse(throw new MessageOnlyException(s"Artifact for $name not found in $externalDeps")) - .data - - val scalaLibrary = getExternalDep("scala-library") + val externalNonBootstrappedDeps = externalDependencyClasspath.in(`dotty-doc`, Compile).value + val scalaLibrary = findArtifact(externalNonBootstrappedDeps, "scala-library") // IMPORTANT: We need to use actual jars to form the ScalaInstance and not // just directories containing classfiles because sbt maintains a cache of @@ -280,7 +294,7 @@ object Build { val dottyCompiler = packageBin.in(`dotty-compiler`, Compile).value val dottyDoc = packageBin.in(`dotty-doc`, Compile).value - val allJars = Seq(dottyLibrary, dottyInterfaces, dottyCompiler, dottyDoc) ++ externalDeps.map(_.data) + val allJars = Seq(dottyLibrary, dottyInterfaces, dottyCompiler, dottyDoc) ++ externalNonBootstrappedDeps.map(_.data) makeScalaInstance( state.value, @@ -316,6 +330,15 @@ object Build { /** Projects -------------------------------------------------------------- */ + val dottyCompilerBootstrappedRef = LocalProject("dotty-compiler-bootstrapped") + + /** External dependencies we may want to put on the compiler classpath. */ + def externalCompilerClasspathTask: Def.Initialize[Task[Def.Classpath]] = + // Even if we're running the non-bootstrapped compiler, we want the + // dependencies of the bootstrapped compiler since we want to put them on + // the compiler classpath, not the JVM classpath. + externalDependencyClasspath.in(dottyCompilerBootstrappedRef, Runtime) + // The root project: // - aggregates other projects so that "compile", "test", etc are run on all projects at once. // - publishes its own empty artifact "dotty" that depends on "dotty-library" and "dotty-compiler", @@ -331,8 +354,8 @@ object Build { val dottyLib = jars("dotty-library") val dottyInterfaces = jars("dotty-interfaces") val otherDeps = (dependencyClasspath in Compile).value.map(_.data).mkString(File.pathSeparator) - val attList = (dependencyClasspath in Runtime).value - dottyLib + File.pathSeparator + findLib(attList, "scala-library-") + val externalDeps = externalCompilerClasspathTask.value + dottyLib + File.pathSeparator + findArtifactPath(externalDeps, "scala-library") } def dottyDocSettings(implicit mode: Mode) = Seq( @@ -343,9 +366,6 @@ object Build { outputStrategy := Some(StdoutOutput), javaOptions ++= (javaOptions in `dotty-compiler`).value, - fork in run := true, - fork in Test := true, - parallelExecution in Test := false, genDocs := Def.taskDyn { // Make majorVersion available at dotty.epfl.ch/versions/latest-nightly-base @@ -404,10 +424,15 @@ object Build { case Bootstrapped => `dotty-doc-bootstrapped` } - def findLib(attList: Seq[Attributed[File]], name: String) = attList - .map(_.data.getAbsolutePath) - .find(_.contains(name)) - .toList.mkString(File.pathSeparator) + /** Find an artifact with the given `name` in `classpath` */ + def findArtifact(classpath: Def.Classpath, name: String): File = classpath + .find(_.get(artifact.key).exists(_.name == name)) + .getOrElse(throw new MessageOnlyException(s"Artifact for $name not found in $classpath")) + .data + + /** Like `findArtifact` but returns the absolute path of the entry as a string */ + def findArtifactPath(classpath: Def.Classpath, name: String): String = + findArtifact(classpath, name).getAbsolutePath // Settings shared between dotty-compiler and dotty-compiler-bootstrapped lazy val commonDottyCompilerSettings = Seq( @@ -439,7 +464,6 @@ object Build { // get libraries onboard libraryDependencies ++= Seq( "org.scala-lang.modules" % "scala-asm" % "6.0.0-scala-1", // used by the backend - "org.scala-lang" % "scala-library" % scalacVersion % "test", Dependencies.`compiler-interface`, "org.jline" % "jline-reader" % "3.9.0", // used by the REPL "org.jline" % "jline-terminal" % "3.9.0", @@ -462,9 +486,6 @@ object Build { ), // Spawn new JVM in run and test - fork in run := true, - fork in Test := true, - parallelExecution in Test := false, // Add git-hash used to package the distribution to the manifest to know it in runtime and report it in REPL packageOptions += ManifestAttributes(("Git-Hash", VersionUtil.gitHash)), @@ -472,7 +493,7 @@ object Build { // http://grokbase.com/t/gg/simple-build-tool/135ke5y90p/sbt-setting-jvm-boot-paramaters-for-scala // packageAll should always be run before tests javaOptions ++= { - val attList = (dependencyClasspath in Runtime).value + val externalDeps = externalCompilerClasspathTask.value val jars = packageAll.value val ci_build = // propagate if this is a ci build @@ -499,11 +520,11 @@ object Build { "-Ddotty.tests.classes.dottyInterfaces=" + jars("dotty-interfaces"), "-Ddotty.tests.classes.dottyLibrary=" + jars("dotty-library"), "-Ddotty.tests.classes.dottyCompiler=" + jars("dotty-compiler"), - "-Ddotty.tests.classes.compilerInterface=" + findLib(attList, "compiler-interface"), - "-Ddotty.tests.classes.scalaLibrary=" + findLib(attList, "scala-library-"), - "-Ddotty.tests.classes.scalaAsm=" + findLib(attList, "scala-asm"), - "-Ddotty.tests.classes.jlineTerminal=" + findLib(attList, "jline-terminal"), - "-Ddotty.tests.classes.jlineReader=" + findLib(attList, "jline-reader") + "-Ddotty.tests.classes.compilerInterface=" + findArtifactPath(externalDeps, "compiler-interface"), + "-Ddotty.tests.classes.scalaLibrary=" + findArtifactPath(externalDeps, "scala-library"), + "-Ddotty.tests.classes.scalaAsm=" + findArtifactPath(externalDeps, "scala-asm"), + "-Ddotty.tests.classes.jlineTerminal=" + findArtifactPath(externalDeps, "jline-terminal"), + "-Ddotty.tests.classes.jlineReader=" + findArtifactPath(externalDeps, "jline-reader") ) jarOpts ::: tuning ::: agentOptions ::: ci_build @@ -541,10 +562,10 @@ object Build { dotr := { val args: List[String] = spaceDelimited("").parsed.toList - val attList = (dependencyClasspath in Runtime).value + val externalDeps = externalCompilerClasspathTask.value val jars = packageAll.value - val scalaLib = findLib(attList, "scala-library") + val scalaLib = findArtifactPath(externalDeps, "scala-library") val dottyLib = jars("dotty-library") def run(args: List[String]): Unit = { @@ -558,7 +579,7 @@ object Build { println("Couldn't find scala-library on classpath, please run using script in bin dir instead") } else if (args.contains("-with-compiler")) { val args1 = args.filter(_ != "-with-compiler") - val asm = findLib(attList, "scala-asm") + val asm = findArtifactPath(externalDeps, "scala-asm") val dottyCompiler = jars("dotty-compiler") val dottyInterfaces = jars("dotty-interfaces") run(insertClasspathInArgs(args1, List(dottyCompiler, dottyInterfaces, asm).mkString(File.pathSeparator))) @@ -606,9 +627,9 @@ object Build { def runCompilerMain(repl: Boolean = false) = Def.inputTaskDyn { val log = streams.value.log - val attList = (dependencyClasspath in Runtime).value + val externalDeps = externalCompilerClasspathTask.value val jars = packageAll.value - val scalaLib = findLib(attList, "scala-library-") + val scalaLib = findArtifactPath(externalDeps, "scala-library") val dottyLib = jars("dotty-library") val dottyCompiler = jars("dotty-compiler") val args0: List[String] = spaceDelimited("").parsed.toList @@ -634,7 +655,7 @@ object Build { log.error("-with-compiler should only be used with a bootstrapped compiler") } val dottyInterfaces = jars("dotty-interfaces") - val asm = findLib(attList, "scala-asm") + val asm = findArtifactPath(externalDeps, "scala-asm") extraClasspath ++= Seq(dottyCompiler, dottyInterfaces, asm) } @@ -696,21 +717,8 @@ object Build { // Settings shared between dotty-library and dotty-library-bootstrapped lazy val dottyLibrarySettings = Seq( - libraryDependencies += "org.scala-lang" % "scala-library" % scalacVersion, - // Needed so that the library sources are visible when `dotty.tools.dotc.core.Definitions#init` is called scalacOptions in Compile ++= Seq("-sourcepath", (scalaSource in Compile).value.getAbsolutePath), - - // Add version-specific source directories: - // - files in src-non-bootstrapped will only be compiled by the reference compiler - // - files in src-bootstrapped will only be compiled by the current dotty compiler (non-bootstrapped and bootstrapped) - unmanagedSourceDirectories in Compile ++= { - val baseDir = baseDirectory.value - if (scalaVersion.value == referenceVersion) - Seq(baseDir / "src-non-bootstrapped") - else - Seq(baseDir / "src-bootstrapped") - } ) lazy val `dotty-library` = project.in(file("library")).asDottyLibrary(NonBootstrapped) @@ -767,10 +775,10 @@ object Build { scalaSource in Test := baseDirectory.value, javaSource in Test := baseDirectory.value, - fork in Test := true, - parallelExecution in Test := false, - - libraryDependencies += (Dependencies.`zinc-api-info` % Test).withDottyCompat(scalaVersion.value) + // Tests disabled until zinc-api-info cross-compiles with 2.13, + // alternatively we could just copy in sources the part of zinc-api-info we need. + sources in Test := Seq(), + // libraryDependencies += (Dependencies.`zinc-api-info` % Test).withDottyCompat(scalaVersion.value) ) lazy val `dotty-language-server` = project.in(file("language-server")). @@ -781,10 +789,6 @@ object Build { // plugin and the language server unmanagedSourceDirectories in Compile += baseDirectory.value / "../sbt-dotty/src/dotty/tools/sbtplugin/config", - // fork so that the shutdown hook in Main is run when we ctrl+c a run - // (you need to have `cancelable in Global := true` in your global sbt config to ctrl+c a run) - fork in run := true, - fork in Test := true, libraryDependencies ++= Seq( "org.eclipse.lsp4j" % "org.eclipse.lsp4j" % "0.6.0", Dependencies.`jackson-databind` @@ -846,6 +850,9 @@ object Build { enablePlugins(MyScalaJSPlugin). dependsOn(`dotty-library-bootstrappedJS`). settings( + // Required to run Scala.js tests. + fork in Test := false, + scalaJSUseMainModuleInitializer := true, ) @@ -862,6 +869,9 @@ object Build { settings( scalacOptions --= Seq("-Xfatal-warnings", "-deprecation"), + // Required to run Scala.js tests. + fork in Test := false, + sourceDirectory in fetchScalaJSSource := target.value / s"scala-js-src-$scalaJSVersion", fetchScalaJSSource := { @@ -956,7 +966,7 @@ object Build { // depend on it via dotty-library, because sbt may rewrite dependencies // (see https://github.com/sbt/sbt/pull/2634), but won't rewrite the direct // dependencies of scala-library (see https://github.com/sbt/sbt/pull/2897) - libraryDependencies += "org.scala-lang" % "scala-library" % scalacVersion + libraryDependencies += "org.scala-lang" % "scala-library" % stdlibVersion(Bootstrapped) ) lazy val `scala-compiler` = project. @@ -964,12 +974,12 @@ object Build { lazy val `scala-reflect` = project. settings(commonDummySettings). settings( - libraryDependencies := Seq("org.scala-lang" % "scala-reflect" % scalacVersion) + libraryDependencies := Seq("org.scala-lang" % "scala-reflect" % stdlibVersion(Bootstrapped)) ) lazy val scalap = project. settings(commonDummySettings). settings( - libraryDependencies := Seq("org.scala-lang" % "scalap" % scalacVersion) + libraryDependencies := Seq("org.scala-lang" % "scalap" % stdlibVersion(Bootstrapped)) ) @@ -1096,7 +1106,18 @@ object Build { "--include-categories=dotty.communitybuild.TestCategory", ), (Test / testOnly) := ((Test / testOnly) dependsOn prepareCommunityBuild).evaluated, - (Test / test ) := ((Test / test ) dependsOn prepareCommunityBuild).value + (Test / test ) := ((Test / test ) dependsOn prepareCommunityBuild).value, + javaOptions ++= { + // Propagate the ivy cache directory setting to the tests, which will + // then propagate it further to the sbt instances they will spawn. + val sbtProps = Option(System.getProperty("sbt.ivy.home")) match { + case Some(ivyHome) => + Seq(s"-Dsbt.ivy.home=$ivyHome") + case _ => + Seq() + } + sbtProps + } ) lazy val publishSettings = Seq( @@ -1217,6 +1238,9 @@ object Build { settings(dottyCompilerSettings) def asDottyLibrary(implicit mode: Mode): Project = project.withCommonSettings. + settings( + libraryDependencies += "org.scala-lang" % "scala-library" % stdlibVersion + ). settings(dottyLibrarySettings) def asDottyDoc(implicit mode: Mode): Project = project.withCommonSettings. diff --git a/project/plugins.sbt b/project/plugins.sbt index 87cef2fc2e5a..f9d6b89f871d 100644 --- a/project/plugins.sbt +++ b/project/plugins.sbt @@ -5,7 +5,7 @@ // Scala IDE project file generator addSbtPlugin("com.typesafe.sbteclipse" % "sbteclipse-plugin" % "5.2.4") -addSbtPlugin("org.scala-js" % "sbt-scalajs" % "1.0.0-M7") +addSbtPlugin("org.scala-js" % "sbt-scalajs" % "1.0.0-M8") addSbtPlugin("org.xerial.sbt" % "sbt-sonatype" % "2.1") diff --git a/sbt-bridge/test/xsbt/ExtractUsedNamesSpecification.scala b/sbt-bridge/test/xsbt/ExtractUsedNamesSpecification.scala index 9106d9f5304a..ee50b3717213 100644 --- a/sbt-bridge/test/xsbt/ExtractUsedNamesSpecification.scala +++ b/sbt-bridge/test/xsbt/ExtractUsedNamesSpecification.scala @@ -228,12 +228,12 @@ class ExtractUsedNamesSpecification { val compilerForTesting = new ScalaCompilerForUnitTesting val (_, callback) = compilerForTesting.compileSrcs(List(List(sealedClass, in)), reuseCompilerInstance = false) - val clientNames = callback.usedNamesAndScopes.filterKeys(!_.startsWith("base.")) + val clientNames = callback.usedNamesAndScopes.view.filterKeys(!_.startsWith("base.")) val names: Set[String] = clientNames.flatMap { case (_, usages) => usages.filter(_.scopes.contains(UseScope.PatMatTarget)).map(_.name) - }(collection.breakOut) + }.toSet names } diff --git a/sbt-bridge/test/xsbti/TestCallback.scala b/sbt-bridge/test/xsbti/TestCallback.scala index 7a065d5abdc9..3348fd2d90f3 100644 --- a/sbt-bridge/test/xsbti/TestCallback.scala +++ b/sbt-bridge/test/xsbti/TestCallback.scala @@ -18,7 +18,7 @@ class TestCallback extends AnalysisCallback val classNames = scala.collection.mutable.Map.empty[File, Set[(String, String)]].withDefaultValue(Set.empty) val apis: scala.collection.mutable.Map[File, Seq[ClassLike]] = scala.collection.mutable.Map.empty - def usedNames = usedNamesAndScopes.mapValues(_.map(_.name)) + def usedNames = usedNamesAndScopes.view.mapValues(_.map(_.name)).toMap override def startSource(source: File): Unit = { assert(!apis.contains(source), s"startSource can be called only once per source file: $source") @@ -68,16 +68,16 @@ object TestCallback { localInheritance: Map[String, Set[String]]) object ExtractedClassDependencies { def fromPairs( - memberRefPairs: Seq[(String, String)], - inheritancePairs: Seq[(String, String)], - localInheritancePairs: Seq[(String, String)] + memberRefPairs: collection.Seq[(String, String)], + inheritancePairs: collection.Seq[(String, String)], + localInheritancePairs: collection.Seq[(String, String)] ): ExtractedClassDependencies = { ExtractedClassDependencies(pairsToMultiMap(memberRefPairs), pairsToMultiMap(inheritancePairs), pairsToMultiMap(localInheritancePairs)) } - private def pairsToMultiMap[A, B](pairs: Seq[(A, B)]): Map[A, Set[B]] = { + private def pairsToMultiMap[A, B](pairs: collection.Seq[(A, B)]): Map[A, Set[B]] = { import scala.collection.mutable.{ HashMap, MultiMap } val emptyMultiMap = new HashMap[A, scala.collection.mutable.Set[B]] with MultiMap[A, B] val multiMap = pairs.foldLeft(emptyMultiMap) { @@ -85,7 +85,7 @@ object TestCallback { acc.addBinding(key, value) } // convert all collections to immutable variants - multiMap.toMap.mapValues(_.toSet).withDefaultValue(Set.empty) + multiMap.toMap.view.mapValues(_.toSet).toMap.withDefaultValue(Set.empty) } } } diff --git a/sbt-dotty/sbt-test/sbt-dotty/example-project/build.sbt b/sbt-dotty/sbt-test/sbt-dotty/example-project/build.sbt index 6e0adfaf4d08..4949483897d9 100644 --- a/sbt-dotty/sbt-test/sbt-dotty/example-project/build.sbt +++ b/sbt-dotty/sbt-test/sbt-dotty/example-project/build.sbt @@ -1,4 +1,4 @@ scalaVersion := sys.props("plugin.scalaVersion") libraryDependencies += -("org.scala-lang.modules" %% "scala-xml" % "1.0.6").withDottyCompat(scalaVersion.value) +("org.scala-lang.modules" %% "scala-xml" % "1.2.0").withDottyCompat(scalaVersion.value) diff --git a/sbt-dotty/sbt-test/scala2-compat/eff/pending b/sbt-dotty/sbt-test/scala2-compat/eff/pending new file mode 100644 index 000000000000..fbeaf3cad946 --- /dev/null +++ b/sbt-dotty/sbt-test/scala2-compat/eff/pending @@ -0,0 +1,2 @@ +# pending because eff hasn't been published for 2.13 yet: https://github.com/atnos-org/eff/issues/185 +> compile diff --git a/sbt-dotty/sbt-test/scala2-compat/eff/test b/sbt-dotty/sbt-test/scala2-compat/eff/test deleted file mode 100644 index 5df2af1f3956..000000000000 --- a/sbt-dotty/sbt-test/scala2-compat/eff/test +++ /dev/null @@ -1 +0,0 @@ -> compile diff --git a/sbt-dotty/sbt-test/source-dependencies/inherited-deps-java/build.sbt b/sbt-dotty/sbt-test/source-dependencies/inherited-deps-java/build.sbt index c25b54921b2b..cbe5367a003c 100644 --- a/sbt-dotty/sbt-test/source-dependencies/inherited-deps-java/build.sbt +++ b/sbt-dotty/sbt-test/source-dependencies/inherited-deps-java/build.sbt @@ -22,7 +22,7 @@ lazy val pairs = expected.map { case (from,tos) => (toFile(from), tos.map(toFile)) } -lazy val expectedDeps = (Relation.empty[File,File] /: pairs) { case (r, (x,ys)) => r + (x,ys) } +lazy val expectedDeps = pairs.foldLeft(Relation.empty[File,File]) { case (r, (x,ys)) => r + (x,ys) } def toFile(s: String) = file(s + ".java").getAbsoluteFile def same[T](x: T, y: T): Unit = { diff --git a/sbt-dotty/sbt-test/source-dependencies/inherited_type_params/A.scala b/sbt-dotty/sbt-test/source-dependencies/inherited_type_params/A.scala index 8c375e910a2f..3eba1ed78d03 100644 --- a/sbt-dotty/sbt-test/source-dependencies/inherited_type_params/A.scala +++ b/sbt-dotty/sbt-test/source-dependencies/inherited_type_params/A.scala @@ -3,5 +3,5 @@ trait Equal[-A] { def equal(a1: A, a2: A): Boolean } object Test { - implicit def TraversableEqual[CC[X] <: collection.TraversableLike[X, CC[X]] with Traversable[X], A: Equal]: Equal[CC[A]] = sys.error("") + implicit def TraversableEqual[CC[X] <: collection.IterableOps[X, CC, CC[X]] with Iterable[X], A: Equal]: Equal[CC[A]] = sys.error("") } diff --git a/sbt-dotty/src/dotty/tools/sbtplugin/DottyPlugin.scala b/sbt-dotty/src/dotty/tools/sbtplugin/DottyPlugin.scala index fed8a1e037d2..14cc2b523a82 100644 --- a/sbt-dotty/src/dotty/tools/sbtplugin/DottyPlugin.scala +++ b/sbt-dotty/src/dotty/tools/sbtplugin/DottyPlugin.scala @@ -93,8 +93,23 @@ object DottyPlugin extends AutoPlugin { val name = moduleID.name if (name != "dotty" && name != "dotty-library" && name != "dotty-compiler") moduleID.crossVersion match { - case _: librarymanagement.Binary if scalaVersion.startsWith("0.") => - moduleID.cross(CrossVersion.constant("2.12")) + case _: librarymanagement.Binary => + val compatVersion = + CrossVersion.partialVersion(scalaVersion) match { + case Some((3, _)) => + "2.13" + case Some((0, minor)) => + if (minor > 18 || scalaVersion.startsWith("0.18.1")) + "2.13" + else + "2.12" + case _ => + "" + } + if (compatVersion.nonEmpty) + moduleID.cross(CrossVersion.constant(compatVersion)) + else + moduleID case _ => moduleID } diff --git a/tests/disabled/partest/run/compiler-asSeenFrom.scala b/tests/disabled/partest/run/compiler-asSeenFrom.scala index 677dd40ddc4a..77795fe80e20 100644 --- a/tests/disabled/partest/run/compiler-asSeenFrom.scala +++ b/tests/disabled/partest/run/compiler-asSeenFrom.scala @@ -18,7 +18,7 @@ abstract class CompilerTest extends DirectTest { override def extraSettings = "-feature -usejavacp -d " + testOutput.path - def show() = (sources, units).zipped foreach check + def show() = sources.lazyZip(units) foreach check // Override at least one of these... def code = "" diff --git a/tests/disabled/partest/run/t7096.scala b/tests/disabled/partest/run/t7096.scala index 872562dd4d21..80583d85cb85 100644 --- a/tests/disabled/partest/run/t7096.scala +++ b/tests/disabled/partest/run/t7096.scala @@ -17,7 +17,7 @@ abstract class CompilerTest extends DirectTest { override def extraSettings = "-usejavacp -d " + testOutput.path - def show() = (sources, units).zipped foreach check + def show() = sources.lazyZip(units) foreach check // Override at least one of these... def code = "" diff --git a/tests/run/i3518.check b/tests/disabled/run/i3518.check similarity index 100% rename from tests/run/i3518.check rename to tests/disabled/run/i3518.check diff --git a/tests/run/i3518.scala b/tests/disabled/run/i3518.scala similarity index 100% rename from tests/run/i3518.scala rename to tests/disabled/run/i3518.scala diff --git a/tests/disabled/run/matchonstream.scala b/tests/disabled/run/matchonstream.scala new file mode 100644 index 000000000000..ed5b7c6067af --- /dev/null +++ b/tests/disabled/run/matchonstream.scala @@ -0,0 +1,4 @@ +// Disabled until https://github.com/scala/bug/issues/11687 gets fixed. +object Test extends App{ + LazyList.from(1) match { case LazyList(1, 2, x @_*) => println("It worked!") } +} diff --git a/tests/run/t5375.check b/tests/disabled/run/t5375.check similarity index 100% rename from tests/run/t5375.check rename to tests/disabled/run/t5375.check diff --git a/tests/run/t5375.scala b/tests/disabled/run/t5375.scala similarity index 100% rename from tests/run/t5375.scala rename to tests/disabled/run/t5375.scala diff --git a/tests/fuzzy/i3187.scala b/tests/fuzzy/i3187.scala new file mode 100644 index 000000000000..1d6a0fc99a03 --- /dev/null +++ b/tests/fuzzy/i3187.scala @@ -0,0 +1,2 @@ +package scala +object collection diff --git a/tests/neg/parser-stability-22.scala b/tests/fuzzy/parser-stability-22.scala similarity index 100% rename from tests/neg/parser-stability-22.scala rename to tests/fuzzy/parser-stability-22.scala diff --git a/tests/neg-custom-args/i1650.scala b/tests/neg-custom-args/i1650.scala index 3ce5d19bf7dd..6f709cd843eb 100644 --- a/tests/neg-custom-args/i1650.scala +++ b/tests/neg-custom-args/i1650.scala @@ -1,5 +1,5 @@ object Test { - test4(test4$default$1) + test4(test4$default$1) // error: recursion limit exceeded def test4[T[P]](x: T[T[List[T[X forSome { type X }]]]]) = ??? // error // error def test4$default$1[T[P]]: T[Int] = ??? } diff --git a/tests/neg/equality.scala b/tests/neg/equality.scala index 3d01418cd561..87eadae20b83 100644 --- a/tests/neg/equality.scala +++ b/tests/neg/equality.scala @@ -119,14 +119,5 @@ object equality { s1 == Nil // error Nil == s2 // error s2 == Nil // error - - import collection.parallel._ - val p1 = ParSeq(1, 2, 3) - val p2 = ParSeq() - Nil == p1 // OK - p1 == Nil // OK - Nil == p2 // OK - p2 == Nil // Ok - } } diff --git a/tests/neg/i1672.scala b/tests/neg/i1672.scala index cbcc5bfb77d6..a3df6e2247bc 100644 --- a/tests/neg/i1672.scala +++ b/tests/neg/i1672.scala @@ -1,7 +1,7 @@ class Test { implicit def compareComparables[T](x: T)(implicit ord: Ordering[T]) = // error: result type of implicit definition needs to be given explicitly - new ord.Ops(x) + new ord.OrderingOps(x) class Bippy { def compare(y: Bippy) = util Random } () < () // error: value `<` is not a member of Unit } diff --git a/tests/neg/i2960.scala b/tests/neg/i2960.scala index f7c1dbe4f6f2..01286cf03c7a 100644 --- a/tests/neg/i2960.scala +++ b/tests/neg/i2960.scala @@ -11,7 +11,7 @@ class Tag(val name: String, val children: mutable.Buffer[Node] = mutable.Buffer()) extends Node { override def mkString(n: Int): String = { - Tag.spaces(n) + s"<$name ${attributes.map(_.name + "=" + Tag.unescape(_)).mkString(" ")}>" + + Tag.spaces(n) + s"<$name ${attributes.map { case (k,v) => k.name + "=" + Tag.unescape(v) }.mkString(" ")}>" + (if(children.isEmpty) "\n" else children.map(_.mkString(n + 4)).mkString("\n", "\n", "\n")) + Tag.spaces(n) + s"" diff --git a/tests/neg/i3067.scala b/tests/neg/i3067.scala index 995a199dc970..d7822c91a9f0 100644 --- a/tests/neg/i3067.scala +++ b/tests/neg/i3067.scala @@ -1,4 +1,8 @@ -class Test[T](f: List[String] => T) +class Foo[A] { + def map[B](f: A => B)(implicit bf: Int): B = ??? +} + +class Test[T](f: Foo[String] => T) object o { @@ -6,6 +10,6 @@ object o { implicit def y = "abc" // error - implicit object a extends Test(_ map identity) // error + implicit object a extends Test(_ map identity) // error // error: no implicit argument found implicit object b extends Test(_ map identity) // error // error: cyclic reference } diff --git a/tests/neg/i3067b.scala b/tests/neg/i3067b.scala index a364aff064c8..587ae98abd4d 100644 --- a/tests/neg/i3067b.scala +++ b/tests/neg/i3067b.scala @@ -1,11 +1,9 @@ -import collection.generic.CanBuildFrom +class Foo[A] { + def map[B](f: A => B)(implicit bf: Int): B = ??? +} -class Test[T](f: List[String] => T) +class Test[T](f: Foo[String] => T) object o { - - implicitly[CanBuildFrom[String, Char, String]] - implicit object b extends Test(_ map identity) // error: type needs to be given // error: cyclic reference - } diff --git a/tests/neg-custom-args/repeatedArgs213.scala b/tests/neg/repeatedArgs213.scala similarity index 97% rename from tests/neg-custom-args/repeatedArgs213.scala rename to tests/neg/repeatedArgs213.scala index ba4aeed541ca..961910f113be 100644 --- a/tests/neg-custom-args/repeatedArgs213.scala +++ b/tests/neg/repeatedArgs213.scala @@ -1,7 +1,6 @@ import scala.collection.{immutable, mutable} import java.nio.file.Paths -// Start of Test class repeatedArgs { def bar(xs: String*): Int = xs.length diff --git a/tests/neg/subtyping.check b/tests/neg/subtyping.check index a5602fe3ca21..7cf78fc4b7d4 100644 --- a/tests/neg/subtyping.check +++ b/tests/neg/subtyping.check @@ -4,15 +4,15 @@ | Cannot prove that B#X <:< A#X.. | I found: | - | $conforms[Nothing] + | <:<.refl[Nothing] | - | But method $conforms in object Predef does not match type B#X <:< A#X. + | But method refl in object <:< does not match type B#X <:< A#X. -- Error: tests/neg/subtyping.scala:12:27 ------------------------------------------------------------------------------ 12 | implicitly[a.T <:< a.U] // error: no implicit argument | ^ | Cannot prove that a.T <:< a.U.. | I found: | - | $conforms[Nothing] + | <:<.refl[Nothing] | - | But method $conforms in object Predef does not match type a.T <:< a.U. + | But method refl in object <:< does not match type a.T <:< a.U. diff --git a/tests/neg/zipped.scala b/tests/neg/zipped.scala index feef1f824bf0..c7d55da6000a 100644 --- a/tests/neg/zipped.scala +++ b/tests/neg/zipped.scala @@ -5,34 +5,34 @@ object Test { // 1. This works, since withFilter is not defined on Tuple3zipped. Instead, // an implicit conversion from Tuple3zipped to Traversable[(Int, Int, Int)] is inserted. // The subsequent map operation has the right type for this Traversable. - (xs, xs, xs).zipped + xs.lazyZip(xs).lazyZip(xs) .withFilter( (x: (Int, Int, Int)) => x match { case (x, y, z) => true } ) // OK .map( (x: (Int, Int, Int)) => x match { case (x, y, z) => x + y + z }) // OK // 2. This works as well, because of auto untupling i.e. `case` is inserted. // But it does not work in Scala2. - (xs, xs, xs).zipped + xs.lazyZip(xs).lazyZip(xs) .withFilter( (x: (Int, Int, Int)) => x match { case (x, y, z) => true } ) // OK .map( (x: Int, y: Int, z: Int) => x + y + z ) // OK // works, because of auto untupling i.e. `case` is inserted // does not work in Scala2 // 3. Now, without withFilter, it's the opposite, we need the 3 parameter map. - (xs, xs, xs).zipped + xs.lazyZip(xs).lazyZip(xs) .map( (x: Int, y: Int, z: Int) => x + y + z ) // OK // 4. The single parameter map does not work. - (xs, xs, xs).zipped + xs.lazyZip(xs).lazyZip(xs) .map( (x: (Int, Int, Int)) => x match { case (x, y, z) => x + y + z }) // error // 5. If we leave out the parameter type, we get a "Wrong number of parameters" error instead - (xs, xs, xs).zipped + xs.lazyZip(xs).lazyZip(xs) .map( x => x match { case (x, y, z) => x + y + z }) // error // This means that the following works in Dotty in normal mode, since a `withFilter` // is inserted. But it does no work under -strict. And it will not work in Scala 3.1. // The reason is that without -strict, the code below is mapped to (1), but with -strict // it is mapped to (5). - for ((x, y, z) <- (xs, xs, xs).zipped) yield x + y + z + for ((x, y, z) <- xs.lazyZip(xs).lazyZip(xs)) yield x + y + z } \ No newline at end of file diff --git a/tests/pending/run/streams.scala b/tests/pending/run/streams.scala index 350e103eab0b..563f5ca0d6cf 100644 --- a/tests/pending/run/streams.scala +++ b/tests/pending/run/streams.scala @@ -8,7 +8,7 @@ object Test extends App { println val s1 = Stream.cons(1, Stream.empty) - println(s1.toArray.deep) + println(s1.toArray.toList) println(s1.take(1)) println(s1.take(2)) println(s1.drop(1)) @@ -22,7 +22,7 @@ object Test extends App { println val s2 = s1.append(Stream.cons(2, Stream.empty)) - println(s2.toArray.deep) + println(s2.toArray.toList) println(s2.drop(1)) println(s2.drop(2)) println(s2.drop(-1)) diff --git a/tests/pending/run/t0421-old.scala b/tests/pending/run/t0421-old.scala index dde89bc5421a..a9264b68cb5d 100644 --- a/tests/pending/run/t0421-old.scala +++ b/tests/pending/run/t0421-old.scala @@ -17,16 +17,16 @@ object Test extends App { def matmul(xss: Array[Array[Double]], yss: Array[Array[Double]]) = { val ysst = transpose(yss) val ysst1: Array[Array[Double]] = yss.transpose - assert(ysst.deep == ysst1.deep) + assert(ysst.toList == ysst1.toList) for (xs <- xss) yield for (yst <- ysst) yield scalprod(xs, yst) } val a1 = Array(Array(0, 2, 4), Array(1, 3, 5)) - println(transpose(a1).deep.mkString("[", ",", "]")) + println(transpose(a1).toList.mkString("[", ",", "]")) - println(matmul(Array(Array(2, 3)), Array(Array(5), Array(7))).deep.mkString("[", ",", "]")) + println(matmul(Array(Array(2, 3)), Array(Array(5), Array(7))).toList.mkString("[", ",", "]")) - println(matmul(Array(Array(4)), Array(Array(6, 8))).deep.mkString("[", ",", "]")) + println(matmul(Array(Array(4)), Array(Array(6, 8))).toList.mkString("[", ",", "]")) } diff --git a/tests/pending/run/t2251.check b/tests/pending/run/t2251.check index 55ad2a58571e..0a9ea3859421 100644 --- a/tests/pending/run/t2251.check +++ b/tests/pending/run/t2251.check @@ -1 +1 @@ -Set(List(List(C), Stream(D, ?))) +Set(List(List(C), LazyList(D, ?))) diff --git a/tests/pending/run/t2251.scala b/tests/pending/run/t2251.scala index 00c5619b499a..c3e7c506f880 100644 --- a/tests/pending/run/t2251.scala +++ b/tests/pending/run/t2251.scala @@ -4,11 +4,11 @@ class C extends B[C] { override def toString = "C" } class D extends B[D] { override def toString = "D" } class E { - val ys = List(List(new C), Stream(new D)) + val ys = List(List(new C), LazyList(new D)) } object Test { - def trav = List(List(), Stream()) + def trav = List(List(), LazyList()) def main(args: Array[String]): Unit = { val f = (new E).ys _ diff --git a/tests/pending/run/t3158.scala b/tests/pending/run/t3158.scala index b9304ee445cb..5de821b80d88 100644 --- a/tests/pending/run/t3158.scala +++ b/tests/pending/run/t3158.scala @@ -1,6 +1,6 @@ object Test { def main(args: Array[String]): Unit = { - println(args.map(_ => foo _).deep) + println(args.map(_ => foo _).toList) } def foo(xs: String*): Unit = { diff --git a/tests/pending/run/t5966.check b/tests/pending/run/t5966.check index bfe8358a77d5..1546eb55616e 100644 --- a/tests/pending/run/t5966.check +++ b/tests/pending/run/t5966.check @@ -1,3 +1,3 @@ (o()_)("") = List() -(o("a1")_)("") = WrappedArray(a1) -(o("a1", "a2")_)("") = WrappedArray(a1, a2) +(o("a1")_)("") = ArraySeq(a1) +(o("a1", "a2")_)("") = ArraySeq(a1, a2) diff --git a/tests/pos-deep-subtype/i6119.scala b/tests/pos-deep-subtype/i6119.scala deleted file mode 100644 index 25c278ae67d8..000000000000 --- a/tests/pos-deep-subtype/i6119.scala +++ /dev/null @@ -1,13 +0,0 @@ -class Test { - def to[Col[_]](factory: Factory[Int, Col[Int]]): Col[Int] = ??? - - to(Vector) - - type Factory[-A, +C] = scala.collection.generic.CanBuildFrom[Nothing, A, C] // Ideally, this would be an opaque type - - implicit def fromCanBuildFromConversion[X, A, C](x: X)( - implicit toCanBuildFrom: X => scala.collection.generic.CanBuildFrom[Nothing, A, C]): Factory[A, C] = ??? - - implicit def genericCompanionToCBF[A, CC[X] <: scala.collection.GenTraversable[X]]( - fact: scala.collection.generic.GenericCompanion[CC]): scala.collection.generic.CanBuildFrom[Any, A, CC[A]] = ??? -} diff --git a/tests/pos-java-interop/t2956/BeanDefinitionVisitor.java b/tests/pos-java-interop/t2956/BeanDefinitionVisitor.java deleted file mode 100644 index 2ff5daa25398..000000000000 --- a/tests/pos-java-interop/t2956/BeanDefinitionVisitor.java +++ /dev/null @@ -1,6 +0,0 @@ -import java.util.Map; -public class BeanDefinitionVisitor { - @SuppressWarnings("unchecked") - protected void visitMap(Map mapVal) { - } -} diff --git a/tests/pos-java-interop/t2956/t2956.scala b/tests/pos-java-interop/t2956/t2956.scala deleted file mode 100644 index 33803874b6bc..000000000000 --- a/tests/pos-java-interop/t2956/t2956.scala +++ /dev/null @@ -1,7 +0,0 @@ -import scala.collection.JavaConversions._ - -class Outer { - protected class Inner extends BeanDefinitionVisitor { - protected def visitMap(mapVal: Map[_, _]): Unit = () - } -} diff --git a/tests/pos/i7011/Macros_1.scala b/tests/pos-macros/i7011/Macros_1.scala similarity index 100% rename from tests/pos/i7011/Macros_1.scala rename to tests/pos-macros/i7011/Macros_1.scala diff --git a/tests/pos/i7011/Test_2.scala b/tests/pos-macros/i7011/Test_2.scala similarity index 100% rename from tests/pos/i7011/Test_2.scala rename to tests/pos-macros/i7011/Test_2.scala diff --git a/tests/pos-scala2/GenTraversableFactory.scala b/tests/pos-scala2/GenTraversableFactory.scala deleted file mode 100644 index 2f93ab27b14a..000000000000 --- a/tests/pos-scala2/GenTraversableFactory.scala +++ /dev/null @@ -1,252 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2006-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - - -package scala -package collection -package generic - -import scala.language.higherKinds - -/** A template for companion objects of `Traversable` and subclasses thereof. - * This class provides a set of operations to create `$Coll` objects. - * It is typically inherited by companion objects of subclasses of `Traversable`. - * - * @since 2.8 - * - * @define coll collection - * @define Coll `Traversable` - * @define factoryInfo - * This object provides a set of operations to create `$Coll` values. - * @author Martin Odersky - * @version 2.8 - * @define canBuildFromInfo - * The standard `CanBuildFrom` instance for $Coll objects. - * @see CanBuildFrom - * @define genericCanBuildFromInfo - * The standard `CanBuildFrom` instance for $Coll objects. - * The created value is an instance of class `GenericCanBuildFrom`, - * which forwards calls to create a new builder to the - * `genericBuilder` method of the requesting collection. - * @see CanBuildFrom - * @see GenericCanBuildFrom - */ -abstract class GenTraversableFactory[CC[X] <: GenTraversable[X] with GenericTraversableTemplate[X, CC]] -extends GenericCompanion[CC] { - - private[this] val ReusableCBFInstance: GenericCanBuildFrom[Nothing] = new GenericCanBuildFrom[Nothing] { - override def apply() = newBuilder[Nothing] - } - def ReusableCBF: GenericCanBuildFrom[Nothing] = ReusableCBFInstance - - /** A generic implementation of the `CanBuildFrom` trait, which forwards - * all calls to `apply(from)` to the `genericBuilder` method of - * $coll `from`, and which forwards all calls of `apply()` to the - * `newBuilder` method of this factory. - */ - class GenericCanBuildFrom[A] extends CanBuildFrom[CC[_], A, CC[A]] { - /** Creates a new builder on request of a collection. - * @param from the collection requesting the builder to be created. - * @return the result of invoking the `genericBuilder` method on `from`. - */ - def apply(from: Coll) = from.genericBuilder[A] - - /** Creates a new builder from scratch - * @return the result of invoking the `newBuilder` method of this factory. - */ - def apply() = newBuilder[A] - } - - /** Concatenates all argument collections into a single $coll. - * - * @param xss the collections that are to be concatenated. - * @return the concatenation of all the collections. - */ - def concat[A](xss: Traversable[A]*): CC[A] = { - val b = newBuilder[A] - // At present we're using IndexedSeq as a proxy for "has a cheap size method". - if (xss forall (_.isInstanceOf[IndexedSeq[_]])) - b.sizeHint(xss.map(_.size).sum) - - for (xs <- xss.seq) b ++= xs - b.result() - } - - /** Produces a $coll containing the results of some element computation a number of times. - * @param n the number of elements contained in the $coll. - * @param elem the element computation - * @return A $coll that contains the results of `n` evaluations of `elem`. - */ - def fill[A](n: Int)(elem: => A): CC[A] = { - val b = newBuilder[A] - b.sizeHint(n) - var i = 0 - while (i < n) { - b += elem - i += 1 - } - b.result() - } - - /** Produces a two-dimensional $coll containing the results of some element computation a number of times. - * @param n1 the number of elements in the 1st dimension - * @param n2 the number of elements in the 2nd dimension - * @param elem the element computation - * @return A $coll that contains the results of `n1 x n2` evaluations of `elem`. - */ - def fill[A](n1: Int, n2: Int)(elem: => A): CC[CC[A]] = - tabulate(n1)(_ => fill(n2)(elem)) - - /** Produces a three-dimensional $coll containing the results of some element computation a number of times. - * @param n1 the number of elements in the 1st dimension - * @param n2 the number of elements in the 2nd dimension - * @param n3 the number of elements in the 3nd dimension - * @param elem the element computation - * @return A $coll that contains the results of `n1 x n2 x n3` evaluations of `elem`. - */ - def fill[A](n1: Int, n2: Int, n3: Int)(elem: => A): CC[CC[CC[A]]] = - tabulate(n1)(_ => fill(n2, n3)(elem)) - - /** Produces a four-dimensional $coll containing the results of some element computation a number of times. - * @param n1 the number of elements in the 1st dimension - * @param n2 the number of elements in the 2nd dimension - * @param n3 the number of elements in the 3nd dimension - * @param n4 the number of elements in the 4th dimension - * @param elem the element computation - * @return A $coll that contains the results of `n1 x n2 x n3 x n4` evaluations of `elem`. - */ - def fill[A](n1: Int, n2: Int, n3: Int, n4: Int)(elem: => A): CC[CC[CC[CC[A]]]] = - tabulate(n1)(_ => fill(n2, n3, n4)(elem)) - - /** Produces a five-dimensional $coll containing the results of some element computation a number of times. - * @param n1 the number of elements in the 1st dimension - * @param n2 the number of elements in the 2nd dimension - * @param n3 the number of elements in the 3nd dimension - * @param n4 the number of elements in the 4th dimension - * @param n5 the number of elements in the 5th dimension - * @param elem the element computation - * @return A $coll that contains the results of `n1 x n2 x n3 x n4 x n5` evaluations of `elem`. - */ - def fill[A](n1: Int, n2: Int, n3: Int, n4: Int, n5: Int)(elem: => A): CC[CC[CC[CC[CC[A]]]]] = - tabulate(n1)(_ => fill(n2, n3, n4, n5)(elem)) - - /** Produces a $coll containing values of a given function over a range of integer values starting from 0. - * @param n The number of elements in the $coll - * @param f The function computing element values - * @return A $coll consisting of elements `f(0), ..., f(n -1)` - */ - def tabulate[A](n: Int)(f: Int => A): CC[A] = { - val b = newBuilder[A] - b.sizeHint(n) - var i = 0 - while (i < n) { - b += f(i) - i += 1 - } - b.result() - } - - /** Produces a two-dimensional $coll containing values of a given function over ranges of integer values starting from 0. - * @param n1 the number of elements in the 1st dimension - * @param n2 the number of elements in the 2nd dimension - * @param f The function computing element values - * @return A $coll consisting of elements `f(i1, i2)` - * for `0 <= i1 < n1` and `0 <= i2 < n2`. - */ - def tabulate[A](n1: Int, n2: Int)(f: (Int, Int) => A): CC[CC[A]] = - tabulate(n1)(i1 => tabulate(n2)(f(i1, _))) - - /** Produces a three-dimensional $coll containing values of a given function over ranges of integer values starting from 0. - * @param n1 the number of elements in the 1st dimension - * @param n2 the number of elements in the 2nd dimension - * @param n3 the number of elements in the 3nd dimension - * @param f The function computing element values - * @return A $coll consisting of elements `f(i1, i2, i3)` - * for `0 <= i1 < n1`, `0 <= i2 < n2`, and `0 <= i3 < n3`. - */ - def tabulate[A](n1: Int, n2: Int, n3: Int)(f: (Int, Int, Int) => A): CC[CC[CC[A]]] = - tabulate(n1)(i1 => tabulate(n2, n3)(f(i1, _, _))) - - /** Produces a four-dimensional $coll containing values of a given function over ranges of integer values starting from 0. - * @param n1 the number of elements in the 1st dimension - * @param n2 the number of elements in the 2nd dimension - * @param n3 the number of elements in the 3nd dimension - * @param n4 the number of elements in the 4th dimension - * @param f The function computing element values - * @return A $coll consisting of elements `f(i1, i2, i3, i4)` - * for `0 <= i1 < n1`, `0 <= i2 < n2`, `0 <= i3 < n3`, and `0 <= i4 < n4`. - */ - def tabulate[A](n1: Int, n2: Int, n3: Int, n4: Int)(f: (Int, Int, Int, Int) => A): CC[CC[CC[CC[A]]]] = - tabulate(n1)(i1 => tabulate(n2, n3, n4)(f(i1, _, _, _))) - - /** Produces a five-dimensional $coll containing values of a given function over ranges of integer values starting from 0. - * @param n1 the number of elements in the 1st dimension - * @param n2 the number of elements in the 2nd dimension - * @param n3 the number of elements in the 3nd dimension - * @param n4 the number of elements in the 4th dimension - * @param n5 the number of elements in the 5th dimension - * @param f The function computing element values - * @return A $coll consisting of elements `f(i1, i2, i3, i4, i5)` - * for `0 <= i1 < n1`, `0 <= i2 < n2`, `0 <= i3 < n3`, `0 <= i4 < n4`, and `0 <= i5 < n5`. - */ - def tabulate[A](n1: Int, n2: Int, n3: Int, n4: Int, n5: Int)(f: (Int, Int, Int, Int, Int) => A): CC[CC[CC[CC[CC[A]]]]] = - tabulate(n1)(i1 => tabulate(n2, n3, n4, n5)(f(i1, _, _, _, _))) - - /** Produces a $coll containing a sequence of increasing of integers. - * - * @param start the first element of the $coll - * @param end the end value of the $coll (the first value NOT contained) - * @return a $coll with values `start, start + 1, ..., end - 1` - */ - def range[T: Integral](start: T, end: T): CC[T] = range(start, end, implicitly[Integral[T]].one) - - /** Produces a $coll containing equally spaced values in some integer interval. - * @param start the start value of the $coll - * @param end the end value of the $coll (the first value NOT contained) - * @param step the difference between successive elements of the $coll (must be positive or negative) - * @return a $coll with values `start, start + step, ...` up to, but excluding `end` - */ - def range[T: Integral](start: T, end: T, step: T): CC[T] = { - val num = implicitly[Integral[T]] - import num._ - - if (step == zero) throw new IllegalArgumentException("zero step") - val b = newBuilder[T] - b sizeHint immutable.NumericRange.count(start, end, step, isInclusive = false) - var i = start - while (if (/*num.mkOrderingOps*/(step) < zero) end < i else i < end) { - b += i - i += step - } - b.result() - } - - /** Produces a $coll containing repeated applications of a function to a start value. - * - * @param start the start value of the $coll - * @param len the number of elements contained inthe $coll - * @param f the function that's repeatedly applied - * @return a $coll with `len` values in the sequence `start, f(start), f(f(start)), ...` - */ - def iterate[A](start: A, len: Int)(f: A => A): CC[A] = { - val b = newBuilder[A] - if (len > 0) { - b.sizeHint(len) - var acc = start - var i = 1 - b += acc - - while (i < len) { - acc = f(acc) - i += 1 - b += acc - } - } - b.result() - } -} diff --git a/tests/pos-scala2/hk-infer.scala b/tests/pos-scala2/hk-infer.scala index c23a9d15166d..2bf898c485cf 100644 --- a/tests/pos-scala2/hk-infer.scala +++ b/tests/pos-scala2/hk-infer.scala @@ -16,7 +16,7 @@ object DoesWork { // Testing the not giving of explicit Booper[M] arguments. object ShouldWorkHK { - class Booper[M[_]](xs: Seq[M[_]]) extends collection.generic.SeqForwarder[M[_]] { + class Booper[M[_]](xs: Seq[M[_]]) { def underlying = xs def BOOP(ys: Seq[M[_]]) = new Booper(xs ++ ys) } @@ -26,7 +26,7 @@ object ShouldWorkHK { } object DoesWorkHK { - class Booper[M[_]](xs: Seq[M[_]]) extends collection.generic.SeqForwarder[M[_]] { + class Booper[M[_]](xs: Seq[M[_]]) { def underlying = xs def BOOP(ys: Seq[M[_]]) = new Booper[M](xs ++ ys) } diff --git a/tests/pos-scala2/t3568.scala b/tests/pos-scala2/t3568.scala index 50f0cdb2ebf3..59b6753cd342 100644 --- a/tests/pos-scala2/t3568.scala +++ b/tests/pos-scala2/t3568.scala @@ -20,7 +20,7 @@ package buffer { class Vec2 extends ElemType { type Element = Vec2; type Component = Float1 } abstract class BaseSeq[T <: ElemType, E] - extends IndexedSeq[E] with IndexedSeqOptimized[E, IndexedSeq[E]] { + extends IndexedSeq[E] with StrictOptimizedSeqOps[E, IndexedSeq, IndexedSeq[E]] { def length = 1 def apply(i: Int) :E } diff --git a/tests/pos-scala2/t3688.scala b/tests/pos-scala2/t3688.scala deleted file mode 100644 index bf7983081112..000000000000 --- a/tests/pos-scala2/t3688.scala +++ /dev/null @@ -1,14 +0,0 @@ -import collection.mutable -import collection.JavaConversions._ -import java.{util => ju} - -object Test { - - implicitly[mutable.Map[Int, String] => ju.Dictionary[Int, String]] - -} - -object Test2 { - def m[P <% ju.List[Int]](l: P) = 1 - m(List(1)) // bug: should compile -} diff --git a/tests/pos-special/repeatedArgs213.scala b/tests/pos-special/repeatedArgs213.scala deleted file mode 100644 index 4760dc44ef72..000000000000 --- a/tests/pos-special/repeatedArgs213.scala +++ /dev/null @@ -1,38 +0,0 @@ -import scala.collection.{immutable, mutable} -import java.nio.file.Paths - -// Missing from 2.12 standard library -package scala.runtime { - object ScalaRunTime { - abstract class ArraySeq[+A] extends immutable.Seq[A] - - def genericWrapArray[T](xs: Array[T]): ArraySeq[T] = ??? - def wrapRefArray[T <: AnyRef](xs: Array[T]): ArraySeq[T] = ??? - def wrapIntArray(xs: Array[Int]): ArraySeq[Int] = ??? - def wrapDoubleArray(xs: Array[Double]): ArraySeq[Double] = ??? - def wrapLongArray(xs: Array[Long]): ArraySeq[Long] = ??? - def wrapFloatArray(xs: Array[Float]): ArraySeq[Float] = ??? - def wrapCharArray(xs: Array[Char]): ArraySeq[Char] = ??? - def wrapByteArray(xs: Array[Byte]): ArraySeq[Byte] = ??? - def wrapShortArray(xs: Array[Short]): ArraySeq[Short] = ??? - def wrapBooleanArray(xs: Array[Boolean]): ArraySeq[Boolean] = ??? - def wrapUnitArray(xs: Array[Unit]): ArraySeq[Unit] = ??? - } -} - -// Start of Test -class repeatedArgs { - def bar(xs: String*): Int = bat(xs) - def bat(xs: immutable.Seq[String]) = xs.length - - def test(xs: immutable.Seq[String]): Unit = { - bar("a", "b", "c") - bar(xs: _*) - - Paths.get("Hello", "World") - Paths.get("Hello", xs: _*) - - val List(_, others: _*) = xs.toList // toList should not be needed, see #4790 - val x: immutable.Seq[String] = others - } -} diff --git a/tests/pos/GenericTraversableTemplate.scala b/tests/pos/GenericTraversableTemplate.scala deleted file mode 100644 index e9120e33cb69..000000000000 --- a/tests/pos/GenericTraversableTemplate.scala +++ /dev/null @@ -1,232 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - - - -package scala -package collection -package generic - -import mutable.Builder -import scala.annotation.migration -import scala.annotation.unchecked.uncheckedVariance -import scala.language.higherKinds - -/** A template class for companion objects of ``regular`` collection classes - * that represent an unconstrained higher-kinded type. - * - * @tparam A The type of the collection elements. - * @tparam CC The type constructor representing the collection class. - * @author Martin Odersky - * @since 2.8 - * @define coll collection - * @define Coll CC - */ -trait GenericTraversableTemplate[+A, +CC[X] <: GenTraversable[X]] extends HasNewBuilder[A, CC[A] @uncheckedVariance] { - - /** Applies a function `f` to all elements of this $coll. - * - * @param f the function that is applied for its side-effect to every element. - * The result of function `f` is discarded. - * - * @tparam U the type parameter describing the result of function `f`. - * This result will always be ignored. Typically `U` is `Unit`, - * but this is not necessary. - * - * @usecase def foreach(f: A => Unit): Unit - */ - def foreach[U](f: A => U): Unit - - /** Selects the first element of this $coll. - * - * @return the first element of this $coll. - * @throws `NoSuchElementException` if the $coll is empty. - */ - def head: A - - /** Tests whether this $coll is empty. - * - * @return `true` if the $coll contain no elements, `false` otherwise. - */ - def isEmpty: Boolean - - /** The factory companion object that builds instances of class $Coll. - * (or its `Iterable` superclass where class $Coll is not a `Seq`.) - */ - def companion: GenericCompanion[CC] - - /** The builder that builds instances of type $Coll[A] - */ - protected[this] def newBuilder: Builder[A, CC[A]] @uncheckedVariance = companion.newBuilder[A] - - /** The generic builder that builds instances of $Coll - * at arbitrary element types. - */ - def genericBuilder[B]: Builder[B, CC[B]] = companion.newBuilder[B] - - private def sequential: TraversableOnce[A] = this.asInstanceOf[GenTraversableOnce[A]].seq - - /** Converts this $coll of pairs into two collections of the first and second - * half of each pair. - * - * {{{ - * val xs = $Coll( - * (1, "one"), - * (2, "two"), - * (3, "three")).unzip - * // xs == ($Coll(1, 2, 3), - * // $Coll(one, two, three)) - * }}} - * - * @tparam A1 the type of the first half of the element pairs - * @tparam A2 the type of the second half of the element pairs - * @param asPair an implicit conversion which asserts that the element type - * of this $coll is a pair. - * @return a pair of ${coll}s, containing the first, respectively second - * half of each element pair of this $coll. - */ - def unzip[A1, A2](implicit asPair: A => (A1, A2)): (CC[A1], CC[A2]) = { - val b1 = genericBuilder[A1] - val b2 = genericBuilder[A2] - for (xy <- sequential) { - val (x, y) = asPair(xy) - b1 += x - b2 += y - } - (b1.result(), b2.result()) - } - - /** Converts this $coll of triples into three collections of the first, second, - * and third element of each triple. - * - * {{{ - * val xs = $Coll( - * (1, "one", '1'), - * (2, "two", '2'), - * (3, "three", '3')).unzip3 - * // xs == ($Coll(1, 2, 3), - * // $Coll(one, two, three), - * // $Coll(1, 2, 3)) - * }}} - * - * @tparam A1 the type of the first member of the element triples - * @tparam A2 the type of the second member of the element triples - * @tparam A3 the type of the third member of the element triples - * @param asTriple an implicit conversion which asserts that the element type - * of this $coll is a triple. - * @return a triple of ${coll}s, containing the first, second, respectively - * third member of each element triple of this $coll. - */ - def unzip3[A1, A2, A3](implicit asTriple: A => (A1, A2, A3)): (CC[A1], CC[A2], CC[A3]) = { - val b1 = genericBuilder[A1] - val b2 = genericBuilder[A2] - val b3 = genericBuilder[A3] - - for (xyz <- sequential) { - val (x, y, z) = asTriple(xyz) - b1 += x - b2 += y - b3 += z - } - (b1.result(), b2.result(), b3.result()) - } - - /** Converts this $coll of traversable collections into - * a $coll formed by the elements of these traversable - * collections. - * - * @tparam B the type of the elements of each traversable collection. - * @param asTraversable an implicit conversion which asserts that the element - * type of this $coll is a `GenTraversable`. - * @return a new $coll resulting from concatenating all element ${coll}s. - * - * @usecase def flatten[B]: $Coll[B] - * - * @inheritdoc - * - * The resulting collection's type will be guided by the - * static type of $coll. For example: - * - * {{{ - * val xs = List( - * Set(1, 2, 3), - * Set(1, 2, 3) - * ).flatten - * // xs == List(1, 2, 3, 1, 2, 3) - * - * val ys = Set( - * List(1, 2, 3), - * List(3, 2, 1) - * ).flatten - * // ys == Set(1, 2, 3) - * }}} - */ - def flatten[B](implicit asTraversable: A => /*<: /*<:= headSize) fail - bs(i) += x - i += 1 - } - if (i != headSize) - fail - } - val bb = genericBuilder[CC[B]] - for (b <- bs) bb += b.result() - bb.result() - } -} - diff --git a/tests/pos/Map.scala b/tests/pos/Map.scala deleted file mode 100644 index 5178d5a862cf..000000000000 --- a/tests/pos/Map.scala +++ /dev/null @@ -1,194 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - - -package scala -package collection -package immutable - -import generic._ - -/** - * A generic trait for immutable maps. Concrete classes have to provide - * functionality for the abstract methods in `Map`: - * - * {{{ - * def get(key: A): Option[B] - * def iterator: Iterator[(A, B)] - * def + [B1 >: B](kv: (A, B1)): Map[A, B1] - * def -(key: A): Map[A, B] - * }}} - * - * @since 1 - */ -trait Map[A, +B] extends Iterable[(A, B)] -// with GenMap[A, B] - with scala.collection.Map[A, B] - with MapLike[A, B, Map[A, B]] { self => - - override def empty: Map[A, B] = Map.empty - - /** Returns this $coll as an immutable map. - * - * A new map will not be built; lazy collections will stay lazy. - */ - @deprecatedOverriding("Immutable maps should do nothing on toMap except return themselves cast as a map.", "2.11.0") - override def toMap[T, U](implicit ev: (A, B) <:< (T, U)): immutable.Map[T, U] = - self.asInstanceOf[immutable.Map[T, U]] - - override def seq: Map[A, B] = this - - /** The same map with a given default function. - * Note: `get`, `contains`, `iterator`, `keys`, etc are not affected by `withDefault`. - * - * Invoking transformer methods (e.g. `map`) will not preserve the default value. - * - * @param d the function mapping keys to values, used for non-present keys - * @return a wrapper of the map with a default value - */ - def withDefault[B1 >: B](d: A => B1): immutable.Map[A, B1] = new Map.WithDefault[A, B1](this, d) - - /** The same map with a given default value. - * Note: `get`, `contains`, `iterator`, `keys`, etc are not affected by `withDefaultValue`. - * - * Invoking transformer methods (e.g. `map`) will not preserve the default value. - * - * @param d default value used for non-present keys - * @return a wrapper of the map with a default value - */ - def withDefaultValue[B1 >: B](d: B1): immutable.Map[A, B1] = new Map.WithDefault[A, B1](this, x => d) - - /** Add a key/value pair to this map. - * @param key the key - * @param value the value - * @return A new map with the new binding added to this map - */ - override def updated [B1 >: B](key: A, value: B1): Map[A, B1] - def + [B1 >: B](kv: (A, B1)): Map[A, B1] -} - -/** $factoryInfo - * @define Coll `immutable.Map` - * @define coll immutable map - */ -object Map extends ImmutableMapFactory[Map] { - - /** $mapCanBuildFromInfo */ - implicit def canBuildFrom[A, B]: CanBuildFrom[Coll, (A, B), Map[A, B]] = new MapCanBuildFrom[A, B] - - def empty[A, B]: Map[A, B] = EmptyMap.asInstanceOf[Map[A, B]] - - class WithDefault[A, +B](underlying: Map[A, B], d: A => B) extends scala.collection.Map.WithDefault[A, B](underlying, d) with Map[A, B] { - override def empty = new WithDefault(underlying.empty, d) - override def updated[B1 >: B](key: A, value: B1): WithDefault[A, B1] = new WithDefault[A, B1](underlying.updated[B1](key, value), d) - override def + [B1 >: B](kv: (A, B1)): WithDefault[A, B1] = updated(kv._1, kv._2) - override def - (key: A): WithDefault[A, B] = new WithDefault(underlying - key, d) - override def withDefault[B1 >: B](d: A => B1): immutable.Map[A, B1] = new WithDefault[A, B1](underlying, d) - override def withDefaultValue[B1 >: B](d: B1): immutable.Map[A, B1] = new WithDefault[A, B1](underlying, x => d) - } - - private object EmptyMap extends AbstractMap[Any, Nothing] with Map[Any, Nothing] with Serializable { - override def size: Int = 0 - def get(key: Any): Option[Nothing] = None - def iterator: Iterator[(Any, Nothing)] = Iterator.empty - override def updated [B1] (key: Any, value: B1): Map[Any, B1] = new Map1(key, value) - def + [B1](kv: (Any, B1)): Map[Any, B1] = updated(kv._1, kv._2) - def - (key: Any): Map[Any, Nothing] = this - } - - class Map1[A, +B](key1: A, value1: B) extends AbstractMap[A, B] with Map[A, B] with Serializable { - override def size = 1 - def get(key: A): Option[B] = - if (key == key1) Some(value1) else None - def iterator = Iterator((key1, value1)) - override def updated [B1 >: B] (key: A, value: B1): Map[A, B1] = - if (key == key1) new Map1(key1, value) - else new Map2(key1, value1, key, value) - def + [B1 >: B](kv: (A, B1)): Map[A, B1] = updated(kv._1, kv._2) - def - (key: A): Map[A, B] = - if (key == key1) Map.empty else this - override def foreach[U](f: ((A, B)) => U): Unit = { - f((key1, value1)) - } - } - - class Map2[A, +B](key1: A, value1: B, key2: A, value2: B) extends AbstractMap[A, B] with Map[A, B] with Serializable { - override def size = 2 - def get(key: A): Option[B] = - if (key == key1) Some(value1) - else if (key == key2) Some(value2) - else None - def iterator = Iterator((key1, value1), (key2, value2)) - override def updated [B1 >: B] (key: A, value: B1): Map[A, B1] = - if (key == key1) new Map2(key1, value, key2, value2) - else if (key == key2) new Map2(key1, value1, key2, value) - else new Map3(key1, value1, key2, value2, key, value) - def + [B1 >: B](kv: (A, B1)): Map[A, B1] = updated(kv._1, kv._2) - def - (key: A): Map[A, B] = - if (key == key1) new Map1(key2, value2) - else if (key == key2) new Map1(key1, value1) - else this - override def foreach[U](f: ((A, B)) => U): Unit = { - f((key1, value1)); f((key2, value2)) - } - } - - class Map3[A, +B](key1: A, value1: B, key2: A, value2: B, key3: A, value3: B) extends AbstractMap[A, B] with Map[A, B] with Serializable { - override def size = 3 - def get(key: A): Option[B] = - if (key == key1) Some(value1) - else if (key == key2) Some(value2) - else if (key == key3) Some(value3) - else None - def iterator = Iterator((key1, value1), (key2, value2), (key3, value3)) - override def updated [B1 >: B] (key: A, value: B1): Map[A, B1] = - if (key == key1) new Map3(key1, value, key2, value2, key3, value3) - else if (key == key2) new Map3(key1, value1, key2, value, key3, value3) - else if (key == key3) new Map3(key1, value1, key2, value2, key3, value) - else new Map4(key1, value1, key2, value2, key3, value3, key, value) - def + [B1 >: B](kv: (A, B1)): Map[A, B1] = updated(kv._1, kv._2) - def - (key: A): Map[A, B] = - if (key == key1) new Map2(key2, value2, key3, value3) - else if (key == key2) new Map2(key1, value1, key3, value3) - else if (key == key3) new Map2(key1, value1, key2, value2) - else this - override def foreach[U](f: ((A, B)) => U): Unit = { - f((key1, value1)); f((key2, value2)); f((key3, value3)) - } - } - - class Map4[A, +B](key1: A, value1: B, key2: A, value2: B, key3: A, value3: B, key4: A, value4: B) extends AbstractMap[A, B] with Map[A, B] with Serializable { - override def size = 4 - def get(key: A): Option[B] = - if (key == key1) Some(value1) - else if (key == key2) Some(value2) - else if (key == key3) Some(value3) - else if (key == key4) Some(value4) - else None - def iterator = Iterator((key1, value1), (key2, value2), (key3, value3), (key4, value4)) - override def updated [B1 >: B] (key: A, value: B1): Map[A, B1] = - if (key == key1) new Map4(key1, value, key2, value2, key3, value3, key4, value4) - else if (key == key2) new Map4(key1, value1, key2, value, key3, value3, key4, value4) - else if (key == key3) new Map4(key1, value1, key2, value2, key3, value, key4, value4) - else if (key == key4) new Map4(key1, value1, key2, value2, key3, value3, key4, value) - else new HashMap + ((key1, value1), (key2, value2), (key3, value3), (key4, value4), (key, value)) - def + [B1 >: B](kv: (A, B1)): Map[A, B1] = updated(kv._1, kv._2) - def - (key: A): Map[A, B] = - if (key == key1) new Map3(key2, value2, key3, value3, key4, value4) - else if (key == key2) new Map3(key1, value1, key3, value3, key4, value4) - else if (key == key3) new Map3(key1, value1, key2, value2, key4, value4) - else if (key == key4) new Map3(key1, value1, key2, value2, key3, value3) - else this - override def foreach[U](f: ((A, B)) => U): Unit = { - f((key1, value1)); f((key2, value2)); f((key3, value3)); f((key4, value4)) - } - } -} - -/** Explicit instantiation of the `Map` trait to reduce class file size in subclasses. */ -abstract class AbstractMap[A, +B] extends scala.collection.AbstractMap[A, B] with Map[A, B] diff --git a/tests/pos/Meter.scala b/tests/pos/Meter.scala index c32d6a4142f1..321047b0215f 100644 --- a/tests/pos/Meter.scala +++ b/tests/pos/Meter.scala @@ -79,7 +79,7 @@ object Test extends App { { println("testing native arrays") val arr = Array(x, y + x) - println(arr.deep) + println(arr.toList) def foo[T <: Printable](x: Array[T]) = { for (i <- 0 until x.length) { x(i).print; println(" " + x(i)) } } diff --git a/tests/pos/arrays2.scala b/tests/pos/arrays2.scala index c9e5e0bfc7ad..8984fd615ad1 100644 --- a/tests/pos/arrays2.scala +++ b/tests/pos/arrays2.scala @@ -1,11 +1,11 @@ -case class C(); +case class C() object arrays2 { def main(args: Array[String]): Unit = { - val a: Array[Array[C]] = new Array[Array[C]](2); - a(0) = new Array[C](2); - a(0)(0) = new C(); + val a: Array[Array[C]] = new Array[Array[C]](2) + a(0) = new Array[C](2) + a(0)(0) = new C() } } @@ -14,17 +14,14 @@ object arrays4 { val args = Array[String]("World") "Hello %1$s".format(args: _*) } +/* +test/files/pos/arrays2.scala:15: warning: Passing an explicit array value to a Scala varargs method is deprecated (since 2.13.0) and will result in a defensive copy; Use the more efficient non-copying ArraySeq.unsafeWrapArray or an explicit toIndexedSeq call + "Hello %1$s".format(args: _*) + ^ +one warning found +*/ // #2461 object arrays3 { - import scala.collection.JavaConversions._ def apply[X](xs : X*) : java.util.List[X] = java.util.Arrays.asList(xs: _*) - - def apply1[X <: String](xs : X*) : java.util.List[X] = java.util.Arrays.asList(xs: _*) - def apply2[X <: AnyVal](xs : X*) : java.util.List[X] = java.util.Arrays.asList(xs: _*) - def apply3(xs : Int*) : java.util.List[Int] = java.util.Arrays.asList(xs: _*) - def apply4(xs : Unit*) : java.util.List[Unit] = java.util.Arrays.asList(xs: _*) - def apply5(xs : Null*) : java.util.List[Null] = java.util.Arrays.asList(xs: _*) - def apply6(xs : Nothing*) : java.util.List[Nothing] = java.util.Arrays.asList(xs: _*) } - diff --git a/tests/pos/collectGenericCC.scala b/tests/pos/collectGenericCC.scala index b0f60de736c1..ba19d78d065d 100644 --- a/tests/pos/collectGenericCC.scala +++ b/tests/pos/collectGenericCC.scala @@ -1,9 +1,8 @@ -import scala.collection.generic.CanBuildFrom import scala.collection._ object Test { - def collect[A, Res](r: Traversable[A])(implicit bf: generic.CanBuild[A, Res]) = { - val b: collection.mutable.Builder[A, Res] = bf() + def collect[A, Res](r: Iterable[A])(implicit bf: Factory[A, Res]) = { + val b: collection.mutable.Builder[A, Res] = bf.newBuilder r foreach ((a: A) => b += a) b.result } diff --git a/tests/pos/collections.scala b/tests/pos/collections.scala deleted file mode 100644 index 5edcff986aab..000000000000 --- a/tests/pos/collections.scala +++ /dev/null @@ -1,36 +0,0 @@ -import scala.collection.generic.CanBuildFrom - -object collections { - - val arr = Array("a", "b") - val aa = arr ++ arr - - List(1, 2, 3) map (x => 2) - - val s = Set(1, 2, 3) - val ss = s map (_ + 1) - - val cbf: CanBuildFrom[List[_], Int, List[Int]] = scala.collection.immutable.List.canBuildFrom - - val nil = Nil - val ints1 = 1 :: Nil - val ints2 = 1 :: 2 :: Nil - val ints3: List[Int] = ints2 - val f = (x: Int) => x + 1 - val ints4: List[Int] = List(1, 2, 3, 5) - - val ys = ints3 map (x => x + 1) - val zs = ys filter (y => y != 0) - - val chrs = "abc" - - def do2(x: Int, y: Char) = () - - chrs foreach println - - (ints2, chrs).zipped foreach do2 - - val xs = List(List(1), List(2), List(3)).iterator - println(/*scala.collection.TraversableOnce.flattenTraversableOnce*/(xs).flatten) - -} diff --git a/tests/pos/collections_1.scala b/tests/pos/collections_1.scala deleted file mode 100644 index 23b23d016e1b..000000000000 --- a/tests/pos/collections_1.scala +++ /dev/null @@ -1,15 +0,0 @@ -package mixins; - -import scala.collection.mutable._; - -class Collections extends HashSet[Int] with ObservableSet[Int] { - override def +=(elem: Int): this.type = super.+=(elem); - override def -=(elem: Int): this.type = super.-=(elem); - override def clear: Unit = super.clear; - -} - -object collections extends Collections; - -//class Collections1 extends HashSet[Int] with ObservableSet[Int,Collections1]; -//object collections1 extends Collections1; diff --git a/tests/pos/factory-conversion.scala b/tests/pos/factory-conversion.scala new file mode 100644 index 000000000000..10a6a5435000 --- /dev/null +++ b/tests/pos/factory-conversion.scala @@ -0,0 +1,7 @@ +import scala.annotation.unchecked.uncheckedVariance + +object Test { + def to[Col[_]](factory: collection.Factory[Int, Col[Int @uncheckedVariance]]) = ??? + + val test = to(List) +} diff --git a/tests/pos/hklub0.scala b/tests/pos/hklub0.scala index 36cd46332c28..6ed3f105cfd0 100644 --- a/tests/pos/hklub0.scala +++ b/tests/pos/hklub0.scala @@ -1,5 +1,6 @@ +class GenericCompanion[+CC[X] <: Iterable[X]] object Test { - val a : scala.collection.generic.GenericCompanion[scala.collection.immutable.Seq] = null - val b : scala.collection.generic.GenericCompanion[scala.collection.mutable.Seq] = null - List(a, b) // immutable.this.List.apply[scala.collection.generic.GenericCompanion[Seq]](Test.this.a, Test.this.b) + val a : GenericCompanion[scala.collection.immutable.Seq] = null + val b : GenericCompanion[scala.collection.mutable.Seq] = null + List(a, b) // immutable.this.List.apply[GenericCompanion[Seq]](Test.this.a, Test.this.b) } diff --git a/tests/pos/i1442.scala b/tests/pos/i1442.scala index abb46d3aaf91..c5ce0bcc2305 100644 --- a/tests/pos/i1442.scala +++ b/tests/pos/i1442.scala @@ -2,7 +2,7 @@ object Test1442 { final def sumMinimized[B](num: Numeric[B]): Int = { var cse: scala.math.Numeric.type = null.asInstanceOf[scala.math.Numeric.type] ({cse = scala.math.Numeric; num eq cse.IntIsIntegral} || - (num eq cse.DoubleAsIfIntegral)) + (num eq cse.BigDecimalAsIfIntegral)) 2 } @@ -15,9 +15,7 @@ object Test1442 { (num eq cse.ByteIsIntegral)|| (num eq cse.CharIsIntegral)|| (num eq cse.LongIsIntegral)|| - (num eq cse.FloatAsIfIntegral)|| - (num eq cse.BigDecimalIsFractional)|| - (num eq cse.DoubleAsIfIntegral)) { + (num eq cse.BigDecimalIsFractional)) { null.asInstanceOf[B] } else null.asInstanceOf[B] } diff --git a/tests/pos/i1754.scala b/tests/pos/i1754.scala deleted file mode 100644 index bb0da32671d3..000000000000 --- a/tests/pos/i1754.scala +++ /dev/null @@ -1,24 +0,0 @@ -object Test { - import java.util.{ concurrent => juc } - import scala.collection.concurrent - import scala.collection.convert.Wrappers._ - - /** - * Implicitly converts a Java ConcurrentMap to a Scala mutable ConcurrentMap. - * The returned Scala ConcurrentMap is backed by the provided Java - * ConcurrentMap and any side-effects of using it via the Scala interface will - * be visible via the Java interface and vice versa. - * - * If the Java ConcurrentMap was previously obtained from an implicit or - * explicit call of `asConcurrentMap(scala.collection.mutable.ConcurrentMap)` - * then the original Scala ConcurrentMap will be returned. - * - * @param m The ConcurrentMap to be converted. - * @return A Scala mutable ConcurrentMap view of the argument. - */ - implicit def mapAsScalaConcurrentMap[A, B](m: juc.ConcurrentMap[A, B]): concurrent.Map[A, B] = m match { - case null => null - case cmw: ConcurrentMapWrapper[_, _] => cmw.underlying - case _ => new JConcurrentMapWrapper(m) - } -} diff --git a/tests/pos/i6828.scala b/tests/pos/i6828.scala index b0e73e5e0225..873f6d517a74 100644 --- a/tests/pos/i6828.scala +++ b/tests/pos/i6828.scala @@ -1,5 +1,5 @@ class Foo { - inline def foo[T](implicit ct: =>scala.reflect.ClassTag[T]): Unit = Unit + inline def foo[T](implicit ct: =>scala.reflect.ClassTag[T]): Unit = () foo[Int] foo[String] } diff --git a/tests/pos/i938.scala b/tests/pos/i938.scala deleted file mode 100644 index cf8fd76e36bb..000000000000 --- a/tests/pos/i938.scala +++ /dev/null @@ -1,21 +0,0 @@ -object Test { - import scala.collection._ - - trait T { - def f() : Unit - } - - def view = new T { - def f() = () - } - - trait TLike[+A, RR] { self => - - def repr: RR = ??? - - def view2 = new TraversableView[A, RR] { - protected lazy val underlying = self.repr - override def foreach[U](f: A => U): Unit = ??? - } - } -} diff --git a/tests/pos/i966.scala b/tests/pos/i966.scala deleted file mode 100644 index 1fedc00238ff..000000000000 --- a/tests/pos/i966.scala +++ /dev/null @@ -1,15 +0,0 @@ -package scala -package collection -package immutable - -import HashMap.{ HashTrieMap, HashMapCollision1, HashMap1 } -import HashSet.{ HashTrieSet, HashSetCollision1, HashSet1 } - -object i996{ - - private[this] def collisionToArray[T](x: Iterable[T]): Array[Iterable[T]] = (x match { - case x: HashMapCollision1[_, _] => x.kvs.map(x => HashMap(x)).toArray - case x: HashSetCollision1[_] => x.ks.map(x => HashSet(x)).toArray - }).asInstanceOf[Array[Iterable[T]]] - -} diff --git a/tests/pos/i996.scala b/tests/pos/i996.scala deleted file mode 100644 index 1fedc00238ff..000000000000 --- a/tests/pos/i996.scala +++ /dev/null @@ -1,15 +0,0 @@ -package scala -package collection -package immutable - -import HashMap.{ HashTrieMap, HashMapCollision1, HashMap1 } -import HashSet.{ HashTrieSet, HashSetCollision1, HashSet1 } - -object i996{ - - private[this] def collisionToArray[T](x: Iterable[T]): Array[Iterable[T]] = (x match { - case x: HashMapCollision1[_, _] => x.kvs.map(x => HashMap(x)).toArray - case x: HashSetCollision1[_] => x.ks.map(x => HashSet(x)).toArray - }).asInstanceOf[Array[Iterable[T]]] - -} diff --git a/tests/pos/iterator-traversable-mix.scala b/tests/pos/iterator-traversable-mix.scala index acc9c13adfeb..3a365c9d0e74 100644 --- a/tests/pos/iterator-traversable-mix.scala +++ b/tests/pos/iterator-traversable-mix.scala @@ -3,6 +3,6 @@ object Test { x1 <- List(1, 2) x2 <- Iterator(3, 4) x3 <- Seq(5, 6).iterator - x4 <- Stream(7, 8) + x4 <- LazyList(7, 8) } yield x1 + x2 + x3 + x4 } diff --git a/tests/pos/javaConversions-2.10-ambiguity.scala b/tests/pos/javaConversions-2.10-ambiguity.scala index c4aad6cbfc84..e92185bb4fb8 100644 --- a/tests/pos/javaConversions-2.10-ambiguity.scala +++ b/tests/pos/javaConversions-2.10-ambiguity.scala @@ -1,10 +1,9 @@ -import collection.{JavaConversions, mutable, concurrent} -import JavaConversions._ +import collection.{mutable, concurrent} +import collection.JavaConverters._ import java.util.concurrent.{ConcurrentHashMap => CHM} object Bar { def assertType[T](t: T) = t - val a = new CHM[String, String]() += (("", "")) + val a = new CHM[String, String]().asScala += (("", "")) assertType[concurrent.Map[String, String]](a) } -// vim: set et: diff --git a/tests/pos/javaConversions-2.10-regression.scala b/tests/pos/javaConversions-2.10-regression.scala deleted file mode 100644 index 7c7ff03b55d7..000000000000 --- a/tests/pos/javaConversions-2.10-regression.scala +++ /dev/null @@ -1,17 +0,0 @@ -import collection.{JavaConversions, mutable, concurrent} -import JavaConversions._ -import java.util.concurrent.{ConcurrentHashMap => CHM} - -object Foo { - def buildCache2_9_simple[K <: AnyRef, V <: AnyRef]: concurrent.Map[K, V] = - mapAsScalaConcurrentMap(new CHM()) - - def buildCache2_9_implicit[K <: AnyRef, V <: AnyRef]: concurrent.Map[K, V] = - new CHM[K, V]() -} - -object Bar { - def assertType[T](t: T) = t - val a = new CHM[String, String]() += (("", "")) - assertType[concurrent.Map[String, String]](a) -} diff --git a/tests/pos/javaReadsSigs/fromjava.java b/tests/pos/javaReadsSigs/fromjava.java index 92441b0c6b7a..25efc30d9b4a 100644 --- a/tests/pos/javaReadsSigs/fromjava.java +++ b/tests/pos/javaReadsSigs/fromjava.java @@ -2,8 +2,8 @@ import scala.math.Ordering; import scala.math.Numeric; import scala.collection.Seq; -import scala.collection.Traversable; -import scala.collection.Traversable$; +import scala.collection.Iterable; +import scala.collection.Iterable$; import scala.collection.immutable.Set; import scala.collection.immutable.HashSet; import scala.collection.immutable.Map; @@ -11,7 +11,8 @@ import scala.collection.immutable.HashMap; import scala.collection.immutable.Vector; import scala.collection.immutable.List; -import scala.collection.generic.CanBuildFrom; +import scala.collection.LinearSeq; +import scala.collection.BuildFrom; class A { }; class B { }; @@ -48,28 +49,28 @@ public static String vector(Vector x) { return y.head(); } public static String list(List x) { - List y = x.drop(2); + LinearSeq y = x.drop(2); return y.head(); } public static Tuple2 map(Map x) { - Traversable> y = x.drop(2); + Iterable> y = x.drop(2); return y.head(); } - public static Object sum(Traversable x) { + public static Object sum(Iterable x) { return x.sum(Contra.intNum); } // Looks like sum as given below fails under java5, so disabled. // // [partest] testing: [...]/files/pos/javaReadsSigs [FAILED] - // [partest] files/pos/javaReadsSigs/fromjava.java:62: name clash: sum(scala.collection.Traversable) and sum(scala.collection.Traversable) have the same erasure - // [partest] public static B sum(Traversable x) { + // [partest] files/pos/javaReadsSigs/fromjava.java:62: name clash: sum(scala.collection.Iterable) and sum(scala.collection.Iterable) have the same erasure + // [partest] public static B sum(Iterable x) { // [partest] ^ // // - // can't make this work with an actual CanBuildFrom: see #4389. - // public static B sum(Traversable x) { - // // have to cast it unfortunately: map in TraversableLike returns + // can't make this work with an actual BuildFrom: see #4389. + // public static B sum(Iterable x) { + // // have to cast it unfortunately: map in IterableLike returns // // "That" and such types seem to be signature poison. - // return ((Traversable)x.map(f1, null)).head(); + // return ((Iterable)x.map(f1, null)).head(); // } } \ No newline at end of file diff --git a/tests/pos/paramcycle.scala b/tests/pos/paramcycle.scala deleted file mode 100644 index d894fdf126dc..000000000000 --- a/tests/pos/paramcycle.scala +++ /dev/null @@ -1,18 +0,0 @@ -import scala.collection._ -import scala.collection.generic._ - -trait ViewMkString[+A] - -trait TraversableViewLike[+A, - +Coll, - +This <: TraversableView[A, Coll] with TraversableViewLike[A, Coll, This]] - extends Traversable[A] with TraversableLike[A, This] with ViewMkString[A] { self => - - def f[B](pf: PartialFunction[A, B]) = - filter(pf.isDefinedAt).map(pf) - -} - -trait TraversableView[+A, +Coll] extends TraversableViewLike[A, Coll, TraversableView[A, Coll]] { } - - diff --git a/tests/pos/poly-signature.scala b/tests/pos/poly-signature.scala new file mode 100644 index 000000000000..da7e36745eca --- /dev/null +++ b/tests/pos/poly-signature.scala @@ -0,0 +1,13 @@ +trait P[A] { + def foo[T](x: Int): A = ??? +} + +class C extends P[Int] { + def foo(x: Int): Int = x +} + +object Test { + def test(p: C): Unit = { + p.foo(1) + } +} diff --git a/tests/pos/pos_valueclasses/t5953.scala b/tests/pos/pos_valueclasses/t5953.scala index e468eff11a58..84e2243d1d6d 100644 --- a/tests/pos/pos_valueclasses/t5953.scala +++ b/tests/pos/pos_valueclasses/t5953.scala @@ -1,18 +1,19 @@ -package t5953 - -import scala.collection.{ mutable, immutable, generic, GenTraversableOnce } +trait CBF[-F, -A, +C] +trait GenTraversable[+A] +trait Traversable[+A] extends GenTraversable[A] +trait Vector[A] extends Traversable[A] +object Vector { + implicit def cbf[A]: CBF[Vector[_], A, Vector[A]] = ??? +} package object foo { - @inline implicit class TravOps[A, CC[A] <: GenTraversableOnce[A]](val coll: CC[A]) extends AnyVal { - def build[CC2[X]](implicit cbf: generic.CanBuildFrom[Nothing, A, CC2[A]]): CC2[A] = { - (cbf() ++= coll.toIterator).result() - } + @inline implicit class TravOps[A, CC[A] <: GenTraversable[A]](val coll: CC[A]) extends AnyVal { + def build[CC2[X]](implicit cbf: CBF[Nothing, A, CC2[A]]): CC2[A] = ??? } } package foo { object Test { - def f1[T](xs: Traversable[T]) = xs.to[immutable.Vector] - def f2[T](xs: Traversable[T]) = xs.build[immutable.Vector] + def f2[T](xs: Traversable[T]) = xs.build[Vector] } } diff --git a/tests/pos/repeatedArgs.scala b/tests/pos/repeatedArgs.scala deleted file mode 100644 index 4a57fcf5908e..000000000000 --- a/tests/pos/repeatedArgs.scala +++ /dev/null @@ -1,26 +0,0 @@ -import scala.collection.{immutable, mutable} -import java.nio.file.Paths -import java.util.concurrent.ForkJoinTask - -class repeatedArgs { - def bar(xs: String*): Int = xs.length - - def test(xs: immutable.Seq[String], ys: collection.Seq[String], zs: Array[String]): Unit = { - bar("a", "b", "c") - bar(xs: _*) - bar(ys: _*) // error in 2.13 - bar(zs: _*) - - Paths.get("Hello", "World") - Paths.get("Hello", xs: _*) - Paths.get("Hello", ys: _*) // error in 2.13 - Paths.get("Hello", zs: _*) - - val List(_, others: _*) = xs.toList // toList should not be needed, see #4790 - val x: collection.Seq[String] = others - // val y: immutable.Seq[String] = others // ok in 2.13 - } - - def invokeAll[T](tasks: ForkJoinTask[T]*): Unit = ForkJoinTask.invokeAll(tasks: _*) - def invokeAll2(tasks: ForkJoinTask[_]*): Unit = ForkJoinTask.invokeAll(tasks: _*) -} diff --git a/tests/pos/repeatedArgs213.scala b/tests/pos/repeatedArgs213.scala new file mode 100644 index 000000000000..94419d88a950 --- /dev/null +++ b/tests/pos/repeatedArgs213.scala @@ -0,0 +1,18 @@ +import scala.collection.{immutable, mutable} +import java.nio.file.Paths + +class repeatedArgs { + def bar(xs: String*): Int = bat(xs) + def bat(xs: immutable.Seq[String]) = xs.length + + def test(xs: immutable.Seq[String]): Unit = { + bar("a", "b", "c") + bar(xs: _*) + + Paths.get("Hello", "World") + Paths.get("Hello", xs: _*) + + val List(_, others: _*) = xs.toList // toList should not be needed, see #4790 + val x: immutable.Seq[String] = others + } +} diff --git a/tests/pos/seqtype-cycle/Test1.scala b/tests/pos/seqtype-cycle/Test1.scala deleted file mode 100644 index ee8d10d7c36a..000000000000 --- a/tests/pos/seqtype-cycle/Test1.scala +++ /dev/null @@ -1,3 +0,0 @@ -class Test { - def bar = Array(1) // call to Array(_: Repeated) -} diff --git a/tests/pos/seqtype-cycle/Test2.scala b/tests/pos/seqtype-cycle/Test2.scala deleted file mode 100644 index 30e1e9157a00..000000000000 --- a/tests/pos/seqtype-cycle/Test2.scala +++ /dev/null @@ -1,10 +0,0 @@ -package object scala { - // needed for some reasons - type Throwable = java.lang.Throwable - type IndexOutOfBoundsException = java.lang.IndexOutOfBoundsException - type List[+A] = scala.collection.immutable.List[A] - type Iterable[+A] = scala.collection.Iterable[A] - - type Seq[A] = scala.collection.Seq[A] - val Seq = scala.collection.Seq -} diff --git a/tests/pos/seqtype-cycle/Test3.scala b/tests/pos/seqtype-cycle/Test3.scala deleted file mode 100644 index f137ded9ffa0..000000000000 --- a/tests/pos/seqtype-cycle/Test3.scala +++ /dev/null @@ -1,12 +0,0 @@ -package scala - -class specialized(types: Int*) extends scala.annotation.StaticAnnotation - -package collection { - class Foo[@specialized(1, 2) A] - - trait Seq[A] extends Foo[A] - object Seq extends SeqFactory[Seq] - - trait SeqFactory[CC[X] <: Seq[X]] -} diff --git a/tests/pos/spec-constr-old.scala b/tests/pos/spec-constr-old.scala index e908b65a415f..662778cb0f49 100644 --- a/tests/pos/spec-constr-old.scala +++ b/tests/pos/spec-constr-old.scala @@ -1,4 +1,4 @@ -class SparseArray2[@specialized(Int) T:ClassManifest](val maxSize: Int, initialLength:Int = 3) { +class SparseArray2[@specialized(Int) T:scala.reflect.ClassManifest](val maxSize: Int, initialLength:Int = 3) { private var data = new Array[T](initialLength); private var index = new Array[Int](initialLength); diff --git a/tests/pos/spec-fields-old.scala b/tests/pos/spec-fields-old.scala index 26a8c4ffbd12..05a4b6e443d7 100644 --- a/tests/pos/spec-fields-old.scala +++ b/tests/pos/spec-fields-old.scala @@ -1,4 +1,4 @@ -abstract class Foo[@specialized T: ClassManifest, U <: Ordered[U]](x: T, size: Int) { +abstract class Foo[@specialized T: scala.reflect.ClassManifest, U <: Ordered[U]](x: T, size: Int) { var y: T var z: T = x diff --git a/tests/pos/spec-params-old.scala b/tests/pos/spec-params-old.scala index 33a252120cc1..d4672ac2f508 100644 --- a/tests/pos/spec-params-old.scala +++ b/tests/pos/spec-params-old.scala @@ -1,4 +1,4 @@ -class Foo[@specialized A: ClassManifest] { +class Foo[@specialized A: scala.reflect.ClassManifest] { // conflicting in bounds, expect a normalized member calling m // and bridge + implementation in specialized subclasses diff --git a/tests/pos/spec-partialmap.scala b/tests/pos/spec-partialmap.scala index 09684e024208..1e944c777043 100644 --- a/tests/pos/spec-partialmap.scala +++ b/tests/pos/spec-partialmap.scala @@ -1,17 +1,12 @@ - // ticket #3378, overloaded specialized variants -import scala.collection.{Traversable,TraversableLike}; -import scala.collection.generic.CanBuildFrom; - -trait PartialMap[@specialized A,@specialized B] -extends PartialFunction[A,B] with Iterable[(A,B)] { +import scala.collection.{Iterable,IterableOps}; +trait PartialMap[@specialized A,@specialized B] extends PartialFunction[A,B] with Iterable[(A,B)] { // commenting out this declaration gives a different exception. /** Getter for all values for which the given key function returns true. */ def apply(f : (A => Boolean)) : Iterator[B] = - for ((k,v) <- iterator; if f(k)) yield v; + for ((k,v) <- iterator; if f(k)) yield v // if this is commented, it compiles fine: - def apply[This <: Traversable[A], That](keys : TraversableLike[A,This]) - (implicit bf: CanBuildFrom[This, B, That]) : That = keys.map(apply); + def apply[This <: Iterable[A]](keys : IterableOps[A, Iterable, This]): Iterable[B] = keys.map(apply) } diff --git a/tests/pos/spec-short.scala b/tests/pos/spec-short.scala index 71e56a485add..323b9f856888 100644 --- a/tests/pos/spec-short.scala +++ b/tests/pos/spec-short.scala @@ -6,7 +6,7 @@ abstract class AbsFun[@specialized T, @specialized U] { def sum(xs: List[T]): Int def prod(xs: List[T], mul: (Int, T) => Int): Int = - (1 /: xs)(mul) + xs.foldLeft(1)(mul) // concrete function, not specialized def bar(m: String): String = m @@ -19,7 +19,7 @@ class Square extends AbsFun[Int, Int] { def apply(x: Int): Int = x * x def sum(xs: List[Int]): Int = - (0 /: xs) (_ + _) + xs.foldLeft(0) (_ + _) def abs(m: Int): Int = sum(List(1, 2, 3)) diff --git a/tests/pos/spec-sparsearray-old.scala b/tests/pos/spec-sparsearray-old.scala index 99a6309cc7a1..2f4c833eed9b 100644 --- a/tests/pos/spec-sparsearray-old.scala +++ b/tests/pos/spec-sparsearray-old.scala @@ -1,9 +1,9 @@ -import scala.collection.mutable.MapLike - -class SparseArray[@specialized(Int) T:ClassManifest] extends collection.mutable.Map[Int,T] with collection.mutable.MapLike[Int,T,SparseArray[T]] { +import scala.reflect.ClassManifest +import scala.collection.{MapFactory, mutable} +class SparseArray[@specialized(Int) T:ClassManifest] extends collection.mutable.Map[Int,T] with collection.mutable.MapOps[Int,T,collection.mutable.Map,SparseArray[T]] { override def get(x: Int) = { val ind = findOffset(x) - if (ind < 0) None else Some(sys.error("ignore")) + if(ind < 0) None else Some(sys.error("ignore")) } /** @@ -15,10 +15,11 @@ class SparseArray[@specialized(Int) T:ClassManifest] extends collection.mutable. sys.error("impl doesn't matter") } - override def apply(i : Int) : T = { sys.error("ignore") } - override def update(i : Int, value : T) = sys.error("ignore") - override def empty = new SparseArray[T] - def -=(ind: Int) = sys.error("ignore") - def +=(kv: (Int,T)) = sys.error("ignore") - override final def iterator = sys.error("ignore") + def addOne(elem: (Int, T)): SparseArray.this.type = ??? + def iterator: Iterator[(Int, T)] = ??? + def subtractOne(elem: Int): SparseArray.this.type = ??? + + override protected[this] def fromSpecific(coll: IterableOnce[(Int, T)]): SparseArray[T] = ??? + override protected[this] def newSpecificBuilder: mutable.Builder[(Int, T), SparseArray[T]] = ??? + override def empty: SparseArray[T] = ??? } diff --git a/tests/pos/spec-super.scala b/tests/pos/spec-super.scala index 67179e023021..3bb72314ef12 100644 --- a/tests/pos/spec-super.scala +++ b/tests/pos/spec-super.scala @@ -1,9 +1,7 @@ -import scala.collection.immutable._ -import scala.collection.mutable.ListBuffer -import scala.collection.generic._ +import scala.collection.BuildFrom -trait Base[+A] extends Traversable[A] { - def add[B >: A, That](that: Traversable[B])(implicit bf: CanBuildFrom[Base[A], B, That]): That = { +trait Base[+A] extends Iterable[A] { + def add[B >: A, That](that: Iterable[B])(implicit bf: BuildFrom[Base[A], B, That]): That = { val b = bf(this) b ++= this b ++= that @@ -13,7 +11,7 @@ trait Base[+A] extends Traversable[A] { } abstract class Derived[@specialized +A] extends Base[A] { - override def add[B >: A, That](that: Traversable[B])(implicit bf: CanBuildFrom[Base[A], B, That]): That = { + override def add[B >: A, That](that: Iterable[B])(implicit bf: BuildFrom[Base[A], B, That]): That = { val b = bf(this) super.add[B, That](that) } diff --git a/tests/pos/sudoku.scala b/tests/pos/sudoku.scala index 150586716066..065239bf4390 100644 --- a/tests/pos/sudoku.scala +++ b/tests/pos/sudoku.scala @@ -1,3 +1,5 @@ +import scala.io.StdIn.readLine + object SudokuSolver extends App { // The board is represented by an array of strings (arrays of chars), // held in a global variable m. The program begins by reading 9 lines diff --git a/tests/pos/t1858.scala b/tests/pos/t1858.scala index c06e73e7e61e..99e88c7af52f 100644 --- a/tests/pos/t1858.scala +++ b/tests/pos/t1858.scala @@ -1,4 +1,4 @@ -import scala.collection.immutable.Stack +import scala.collection.mutable.Stack object Test { diff --git a/tests/pos/t2183.scala b/tests/pos/t2183.scala deleted file mode 100644 index 1243568b638f..000000000000 --- a/tests/pos/t2183.scala +++ /dev/null @@ -1,5 +0,0 @@ -import scala.collection.mutable._ - -object Test { - val m = new HashSet[String] with SynchronizedSet[String] -} diff --git a/tests/pos/t2293.scala b/tests/pos/t2293.scala deleted file mode 100644 index 536d4ec3d0fd..000000000000 --- a/tests/pos/t2293.scala +++ /dev/null @@ -1,5 +0,0 @@ -import scala.collection.JavaConversions._ - -object Test { - val m: java.util.Map[String,String] = collection.mutable.Map("1"->"2") -} diff --git a/tests/pos/t2421_delitedsl.scala b/tests/pos/t2421_delitedsl.scala index bde3593c9ee4..85dd67b53ce0 100644 --- a/tests/pos/t2421_delitedsl.scala +++ b/tests/pos/t2421_delitedsl.scala @@ -1,9 +1,6 @@ trait DeliteDSL { abstract class <~<[-From, +To] extends (From => To) - implicit def trivial[A]: A <~< A = new (A <~< A) {def apply(x: A) = x} - implicit def convert_<-<[A, B](x: A)(implicit ev: A <~< B): B = ev(x) - trait Forcible[T] object Forcible { @@ -11,18 +8,15 @@ trait DeliteDSL { } case class DeliteInt(x: Int) extends Forcible[Int] + implicit val forcibleInt: Int <~< Forcible[Int] = Forcible.factory(DeliteInt(_: Int)) - implicit val forcibleInt: DeliteDSL.this.<~<[Int,DeliteDSL.this.Forcible[Int]] = - Forcible.factory((x: Int) => DeliteInt(x)) - - import scala.collection.Traversable - class DeliteCollection[T](val xs: Traversable[T]) { + class DeliteCollection[T](val xs: Iterable[T]) { // must use existential in bound of P, instead of T itself, because we cannot both have: - // Test.x below: DeliteCollection[T=Int] => P=DeliteInt <: Forcible[T=Int], as T=Int <~< P=DeliteInt - // Test.xAlready below: DeliteCollection[T=DeliteInt] => P=DeliteInt <: Forcible[T=DeliteInt], as T=DeliteInt <~< P=DeliteInt + // Test.x below: DeliteCollection[T=Int] -> P=DeliteInt <: Forcible[T=Int], as T=Int <~< P=DeliteInt + // Test.xAlready below: DeliteCollection[T=DeliteInt] -> P=DeliteInt <: Forcible[T=DeliteInt], as T=DeliteInt <~< P=DeliteInt // this would required DeliteInt <: Forcible[Int] with Forcible[DeliteInt] - def headProxy[P <: Forcible[_]](implicit w: T <~< P): P = xs.head + def headProxy[P <: Forcible[_]](implicit w: T <~< P): P = w(xs.head) } // If T is already a proxy (it is forcible), the compiler should use // forcibleIdentity to deduce that P=T. If T is Int, the compiler diff --git a/tests/pos/t247.scala b/tests/pos/t247.scala index fdcafeb2c6cc..7059cd272b1a 100644 --- a/tests/pos/t247.scala +++ b/tests/pos/t247.scala @@ -2,6 +2,8 @@ class Order[t](less:(t,t) => Boolean,equal:(t,t) => Boolean) {} trait Map[A, B] extends scala.collection.Map[A, B] { val factory:MapFactory[A] + def -(key1: A, key2: A, keys: A*): Map[A, B] = null + def -(key: A): Map[A, B] = null } abstract class MapFactory[A] { def Empty[B]:Map[A,B]; diff --git a/tests/pos/t2503.scala b/tests/pos/t2503.scala index 21801e2dbd28..8dda45b2b760 100755 --- a/tests/pos/t2503.scala +++ b/tests/pos/t2503.scala @@ -1,10 +1,7 @@ import scala.collection.mutable._ trait SB[A] extends Buffer[A] { - - import collection.Traversable - - abstract override def insertAll(n: Int, iter: Traversable[A]): Unit = synchronized { + abstract override def insertAll(n: Int, iter: IterableOnce[A]): Unit = synchronized { super.insertAll(n, iter) } diff --git a/tests/pos/t2795-old.scala b/tests/pos/t2795-old.scala index 935cb1f44439..b3976c5ff40a 100644 --- a/tests/pos/t2795-old.scala +++ b/tests/pos/t2795-old.scala @@ -5,7 +5,7 @@ trait Element[T] { trait Config { type T <: Element[T] - implicit val m: ClassManifest[T] + implicit val m: scala.reflect.ClassManifest[T] // XXX Following works fine: // type T <: Element[_] } diff --git a/tests/pos/t2939.scala b/tests/pos/t2939.scala deleted file mode 100644 index 57dd5202485f..000000000000 --- a/tests/pos/t2939.scala +++ /dev/null @@ -1,13 +0,0 @@ -import collection._ - -object Proxies { - class C1 extends MapProxy[Int,Int] { def self = Map[Int,Int]() } - class C2 extends mutable.MapProxy[Int,Int] { def self = mutable.Map[Int,Int]() } - class C3 extends immutable.MapProxy[Int,Int] { def self = immutable.Map[Int,Int]() } - - class C4 extends SetProxy[Int] { def self = Set[Int]() } - class C5 extends mutable.SetProxy[Int] { def self = mutable.Set[Int]() } - class C6 extends immutable.SetProxy[Int] { def self = immutable.Set[Int]() } - - class C7 extends SeqProxy[Int] { def self = Seq[Int]() } -} diff --git a/tests/pos/t3528.scala b/tests/pos/t3528.scala index ff49b3e9298b..e85555353906 100644 --- a/tests/pos/t3528.scala +++ b/tests/pos/t3528.scala @@ -1,6 +1,6 @@ class A { // 3528 - not fixed - // def f1 = List(List(1), Stream(1)) + // def f1 = List(List(1), LazyList(1)) // 3528 comments def f2 = List(Set(1,2,3), List(1,2,3)) // 2322 diff --git a/tests/pos/t4501.scala b/tests/pos/t4501.scala index f7d45eaa539f..6ac36e1e9b53 100644 --- a/tests/pos/t4501.scala +++ b/tests/pos/t4501.scala @@ -3,7 +3,8 @@ import scala.collection.mutable.ListBuffer class A { def foo[T](a:T, b:T):T = a - def f1 = foo(ListBuffer(), List()) + // f1 no longer compiles with 2.13 collections, it produces an invalid lub; added to run/invalid-lubs.scala + // def f1 = foo(ListBuffer(), List()) def f2 = foo(ListBuffer(), ListBuffer()) def f3 = foo(List(), List()) diff --git a/tests/pos/t4716.scala b/tests/pos/t4716.scala index ec29e8d2cbf3..045aad15e608 100644 --- a/tests/pos/t4716.scala +++ b/tests/pos/t4716.scala @@ -1,10 +1,6 @@ - - - - -trait Bug2[@specialized(Int) +A] extends TraversableOnce[A] { - def ++[B >: A](that: TraversableOnce[B]) = { - lazy val it = that.toIterator +trait Bug2[@specialized(Int) +A] extends IterableOnce[A] { + def ++[B >: A](that: IterableOnce[B]) = { + lazy val it = that.iterator it } } diff --git a/tests/pos/t4717.scala b/tests/pos/t4717.scala index ed35a8ad8742..145b0f71ec59 100644 --- a/tests/pos/t4717.scala +++ b/tests/pos/t4717.scala @@ -1,22 +1,13 @@ +trait Bug1[@specialized(Boolean) A] extends IterableOnce[A] { - - - - - - -trait Bug1[@specialized(Boolean) A] extends TraversableOnce[A] { - - def ++[B >: A](that: TraversableOnce[B]): Iterator[B] = new Iterator[B] { - lazy val it = that.toIterator + def ++[B >: A](that: IterableOnce[B]): Iterator[B] = new Iterator[B] { + lazy val it = that.iterator def hasNext = it.hasNext def next = it.next } } - - trait WorksFine[@specialized(Boolean) A] { class SubBounds[B >: A] extends Bounds[B] { lazy val it = ??? @@ -24,12 +15,9 @@ trait WorksFine[@specialized(Boolean) A] { def x[B >: A]: Unit = new SubBounds[B] } - trait Bounds[@specialized(Boolean) A] { // okay without `>: A` def x[B >: A]: Unit = new Bounds[B] { lazy val it = ??? // def or val okay } } - - diff --git a/tests/pos/t516.scala b/tests/pos/t516.scala deleted file mode 100644 index 5561b7610c3d..000000000000 --- a/tests/pos/t516.scala +++ /dev/null @@ -1,14 +0,0 @@ -import scala.collection.mutable._; -import scala.collection.script._; - -class Members; - -object subscriber extends Subscriber[Message[String] with Undoable, Members] { - def notify(pub: Members, event: Message[String] with Undoable): Unit = - (event: Message[String]) match { - case Include(l, elem) => Console.println("ADD: " + elem); - case Remove(l, elem) => Console.println("REM: " + elem); - //case i : Include[HasTree] with Undoable => - //case r : Remove [HasTree] with Undoable => - } - } diff --git a/tests/pos/t6482.scala b/tests/pos/t6482.scala index 24ea38e519a4..8efbc57333f0 100644 --- a/tests/pos/t6482.scala +++ b/tests/pos/t6482.scala @@ -1,4 +1,4 @@ -final class TraversableOnceOps[+A](val collection: TraversableOnce[A]) extends AnyVal { +final class IterableOnceOps[+A](val collection: Iterable[A]) extends AnyVal { def reduceLeftOption[B >: A](op: (B, A) => B): Option[B] = if (collection.isEmpty) None else Some(collection.reduceLeft[B](op)) } diff --git a/tests/pos/t6963c.scala b/tests/pos/t6963c.scala index d3c3616eb248..80e8f11d9de5 100644 --- a/tests/pos/t6963c.scala +++ b/tests/pos/t6963c.scala @@ -1,3 +1,6 @@ +// scalac: -Xmigration:2.9 -Xfatal-warnings +// +import collection.Seq object Test { def f1(x: Any) = x.isInstanceOf[Seq[_]] def f2(x: Any) = x match { @@ -10,7 +13,7 @@ object Test { case _ => false } - def f4(x: Any) = x.isInstanceOf[Traversable[_]] + def f4(x: Any) = x.isInstanceOf[Iterable[_]] def f5(x1: Any, x2: Any, x3: AnyRef) = (x1, x2, x3) match { case (Some(_: Seq[_]), Nil, _) => 1 diff --git a/tests/pos/tcpoly_infer_explicit_tuple_wrapper.scala b/tests/pos/tcpoly_infer_explicit_tuple_wrapper.scala index 30228fbd286e..380e5824081d 100644 --- a/tests/pos/tcpoly_infer_explicit_tuple_wrapper.scala +++ b/tests/pos/tcpoly_infer_explicit_tuple_wrapper.scala @@ -1,16 +1,13 @@ -import scala.collection.generic.GenericTraversableTemplate -import scala.collection.Iterable - -class IterableOps[CC[+B] <: Iterable[B] with GenericTraversableTemplate[B, CC], A1, A2](tuple: (CC[A1], Iterable[A2])) { +class IterableOps[CC[+B] <: Iterable[B], A1, A2](tuple: (CC[A1], Iterable[A2])) { def unzip: (CC[A1], CC[A2]) = sys.error("foo") } object Test { - implicit def tupleOfIterableWrapper[CC[+B] <: Iterable[B] with GenericTraversableTemplate[B, CC], A1, A2](tuple: (CC[A1], Iterable[A2])) - : IterableOps[CC,A1,A2] = new IterableOps[CC, A1, A2](tuple) + implicit def tupleOfIterableWrapper[CC[+B] <: Iterable[B], A1, A2](tuple: (CC[A1], Iterable[A2])): IterableOps[CC, A1, A2] + = new IterableOps[CC, A1, A2](tuple) val t = (List(1, 2, 3), List(6, 5, 4)) - tupleOfIterableWrapper(t) unzip + tupleOfIterableWrapper(t).unzip } diff --git a/tests/pos/tcpoly_infer_implicit_tuple_wrapper.scala b/tests/pos/tcpoly_infer_implicit_tuple_wrapper.scala index 82213719f4b6..40001dc3ff04 100644 --- a/tests/pos/tcpoly_infer_implicit_tuple_wrapper.scala +++ b/tests/pos/tcpoly_infer_implicit_tuple_wrapper.scala @@ -1,18 +1,17 @@ -import scala.collection.generic.GenericTraversableTemplate import scala.collection.Iterable -class IterableOps[CC[+B] <: Iterable[B] with GenericTraversableTemplate[B, CC], A1, A2](tuple: (CC[A1], Iterable[A2])) { +class IterableOps[CC[+B] <: Iterable[B], A1, A2](tuple: (CC[A1], Iterable[A2])) { def unzip: (CC[A1], CC[A2]) = sys.error("foo") } object Test { - implicit def tupleOfIterableWrapper[CC[+B] <: Iterable[B] with GenericTraversableTemplate[B, CC], A1, A2](tuple: (CC[A1], Iterable[A2])) - : IterableOps[CC,A1,A2] = new IterableOps[CC, A1, A2](tuple) + implicit def tupleOfIterableWrapper[CC[+B] <: Iterable[B], A1, A2](tuple: (CC[A1], Iterable[A2])): IterableOps[CC, A1, A2] + = new IterableOps[CC, A1, A2](tuple) val t = (List(1, 2, 3), List(6, 5, 4)) - tupleOfIterableWrapper(t) unzip + tupleOfIterableWrapper(t).unzip - t unzip + t.unzip } diff --git a/tests/pos/tcpoly_ticket2096.scala b/tests/pos/tcpoly_ticket2096.scala index d2387b36bd24..6fd7c7984a58 100644 --- a/tests/pos/tcpoly_ticket2096.scala +++ b/tests/pos/tcpoly_ticket2096.scala @@ -12,7 +12,7 @@ case class MSequitor[A]( a_ : A* ) extends Seq[A] with MBrace[MSequitor,A] { override def nest( a : A ) = new MSequitor[A]( a ) override def flatten[T <: MSequitor[MSequitor[A]]]( bsq : T ) : MSequitor[A] = { - (new MSequitor[A]( ) /: bsq)( { + bsq.foldLeft(new MSequitor[A]( ))( { ( acc : MSequitor[A], e : MSequitor[A] ) => ( acc ++ e ).asInstanceOf[MSequitor[A]] } ) } diff --git a/tests/pos/tcpoly_typeapp.scala b/tests/pos/tcpoly_typeapp.scala index 4cb1da4f74cd..b131b7288ea8 100644 --- a/tests/pos/tcpoly_typeapp.scala +++ b/tests/pos/tcpoly_typeapp.scala @@ -1,4 +1,4 @@ abstract class x { type t[m[x] <: Bound[x], Bound[x]] - val x: t[scala.collection.mutable.MutableList, Iterable] + val x: t[scala.collection.mutable.Stack, Iterable] } diff --git a/tests/pos/typeclass-encoding.scala b/tests/pos/typeclass-encoding.scala index 981da1008b00..52ad8126dd8e 100644 --- a/tests/pos/typeclass-encoding.scala +++ b/tests/pos/typeclass-encoding.scala @@ -19,7 +19,7 @@ } def sum[T: Monoid](xs: List[T]): T = - (inst[T, Monoid].unit /: xs)(_ `add` _) + xs.foldLeft(inst[T, Monoid].unit)(_ `add` _) */ object runtime { @@ -85,7 +85,7 @@ object semiGroups { } def sum[T](xs: List[T])(implicit $ev: Implementation[T] { type Implemented = Monoid } ) = { - (inst[T, Monoid].unit /: xs)((x, y) => inject(x) `add` y) - (inst[T, Monoid].unit /: xs)((x, y) => x `add` y) // fails in scalac and previous dotc. + xs.foldLeft(inst[T, Monoid].unit)((x, y) => inject(x) `add` y) + xs.foldLeft(inst[T, Monoid].unit)((x, y) => x `add` y) // fails in scalac and previous dotc. } } diff --git a/tests/pos/typeclass-encoding2.scala b/tests/pos/typeclass-encoding2.scala index 3269430435de..18559b784754 100644 --- a/tests/pos/typeclass-encoding2.scala +++ b/tests/pos/typeclass-encoding2.scala @@ -37,7 +37,7 @@ } def sum[T: Monoid](xs: List[T]): T = - (Monod.impl[T].unit /: xs)(_ `add` _) + xs.foldLeft(Monod.impl[T].unit)(_ `add` _) */ object runtime { @@ -131,7 +131,7 @@ object semiGroups { implicit def NatOps: Nat.type = Nat def sum[T](xs: List[T])(implicit ev: Monoid.Impl[T]) = - (Monoid.impl[T].unit /: xs)((x, y) => x `add` y) + xs.foldLeft(Monoid.impl[T].unit)((x, y) => x `add` y) sum(List(1, 2, 3)) sum(List("hello ", "world!")) @@ -230,7 +230,7 @@ object ord { def inf[T](xs: List[T])(implicit ev: Ord.Impl[T]): T = { val smallest = Ord.impl[T].minimum - (smallest /: xs)(min) + xs.foldLeft(smallest)(min) } inf(List[Int]()) diff --git a/tests/pos/typeclass-encoding3.scala b/tests/pos/typeclass-encoding3.scala index 925ba911e3a3..37a4dd2670a0 100644 --- a/tests/pos/typeclass-encoding3.scala +++ b/tests/pos/typeclass-encoding3.scala @@ -205,7 +205,7 @@ object Test { def max[T](x: T, y: T)(implicit ev: Ord.Common { type This = T }) = if (ev.inject(x) < y) x else y def max[T](xs: List[T])(implicit ev: Ord.Common { type This = T }): T = - (Ord.by[T].minimum /: xs)(max(_, _)) + xs.foldLeft(Ord.by[T].minimum)(max(_, _)) val x1 = max(1, 2) val x2 = max(List(1), Nil) diff --git a/tests/pos/typers.scala b/tests/pos/typers.scala index 1cd646899f6e..f335223aeae9 100644 --- a/tests/pos/typers.scala +++ b/tests/pos/typers.scala @@ -10,7 +10,7 @@ object typers { object Inference { - for ((name, n) <- (names, ints).zipped) + for ((name, n) <- names.lazyZip(ints)) println(name.length + n) def double(x: Char): String = s"$x$x" diff --git a/tests/pos/virtpatmat_exist1.scala b/tests/pos/virtpatmat_exist1.scala index 295e99cfcc8a..f3ac809a7268 100644 --- a/tests/pos/virtpatmat_exist1.scala +++ b/tests/pos/virtpatmat_exist1.scala @@ -1,9 +1,27 @@ -import annotation.unchecked.{ uncheckedVariance=> uV } -import scala.collection.mutable.{ListMap, HashMap, HashSet} +import annotation.unchecked.{uncheckedVariance => uV} +import scala.collection.{IterableFactory, StrictOptimizedIterableOps, mutable} +import scala.collection.immutable.{ListMap, ListSet} +import scala.collection.mutable.{AbstractSet, HashMap, HashSet, Set, SetOps} + +// Stub of HashSet, but not final, so we can extend from it (in Test below) +class HS[A] + extends AbstractSet[A] + with SetOps[A, HS, HS[A]] + with StrictOptimizedIterableOps[A, HS, HS[A]] + with collection.IterableFactoryDefaults[A, HS] + with Serializable { + override def iterableFactory: IterableFactory[HS] = ??? + def get(elem: A): Option[A] = ??? + def contains(elem: A): Boolean = ??? + def addOne(elem: A): HS.this.type = ??? + def clear(): Unit = ??? + def iterator: Iterator[A] = ??? + def subtractOne(elem: A): HS.this.type = ??? +} object Test { class HashMapCollision1[A, +B](var hash: Int, var kvs: ListMap[A, B @uV]) extends HashMap[A, B @uV] - class HashSetCollision1[A](var hash: Int, var ks: Set[A]) extends HashSet[A] + class HashSetCollision1[A](var hash: Int, var ks: ListSet[A]) extends HS[A] def splitArray[T](ad: Array[Iterable[T]]): Any = ad(0) match { diff --git a/tests/pos/virtpatmat_gadt_array.scala b/tests/pos/virtpatmat_gadt_array.scala index 02dbad68d9d0..de0367df05c9 100644 --- a/tests/pos/virtpatmat_gadt_array.scala +++ b/tests/pos/virtpatmat_gadt_array.scala @@ -1,8 +1,8 @@ -import scala.collection.mutable._ +import scala.collection.ArrayOps object Test { def genericArrayOps[T](xs: Array[T]): ArrayOps[T] = xs match { case x: Array[AnyRef] => refArrayOps[AnyRef](x).asInstanceOf[ArrayOps[T]] - case null => null + case null => null.asInstanceOf[ArrayOps[T]] // `ArrayOps` is AnyVal } // def genericArrayOps[T >: Nothing <: Any](xs: Array[T]): scala.collection.mutable.ArrayOps[T] // = OptionMatching.runOrElse(xs)(((x1: Array[T]) => @@ -11,5 +11,5 @@ object Test { // (OptionMatching.guard(null.==(x1), x1.asInstanceOf[Array[T]]).flatMap(((x3: Array[T]) => // OptionMatching.one(null))): Option[scala.collection.mutable.ArrayOps[T]])): Option[scala.collection.mutable.ArrayOps[T]]).orElse((OptionMatching.zero: Option[scala.collection.mutable.ArrayOps[T]])))) - def refArrayOps[T <: AnyRef](xs: Array[T]): ArrayOps[T] = new ArrayOps.ofRef[T](xs) + def refArrayOps[T <: AnyRef](xs: Array[T]): ArrayOps[T] = new ArrayOps(xs) } diff --git a/tests/run-macros/inferred-repeated-result.check b/tests/run-macros/inferred-repeated-result.check index 4d3cdea7ed52..78f07c3f4f4d 100644 --- a/tests/run-macros/inferred-repeated-result.check +++ b/tests/run-macros/inferred-repeated-result.check @@ -1,4 +1,4 @@ -C.bar1 : scala.Option[scala.collection.Seq[scala.Predef.String]] -C.bar2 : scala.Option[scala.collection.Seq[scala.Predef.String]] -C.foo1 : scala.collection.Seq[scala.Predef.String] -C.foo2 : scala.collection.Seq[scala.Predef.String] +C.bar1 : scala.Option[scala.collection.immutable.Seq[scala.Predef.String]] +C.bar2 : scala.Option[scala.collection.immutable.Seq[scala.Predef.String]] +C.foo1 : scala.collection.immutable.Seq[scala.Predef.String] +C.foo2 : scala.collection.immutable.Seq[scala.Predef.String] diff --git a/tests/run-macros/quote-matcher-runtime.check b/tests/run-macros/quote-matcher-runtime.check index e772ae7edb1c..1f170a4fc01f 100644 --- a/tests/run-macros/quote-matcher-runtime.check +++ b/tests/run-macros/quote-matcher-runtime.check @@ -787,13 +787,13 @@ Pattern: { } Result: None -Scrutinee: scala.List.apply[scala.Int]((1, 2, 3: scala.[scala.Int])).map[scala.Double, scala.collection.immutable.List[scala.Double]](((x: scala.Int) => x.toDouble./(2)))(scala.collection.immutable.List.canBuildFrom[scala.Double]).map[java.lang.String, scala.collection.immutable.List[java.lang.String]](((y: scala.Double) => y.toString()))(scala.collection.immutable.List.canBuildFrom[java.lang.String]) +Scrutinee: scala.List.apply[scala.Int]((1, 2, 3: scala.[scala.Int])).map[scala.Double](((x: scala.Int) => x.toDouble./(2))).map[java.lang.String](((y: scala.Double) => y.toString())) Pattern: { @scala.internal.Quoted.patternBindHole type T @scala.internal.Quoted.patternBindHole type U @scala.internal.Quoted.patternBindHole type V - (scala.internal.Quoted.patternHole[scala.List[T]].map[U, scala.collection.immutable.List[U]](scala.internal.Quoted.patternHole[scala.Function1[T, U]])(scala.collection.immutable.List.canBuildFrom[U]).map[V, scala.collection.immutable.List[V]](scala.internal.Quoted.patternHole[scala.Function1[U, V]])(scala.collection.immutable.List.canBuildFrom[V]): scala.collection.immutable.List[scala.Any]) + (scala.internal.Quoted.patternHole[scala.List[T]].map[U](scala.internal.Quoted.patternHole[scala.Function1[T, U]]).map[V](scala.internal.Quoted.patternHole[scala.Function1[U, V]]): scala.collection.immutable.List[scala.Any]) } Result: Some(List(Type(scala.Int), Type(scala.Double), Type(java.lang.String), Expr(scala.List.apply[scala.Int]((1, 2, 3: scala.[scala.Int]))), Expr(((x: scala.Int) => x.toDouble./(2))), Expr(((y: scala.Double) => y.toString())))) diff --git a/tests/run-macros/quote-matching-optimize-1.check b/tests/run-macros/quote-matching-optimize-1.check index a6940021e7b8..2f86188f9fa9 100644 --- a/tests/run-macros/quote-matching-optimize-1.check +++ b/tests/run-macros/quote-matching-optimize-1.check @@ -16,17 +16,17 @@ Original: ls.filter(((x: scala.Int) => x.<(3))).foreach[scala.Unit](((x: scala.I Optimized: ls.foreach[scala.Unit](((x: scala.Int) => if (x.<(3)) scala.Predef.println(x) else ())) Result: () -Original: scala.List.apply[scala.Int]((1, 2, 3: scala.[scala.Int])).map[scala.Int, scala.collection.immutable.List[scala.Int]](((a: scala.Int) => a.*(2)))(scala.collection.immutable.List.canBuildFrom[scala.Int]).map[java.lang.String, scala.collection.immutable.List[java.lang.String]](((b: scala.Int) => b.toString()))(scala.collection.immutable.List.canBuildFrom[java.lang.String]) -Optimized: scala.List.apply[scala.Int]((1, 2, 3: scala.[scala.Int])).map[java.lang.String, scala.collection.immutable.List[java.lang.String]](((x: scala.Int) => { +Original: scala.List.apply[scala.Int]((1, 2, 3: scala.[scala.Int])).map[scala.Int](((a: scala.Int) => a.*(2))).map[java.lang.String](((b: scala.Int) => b.toString())) +Optimized: scala.List.apply[scala.Int]((1, 2, 3: scala.[scala.Int])).map[java.lang.String](((x: scala.Int) => { val x$1: scala.Int = x.*(2) x$1.toString() -}))(scala.collection.immutable.List.canBuildFrom[java.lang.String]) +})) Result: List(2, 4, 6) -Original: scala.List.apply[scala.Int]((55, 67, 87: scala.[scala.Int])).map[scala.Char, scala.collection.immutable.List[scala.Char]](((a: scala.Int) => a.toChar))(scala.collection.immutable.List.canBuildFrom[scala.Char]).map[java.lang.String, scala.collection.immutable.List[java.lang.String]](((b: scala.Char) => b.toString()))(scala.collection.immutable.List.canBuildFrom[java.lang.String]) -Optimized: scala.List.apply[scala.Int]((55, 67, 87: scala.[scala.Int])).map[java.lang.String, scala.collection.immutable.List[java.lang.String]](((x: scala.Int) => { +Original: scala.List.apply[scala.Int]((55, 67, 87: scala.[scala.Int])).map[scala.Char](((a: scala.Int) => a.toChar)).map[java.lang.String](((b: scala.Char) => b.toString())) +Optimized: scala.List.apply[scala.Int]((55, 67, 87: scala.[scala.Int])).map[java.lang.String](((x: scala.Int) => { val x$2: scala.Char = x.toChar x$2.toString() -}))(scala.collection.immutable.List.canBuildFrom[java.lang.String]) +})) Result: List(7, C, W) diff --git a/tests/run-macros/quote-matching-optimize-1/Macro_1.scala b/tests/run-macros/quote-matching-optimize-1/Macro_1.scala index 91b89d6f0637..606a35bb808f 100644 --- a/tests/run-macros/quote-matching-optimize-1/Macro_1.scala +++ b/tests/run-macros/quote-matching-optimize-1/Macro_1.scala @@ -11,7 +11,7 @@ object Macro { case '{ type $t; ($ls: List[`$t`]).filter($f).filter($g) } => optimize('{ $ls.filter(x => ${f('x)} && ${g('x)}) }) - case '{ type $t; type $u; type $v; ($ls: List[`$t`]).map[`$u`, List[`$u`]]($f).map[`$v`, List[`$v`]]($g) } => + case '{ type $t; type $u; type $v; ($ls: List[`$t`]).map[`$u`]($f).map[`$v`]($g) } => optimize('{ $ls.map(x => ${g(f('x))}) }) case '{ type $t; ($ls: List[`$t`]).filter($f).foreach[Unit]($g) } => diff --git a/tests/run-macros/quote-matching-optimize-2.check b/tests/run-macros/quote-matching-optimize-2.check index d325040a76f3..ae181d841a14 100644 --- a/tests/run-macros/quote-matching-optimize-2.check +++ b/tests/run-macros/quote-matching-optimize-2.check @@ -16,17 +16,17 @@ Original: ls.filter(((x: scala.Int) => x.<(3))).foreach[scala.Unit](((x: scala.I Optimized: ls.foreach[scala.Any](((x: scala.Int) => if (x.<(3)) scala.Predef.println(x) else ())) Result: () -Original: ls.map[scala.Int, scala.collection.immutable.List[scala.Int]](((a: scala.Int) => a.*(2)))(scala.collection.immutable.List.canBuildFrom[scala.Int]).map[java.lang.String, scala.collection.immutable.List[java.lang.String]](((b: scala.Int) => b.toString()))(scala.collection.immutable.List.canBuildFrom[java.lang.String]) -Optimized: ls.map[java.lang.String, scala.collection.immutable.List[java.lang.String]](((x: scala.Int) => { +Original: ls.map[scala.Int](((a: scala.Int) => a.*(2))).map[java.lang.String](((b: scala.Int) => b.toString())) +Optimized: ls.map[java.lang.String](((x: scala.Int) => { val x$1: scala.Int = x.*(2) x$1.toString() -}))(scala.collection.immutable.List.canBuildFrom[java.lang.String]) +})) Result: List(2, 4, 6) -Original: ls.map[scala.Char, scala.collection.immutable.List[scala.Char]](((a: scala.Int) => a.toChar))(scala.collection.immutable.List.canBuildFrom[scala.Char]).map[java.lang.String, scala.collection.immutable.List[java.lang.String]](((b: scala.Char) => b.toString()))(scala.collection.immutable.List.canBuildFrom[java.lang.String]) -Optimized: ls.map[java.lang.String, scala.collection.immutable.List[java.lang.String]](((x: scala.Int) => { +Original: ls.map[scala.Char](((a: scala.Int) => a.toChar)).map[java.lang.String](((b: scala.Char) => b.toString())) +Optimized: ls.map[java.lang.String](((x: scala.Int) => { val x$2: scala.Char = x.toChar x$2.toString() -}))(scala.collection.immutable.List.canBuildFrom[java.lang.String]) +})) Result: List(, , ) diff --git a/tests/run-macros/quote-matching-optimize-2/Macro_1.scala b/tests/run-macros/quote-matching-optimize-2/Macro_1.scala index 14592cae64c0..e88cb3689751 100644 --- a/tests/run-macros/quote-matching-optimize-2/Macro_1.scala +++ b/tests/run-macros/quote-matching-optimize-2/Macro_1.scala @@ -13,7 +13,7 @@ object Macro { case '{ ($ls: List[$t]).filter($f).filter($g) } => optimize('{ $ls.filter(x => ${f('x)} && ${g('x)}) }) - case '{ type $u; type $v; ($ls: List[$t]).map[`$u`, List[`$u`]]($f).map[`$v`, List[`$v`]]($g) } => + case '{ type $u; type $v; ($ls: List[$t]).map[`$u`]($f).map[`$v`]($g) } => optimize('{ $ls.map(x => ${g(f('x))}) }) case '{ ($ls: List[$t]).filter($f).foreach[$u]($g) } => diff --git a/tests/run-macros/quote-matching-optimize-3.check b/tests/run-macros/quote-matching-optimize-3.check index 257667305b70..cdfcd9b51eb8 100644 --- a/tests/run-macros/quote-matching-optimize-3.check +++ b/tests/run-macros/quote-matching-optimize-3.check @@ -16,11 +16,11 @@ Original: ls.filter(((x: scala.Int) => x.<(3))).foreach[scala.Unit](((x: scala.I Optimized: ls.foreach[scala.Any](((x: scala.Int) => if (((x: scala.Int) => x.<(3)).apply(x)) ((x: scala.Int) => scala.Predef.println(x)).apply(x) else ())) Result: () -Original: ls.map[scala.Long, scala.collection.immutable.List[scala.Long]](((a: scala.Int) => a.toLong))(scala.collection.immutable.List.canBuildFrom[scala.Long]).map[java.lang.String, scala.collection.immutable.List[java.lang.String]](((b: scala.Long) => b.toString()))(scala.collection.immutable.List.canBuildFrom[java.lang.String]) -Optimized: ls.map[java.lang.String, scala.collection.immutable.List[java.lang.String]](((x: scala.Int) => ((b: scala.Long) => b.toString()).apply(((a: scala.Int) => a.toLong).apply(x))))(scala.collection.immutable.List.canBuildFrom[java.lang.String]) +Original: ls.map[scala.Long](((a: scala.Int) => a.toLong)).map[java.lang.String](((b: scala.Long) => b.toString())) +Optimized: ls.map[java.lang.String](((x: scala.Int) => ((b: scala.Long) => b.toString()).apply(((a: scala.Int) => a.toLong).apply(x)))) Result: List(1, 2, 3) -Original: ls.map[scala.Char, scala.collection.immutable.List[scala.Char]](((a: scala.Int) => a.toChar))(scala.collection.immutable.List.canBuildFrom[scala.Char]).map[java.lang.String, scala.collection.immutable.List[java.lang.String]](((b: scala.Char) => b.toString()))(scala.collection.immutable.List.canBuildFrom[java.lang.String]) -Optimized: ls.map[java.lang.String, scala.collection.immutable.List[java.lang.String]](((x: scala.Int) => ((b: scala.Char) => b.toString()).apply(((a: scala.Int) => a.toChar).apply(x))))(scala.collection.immutable.List.canBuildFrom[java.lang.String]) +Original: ls.map[scala.Char](((a: scala.Int) => a.toChar)).map[java.lang.String](((b: scala.Char) => b.toString())) +Optimized: ls.map[java.lang.String](((x: scala.Int) => ((b: scala.Char) => b.toString()).apply(((a: scala.Int) => a.toChar).apply(x)))) Result: List(, , ) diff --git a/tests/run-macros/quote-matching-optimize-3/Macro_1.scala b/tests/run-macros/quote-matching-optimize-3/Macro_1.scala index cf98b4b761fd..8a093feba362 100644 --- a/tests/run-macros/quote-matching-optimize-3/Macro_1.scala +++ b/tests/run-macros/quote-matching-optimize-3/Macro_1.scala @@ -13,7 +13,7 @@ object Macro { case '{ ($ls: List[$t]).filter($f).filter($g) } => optimize('{ $ls.filter(x => $f(x) && $g(x)) }) - case '{ type $uu; type $vv; ($ls: List[$tt]).map[`$uu`, List[`$uu`]]($f).map[String, List[String]]($g) } => + case '{ type $uu; type $vv; ($ls: List[$tt]).map[`$uu`]($f).map[String]($g) } => optimize('{ $ls.map(x => $g($f(x))) }) case '{ ($ls: List[$t]).filter($f).foreach[$u]($g) } => diff --git a/tests/run-macros/quote-toExprOfSeq.check b/tests/run-macros/quote-toExprOfSeq.check index bba4602df214..78a5ddae3b67 100644 --- a/tests/run-macros/quote-toExprOfSeq.check +++ b/tests/run-macros/quote-toExprOfSeq.check @@ -1,4 +1,4 @@ -WrappedArray(1, 2, 3) -WrappedArray(1, 2, 3) +ArraySeq(1, 2, 3) +ArraySeq(1, 2, 3) List(1, 2, 3) 2 diff --git a/tests/run-macros/tasty-extractors-2.check b/tests/run-macros/tasty-extractors-2.check index 2142e59d492c..70096a223fca 100644 --- a/tests/run-macros/tasty-extractors-2.check +++ b/tests/run-macros/tasty-extractors-2.check @@ -49,7 +49,7 @@ Type.TypeRef(Type.ThisType(Type.TypeRef(NoPrefix(), "scala")), "Unit") Inlined(None, Nil, Block(List(ClassDef("Foo", DefDef("", Nil, List(Nil), Inferred(), None), List(Apply(Select(New(Inferred()), ""), Nil)), Nil, None, List(DefDef("a", Nil, Nil, Inferred(), Some(Literal(Constant(0))))))), Literal(Constant(())))) Type.TypeRef(Type.ThisType(Type.TypeRef(NoPrefix(), "scala")), "Unit") -Inlined(None, Nil, Block(List(ClassDef("Foo", DefDef("", Nil, List(Nil), Inferred(), None), List(Apply(Select(New(Inferred()), ""), Nil), TypeSelect(Select(Ident("_root_"), "scala"), "Product"), TypeSelect(Select(Ident("_root_"), "scala"), "Serializable")), Nil, None, List(DefDef("productElementName", Nil, List(List(ValDef("x$1", TypeSelect(Select(Ident("_root_"), "scala"), "Int"), None))), TypeSelect(Select(Ident("java"), "lang"), "String"), Some(Match(Ident("x$1"), List(CaseDef(Pattern.WildcardPattern(), None, Apply(Ident("throw"), List(Apply(Select(New(TypeSelect(Select(Ident("java"), "lang"), "IndexOutOfBoundsException")), ""), List(Apply(Select(Select(Select(Ident("java"), "lang"), "String"), "valueOf"), List(Ident("x$1")))))))))))), DefDef("copy", Nil, List(Nil), Inferred(), Some(Apply(Select(New(Inferred()), ""), Nil))))), ValDef("Foo", TypeIdent("Foo$"), Some(Apply(Select(New(TypeIdent("Foo$")), ""), Nil))), ClassDef("Foo$", DefDef("", Nil, List(Nil), Inferred(), None), List(Apply(Select(New(Inferred()), ""), Nil), Applied(Inferred(), List(Inferred())), TypeSelect(Select(Ident("_root_"), "scala"), "Serializable")), Nil, Some(ValDef("_", Singleton(Ident("Foo")), None)), List(DefDef("apply", Nil, List(Nil), Inferred(), Some(Apply(Select(New(Inferred()), ""), Nil))), DefDef("unapply", Nil, List(List(ValDef("x$1", Inferred(), None))), Inferred(), Some(Literal(Constant(true))))))), Literal(Constant(())))) +Inlined(None, Nil, Block(List(ClassDef("Foo", DefDef("", Nil, List(Nil), Inferred(), None), List(Apply(Select(New(Inferred()), ""), Nil), TypeSelect(Select(Ident("_root_"), "scala"), "Product"), TypeSelect(Select(Ident("_root_"), "scala"), "Serializable")), Nil, None, List(DefDef("copy", Nil, List(Nil), Inferred(), Some(Apply(Select(New(Inferred()), ""), Nil))))), ValDef("Foo", TypeIdent("Foo$"), Some(Apply(Select(New(TypeIdent("Foo$")), ""), Nil))), ClassDef("Foo$", DefDef("", Nil, List(Nil), Inferred(), None), List(Apply(Select(New(Inferred()), ""), Nil), Applied(Inferred(), List(Inferred())), TypeSelect(Select(Ident("_root_"), "scala"), "Serializable")), Nil, Some(ValDef("_", Singleton(Ident("Foo")), None)), List(DefDef("apply", Nil, List(Nil), Inferred(), Some(Apply(Select(New(Inferred()), ""), Nil))), DefDef("unapply", Nil, List(List(ValDef("x$1", Inferred(), None))), Inferred(), Some(Literal(Constant(true))))))), Literal(Constant(())))) Type.TypeRef(Type.ThisType(Type.TypeRef(NoPrefix(), "scala")), "Unit") Inlined(None, Nil, Block(List(ClassDef("Foo1", DefDef("", Nil, List(List(ValDef("a", TypeIdent("Int"), None))), Inferred(), None), List(Apply(Select(New(Inferred()), ""), Nil)), Nil, None, List(ValDef("a", Inferred(), None)))), Literal(Constant(())))) diff --git a/tests/run-with-compiler-custom-args/tasty-interpreter/interpreter/jvm/JVMReflection.scala b/tests/run-with-compiler-custom-args/tasty-interpreter/interpreter/jvm/JVMReflection.scala index f903955e7a57..71bd24c4f1a2 100644 --- a/tests/run-with-compiler-custom-args/tasty-interpreter/interpreter/jvm/JVMReflection.scala +++ b/tests/run-with-compiler-custom-args/tasty-interpreter/interpreter/jvm/JVMReflection.scala @@ -81,34 +81,35 @@ class JVMReflection[R <: Reflection & Singleton](val reflect: R) { } private def paramsSig(sym: Symbol): List[Class[_]] = { - sym.asDefDef.signature.paramSigs.map { param => - def javaArraySig(name: String): String = { - if (name.endsWith("[]")) "[" + javaArraySig(name.dropRight(2)) - else name match { - case "scala.Boolean" => "Z" - case "scala.Byte" => "B" - case "scala.Short" => "S" - case "scala.Int" => "I" - case "scala.Long" => "J" - case "scala.Float" => "F" - case "scala.Double" => "D" - case "scala.Char" => "C" - case paramName => "L" + paramName + ";" + sym.asDefDef.signature.paramSigs.collect { + case param: String => + def javaArraySig(name: String): String = { + if (name.endsWith("[]")) "[" + javaArraySig(name.dropRight(2)) + else name match { + case "scala.Boolean" => "Z" + case "scala.Byte" => "B" + case "scala.Short" => "S" + case "scala.Int" => "I" + case "scala.Long" => "J" + case "scala.Float" => "F" + case "scala.Double" => "D" + case "scala.Char" => "C" + case paramName => "L" + paramName + ";" + } } - } - - def javaSig(name: String): String = - if (name.endsWith("[]")) javaArraySig(name) else name - - if (param == "scala.Boolean") classOf[Boolean] - else if (param == "scala.Byte") classOf[Byte] - else if (param == "scala.Char") classOf[Char] - else if (param == "scala.Short") classOf[Short] - else if (param == "scala.Int") classOf[Int] - else if (param == "scala.Long") classOf[Long] - else if (param == "scala.Float") classOf[Float] - else if (param == "scala.Double") classOf[Double] - else java.lang.Class.forName(javaSig(param), false, classLoader) + + def javaSig(name: String): String = + if (name.endsWith("[]")) javaArraySig(name) else name + + if (param == "scala.Boolean") classOf[Boolean] + else if (param == "scala.Byte") classOf[Byte] + else if (param == "scala.Char") classOf[Char] + else if (param == "scala.Short") classOf[Short] + else if (param == "scala.Int") classOf[Int] + else if (param == "scala.Long") classOf[Long] + else if (param == "scala.Float") classOf[Float] + else if (param == "scala.Double") classOf[Double] + else java.lang.Class.forName(javaSig(param), false, classLoader) } } diff --git a/tests/run/Course-2002-10.scala b/tests/run/Course-2002-10.scala index a8bf47a4e393..59a8c37632af 100644 --- a/tests/run/Course-2002-10.scala +++ b/tests/run/Course-2002-10.scala @@ -6,11 +6,11 @@ import math.{Pi, log} object M0 { - def addStream (s1: Stream[Int], s2: Stream[Int]): Stream[Int] = - Stream.cons(s1.head + s2.head, addStream(s1.tail, s2.tail)); + def addLazyList (s1: LazyList[Int], s2: LazyList[Int]): LazyList[Int] = + LazyList.cons(s1.head + s2.head, addLazyList(s1.tail, s2.tail)); - val fib: Stream[Int] = - Stream.cons(0, Stream.cons(1, addStream(this.fib, this.fib.tail))); + val fib: LazyList[Int] = + LazyList.cons(0, LazyList.cons(1, addLazyList(this.fib, this.fib.tail))); def test = { var i = 0; @@ -23,36 +23,36 @@ object M0 { object M1 { - def scale(x: Double, s: Stream[Double]): Stream[Double] = + def scale(x: Double, s: LazyList[Double]): LazyList[Double] = s map { e: Double => e*x } - def partialSums(s: Stream[Double]): Stream[Double] = - Stream.cons(s.head, partialSums(s.tail) map (x => x + s.head)); + def partialSums(s: LazyList[Double]): LazyList[Double] = + LazyList.cons(s.head, partialSums(s.tail) map (x => x + s.head)); - def euler(s: Stream[Double]): Stream[Double] = { + def euler(s: LazyList[Double]): LazyList[Double] = { val nm1 = s apply 0; val n = s apply 1; val np1 = s apply 2; - Stream.cons(np1 - ((np1 - n)*(np1 - n) / (nm1 - 2*n + np1)),euler(s.tail)) + LazyList.cons(np1 - ((np1 - n)*(np1 - n) / (nm1 - 2*n + np1)),euler(s.tail)) }; - def better(s: Stream[Double], transform: Stream[Double] => Stream[Double]) - : Stream[Stream[Double]] = - Stream.cons(s, better(transform(s), transform)); + def better(s: LazyList[Double], transform: LazyList[Double] => LazyList[Double]) + : LazyList[LazyList[Double]] = + LazyList.cons(s, better(transform(s), transform)); - def veryGood(s: Stream[Double], transform: Stream[Double] => Stream[Double]) - : Stream[Double] = + def veryGood(s: LazyList[Double], transform: LazyList[Double] => LazyList[Double]) + : LazyList[Double] = better(s, transform) map (x => x.head); - def lnSummands(n: Double): Stream[Double] = - Stream.cons(1.0 / n, lnSummands(n + 1.0) map { x: Double => -x }) + def lnSummands(n: Double): LazyList[Double] = + LazyList.cons(1.0 / n, lnSummands(n + 1.0) map { x: Double => -x }) var ln0 = partialSums(lnSummands(1.0)); var ln1 = euler(ln0); var ln2 = veryGood(ln0, euler); - def piSummands(n: Double): Stream[Double] = - Stream.cons(1.0 / n, piSummands(n + 2.0) map { x: Double => -x }) + def piSummands(n: Double): LazyList[Double] = + LazyList.cons(1.0 / n, piSummands(n + 2.0) map { x: Double => -x }) var pi0 = scale(4.0, partialSums(piSummands(1.0))); var pi1 = euler(pi0); @@ -76,7 +76,7 @@ object M1 { Console.print(str(Pi) + ", "); Console.print(str(Pi) + ", "); Console.print(str(Pi) + "\n"); - Console.println(); + Console.println; i = 0; while (i < 10) { Console.print("ln("+i+") = "); @@ -89,7 +89,7 @@ object M1 { Console.print(str(log(2)) + ", "); Console.print(str(log(2)) + ", "); Console.print(str(log(2)) + "\n"); - Console.println(); + Console.println; } } @@ -107,7 +107,7 @@ object M2 { var current: Iterator[Int] = new IntIterator(2); def hasNext = true; def next = { - val p = current.next(); + val p = current.next; current = current filter { x => !((x % p) == 0) }; p } @@ -116,8 +116,8 @@ object M2 { def test = { val i = (new PrimeIterator()).take(30); Console.print("prime numbers:"); - while (i.hasNext) { Console.print(" " + i.next()); } - Console.println(); + while (i.hasNext) { Console.print(" " + i.next); } + Console.println; } } diff --git a/tests/run/Course-2002-13.scala b/tests/run/Course-2002-13.scala index 774695b6c406..c80e036d0560 100644 --- a/tests/run/Course-2002-13.scala +++ b/tests/run/Course-2002-13.scala @@ -123,36 +123,36 @@ object Programs { lhs.toString() + " :- " + rhs.mkString("", ",", "") + "."; } - def list2stream[a](xs: List[a]): Stream[a] = xs match { - case List() => Stream.empty - case x :: xs1 => Stream.cons(x, list2stream(xs1)) + def list2stream[a](xs: List[a]): LazyList[a] = xs match { + case List() => LazyList.empty + case x :: xs1 => LazyList.cons(x, list2stream(xs1)) } - def option2stream[a](xo: Option[a]): Stream[a] = xo match { - case None => Stream.empty - case Some(x) => Stream.cons(x, Stream.empty) + def option2stream[a](xo: Option[a]): LazyList[a] = xo match { + case None => LazyList.empty + case Some(x) => LazyList.cons(x, LazyList.empty) } - def solve(query: List[Term], clauses: List[Clause]): Stream[Subst] = { + def solve(query: List[Term], clauses: List[Clause]): LazyList[Subst] = { - def solve2(query: List[Term], s: Subst): Stream[Subst] = query match { + def solve2(query: List[Term], s: Subst): LazyList[Subst] = query match { case List() => - Stream.cons(s, Stream.empty) + LazyList.cons(s, LazyList.empty) case Con("not", qs) :: query1 => - if (solve1(qs, s).isEmpty) Stream.cons(s, Stream.empty) - else Stream.empty + if (solve1(qs, s).isEmpty) LazyList.cons(s, LazyList.empty) + else LazyList.empty case q :: query1 => for (clause <- list2stream(clauses); s1 <- tryClause(clause.newInstance, q, s); s2 <- solve1(query1, s1)) yield s2 } - def solve1(query: List[Term], s: Subst): Stream[Subst] = { + def solve1(query: List[Term], s: Subst): LazyList[Subst] = { val ss = solve2(query, s); if (debug) Console.println("solved " + query + " = " + ss); ss } - def tryClause(c: Clause, q: Term, s: Subst): Stream[Subst] = { + def tryClause(c: Clause, q: Term, s: Subst): LazyList[Subst] = { if (debug) Console.println("trying " + c); for (s1 <- option2stream(unify(q, c.lhs, s)); s2 <- solve1(c.rhs, s1)) yield s2; } @@ -182,7 +182,7 @@ class Parser(s: String) { if (token equals "(") { token = it.next; val ts: List[Term] = if (token equals ")") List() else rep(term); - if (token equals ")") token = it.next else syntaxError("`)' expected"); + if (token equals ")") token = it.next else syntaxError("`)` expected"); ts } else List()) } @@ -204,7 +204,7 @@ class Parser(s: String) { constructor, if (token equals ":-") { token = it.next; rep(constructor) } else List()) } - if (token equals ".") token = it.next else syntaxError("`.' expected"); + if (token equals ".") token = it.next else syntaxError("`.` expected"); result } @@ -215,7 +215,7 @@ object Prolog { def processor: String => Unit = { var program: List[Clause] = List(); - var solutions: Stream[Subst] = Stream.empty; + var solutions: LazyList[Subst] = LazyList.empty; var tvs: List[String] = List(); { input => new Parser(input).all foreach { c => diff --git a/tests/run/Meter.check b/tests/run/Meter.check index 342152683757..9d8fb7aca9ab 100644 --- a/tests/run/Meter.check +++ b/tests/run/Meter.check @@ -6,7 +6,7 @@ x.hashCode: 1072693248 x == y: true a == b: true testing native arrays -Array(1.0m, 2.0m) +List(1.0m, 2.0m) 1.0m >>>1.0m<<< 1.0m >>>2.0m<<< 2.0m diff --git a/tests/run/Meter.scala b/tests/run/Meter.scala index cbdec8f5908c..5fe82a87e964 100644 --- a/tests/run/Meter.scala +++ b/tests/run/Meter.scala @@ -69,6 +69,7 @@ object Test extends App { println("x.hashCode: "+x.hashCode) + // println("x == 1: "+(x == 1)) println("x == y: "+(x == y)) assert(x.hashCode == (1.0).hashCode) @@ -78,7 +79,7 @@ object Test extends App { { println("testing native arrays") val arr = Array(x, y + x) - println(arr.deep) + println(arr.toList) def foo[T <: Printable](x: Array[T]): Unit = { for (i <- 0 until x.length) { x(i).print; println(" "+x(i)) } } diff --git a/tests/run/MeterCaseClass.check b/tests/run/MeterCaseClass.check index 7735f58af23d..295004c1ca79 100644 --- a/tests/run/MeterCaseClass.check +++ b/tests/run/MeterCaseClass.check @@ -6,7 +6,7 @@ x.hashCode: 1072693248 x == y: true a == b: true testing native arrays -Array(Meter(1.0), Meter(2.0)) +List(Meter(1.0), Meter(2.0)) Meter(1.0) >>>Meter(1.0)<<< Meter(1.0) >>>Meter(2.0)<<< Meter(2.0) diff --git a/tests/run/MeterCaseClass.scala b/tests/run/MeterCaseClass.scala index b71659492966..4bc29cf8161a 100644 --- a/tests/run/MeterCaseClass.scala +++ b/tests/run/MeterCaseClass.scala @@ -18,7 +18,7 @@ package a { private[a] trait MeterArg - implicit val boxings: a.BoxingConversions[a.Meter,Double] = new BoxingConversions[Meter, Double] { + implicit val boxings: BoxingConversions[Meter, Double] = new BoxingConversions[Meter, Double] { def box(x: Double) = new Meter(x) def unbox(m: Meter) = m.underlying } @@ -30,7 +30,7 @@ package a { override def toString = unbox.toString+"ft" } object Foot { - implicit val boxings: a.BoxingConversions[a.Foot,Double] = new BoxingConversions[Foot, Double] { + implicit val boxings: BoxingConversions[Foot, Double] = new BoxingConversions[Foot, Double] { def box(x: Double) = new Foot(x) def unbox(m: Foot) = m.unbox } @@ -66,6 +66,7 @@ object Test extends App { println("x.hashCode: "+x.hashCode) + // println("x == 1: "+(x == 1)) // error: Values of types a.Meter and Int cannot be compared with == or != println("x == y: "+(x == y)) assert(x.hashCode == (1.0).hashCode) @@ -75,7 +76,7 @@ object Test extends App { { println("testing native arrays") val arr = Array(x, y + x) - println(arr.deep) + println(arr.toList) def foo[T <: Printable](x: Array[T]): Unit = { for (i <- 0 until x.length) { x(i).print; println(" "+x(i)) } } diff --git a/tests/run/MutableListTest.scala b/tests/run/MutableListTest.scala deleted file mode 100644 index 19d7dc77a49d..000000000000 --- a/tests/run/MutableListTest.scala +++ /dev/null @@ -1,126 +0,0 @@ - - - -import scala.collection.mutable.MutableList - - - -class ExtList[T] extends MutableList[T] { - def printState: Unit = { - println("Length: " + len) - println("Last elem: " + last0.elem) - println("First elem: " + first0.elem) - println("After first elem: " + first0.next.elem) - println("After first: " + first0.next) - println("Last: " + last0) - } -} - -object Test { - - def main(args: Array[String]): Unit = { - testEmpty - testAddElement - testAddFewElements - testAddMoreElements - testTraversables - } - - def testEmpty: Unit = { - val mlist = new MutableList[Int] - assert(mlist.isEmpty) - assert(mlist.get(0) == None) - } - - def testAddElement: Unit = { - val mlist = new MutableList[Int] - mlist += 17 - assert(mlist.nonEmpty) - assert(mlist.length == 1) - assert(mlist.head == 17) - assert(mlist.last == 17) - assert(mlist(0) == 17) - assert(mlist.tail.isEmpty) - assert(mlist.tail.length == 0) - mlist(0) = 101 - assert(mlist(0) == 101) - assert(mlist.toList == List(101)) - assert(mlist.tail.get(0) == None) - assert((mlist.tail += 19).head == 19) - assert(mlist.tail.length == 0) - } - - def testAddFewElements: Unit = { - val mlist = new MutableList[Int] - for (i <- 0 until 2) mlist += i -// mlist.printState - for (i <- 0 until 2) assert(mlist(i) == i) - assert(mlist.length == 2) - assert(mlist.nonEmpty) - assert(mlist.tail.length == 1) - assert(mlist.tail.tail.length == 0) - assert(mlist.tail.tail.isEmpty) - } - - def testAddMoreElements: Unit = { - val mlist = new MutableList[Int] - for (i <- 0 until 10) mlist += i * i - assert(mlist.nonEmpty) - assert(mlist.length == 10) - for (i <- 0 until 10) assert(mlist(i) == i * i) - assert(mlist(5) == 5 * 5) - assert(mlist(9) == 9 * 9) - var sometail = mlist - for (i <- 0 until 10) { - assert(sometail.head == i * i) - sometail = sometail.tail - } - mlist(5) = -25 - assert(mlist(5) == -25) - mlist(0) = -1 - assert(mlist(0) == -1) - mlist(9) = -81 - assert(mlist(9) == -81) - assert(mlist(5) == -25) - assert(mlist(0) == -1) - assert(mlist.last == -81) - mlist.clear() - assert(mlist.isEmpty) - mlist += 1001 - assert(mlist.head == 1001) - mlist += 9999 - assert(mlist.tail.head == 9999) - assert(mlist.last == 9999) - } - - def testTraversables: Unit = { - val mlist = new MutableList[Int] - for (i <- 0 until 10) mlist += i * i - var lst = mlist.drop(5) - for (i <- 0 until 5) assert(lst(i) == (i + 5) * (i + 5)) - lst = lst.take(3) - for (i <- 0 until 3) assert(lst(i) == (i + 5) * (i + 5)) - lst += 129 - assert(lst(3) == 129) - assert(lst.last == 129) - assert(lst.length == 4) - lst += 7 - assert(lst.init.last == 129) - assert(lst.length == 5) - lst.clear() - assert(lst.length == 0) - for (i <- 0 until 5) lst += i - assert(lst.reduceLeft(_ + _) == 10) - } - -} - - - - - - - - - - diff --git a/tests/run/QueueTest.scala b/tests/run/QueueTest.scala index 773def22c60f..55f3274d74e6 100644 --- a/tests/run/QueueTest.scala +++ b/tests/run/QueueTest.scala @@ -1,20 +1,5 @@ - - import scala.collection.mutable.Queue - - - -class ExtQueue[T] extends Queue[T] { - def printState: Unit = { - println("-------------------") - println("Length: " + len) - println("First: " + first0) - println("First elem: " + first0.elem) - println("After first: " + first0.next) - } -} - object Test { def main(args: Array[String]): Unit = { @@ -80,7 +65,7 @@ object Test { } def testTwoEnqueues: Unit = { - val queue = new ExtQueue[Int] + val queue = new Queue[Int] queue.enqueue(30) queue.enqueue(40) @@ -88,7 +73,6 @@ object Test { assert(queue.size == 2) assert(queue.nonEmpty) assert(queue.front == 30) -// queue.printState val all = queue.dequeueAll(_ > 20) assert(all.size == 2) @@ -99,7 +83,7 @@ object Test { } def testFewEnqueues: Unit = { - val queue = new ExtQueue[Int] + val queue = new Queue[Int] queue.enqueue(10) queue.enqueue(20) @@ -108,35 +92,27 @@ object Test { assert(queue.head == 10) assert(queue.last == 20) assert(queue.front == 10) -// queue.printState val ten = queue.dequeue() assert(ten == 10) assert(queue.length == 1) -// queue.printState queue.enqueue(30) -// queue.printState val gt25 = queue.dequeueFirst(_ > 25) assert(gt25 == Some(30)) assert(queue.nonEmpty) assert(queue.length == 1) assert(queue.head == 20) assert(queue.front == 20) -// queue.printState queue.enqueue(30) -// queue.printState val lt25 = queue.dequeueFirst(_ < 25) assert(lt25 == Some(20)) assert(queue.nonEmpty) assert(queue.length == 1) -// queue.printState queue.enqueue(40) -// queue.printState val all = queue.dequeueAll(_ > 20) -// queue.printState assert(all.size == 2) assert(all.contains(30)) assert(all.contains(40)) @@ -145,19 +121,14 @@ object Test { queue.enqueue(50) queue.enqueue(60) -// queue.printState val allgt55 = queue.dequeueAll(_ > 55) -// println(allgt55) -// queue.printState assert(allgt55.size == 1) assert(allgt55.contains(60)) assert(queue.length == 1) queue.enqueue(70) queue.enqueue(80) -// queue.printState val alllt75 = queue.dequeueAll(_ < 75) -// queue.printState assert(alllt75.size == 2) assert(alllt75.contains(70)) assert(alllt75.contains(50)) @@ -168,7 +139,7 @@ object Test { } def testMoreEnqueues: Unit = { - val queue = new ExtQueue[Int] + val queue = new Queue[Int] for (i <- 0 until 10) queue.enqueue(i * 2) for (i <- 0 until 10) { @@ -183,10 +154,7 @@ object Test { assert(queue.length == 10) assert(queue.nonEmpty) - //queue.printState val gt5 = queue.dequeueAll(_ > 4) - //queue.printState - //println(gt5) assert(gt5.size == 7) assert(queue.length == 3) assert(queue.nonEmpty) @@ -217,14 +185,11 @@ object Test { assert(queue.length == 10) val foddgt25 = queue.dequeueFirst(num => num > 25 && num % 2 == 1) - assert(foddgt25 == Some(49)) + assert(foddgt25 == Some(49), foddgt25) assert(queue.length == 9) assert(queue.nonEmpty) - //queue.printState val lt30 = queue.dequeueAll(_ < 30) - //println(lt30) - //queue.printState assert(lt30.size == 6) assert(queue.length == 3) diff --git a/tests/run/ReverseSeqView.scala b/tests/run/ReverseSeqView.scala index 2004791bffee..b290f82358d6 100644 --- a/tests/run/ReverseSeqView.scala +++ b/tests/run/ReverseSeqView.scala @@ -1,25 +1,5 @@ - - - - - - object Test extends App { - - val lstv = List(1, 2, 3).view - val lstvr = lstv.reverse + val lstv = List(1, 2, 3).view // SeqView + val lstvr = lstv.reverse // Can reverse a SeqView, but get a plain View which can no longer be reversed assert(lstvr.iterator.toList == List(3, 2, 1)) - assert(lstvr.reverse == List(1, 2, 3)) - assert(lstvr.reverseIterator.toList == List(1, 2, 3)) - assert(lstvr.reverseMap(_ + 1) == List(2, 3, 4)) - } - - - - - - - - - diff --git a/tests/run/UnrolledBuffer.scala b/tests/run/UnrolledBuffer.scala index 2e325dbfab97..9a664b27a480 100644 --- a/tests/run/UnrolledBuffer.scala +++ b/tests/run/UnrolledBuffer.scala @@ -2,6 +2,7 @@ + import collection.mutable.UnrolledBuffer @@ -57,7 +58,8 @@ object Test { assert(u2.size == 0) assertCorrect(u1) - u1 concat UnrolledBuffer() + // Dotty FIXME: type argument should not be needed + u1 concat UnrolledBuffer[Int]() assertCorrect(u1) val u3 = u1 map { x => x } diff --git a/tests/run/array-charSeq.scala b/tests/run/array-charSeq.scala index 64055c6406ba..e2f019274820 100644 --- a/tests/run/array-charSeq.scala +++ b/tests/run/array-charSeq.scala @@ -1,12 +1,13 @@ +import runtime.ArrayCharSequence + object Test { val arr = Array[Char]('a' to 'i': _*) - var xs: CharSequence = arr + var xs: CharSequence = new runtime.ArrayCharSequence(arr, 0, arr.length) val hash = xs.hashCode def check(chars: CharSequence): Unit = { println("\n[check '" + chars + "'] len = " + chars.length) chars match { - case x: Predef.ArrayCharSequence => assert(x.__arrayOfChars eq arr, ((x.__arrayOfChars, arr))) case x: runtime.ArrayCharSequence => assert(x.xs eq arr, ((x.xs, arr))) case x => assert(false, x) } diff --git a/tests/run/arrayview.scala b/tests/run/arrayview.scala index 97e840f5e946..3d84f0edbe7c 100644 --- a/tests/run/arrayview.scala +++ b/tests/run/arrayview.scala @@ -2,7 +2,7 @@ object Test { def f = (1 to 100).toArray.view def main(args: Array[String]): Unit = { - val xs = (f filter (_ < 50)).reverse.filter(_ % 2 == 0).map(_ / 2).flatMap(x => Array(1, x)) + val xs = (f filter (_ < 50)).filter(_ % 2 == 0).map(_ / 2).flatMap(x => Array(1, x)) assert(xs.size == 48) val ys = xs.toArray assert(ys.size == 48) diff --git a/tests/run/bitsets.check b/tests/run/bitsets.check index 41c2ccdcb87c..82cb98551abd 100644 --- a/tests/run/bitsets.check +++ b/tests/run/bitsets.check @@ -60,7 +60,7 @@ ia2 = List(2) ia3 = List() i2_m0 = List(1010101010101010101010101) -i2_m2 = List(ffffffffffffffff, ffffffffffffffff, ffffffffffffffff, ffffffffffffffff, 1) +i2_m2 = List(ffffffffffffffff, ffffffffffffffff, ffffffffffffffff, ffffffffffffffff, 1, 0, 0, 0) i2_m0c = true i2_m1c = true i2_m2c = true @@ -82,3 +82,5 @@ i2_r1 = true i2_r2 = true i2_r3 = true +125 + diff --git a/tests/run/bitsets.scala b/tests/run/bitsets.scala index 5d492207494a..b6843d1fa9c2 100644 --- a/tests/run/bitsets.scala +++ b/tests/run/bitsets.scala @@ -81,14 +81,14 @@ object TestMutable2 { println("m2_i1 = " + (t1 == b1)) println("m2_i2 = " + (t2 == b2)) println("m2_i3 = " + (t3 == b3)) - println("m2_f0 = " + (t0.from(42) == b0.from(42))) - println("m2_f1 = " + (t1.from(42) == b1.from(42))) - println("m2_f2 = " + (t2.from(42) == b2.from(42))) - println("m2_f3 = " + (t3.from(42) == b3.from(42))) - println("m2_t0 = " + (t0.to(195) == b0.to(195))) - println("m2_t1 = " + (t1.to(195) == b1.to(195))) - println("m2_t2 = " + (t2.to(195) == b2.to(195))) - println("m2_t3 = " + (t3.to(195) == b3.to(195))) + println("m2_f0 = " + (t0.rangeFrom(42) == b0.rangeFrom(42))) + println("m2_f1 = " + (t1.rangeFrom(42) == b1.rangeFrom(42))) + println("m2_f2 = " + (t2.rangeFrom(42) == b2.rangeFrom(42))) + println("m2_f3 = " + (t3.rangeFrom(42) == b3.rangeFrom(42))) + println("m2_t0 = " + (t0.rangeTo(195) == b0.rangeTo(195))) + println("m2_t1 = " + (t1.rangeTo(195) == b1.rangeTo(195))) + println("m2_t2 = " + (t2.rangeTo(195) == b2.rangeTo(195))) + println("m2_t3 = " + (t3.rangeTo(195) == b3.rangeTo(195))) println("m2_r0 = " + (t0.range(43,194) == b0.range(43,194))) println("m2_r1 = " + (t1.range(43,194) == b1.range(43,194))) println("m2_r2 = " + (t2.range(43,194) == b2.range(43,194))) @@ -191,14 +191,14 @@ object TestImmutable2 { println("i2_i1 = " + (t1 == b1)) println("i2_i2 = " + (t2 == b2)) println("i2_i3 = " + (t3 == b3)) - println("i2_f0 = " + (t0.from(42) == b0.from(42))) - println("i2_f1 = " + (t1.from(42) == b1.from(42))) - println("i2_f2 = " + (t2.from(42) == b2.from(42))) - println("i2_f3 = " + (t3.from(42) == b3.from(42))) - println("i2_t0 = " + (t0.to(195) == b0.to(195))) - println("i2_t1 = " + (t1.to(195) == b1.to(195))) - println("i2_t2 = " + (t2.to(195) == b2.to(195))) - println("i2_t3 = " + (t3.to(195) == b3.to(195))) + println("i2_f0 = " + (t0.rangeFrom(42) == b0.rangeFrom(42))) + println("i2_f1 = " + (t1.rangeFrom(42) == b1.rangeFrom(42))) + println("i2_f2 = " + (t2.rangeFrom(42) == b2.rangeFrom(42))) + println("i2_f3 = " + (t3.rangeFrom(42) == b3.rangeFrom(42))) + println("i2_t0 = " + (t0.rangeTo(195) == b0.rangeTo(195))) + println("i2_t1 = " + (t1.rangeTo(195) == b1.rangeTo(195))) + println("i2_t2 = " + (t2.rangeTo(195) == b2.rangeTo(195))) + println("i2_t3 = " + (t3.rangeTo(195) == b3.rangeTo(195))) println("i2_r0 = " + (t0.range(77,194) == b0.range(77,194))) println("i2_r1 = " + (t1.range(77,194) == b1.range(77,194))) println("i2_r2 = " + (t2.range(77,194) == b2.range(77,194))) @@ -206,6 +206,12 @@ object TestImmutable2 { println } +object TestImmutable3 { + import scala.collection.immutable.BitSet + BitSet(125).filter{ xi => println(xi); true } // scala/bug#11380 + println +} + object Test extends App { TestMutable TestMutable2 @@ -213,6 +219,7 @@ object Test extends App { // TestMutable4 TestImmutable TestImmutable2 + TestImmutable3 } //############################################################################ diff --git a/tests/run/breakout.check b/tests/run/breakout.check deleted file mode 100644 index 7971496d1f0c..000000000000 --- a/tests/run/breakout.check +++ /dev/null @@ -1 +0,0 @@ -2, 3, 4 diff --git a/tests/run/breakout.scala b/tests/run/breakout.scala deleted file mode 100644 index 8081405bd19b..000000000000 --- a/tests/run/breakout.scala +++ /dev/null @@ -1,9 +0,0 @@ -import scala.collection.generic._ -import scala.collection._ -import scala.collection.mutable._ - -object Test extends App { - val l = List(1, 2, 3) - val a: Array[Int] = l.map(_ + 1)(breakOut) - println(a.mkString(", ")) -} diff --git a/tests/run/capturing.scala b/tests/run/capturing.scala index 7f00a8322129..aaef60754639 100644 --- a/tests/run/capturing.scala +++ b/tests/run/capturing.scala @@ -4,6 +4,6 @@ class MT(sf: MT => String) { } object Test extends App { def printFields(obj: Any) = - println(obj.getClass.getDeclaredFields.map(_.toString).sorted.deep.mkString("\n")) + println(obj.getClass.getDeclaredFields.map(_.toString).sorted.toList.mkString("\n")) printFields(new MT(_ => "")) } diff --git a/tests/run/caseClassHash.check b/tests/run/caseClassHash.check index 332fd477d2ce..cf37fdb19f48 100644 --- a/tests/run/caseClassHash.check +++ b/tests/run/caseClassHash.check @@ -1,9 +1,9 @@ Foo(true,-1,-1,d,-5,-10,500.0,500.0,List(),5.0) Foo(true,-1,-1,d,-5,-10,500.0,500.0,List(),5) -205963949 -205963949 +930449446 +930449446 true -## method 1: 205963949 -## method 2: 205963949 - Murmur 1: 1383698062 - Murmur 2: 1383698062 +## method 1: 930449446 +## method 2: 930449446 + Murmur 1: 930449446 + Murmur 2: 930449446 diff --git a/tests/run/classmanifests_new_alias.scala b/tests/run/classmanifests_new_alias.scala index 777bd5dd6d45..5cb9b0ba748d 100644 --- a/tests/run/classmanifests_new_alias.scala +++ b/tests/run/classmanifests_new_alias.scala @@ -1,7 +1,7 @@ @deprecated("Suppress warnings", since="2.11") object Test extends App { - type CM[T] = ClassManifest[T] + type CM[T] = scala.reflect.ClassManifest[T] println(implicitly[CM[Int]]) println(implicitly[CM[Int]] eq Manifest.Int) } diff --git a/tests/run/classmanifests_new_core.scala b/tests/run/classmanifests_new_core.scala deleted file mode 100644 index 0a9c58e8e106..000000000000 --- a/tests/run/classmanifests_new_core.scala +++ /dev/null @@ -1,5 +0,0 @@ -@deprecated("Suppress warnings", since="2.11") -object Test extends App { - println(classManifest[Int]) - println(classManifest[Int] eq Manifest.Int) -} diff --git a/tests/run/collection-stacks.check b/tests/run/collection-stacks.check index aa25cd1fa66c..cd28d1a0f885 100644 --- a/tests/run/collection-stacks.check +++ b/tests/run/collection-stacks.check @@ -1,14 +1,9 @@ 3-2-1: true -3-2-1: true apply 3: true -3: true -1: true 1: true top 3: true -3: true pop -2-1: true 3: true 2-1: true diff --git a/tests/run/collection-stacks.scala b/tests/run/collection-stacks.scala index ed627640dde0..86058d982f5f 100644 --- a/tests/run/collection-stacks.scala +++ b/tests/run/collection-stacks.scala @@ -1,4 +1,4 @@ -import scala.collection.{ immutable, mutable } +import scala.collection.mutable object Test extends App { def mutableStack[T](xs: T*): mutable.Stack[T] = { @@ -7,32 +7,21 @@ object Test extends App { s } - def immutableStack[T](xs: T*): immutable.Stack[T] = { - immutable.Stack.empty[T] pushAll xs - } - def check[T](expected: T, got: T): Unit = { - println(got + ": " + (expected == got)) + println(s"$got: ${expected == got}") } // check #957 - check("3-2-1", immutableStack(1, 2, 3).iterator.mkString("-")) check("3-2-1", mutableStack(1, 2, 3).iterator.mkString("-")) println("apply") - check(3, immutableStack(1, 2, 3).apply(0)) check(3, mutableStack(1, 2, 3).apply(0)) - check(1, immutableStack(1, 2, 3).apply(2)) check(1, mutableStack(1, 2, 3).apply(2)) println("top") - check(3, immutableStack(1, 2, 3).top) check(3, mutableStack(1, 2, 3).top) println("pop") - check("2-1", immutableStack(1, 2, 3).pop.mkString("-")) check(3, mutableStack(1, 2, 3).pop()) check("2-1", { val s = mutableStack(1, 2, 3); s.pop(); s.toList.mkString("-") }) } - -// vim: set ts=2 sw=2 et: diff --git a/tests/run/collections.scala b/tests/run/collections.scala index 206a2b96bea1..516d3e038601 100644 --- a/tests/run/collections.scala +++ b/tests/run/collections.scala @@ -1,24 +1,23 @@ import scala.collection._ -import scala.compat.Platform.currentTime import scala.language.postfixOps object Test extends App { val printTime = false - def sum[A](xs: Iterable[Int]) = (0 /: xs)((x, y) => x + y) + def sum[A](xs: Iterable[Int]) = xs.foldLeft(0)((x, y) => x + y) def time(op: => Unit): Unit = { - val start = currentTime + val start = System.currentTimeMillis() op - if (printTime) println(" time = "+(currentTime - start)+"ms") + if (printTime) println(" time = "+(System.currentTimeMillis() - start)+"ms") } def test(msg: String, s0: collection.immutable.Set[Int], iters: Int) = { println("***** "+msg+":") var s = s0 s = s + 2 - s = s + (3, 4000, 10000) + s = s + 3 + 4000 + 10000 println("test1: "+sum(s)) time { s = s ++ (List.range(0, iters) map (2*)) @@ -35,8 +34,8 @@ object Test extends App { def test(msg: String, s0: collection.mutable.Set[Int], iters: Int) = { println("***** "+msg+":") var s = s0 - s = s + 2 - s = s + (3, 4000, 10000) + s = s.clone() += 2 + s = s.clone += (3, 4000, 10000) println("test1: "+sum(s)) time { s = s ++ (List.range(0, iters) map (2*)) @@ -54,7 +53,7 @@ object Test extends App { println("***** "+msg+":") var s = s0 s = s + (2 -> 2) - s = s + (3 -> 3, 4000 -> 4000, 10000 -> 10000) + s = s + (3 -> 3) + (4000 -> 4000) + (10000 -> 10000) println("test1: "+sum(s map (_._2))) time { s = s ++ (List.range(0, iters) map (x => x * 2 -> x * 2)) @@ -88,8 +87,8 @@ object Test extends App { def test(msg: String, s0: collection.mutable.Map[Int, Int], iters: Int) = { println("***** "+msg+":") var s = s0 - s = s + (2 -> 2) - s = s + (3 -> 3, 4000 -> 4000, 10000 -> 10000) + s = s.clone() += (2 -> 2) + s = s.clone() += (3 -> 3, 4000 -> 4000, 10000 -> 10000) println("test1: "+sum(s map (_._2))) time { s = s ++ (List.range(0, iters) map (x => x * 2 -> x * 2)) diff --git a/tests/run/colltest.check b/tests/run/colltest.check index e5bb013ed72e..256ed93a4ac8 100644 --- a/tests/run/colltest.check +++ b/tests/run/colltest.check @@ -2,7 +2,4 @@ true false true false -true -false -succeeded for 10 iterations. succeeded for 10 iterations. diff --git a/tests/run/colltest.scala b/tests/run/colltest.scala index 46e0c8478704..22404f0e9ed5 100644 --- a/tests/run/colltest.scala +++ b/tests/run/colltest.scala @@ -48,10 +48,6 @@ class TestSet(s0: Set[Int], s1: Set[Int]) { object Test extends App { def t3954: Unit = { import scala.collection.mutable - import scala.collection.immutable - val result = new mutable.ImmutableSetAdaptor(immutable.ListSet.empty[Int]) - println(result.add(1)) - println(result.add(1)) val result2 = new mutable.HashSet[Int] println(result2.add(1)) println(result2.add(1)) @@ -62,5 +58,4 @@ object Test extends App { t3954 new TestSet(HashSet.empty, new LinkedHashSet) - new TestSet(new ImmutableSetAdaptor(collection.immutable.Set.empty[Int]), new LinkedHashSet) } diff --git a/tests/run/colltest1.check b/tests/run/colltest1.check index 5ec6286d9ef2..ecaff56a0ce5 100644 --- a/tests/run/colltest1.check +++ b/tests/run/colltest1.check @@ -12,12 +12,12 @@ new test starting with Stream() 10: Stream(1, 2, 3, 4, 5, 6, 7, 8, 9, 10) 9: Stream(2, 3, 4, 5, 6, 7, 8, 9, 10) 1 -Stream(1, ?) -new test starting with WrappedArray() -10: ArrayBuffer(1, 2, 3, 4, 5, 6, 7, 8, 9, 10) -9: ArrayBuffer(2, 3, 4, 5, 6, 7, 8, 9, 10) +Stream(1, ) +new test starting with ArraySeq() +10: ArraySeq(1, 2, 3, 4, 5, 6, 7, 8, 9, 10) +9: ArraySeq(2, 3, 4, 5, 6, 7, 8, 9, 10) 1 -ArrayBuffer(1, 2, 3, 4, 5, 6, 7, 8, 9, 10) +ArraySeq(1, 2, 3, 4, 5, 6, 7, 8, 9, 10) new test starting with ArrayBuffer() 10: ArrayBuffer(1, 2, 3, 4, 5, 6, 7, 8, 9, 10) 9: ArrayBuffer(2, 3, 4, 5, 6, 7, 8, 9, 10) diff --git a/tests/run/colltest1.scala b/tests/run/colltest1.scala index c020253a60e4..4248e3d99c96 100644 --- a/tests/run/colltest1.scala +++ b/tests/run/colltest1.scala @@ -1,5 +1,5 @@ /* - * filter: inliner warnings; re-run with -Yinline-warnings for details + * filter: inliner warnings; re-run with */ import scala.collection._ import scala.language.postfixOps @@ -10,8 +10,8 @@ object Test extends App { println("new test starting with "+empty) assert(empty.isEmpty) val ten = empty ++ List(1, 2, 3, 4, 5, 6, 7, 8, 9, 10) - println(ten.size+": "+ten) - println(ten.tail.size+": "+ten.tail) + println(s"${ten.size}: $ten") + println(s"${ten.tail.size}: ${ten.tail}") assert(ten == empty ++ (1 to 10)) assert(ten.size == 10) assert(ten forall (_ <= 10)) @@ -43,7 +43,7 @@ object Test extends App { assert(ten.last == 10) assert(List(ten.head) ++ ten.tail == ten) assert(ten.init ++ List(ten.last) == ten, ten.init) - assert(vs1 == vs2, vs1+"!="+vs2) + assert(vs1 == vs2, s"$vs1!=$vs2") assert(vs1 == ten) assert((ten take 5) == firstFive) assert((ten drop 5) == secondFive) @@ -54,9 +54,9 @@ object Test extends App { assert((ten span (_ <= 5)) == (firstFive, secondFive)) assert((ten splitAt 5) == (firstFive, secondFive), ten splitAt 5) val buf = new mutable.ArrayBuffer[Int] - firstFive copyToBuffer buf - secondFive copyToBuffer buf - assert(buf.result() == ten, buf.result()) + buf ++= firstFive + buf ++= secondFive + assert(buf == ten, buf) assert(ten.toArray.size == 10) assert(ten.toArray.toSeq == ten, ten.toArray.toSeq) assert(ten.toIterable == ten) @@ -122,7 +122,7 @@ object Test extends App { assert(ten contains 1) assert(ten contains 10) assert(!(ten contains 0)) - assert((empty ++ (1 to 7) union empty ++ (3 to 10)) == List(1, 2, 3, 4, 5, 6, 7, 3, 4, 5, 6, 7, 8, 9, 10)) + assert((empty ++ (1 to 7) ++ empty ++ (3 to 10)) == List(1, 2, 3, 4, 5, 6, 7, 3, 4, 5, 6, 7, 8, 9, 10)) assert((ten diff ten).isEmpty) assert((ten diff List()) == ten) assert((ten diff (ten filter (_ % 2 == 0))) == (ten filterNot (_ % 2 == 0))) @@ -139,7 +139,7 @@ object Test extends App { def setTest(empty: => Set[String]): Unit = { var s = empty + "A" + "B" + "C" - s += ("D", "E", "F") + s ++= List("D", "E", "F") s ++= List("G", "H", "I") s ++= ('J' to 'Z') map (_.toString) assert(s forall (s contains)) @@ -147,7 +147,7 @@ object Test extends App { assert(!(s contains "0")) s = s + "0" assert(s contains "0") - s = s - "X" + s = s.diff(Set("X")) assert(!(s contains "X")) assert(empty.isEmpty) assert(!s.isEmpty) @@ -156,8 +156,8 @@ object Test extends App { assert(!s.isEmpty) val s1 = s intersect empty assert(s1 == empty, s1) - def abc = empty + ("a", "b", "c") - def bc = empty + ("b", "c") + def abc = empty ++ Set("a", "b", "c") + def bc = empty ++ Set("b", "c") assert(bc subsetOf abc) } @@ -173,7 +173,7 @@ object Test extends App { def mapTest(empty: => Map[String, String]) = { var m = empty + ("A" -> "A") + ("B" -> "B") + ("C" -> "C") - m += (("D" -> "D"), ("E" -> "E"), ("F" -> "F")) + m ++= List(("D" -> "D"), ("E" -> "E"), ("F" -> "F")) m ++= List(("G" -> "G"), ("H" -> "H"), ("I" -> "I")) m ++= ('J' to 'Z') map (x => (x.toString -> x.toString)) println(m.toList.sorted) @@ -184,10 +184,11 @@ object Test extends App { assert(m.getOrElse("7", "@") == "@") assert(m.keySet.size == 26) assert(m.size == 26) - assert(m.keySet == Set() ++ m.keysIterator) - assert(m.keySet == m.keysIterator.toList.toSet, m.keySet.toList+"!="+m.keysIterator.toList.toSet) + assert(m.keySet == Set() ++ m.keysIterator.to(LazyList)) + assert(m.keySet == m.keysIterator.toList.toSet, s"${m.keySet.toList}!=${m.keysIterator.toList.toSet}") val m1 = empty ++ m - val mm = m -- m.keySet.toList + val ks = m.keySet + val mm = m.view.filterKeys(k => !ks(k)) assert(mm.isEmpty, mm) def m3 = empty ++ m1 assert(m1 == m3) @@ -199,7 +200,7 @@ object Test extends App { def mutableMapTest(empty: => mutable.Map[String, String]) = { mapTest(empty) val m1 = empty ++ (('A' to 'Z') map (_.toString) map (x => (x, x))) - val m2 = m1 retain ((k, v) => k == "N") + val m2 = m1 filterInPlace ((k, v) => k == "N") assert(m2.size == 1, m2) } diff --git a/tests/run/concurrent-map-conversions.scala b/tests/run/concurrent-map-conversions.scala deleted file mode 100644 index 225efe1da76d..000000000000 --- a/tests/run/concurrent-map-conversions.scala +++ /dev/null @@ -1,36 +0,0 @@ - - - - - -object Test { - - def main(args: Array[String]): Unit = { - testConversions() - testConverters() - } - - def needPackageConcurrentMap(map: collection.concurrent.Map[Int, Int]): Unit = { - } - def needJavaConcurrent(map: java.util.concurrent.ConcurrentMap[Int, Int]): Unit = { - } - - def testConversions(): Unit = { - import collection.JavaConversions._ - val skiplist = new java.util.concurrent.ConcurrentSkipListMap[Int, Int] - val ctrie = new collection.concurrent.TrieMap[Int, Int] - - needPackageConcurrentMap(skiplist) - needJavaConcurrent(ctrie) - } - - def testConverters(): Unit = { - import collection.JavaConverters._ - val skiplist = new java.util.concurrent.ConcurrentSkipListMap[Int, Int] - val ctrie = new collection.concurrent.TrieMap[Int, Int] - - needPackageConcurrentMap(skiplist.asScala) - needJavaConcurrent(ctrie.asJava) - } - -} diff --git a/tests/run/correct-bind.check b/tests/run/correct-bind.check index eb5597af8bb4..f9d98ae0e37c 100644 --- a/tests/run/correct-bind.check +++ b/tests/run/correct-bind.check @@ -1 +1 @@ -Vector(second, third) +ArraySeq(second, third) diff --git a/tests/run/deeps.check b/tests/run/deeps.check deleted file mode 100644 index a68e474f62ab..000000000000 --- a/tests/run/deeps.check +++ /dev/null @@ -1,87 +0,0 @@ -testEquals1 -false -false -true - -testEquals2 -false -false -true - -testEquals3 -x=Array(1) -y=Array(1) -false -false -true - -x=Array(Array(1), Array(1)) -y=Array(Array(1), Array(1)) -false -false -true - -x=Array(Array(Array(1), Array(1)), Array(Array(1), Array(1))) -y=Array(Array(Array(1), Array(1)), Array(Array(1), Array(1))) -false -false -true - -testEquals4 -false -false -true -false -false -true -Array(true, false) -Array(true, false) -[true;false] -true;false - -Array(Array(true, false), Array(true, false)) -Array(Array(true, false), Array(true, false)) -[Array(true, false);Array(true, false)] -Array(true, false);Array(true, false) - -Array(Array(Array(true, false), Array(true, false)), Array(Array(true, false), Array(true, false))) -Array(Array(Array(true, false), Array(true, false)), Array(Array(true, false), Array(true, false))) -[Array(Array(true, false), Array(true, false));Array(Array(true, false), Array(true, false))] -Array(Array(true, false), Array(true, false));Array(Array(true, false), Array(true, false)) - -Array(1.0, 0.0) -Array(1.0, 0.0) -[1.0;0.0] -1.0;0.0 - -Array(Array(1.0, 0.0), Array(1.0, 0.0)) -Array(Array(1.0, 0.0), Array(1.0, 0.0)) -[Array(1.0, 0.0);Array(1.0, 0.0)] -Array(1.0, 0.0);Array(1.0, 0.0) - -Array(Array(Array(1.0, 0.0), Array(1.0, 0.0)), Array(Array(1.0, 0.0), Array(1.0, 0.0))) -Array(Array(Array(1.0, 0.0), Array(1.0, 0.0)), Array(Array(1.0, 0.0), Array(1.0, 0.0))) -[Array(Array(1.0, 0.0), Array(1.0, 0.0));Array(Array(1.0, 0.0), Array(1.0, 0.0))] -Array(Array(1.0, 0.0), Array(1.0, 0.0));Array(Array(1.0, 0.0), Array(1.0, 0.0)) - -Array(a, b) -Array(a, b) -[a;b] -a;b - -Array(Array(a, b), Array(a, b)) -Array(Array(a, b), Array(a, b)) -[Array(a, b);Array(a, b)] -Array(a, b);Array(a, b) - -Array(Array(Array(a, b), Array(a, b)), Array(Array(a, b), Array(a, b))) -Array(Array(Array(a, b), Array(a, b)), Array(Array(a, b), Array(a, b))) -[Array(Array(a, b), Array(a, b));Array(Array(a, b), Array(a, b))] -Array(Array(a, b), Array(a, b));Array(Array(a, b), Array(a, b)) - -[Array(true, false); Array(false)] -[Array(1, 2); Array(3)] -[Array(1, 2); Array(3)] - -Array(boo, and, foo) -Array(a) diff --git a/tests/run/deeps.scala b/tests/run/deeps.scala deleted file mode 100644 index d155030116f9..000000000000 --- a/tests/run/deeps.scala +++ /dev/null @@ -1,114 +0,0 @@ -//############################################################################ -// deepEquals / deep.toString -//############################################################################ - -//############################################################################ -// need to revisit array equqality -object Test { - - def testEquals1: Unit = { - println(Array(1) == Array(1)) - println(Array(1) equals Array(1)) - println(Array(1).deep == Array(1).deep) - println() - } - - def testEquals2: Unit = { - println(Array(Array(1), Array(2)) == Array(Array(1), Array(2))) - println(Array(Array(1), Array(2)) equals Array(Array(1), Array(2))) - println(Array(Array(1), Array(2)).deep equals Array(Array(1), Array(2)).deep) - println() - } - - def testEquals3: Unit = { - val a1 = Array(1) - val b1 = Array(1) - val a2 = Array(a1, b1) - val b2 = Array(a1, b1) - val a3 = Array(a2, b2) - val b3 = Array(a2, b2) - def test[T](x: Array[T], y: Array[T]): Unit = { - println("x=" + x.deep.toString) - println("y=" + y.deep.toString) - println(x == y) - println(x equals y) - println(x.deep == y.deep) - println() - } - test(a1, b1) - test(a2, b2) - test(a3, b3) - } - - def testEquals4: Unit = { - println("boo:and:foo".split(':') == "boo:and:foo".split(':')) - println("boo:and:foo".split(':') equals "boo:and:foo".split(':')) - println("boo:and:foo".split(':').deep == "boo:and:foo".split(':').deep) - - val xs = new java.util.ArrayList[String](); xs.add("a") - val ys = new java.util.ArrayList[String](); ys.add("a") - println(xs.toArray == ys.toArray) - println(xs.toArray equals ys.toArray) - println(xs.toArray.deep == ys.toArray.deep) - } - - def testToString1: Unit = { - def sweep(s: String) = ( - s.replaceAll("D@[0-9a-fA-F]+", "D@0000000") - .replaceAll("Z@[0-9a-fA-F]+", "Z@0000000") - .replaceAll(";@[0-9a-fA-F]+", ";@0000000") - ) - def test[T](a: Array[T]): Unit = { - println(sweep(a.deep.toString)) - println(a.deep.toString) - println(a.deep.mkString("[", ";", "]")) - println(a.deep.mkString(";")) - println() - } - - val ba1 = Array(true, false) - val ba2 = Array(ba1, ba1) - val ba3 = Array(ba2, ba2) - test(ba1) - test(ba2) - test(ba3) - - val da1 = Array(1.0d, 0.0d) - val da2 = Array(da1, da1) - val da3 = Array(da2, da2) - test(da1) - test(da2) - test(da3) - - val sa1 = Array("a", "b") - val sa2 = Array(sa1, sa1) - val sa3 = Array(sa2, sa2) - test(sa1) - test(sa2) - test(sa3) - } - - def testToString2: Unit = { - println(Array(Array(true, false), Array(false)).deep.mkString("[", "; ", "]")) - println(Array(Array('1', '2'), Array('3')).deep.mkString("[", "; ", "]")) - println(Array(Array(1, 2), Array(3)).deep.mkString("[", "; ", "]")) - println() - } - - def testToString3: Unit = { - println("boo:and:foo".split(':').deep.toString) - - val xs = new java.util.ArrayList[String](); xs.add("a") - println(xs.toArray.deep.toString) - } - - def main(args: Array[String]): Unit = { - println("testEquals1") ; testEquals1 - println("testEquals2") ; testEquals2 - println("testEquals3") ; testEquals3 - println("testEquals4") ; testEquals4 - testToString1 - testToString2 - testToString3 - } -} diff --git a/tests/run/enrich-gentraversable.check b/tests/run/enrich-gentraversable.check index 94c66e36921d..7a5611a13a08 100644 --- a/tests/run/enrich-gentraversable.check +++ b/tests/run/enrich-gentraversable.check @@ -1,8 +1,8 @@ List(2, 4) -Array(2, 4) -HW -Vector(72, 108, 108, 32, 114, 108, 100) List(2, 4) -Array(2, 4) +List(2, 4) HW -Vector(72, 108, 108, 32, 114, 108, 100) +ArraySeq(72, 108, 108, 32, 114, 108, 100) +Map(bar -> 2) +TreeMap(bar -> 2) +Map(bar -> 2) diff --git a/tests/run/enrich-gentraversable.scala b/tests/run/enrich-gentraversable.scala index 7d8b6bdb3b6d..43514e5a5136 100644 --- a/tests/run/enrich-gentraversable.scala +++ b/tests/run/enrich-gentraversable.scala @@ -2,69 +2,59 @@ import scala.language.implicitConversions import scala.language.postfixOps object Test extends App { - import scala.collection.{GenTraversableOnce,GenTraversableLike} - import scala.collection.generic._ + import scala.collection.generic.IsIterable + import scala.collection.{BuildFrom, Iterable, IterableOps, View} + import scala.collection.immutable.TreeMap def typed[T](t : => T): Unit = {} - def testTraversableLike = { - class FilterMapImpl[A, Repr](val r: GenTraversableLike[A, Repr]) /* extends AnyVal */ { - final def filterMap[B, That](f: A => Option[B])(implicit cbf: CanBuildFrom[Repr, B, That]): That = - r.flatMap(f(_).toSeq) + def testIterableOps = { + class FilterMapImpl[A, Repr](r: Repr, it: IterableOps[A, Iterable, _]) { + final def filterMap[B, That](f: A => Option[B])(implicit bf: BuildFrom[Repr, B, That]): That = + bf.fromSpecific(r)(it.flatMap(f(_))) } - implicit def filterMap[Repr, A](r: Repr)(implicit fr: IsTraversableLike[Repr]): FilterMapImpl[fr.A,Repr] = - new FilterMapImpl[fr.A, Repr](fr.conversion(r)) + implicit def filterMap[Repr](r: Repr)(implicit fr: IsIterable[Repr]): FilterMapImpl[fr.A, Repr] = + new FilterMapImpl[fr.A, Repr](r, fr(r)) val l = List(1, 2, 3, 4, 5) val fml = l.filterMap(i => if(i % 2 == 0) Some(i) else None) typed[List[Int]](fml) println(fml) + val lv = l.view + val fmlv = lv.filterMap(i => if (i % 2 == 0) Some(i) else None) + typed[View[Int]](fmlv) + println(fmlv.toList) + val a = Array(1, 2, 3, 4, 5) val fma = a.filterMap(i => if(i % 2 == 0) Some(i) else None) typed[Array[Int]](fma) - println(fma.deep) + println(fma.toList) val s = "Hello World" val fms1 = s.filterMap(c => if(c >= 'A' && c <= 'Z') Some(c) else None) typed[String](fms1) println(fms1) - val fms2 = s.filterMap(c =>if(c % 2 == 0) Some(c.toInt) else None) + val fms2 = s.filterMap(c => if(c % 2 == 0) Some(c.toInt) else None) typed[IndexedSeq[Int]](fms2) println(fms2) - } - def testTraversableOnce = { - class FilterMapImpl[A, Repr](val r: GenTraversableOnce[A]) /* extends AnyVal */ { - final def filterMap[B, That](f: A => Option[B])(implicit cbf: CanBuildFrom[Repr, B, That]): That = { - val b = cbf() - for(e <- r.seq) f(e) foreach (b +=) - b.result - } - } - implicit def filterMap[Repr, A](r: Repr)(implicit fr: IsTraversableOnce[Repr]): FilterMapImpl[fr.A,Repr] = - new FilterMapImpl[fr.A, Repr](fr.conversion(r)) + val m = Map(1 -> "foo", 2 -> "bar") + val fmm = m.filterMap { case (k, v) => if (k % 2 == 0) Some(v -> k) else None } + typed[Map[String, Int]](fmm) + println(fmm) - val l = List(1, 2, 3, 4, 5) - val fml = l.filterMap(i => if(i % 2 == 0) Some(i) else None) - typed[List[Int]](fml) - println(fml) + val tm = TreeMap(1 -> "foo", 2 -> "bar") + val tmm = tm.filterMap { case (k, v) => if (k % 2 == 0) Some(v -> k) else None } + typed[TreeMap[String, Int]](tmm) + println(tmm) - val a = Array(1, 2, 3, 4, 5) - val fma = a.filterMap(i => if(i % 2 == 0) Some(i) else None) - typed[Array[Int]](fma) - println(fma.deep) - - val s = "Hello World" - val fms1 = s.filterMap(c => if(c >= 'A' && c <= 'Z') Some(c) else None) - typed[String](fms1) - println(fms1) + val mv = m.view + val fmmv = mv.filterMap { case (k, v) => if (k % 2 == 0) Some(v -> k) else None } + typed[View[(String, Int)]](fmmv) + println(fmmv.toMap) - val fms2 = s.filterMap(c =>if(c % 2 == 0) Some(c.toInt) else None) - typed[IndexedSeq[Int]](fms2) - println(fms2) } - testTraversableLike - testTraversableOnce + testIterableOps } diff --git a/tests/run/equality.scala b/tests/run/equality.scala index ff5989882196..2af73691d824 100644 --- a/tests/run/equality.scala +++ b/tests/run/equality.scala @@ -1,7 +1,7 @@ // a quickly assembled test of equality. Needs work. object Test { - import scala.runtime.ScalaRunTime.hash + def hash(x: Any): Int = x.## // forces upcast to Any def makeFromInt(x: Int) = List( x.toByte, x.toShort, x.toInt, x.toLong, x.toFloat, x.toDouble, BigInt(x), BigDecimal(x) diff --git a/tests/run/forvaleq.scala b/tests/run/forvaleq.scala index dac3234a68d6..f9c81caf5cc7 100644 --- a/tests/run/forvaleq.scala +++ b/tests/run/forvaleq.scala @@ -1,6 +1,5 @@ // test "foo = expr" clauses in for comprehensions -import scala.collection.immutable.Queue import scala.{List=>L} object Test { diff --git a/tests/run/generic/Serialization.scala b/tests/run/generic/Serialization.scala index a82d6bc7a100..211e801bd702 100644 --- a/tests/run/generic/Serialization.scala +++ b/tests/run/generic/Serialization.scala @@ -1,7 +1,7 @@ package generic import java.io.{DataInputStream,DataOutputStream} -import scala.collection.generic.GenericCompanion +import scala.collection.IterableFactory import scala.collection.mutable.ArrayBuffer import Shapes._ @@ -98,7 +98,7 @@ object Serialization { } implicit def IterableSerializable[I[X] <: Iterable[X], Elem](implicit - ev1: GenericCompanion[I], + ev1: IterableFactory[I], ev2: Serializable[Elem] ): Serializable[I[Elem]] = new Serializable[I[Elem]] { diff --git a/tests/run/hashset.scala b/tests/run/hashset.scala deleted file mode 100644 index 2c18ade7b493..000000000000 --- a/tests/run/hashset.scala +++ /dev/null @@ -1,48 +0,0 @@ -import scala.collection.generic.{Growable, Shrinkable} -import scala.collection.GenSet -import scala.collection.mutable.FlatHashTable -import scala.collection.mutable.HashSet -import scala.collection.parallel.mutable.ParHashSet - -object Test extends App { - test(new Creator{ - def create[A] = new HashSet[A] - def hashSetType = "HashSet" - }) - - test(new Creator{ - def create[A] = new ParHashSet[A] - def hashSetType = "ParHashSet" - }) - - - def test(creator : Creator): Unit = { - println("*** " + creator.hashSetType + " primitives") - val h1 = creator.create[Int] - for (i <- 0 until 20) h1 += i - println((for (i <- 0 until 20) yield i + " " + (h1 contains i)).toList.sorted mkString(",")) - println((for (i <- 20 until 40) yield i + " " + (h1 contains i)).toList.sorted mkString(",")) - println(h1.toList.sorted mkString ",") - println() - - println("*** " + creator.hashSetType + " Strings with null") - val h2 = creator.create[String] - h2 += null - for (i <- 0 until 20) h2 += "" + i - println("null " + (h2 contains null)) - println((for (i <- 0 until 20) yield i + " " + (h2 contains ("" + i))).toList.sorted mkString(",")) - println((for (i <- 20 until 40) yield i + " " + (h2 contains ("" + i))).toList.sorted mkString(",")) - println((h2.toList map {x => "" + x}).sorted mkString ",") - - h2 -= null - h2 -= "" + 0 - println("null " + (h2 contains null)) - println((for (i <- 0 until 20) yield i + " " + (h2 contains ("" + i))).toList.sorted mkString(",")) - println() - } - - trait Creator { - def create[A] : GenSet[A] with Cloneable with FlatHashTable[A] with Growable[A] with Shrinkable[A] - def hashSetType : String - } -} diff --git a/tests/run/hashsetremove.check b/tests/run/hashsetremove.check deleted file mode 100644 index 8de9826895ce..000000000000 --- a/tests/run/hashsetremove.check +++ /dev/null @@ -1,6 +0,0 @@ -remove 0 should be false, was false -contains 1 should be true, was true -remove 1 should be true, was true -contains 1 should be false, was false -remove 1 should be false, was false -contains 1 should be false, was false diff --git a/tests/run/hashsetremove.scala b/tests/run/hashsetremove.scala index 1cfb5ed63fbb..193cdd7718d6 100644 --- a/tests/run/hashsetremove.scala +++ b/tests/run/hashsetremove.scala @@ -4,10 +4,10 @@ import scala.collection.mutable.HashSet object Test extends App { val h = new HashSet[Int] h += 1 - println(s"remove 0 should be false, was ${h remove 0}") - println(s"contains 1 should be true, was ${h contains 1}") - println(s"remove 1 should be true, was ${h remove 1}") - println(s"contains 1 should be false, was ${h contains 1}") - println(s"remove 1 should be false, was ${h remove 1}") - println(s"contains 1 should be false, was ${h contains 1}") + assert(!h.remove(0)) + assert(h(1)) + assert(h.remove(1)) + assert(!h(1)) + assert(!h.remove(1)) + assert(!h(1)) } diff --git a/tests/run/i1284.scala b/tests/run/i1284.scala index f8b9de0de087..a2b3244869b3 100644 --- a/tests/run/i1284.scala +++ b/tests/run/i1284.scala @@ -3,6 +3,6 @@ case object B object Test { def main(args: Array[String]): Unit = { - assert(Array(A, B).deep.toString == "Array(A, B)") + assert(Array(A, B).toList.toString == "List(A, B)") } } diff --git a/tests/run/i2883.scala b/tests/run/i2883.scala index d42f715d7f0a..1a82d3344aed 100644 --- a/tests/run/i2883.scala +++ b/tests/run/i2883.scala @@ -11,6 +11,6 @@ object Test extends App { def foo(wrapper: Wrapper): Foo = new Foo(wrapper.value) {} def printFields(obj: Any) = - println(obj.getClass.getDeclaredFields.map(_.toString).sorted.deep.mkString("\n")) + println(obj.getClass.getDeclaredFields.map(_.toString).sorted.toList.mkString("\n")) printFields(foo(new Wrapper(1))) } diff --git a/tests/run/i3207.check b/tests/run/i3207.check index 9cd18c01426a..19455e1b3b0f 100644 --- a/tests/run/i3207.check +++ b/tests/run/i3207.check @@ -1,2 +1,2 @@ -WrappedArray() -WrappedArray(A, B) +ArraySeq() +ArraySeq(A, B) diff --git a/tests/run/i4523.check b/tests/run/i4523.check index 9710c14ba5bc..9cfb7a201e33 100644 --- a/tests/run/i4523.check +++ b/tests/run/i4523.check @@ -1,4 +1,4 @@ private static final int C.foo$$anonfun$1(int) private static final void C.notfive$1(int) private static java.lang.Object C.$deserializeLambda$(java.lang.invoke.SerializedLambda) -public scala.collection.Seq C.foo() +public scala.collection.immutable.Seq C.foo() diff --git a/tests/run/i768.scala b/tests/run/i768.scala index 0697b476bb48..08e2200efca8 100644 --- a/tests/run/i768.scala +++ b/tests/run/i768.scala @@ -3,8 +3,9 @@ case class A(a: String*){ } object Test { - def main(args: Array[String]) = - assert(A("a", "bc").s == "WrappedArray(a, bc)") + def main(args: Array[String]) = { + assert(A("a", "bc").s == "ArraySeq(a, bc)") + } } diff --git a/tests/run/infiniteloop.check b/tests/run/infiniteloop.check index 6f8cf6e4d9cf..18d89b22bd93 100644 --- a/tests/run/infiniteloop.check +++ b/tests/run/infiniteloop.check @@ -1 +1,2 @@ -Stream(512, 256, 128, 64, 32, 16, 8, 4, 2, 1) +LazyList() +List(512, 256, 128, 64, 32, 16, 8, 4, 2, 1) diff --git a/tests/run/infiniteloop.scala b/tests/run/infiniteloop.scala index 57cfa8d37fc8..258ad01c6ac2 100644 --- a/tests/run/infiniteloop.scala +++ b/tests/run/infiniteloop.scala @@ -2,11 +2,12 @@ object Test extends App { def foo: Unit = { - val s3 = Stream.range(1, 1000) //100000 (ticket #153: Stackoverflow) + val s3 = LazyList.range(1, 1000) //100000 (ticket #153: Stackoverflow) // ticket #153 def powers(x: Int) = if ((x&(x-1)) == 0) Some(x) else None println(s3.flatMap(powers).reverse) + println(s3.flatMap(powers).reverse.toList) } foo diff --git a/tests/run/interop_classtags_are_classmanifests.scala b/tests/run/interop_classtags_are_classmanifests.scala deleted file mode 100644 index 62d85c3ce3b2..000000000000 --- a/tests/run/interop_classtags_are_classmanifests.scala +++ /dev/null @@ -1,12 +0,0 @@ -import scala.reflect.ClassTag - -@deprecated("Suppress warnings", since="2.11") -object Test extends App { - def classTagIsClassManifest[T: ClassTag] = { - println(classManifest[T]) - } - - classTagIsClassManifest[Int] - classTagIsClassManifest[String] - classTagIsClassManifest[Array[Int]] -} diff --git a/tests/run/iterables.check b/tests/run/iterables.check index aac90b70a909..b7b0b61a85c8 100644 --- a/tests/run/iterables.check +++ b/tests/run/iterables.check @@ -1,5 +1,7 @@ false 0,1,2,3,4,5,6,7,8,9 5,6,7,8,9 +0,2,4,6,8 + 0,2,4,6,8 1,3,5,7,9 diff --git a/tests/run/iterables.scala b/tests/run/iterables.scala index ad30f4731673..40710a737f7a 100644 --- a/tests/run/iterables.scala +++ b/tests/run/iterables.scala @@ -1,3 +1,4 @@ +import scala.collection.{mutable, StrictOptimizedIterableOps} object Test extends App { class Test(n: Int) extends Iterable[Int] { private var i = 0 @@ -8,6 +9,9 @@ object Test extends App { else throw new IndexOutOfBoundsException("empty iterator") } } + + class TestStrict(n: Int) extends Test(n) with StrictOptimizedIterableOps[Int, Iterable, Iterable[Int]] + { val x = new Test(10) println(x.isEmpty) @@ -20,7 +24,13 @@ object Test extends App { { val x = new Test(10) val y = x.partition(_ % 2 == 0) - println(y._1.mkString(",")) - println(y._2.mkString(",")) + println(y._1.mkString(",")) // evens + println(y._2.mkString(",")) // empty, creates two iterators + } + { + val x = new TestStrict(10) + val y = x.partition(_ % 2 == 0) + println(y._1.mkString(",")) // evens + println(y._2.mkString(",")) // odds } } diff --git a/tests/run/iterator-from.scala b/tests/run/iterator-from.scala index 63b68d3feb00..ca2910d13005 100644 --- a/tests/run/iterator-from.scala +++ b/tests/run/iterator-from.scala @@ -1,5 +1,5 @@ /* This file tests iteratorFrom, keysIteratorFrom, and valueIteratorFrom on various sorted sets and maps - * filter: inliner warnings; re-run with -Yinline-warnings for details + * filter: inliner warnings */ import scala.util.{Random => R} @@ -11,22 +11,19 @@ object Test extends App { val maxKey = 50 val maxValue = 50 - implicit def convertIfView[A](x: A)(implicit view: A => Ordered[A]): Ordered[A] = view(x) - - def testSet[A: Ordering](set: SortedSet[A], list: List[A]): Unit = { + def testSet[A](set: SortedSet[A], list: List[A])(implicit o: Ordering[A]): Unit = { val distinctSorted = list.distinct.sorted assertEquals("Set size wasn't the same as list sze", set.size, distinctSorted.size) for(key <- distinctSorted) { val clazz = set.getClass val iteratorFrom = (set iteratorFrom key).toList - check(clazz, list, s"set iteratorFrom $key", s"(set from $key).iterator", iteratorFrom, (set from key).iterator.toList) - check(clazz, list, s"set.iteratorFrom $key", s"distinctSorted dropWhile (_ < $key)", iteratorFrom, distinctSorted dropWhile (_ < key)) - check(clazz, list, s"set iteratorFrom $key", s"set keysIterator from $key", iteratorFrom, (set keysIteratorFrom key).toList) + check(clazz, list, s"set iteratorFrom $key", s"(set from $key).iterator", iteratorFrom, (set rangeFrom key).iterator.toList) + check(clazz, list, s"set.iteratorFrom $key", s"distinctSorted dropWhile (_ < $key)", iteratorFrom, distinctSorted dropWhile (o.lt(_, key))) } } - def testMap[A: Ordering, B](map: SortedMap[A, B], list: List[(A, B)]): Unit = { + def testMap[A, B](map: SortedMap[A, B], list: List[(A, B)])(implicit o: Ordering[A]): Unit = { val distinctSorted = distinctByKey(list).sortBy(_._1) assertEquals("Map size wasn't the same as list sze", map.size, distinctSorted.size) @@ -34,8 +31,8 @@ object Test extends App { val key = keyValue._1 val clazz = map.getClass val iteratorFrom = (map iteratorFrom key).toList - check(clazz, list, s"map iteratorFrom $key", s"(map from $key).iterator", iteratorFrom, (map from key).iterator.toList) - check(clazz, list, s"map iteratorFrom $key", s"distinctSorted dropWhile (_._1 < $key)", iteratorFrom, distinctSorted dropWhile (_._1 < key)) + check(clazz, list, s"map iteratorFrom $key", s"(map from $key).iterator", iteratorFrom, (map rangeFrom key).iterator.toList) + check(clazz, list, s"map iteratorFrom $key", s"distinctSorted dropWhile (_._1 < $key)", iteratorFrom, distinctSorted dropWhile (x => o.lt(x._1, key))) check(clazz, list, s"map iteratorFrom $key map (_._1)", s"map keysIteratorFrom $key", iteratorFrom map (_._1), (map keysIteratorFrom key).toList) check(clazz, list, s"map iteratorFrom $key map (_._2)", s"map valuesIteratorFrom $key", iteratorFrom map (_._2), (map valuesIteratorFrom key).toList) } @@ -63,12 +60,11 @@ object Test extends App { testSet(immutable.TreeSet(keys:_*), keys) testSet(mutable.TreeSet(keys:_*), keys) val days = keys map {n => Weekday(n % Weekday.values.size)} - - testSet(Weekday.ValueSet(days:_*), days) // Note: produces divergent search in scalac + testSet(Weekday.ValueSet(days:_*), days) val treeMap = immutable.TreeMap(keyValues:_*) testMap(treeMap, keyValues) - testMap(treeMap.filterKeys(_ % 2 == 0), keyValues filter (_._1 % 2 == 0)) - testMap(treeMap mapValues (_ + 1), keyValues map {case (k,v) => (k, v + 1)}) + testMap(treeMap.view.filterKeys(_ % 2 == 0).to(SortedMap), keyValues filter (_._1 % 2 == 0)) + testMap(treeMap.view.mapValues(_ + 1).to(SortedMap), keyValues map {case (k,v) => (k, v + 1)}) } } diff --git a/tests/run/kmpSliceSearch.scala b/tests/run/kmpSliceSearch.scala index 02f14df4dada..d5c1fc370023 100644 --- a/tests/run/kmpSliceSearch.scala +++ b/tests/run/kmpSliceSearch.scala @@ -17,21 +17,21 @@ object Test { for (h <- Array(2,5,1000)) { for (i <- 0 to 100) { for (j <- 0 to 10) { - val xs = (0 to j).map(_ => (rng.nextInt() & 0x7FFFFFFF) % h) + val xs = (0 to j).map(_ => (rng.nextInt & 0x7FFFFFFF) % h) val xsa = xs.toArray val xsv = Vector() ++ xs val xsl = xs.toList - val xss = Vector[Seq[Int]](xs,xsa,xsv,xsl) + val xss = Vector[Seq[Int]](xs,xsa.toIndexedSeq,xsv,xsl) for (k <- 0 to 5) { - val ys = (0 to k).map(_ => (rng.nextInt() & 0x7FFFFFFF) % h) + val ys = (0 to k).map(_ => (rng.nextInt & 0x7FFFFFFF) % h) val ysa = ys.toArray val ysv = Vector() ++ ys val ysl = ys.toList - val yss = Vector[Seq[Int]](ys,ysa,ysv,ysl) + val yss = Vector[Seq[Int]](ys,ysa.toIndexedSeq,ysv,ysl) val fwd_slow = slowSearch(xs,ys) val bkw_slow = bkwSlowSearch(xs,ys) - val fwd_fast = xss.flatMap(xs => yss.map(ys => SeqLike.indexOf(xs,0,xs.length,ys,0,ys.length,0))) - val bkw_fast = xss.flatMap(xs => yss.map(ys => SeqLike.lastIndexOf(xs,0,xs.length,ys,0,ys.length,xs.length))) + val fwd_fast = xss.flatMap(xs => yss.map(ys => xs.indexOfSlice(ys))) + val bkw_fast = xss.flatMap(xs => yss.map(ys => xs.lastIndexOfSlice(ys))) assert(fwd_fast.forall(_ == fwd_slow)) assert(bkw_fast.forall(_ == bkw_slow)) } @@ -41,20 +41,20 @@ object Test { // Check performance^Wcorrectness of common small test cases val haystacks = List[Seq[Int]]( - Array(1,2,3,4,5,6,7,8,9,10,11,12,13,14,15), + Array(1,2,3,4,5,6,7,8,9,10,11,12,13,14,15).toIndexedSeq, Vector(99,2,99,99,2,99,99,99,2,99,99,99,99,2), List(1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1), 1 to 15 ) val needles = List[Seq[Int]]( - Array(7,8,9,10), + Array(7,8,9,10).toIndexedSeq, Vector(99,99,99), List(1,1,1,1,1,2), 5 to 9 ) (haystacks zip needles) foreach { case (hay, nee) => - println(hay.indexOfSlice(nee,2) + " " + hay.lastIndexOfSlice(nee,13)) + println(s"${hay.indexOfSlice(nee,2)} ${hay.lastIndexOfSlice(nee,13)}") } } } diff --git a/tests/run/lists-run.scala b/tests/run/lists-run.scala index 713b19659687..e0e2a328e796 100644 --- a/tests/run/lists-run.scala +++ b/tests/run/lists-run.scala @@ -6,7 +6,7 @@ import scala.language.postfixOps object Test { def main(args: Array[String]): Unit = { - Test_multiset.run() // multiset operations: union, intersect, diff + Test_multiset.run() // multiset operations: :::, intersect, diff Test1.run() //count, exists, filter, .. Test2.run() //#468 Test3.run() //#1691 @@ -21,8 +21,8 @@ object Test_multiset { thiz forall (that contains _) val xs = List(1, 1, 2) val ys = List(1, 2, 2, 3) - assert(List(1, 1, 2, 1, 2, 2, 3) == (xs union ys), "xs_union_ys") - assert(List(1, 2, 2, 3, 1, 1, 2) == (ys union xs), "ys_union_xs") + assert(List(1, 1, 2, 1, 2, 2, 3) == (xs ::: ys), "xs_:::_ys") + assert(List(1, 2, 2, 3, 1, 1, 2) == (ys ::: xs), "ys_:::_xs") assert(List(1, 2) == (xs intersect ys), "xs_intersect_ys") assert(List(1, 2) == (ys intersect xs), "ys_intersect_xs") assert(List(1) == (xs diff ys), "xs_diff_ys") @@ -30,8 +30,8 @@ object Test_multiset { assert(isSubListOf(xs filterNot (ys contains), xs diff ys), "xs_subset_ys") val zs = List(0, 1, 1, 2, 2, 2) - assert(List(0, 1, 1, 2, 2, 2, 1, 2, 2, 3) == (zs union ys), "zs_union_ys") - assert(List(1, 2, 2, 3, 0, 1, 1, 2, 2, 2) == (ys union zs), "ys_union_zs") + assert(List(0, 1, 1, 2, 2, 2, 1, 2, 2, 3) == (zs ::: ys), "zs_:::_ys") + assert(List(1, 2, 2, 3, 0, 1, 1, 2, 2, 2) == (ys ::: zs), "ys_:::_zs") assert(List(1, 2, 2) == (zs intersect ys), "zs_intersect_ys") assert(List(1, 2, 2) == (ys intersect zs), "ys_intersect_zs") assert(List(0, 1, 2) == (zs diff ys), "zs_diff_ys") @@ -39,8 +39,8 @@ object Test_multiset { assert(isSubListOf(zs filterNot (ys contains), zs diff ys), "xs_subset_ys") val ws = List(2) - assert(List(2, 1, 2, 2, 3) == (ws union ys), "ws_union_ys") - assert(List(1, 2, 2, 3, 2) == (ys union ws), "ys_union_ws") + assert(List(2, 1, 2, 2, 3) == (ws ::: ys), "ws_:::_ys") + assert(List(1, 2, 2, 3, 2) == (ys ::: ws), "ys_:::_ws") assert(List(2) == (ws intersect ys), "ws_intersect_ys") assert(List(2) == (ys intersect ws), "ys_intersect_ws") assert(List() == (ws diff ys), "ws_diff_ys") @@ -48,8 +48,8 @@ object Test_multiset { assert(isSubListOf(ws filterNot (ys contains), ws diff ys), "ws_subset_ys") val vs = List(3, 2, 2, 1) - assert(List(1, 1, 2, 3, 2, 2, 1) == (xs union vs), "xs_union_vs") - assert(List(3, 2, 2, 1, 1, 1, 2) == (vs union xs), "vs_union_xs") + assert(List(1, 1, 2, 3, 2, 2, 1) == (xs ::: vs), "xs_:::_vs") + assert(List(3, 2, 2, 1, 1, 1, 2) == (vs ::: xs), "vs_:::_xs") assert(List(1, 2) == (xs intersect vs), "xs_intersect_vs") assert(List(2, 1) == (vs intersect xs), "vs_intersect_xs") assert(List(1) == (xs diff vs), "xs_diff_vs") diff --git a/tests/run/mapValues.scala b/tests/run/mapValues.scala index d3266bd18fef..ff6f93346b49 100644 --- a/tests/run/mapValues.scala +++ b/tests/run/mapValues.scala @@ -1,6 +1,6 @@ object Test { val m = Map(1 -> 1, 2 -> 2) - val mv = (m mapValues identity) - 1 + val mv = (m.view mapValues identity).toMap - 1 def main(args: Array[String]): Unit = { assert(mv.size == 1) diff --git a/tests/run/map_java_conversions.scala b/tests/run/map_java_conversions.scala deleted file mode 100644 index 5c484ee35e3f..000000000000 --- a/tests/run/map_java_conversions.scala +++ /dev/null @@ -1,60 +0,0 @@ - - - - - -object Test { - - def main(args: Array[String]): Unit = { - import collection.JavaConversions._ - - test(new java.util.HashMap[String, String]) - test(new java.util.Properties) - testConcMap - } - - def testConcMap: Unit = { - import collection.JavaConversions._ - - val concMap = new java.util.concurrent.ConcurrentHashMap[String, String] - - test(concMap) - val cmap = mapAsScalaConcurrentMap(concMap) - cmap.putIfAbsent("absentKey", "absentValue") - cmap.put("somekey", "somevalue") - assert(cmap.remove("somekey", "somevalue") == true) - assert(cmap.replace("absentKey", "newAbsentValue") == Some("absentValue")) - assert(cmap.replace("absentKey", "newAbsentValue", ".......") == true) - } - - def test(m: collection.mutable.Map[String, String]): Unit = { - m.clear() - assert(m.size == 0) - - m.put("key", "value") - assert(m.size == 1) - - assert(m.put("key", "anotherValue") == Some("value")) - assert(m.put("key2", "value2") == None) - assert(m.size == 2) - - m += (("key3", "value3")) - assert(m.size == 3) - - m -= "key2" - assert(m.size == 2) - assert(m.nonEmpty) - assert(m.remove("key") == Some("anotherValue")) - - m.clear() - for (i <- 0 until 10) m += (("key" + i, "value" + i)) - for ((k, v) <- m) assert(k.startsWith("key")) - } - -} - - - - - - diff --git a/tests/run/matcharraytail.check b/tests/run/matcharraytail.check index f2844d41a994..c824a31684c0 100644 --- a/tests/run/matcharraytail.check +++ b/tests/run/matcharraytail.check @@ -1,2 +1,2 @@ -Array(foo, bar, baz) -Vector(bar, baz) +List(foo, bar, baz) +ArraySeq(bar, baz) diff --git a/tests/run/matcharraytail.scala b/tests/run/matcharraytail.scala index e82bcf6f02fe..b65a3bcb537f 100644 --- a/tests/run/matcharraytail.scala +++ b/tests/run/matcharraytail.scala @@ -1,6 +1,6 @@ object Test extends App{ Array("foo", "bar", "baz") match { - case x@Array("foo", bar :_*) => println(x.deep.toString); println(bar.toString); + case x@Array("foo", bar @_*) => println(x.toList.toString); println(bar.toString); case Array(x, y, z) => println("shouldn't have fallen through"); case _ => println("default case?!"); } diff --git a/tests/run/matchemptyarray.check b/tests/run/matchemptyarray.check index 815225fc64d6..375518e921cd 100644 --- a/tests/run/matchemptyarray.check +++ b/tests/run/matchemptyarray.check @@ -1 +1 @@ -Array() +List() diff --git a/tests/run/matchemptyarray.scala b/tests/run/matchemptyarray.scala index 1fa53b48297a..9d0fdb76c116 100644 --- a/tests/run/matchemptyarray.scala +++ b/tests/run/matchemptyarray.scala @@ -1,5 +1,5 @@ object Test extends App{ Array[String]() match { - case x@Array() => println(x.deep.toString()); + case x@Array() => println(x.toList.toString()); } } diff --git a/tests/run/matchonstream.scala b/tests/run/matchonstream.scala deleted file mode 100644 index 0ccc7bc47b6d..000000000000 --- a/tests/run/matchonstream.scala +++ /dev/null @@ -1,3 +0,0 @@ -object Test extends App{ - Stream.from(1) match { case Stream(1, 2, x :_*) => println("It worked!") } -} diff --git a/tests/run/multi-array.check b/tests/run/multi-array.check deleted file mode 100644 index f163dae13d9e..000000000000 --- a/tests/run/multi-array.check +++ /dev/null @@ -1,4 +0,0 @@ -Array(1, 2, 3) -null -Array(Array(0, 0, 0), Array(0, 0, 0), Array(0, 0, 0)) -Array(Array(0, 1, 2), Array(1, 2, 3), Array(2, 3, 4)) diff --git a/tests/run/multi-array.scala b/tests/run/multi-array.scala deleted file mode 100644 index 36e21ae539de..000000000000 --- a/tests/run/multi-array.scala +++ /dev/null @@ -1,14 +0,0 @@ -object Test extends App { - val a = Array(1, 2, 3) - println(a.deep.toString) - - val aaiIncomplete = new Array[Array[Array[Int]]](3) - println(aaiIncomplete(0)) - - val aaiComplete: Array[Array[Int]] = Array.ofDim[Int](3, 3) // new Array[Array[Int]](3, 3) - println(aaiComplete.deep) - for (i <- 0 until 3; j <- 0 until 3) - aaiComplete(i)(j) = i + j - println(aaiComplete.deep.toString) - assert(aaiComplete.last.last == 4) -} diff --git a/tests/run/null-and-intersect.scala b/tests/run/null-and-intersect.scala index 7266dabe6df4..04552635551d 100644 --- a/tests/run/null-and-intersect.scala +++ b/tests/run/null-and-intersect.scala @@ -1,9 +1,10 @@ object Test { + trait Immutable trait Immortal class Bippy extends Immutable with Immortal class Boppy extends Immutable - def f[T](x: Traversable[T]) = x match { + def f[T](x: Iterable[T]) = x match { case _: Map[_, _] => 3 case _: Seq[_] => 2 case _: Iterable[_] => 1 diff --git a/tests/run/paramForwarding.scala b/tests/run/paramForwarding.scala index 6fe80a230c20..2fe1b9c4a06b 100644 --- a/tests/run/paramForwarding.scala +++ b/tests/run/paramForwarding.scala @@ -43,7 +43,7 @@ class Y(override val theValue: Int) extends NonVal(theValue) object Test { def printFields(obj: Any) = - println(obj.getClass.getDeclaredFields.map(_.toString).sorted.deep.mkString("\n")) + println(obj.getClass.getDeclaredFields.map(_.toString).sorted.toList.mkString("\n")) def main(args: Array[String]): Unit = { val b10 = new B(10) diff --git a/tests/run/paramForwarding_separate/B_2.scala b/tests/run/paramForwarding_separate/B_2.scala index 7c13018b7019..6432d5217b11 100644 --- a/tests/run/paramForwarding_separate/B_2.scala +++ b/tests/run/paramForwarding_separate/B_2.scala @@ -4,7 +4,7 @@ class B(member: Int) extends SubA(member) { object Test { def printFields(cls: Class[_]) = - println(cls.getDeclaredFields.map(_.toString).sorted.deep.mkString("\n")) + println(cls.getDeclaredFields.map(_.toString).sorted.toList.mkString("\n")) def main(args: Array[String]): Unit = { val a = new A(10) diff --git a/tests/run/paramForwarding_together.scala b/tests/run/paramForwarding_together.scala index 670411e58049..fc98f8f888a2 100644 --- a/tests/run/paramForwarding_together.scala +++ b/tests/run/paramForwarding_together.scala @@ -12,7 +12,7 @@ class B(member: Int) extends SubA(member) { object Test { def printFields(cls: Class[_]) = - println(cls.getDeclaredFields.map(_.toString).sorted.deep.mkString("\n")) + println(cls.getDeclaredFields.map(_.toString).sorted.toList.mkString("\n")) def main(args: Array[String]): Unit = { val a = new A(10) diff --git a/tests/run/paramForwarding_together_b.scala b/tests/run/paramForwarding_together_b.scala index cb61cd11fb39..558892b6262c 100644 --- a/tests/run/paramForwarding_together_b.scala +++ b/tests/run/paramForwarding_together_b.scala @@ -13,7 +13,7 @@ class A(val member: Int) { object Test { def printFields(cls: Class[_]) = - println(cls.getDeclaredFields.map(_.toString).sorted.deep.mkString("\n")) + println(cls.getDeclaredFields.map(_.toString).sorted.toList.mkString("\n")) def main(args: Array[String]): Unit = { val a = new A(10) diff --git a/tests/run/parmap-ops.scala b/tests/run/parmap-ops.scala deleted file mode 100644 index 3c0d8ee4b667..000000000000 --- a/tests/run/parmap-ops.scala +++ /dev/null @@ -1,48 +0,0 @@ -import collection._ - -object Test { - - def main(args: Array[String]): Unit = { - val gm: GenMap[Int, Int] = GenMap(0 -> 0, 1 -> 1).par - - // ops - assert(gm.isDefinedAt(1)) - assert(gm.contains(1)) - assert(gm.getOrElse(1, 2) == 1) - assert(gm.getOrElse(2, 3) == 3) - assert(gm.keysIterator.toSet == Set(0, 1)) - assert(gm.valuesIterator.toSet == Set(0, 1)) - assert(gm.keySet == Set(0, 1)) - assert(gm.keys.toSet == Set(0, 1)) - assert(gm.values.toSet == Set(0, 1)) - try { - gm.default(-1) - assert(false) - } catch { - case e: NoSuchElementException => // ok - } - - assert(gm.filterKeys(_ % 2 == 0)(0) == 0) - assert(gm.filterKeys(_ % 2 == 0).get(1) == None) - assert(gm.mapValues(_ + 1)(0) == 1) - - // with defaults - val pm = parallel.mutable.ParMap(0 -> 0, 1 -> 1) - val dm = pm.withDefault(x => -x) - assert(dm(0) == 0) - assert(dm(1) == 1) - assert(dm(2) == -2) - assert(dm.updated(2, 2) == parallel.ParMap(0 -> 0, 1 -> 1, 2 -> 2)) - dm.put(3, 3) - assert(dm(3) == 3) - assert(pm(3) == 3) - assert(dm(4) == -4) - - val imdm = parallel.immutable.ParMap(0 -> 0, 1 -> 1).withDefault(x => -x) - assert(imdm(0) == 0) - assert(imdm(1) == 1) - assert(imdm(2) == -2) - assert(imdm.updated(2, 2) == parallel.ParMap(0 -> 0, 1 -> 1, 2 -> 2)) - } - -} diff --git a/tests/run/pc-conversions.scala b/tests/run/pc-conversions.scala deleted file mode 100644 index effac118bc81..000000000000 --- a/tests/run/pc-conversions.scala +++ /dev/null @@ -1,94 +0,0 @@ -/* - * filter: inliner warning; re-run with -Yinline-warnings for details - */ - -import collection._ - - -// test conversions between collections -object Test { - - def main(args: Array[String]): Unit = { - testConversions - } - - def testConversions: Unit = { - // seq conversions - assertSeq(parallel.mutable.ParArray(1, 2, 3)) - assertSeq(parallel.mutable.ParHashMap(1 -> 2, 2 -> 3)) - assertSeq(parallel.mutable.ParHashSet(1, 2, 3)) - assertSeq(parallel.immutable.ParRange(1, 50, 1, false)) - assertSeq(parallel.immutable.ParHashMap(1 -> 2, 2 -> 4)) - assertSeq(parallel.immutable.ParHashSet(1, 2, 3)) - - // par conversions - assertPar(Array(1, 2, 3)) - assertPar(mutable.ArrayBuffer(1, 2, 3)) - assertPar(mutable.ArraySeq(1, 2, 3)) - assertPar(mutable.WrappedArray.make[Int](Array(1, 2, 3))) - assertPar(mutable.HashMap(1 -> 1, 2 -> 2)) - assertPar(mutable.HashSet(1, 2, 3)) - assertPar(immutable.Range(1, 50, 1)) - assertPar(immutable.HashMap(1 -> 1, 2 -> 2)) - assertPar(immutable.HashSet(1, 2, 3)) - - // par.to* and to*.par tests - assertToPar(List(1 -> 1, 2 -> 2, 3 -> 3)) - assertToPar(Stream(1 -> 1, 2 -> 2)) - assertToPar(Array(1 -> 1, 2 -> 2)) - assertToPar(mutable.PriorityQueue(1 -> 1, 2 -> 2, 3 -> 3)) - assertToPar(mutable.ArrayBuffer(1 -> 1, 2 -> 2)) - assertToPar(mutable.ArraySeq(1 -> 3)) - assertToPar(mutable.WrappedArray.make[(Int, Int)](Array(1 -> 3))) - assertToPar(mutable.HashMap(1 -> 3)) - assertToPar(mutable.HashSet(1 -> 3)) - assertToPar(immutable.HashMap(1 -> 3)) - assertToPar(immutable.HashSet(1 -> 3)) - assertToPar(parallel.mutable.ParArray(1 -> 1, 2 -> 2, 3 -> 3)) - assertToPar(parallel.mutable.ParHashMap(1 -> 2)) - assertToPar(parallel.mutable.ParHashSet(1 -> 2)) - assertToPar(parallel.immutable.ParHashMap(1 -> 2)) - assertToPar(parallel.immutable.ParHashSet(1 -> 3)) - - assertToParWoMap(immutable.Range(1, 10, 2)) - - // seq and par again conversions) - assertSeqPar(parallel.mutable.ParArray(1, 2, 3)) - } - - def assertSeqPar[T](pc: parallel.ParIterable[T]) = pc.seq.par == pc - - def assertSeq[T](pc: parallel.ParIterable[T]) = assert(pc.seq == pc) - - def assertPar[T, P <: Parallel](xs: GenIterable[T]) = assert(xs == xs.par) - - def assertToPar[K, V](xs: GenTraversable[(K, V)]): Unit = { - xs match { - case _: Seq[_] => - assert(xs.toIterable.par == xs) - assert(xs.par.toIterable == xs) - case _ => - } - - assert(xs.toSeq.par == xs.toSeq) - assert(xs.par.toSeq == xs.toSeq) - - assert(xs.toSet.par == xs.toSet) - assert(xs.par.toSet == xs.toSet) - - assert(xs.toMap.par == xs.toMap) - assert(xs.par.toMap == xs.toMap) - } - - def assertToParWoMap[T](xs: GenSeq[T]): Unit = { - assert(xs.toIterable.par == xs.toIterable) - assert(xs.par.toIterable == xs.toIterable) - - assert(xs.toSeq.par == xs.toSeq) - assert(xs.par.toSeq == xs.toSeq) - - assert(xs.toSet.par == xs.toSet) - assert(xs.par.toSet == xs.toSet) - } - -} diff --git a/tests/run/productElementName.check b/tests/run/productElementName.check index def205de0d2d..a7499b7c1e5d 100644 --- a/tests/run/productElementName.check +++ b/tests/run/productElementName.check @@ -1,5 +1,5 @@ User(name=Susan, age=42) -ユーザー(名前=Susan, 年齢=42) +ユーザ(名前=Susan, 年齢=42) U$er(na$me=Susan, a$ge=42) type(for=Susan, if=42) contains spaces(first param=Susan, second param=42) @@ -7,5 +7,8 @@ Symbols(::=Susan, ||=42) MultipleParamLists(a=Susan, b=42) AuxiliaryConstructor(a=Susan, b=42) OverloadedApply(a=Susan, b=123) +DefinesProductElementName(foo=Susan, foo=42) +InheritsProductElementName(a=Susan, b=42) +InheritsProductElementName_Override(overriden=Susan, overriden=42) +InheritsProductElementName_Override_SelfType(a=Susan, b=42) PrivateMembers(a=10, b=20, c=30, d=40, e=50, f=60) -NoParams() diff --git a/tests/run/productElementName.scala b/tests/run/productElementName.scala index 4451c9fad465..b98ac96cdfa9 100644 --- a/tests/run/productElementName.scala +++ b/tests/run/productElementName.scala @@ -1,78 +1,59 @@ -// These methods are not yet on Product.scala (added in 2.13.x) -trait Product2_13 extends Product { - def productElementName(n: Int): String - /** An iterator over the names of all the elements of this product. - */ - def productElementNames: Iterator[String] = new scala.collection.AbstractIterator[String] { - private[this] var c: Int = 0 - private[this] val cmax = productArity +case class User(name: String, age: Int) - def hasNext = c < cmax +case class ユーザ(名前: String, 年齢: Int) - def next() = { - val result = productElementName(c); c += 1; result - } - } -} - -case class User(name: String, age: Int) extends Product2_13 - -case class ユーザー(名前: String, 年齢: Int) extends Product2_13 +case class U$er(na$me: String, a$ge: Int) -case class U$er(na$me: String, a$ge: Int) extends Product2_13 +case class `type`(`for`: String, `if`: Int) -case class `type`(`for`: String, `if`: Int) extends Product2_13 +case class `contains spaces`(`first param`: String, `second param`: Int) -case class `contains spaces`(`first param`: String, `second param`: Int) extends Product2_13 +case class Symbols(:: : String, || : Int) -case class Symbols(:: : String, || : Int) extends Product2_13 +case class MultipleParamLists(a: String, b: Int)(c: Boolean) -case class MultipleParamLists(a: String, b: Int)(c: Boolean) extends Product2_13 - -case class AuxiliaryConstructor(a: String, b: Int) extends Product2_13 { +case class AuxiliaryConstructor(a: String, b: Int) { def this(x: String) = { this(x, 123) } } -case class OverloadedApply(a: String, b: Int) extends Product2_13 +case class OverloadedApply(a: String, b: Int) object OverloadedApply { def apply(x: String): OverloadedApply = new OverloadedApply(x, 123) } -case class NoParams() extends Product2_13 - -//case class DefinesProductElementName(a: String, b: Int) extends Product2_13 { -// override def productElementName(n: Int): String = "foo" -//} - -//trait A { -// override def productElementName(n: Int): String = "overriden" -//} -//case class InheritsProductElementName(a: String, b: Int) extends A -// -//trait B extends Product2_13 { -// override def productElementName(n: Int): String = "overriden" -//} -//case class InheritsProductElementName_Override(a: String, b: Int) extends B -// -//trait C { self: Product => -// override def productElementName(n: Int): String = "overriden" -//} -//case class InheritsProductElementName_Override_SelfType(a: String, b: Int) extends C - -case class PrivateMembers(a: Int, private val b: Int, c: Int, private val d: Int, e: Int, private val f: Int) extends Product2_13 +case class DefinesProductElementName(a: String, b: Int) { + override def productElementName(n: Int): String = "foo" +} + +trait A { + def productElementName(n: Int): String = "overriden" +} +case class InheritsProductElementName(a: String, b: Int) extends A + +trait B extends Product { + override def productElementName(n: Int): String = "overriden" +} +case class InheritsProductElementName_Override(a: String, b: Int) extends B + +trait C { self: Product => + override def productElementName(n: Int): String = "overriden" +} +case class InheritsProductElementName_Override_SelfType(a: String, b: Int) extends C + +case class PrivateMembers(a: Int, private val b: Int, c: Int, private val d: Int, e: Int, private val f: Int) object Test extends App { - def pretty(p: Product2_13): String = + def pretty(p: Product): String = p.productElementNames.zip(p.productIterator) - .map { case (name, value) => s"$name=$value" } - .mkString(p.productPrefix + "(", ", ", ")") + .map { case (name, value) => s"$name=$value" } + .mkString(p.productPrefix + "(", ", ", ")") println(pretty(User("Susan", 42))) - println(pretty(ユーザー("Susan", 42))) + println(pretty(ユーザ("Susan", 42))) println(pretty(U$er("Susan", 42))) println(pretty(`type`("Susan", 42))) println(pretty(`contains spaces`("Susan", 42))) @@ -80,17 +61,17 @@ object Test extends App { println(pretty(MultipleParamLists("Susan", 42)(true))) println(pretty(AuxiliaryConstructor("Susan", 42))) println(pretty(OverloadedApply("Susan"))) -// println(pretty(DefinesProductElementName("Susan", 42))) + println(pretty(DefinesProductElementName("Susan", 42))) + + // uses the synthetic, not the one defined in the trait + println(pretty(InheritsProductElementName("Susan", 42))) -// // uses the synthetic, not the one defined in the trait -// println(pretty(InheritsProductElementName("Susan", 42))) -// -// // uses the override defined in the trait -// println(pretty(InheritsProductElementName_Override("Susan", 42))) -// -// // uses the synthetic, not the one defined in the trait -// println(pretty(InheritsProductElementName_Override_SelfType("Susan", 42))) + // uses the override defined in the trait + println(pretty(InheritsProductElementName_Override("Susan", 42))) + + // uses the synthetic, not the one defined in the trait + println(pretty(InheritsProductElementName_Override_SelfType("Susan", 42))) println(pretty(PrivateMembers(10, 20, 30, 40, 50, 60))) - println(pretty(NoParams())) } + diff --git a/tests/run/range.scala b/tests/run/range.scala index ee934f6279a6..ed4aeacffc09 100644 --- a/tests/run/range.scala +++ b/tests/run/range.scala @@ -4,7 +4,7 @@ object Test { def rangeForeach(range : Range) = { val buffer = new scala.collection.mutable.ListBuffer[Int]; range.foreach(buffer += _); - assert(buffer.toList == range.iterator.toList, buffer.toList+"/"+range.iterator.toList) + assert(buffer.toList == range.iterator.toList, buffer.toList.toString + "/" + range.iterator.toList) } def boundaryTests() = { @@ -36,28 +36,30 @@ object Test { def gr1 = NumericRange(x, x, x) def gr2 = NumericRange.inclusive(x, x, x) - def gr3 = NumericRange(x, x * fromInt(10), x) - def gr4 = NumericRange.inclusive(x, x * fromInt(10), x) - def gr5 = gr3.toList ::: negated.gr3.toList + def gr3 = NumericRange(x, x * fromInt(4), x * fromInt(2)) // scala/bug#9348 + def gr4 = NumericRange(x, x * fromInt(-2), x * fromInt(-2)) + def gr5 = NumericRange(x, x * fromInt(10), x) + def gr6 = NumericRange.inclusive(x, x * fromInt(10), x) + def gr7 = gr3.toList ::: negated.gr3.toList def check = { assert(gr1.isEmpty && !gr2.isEmpty) - assert(gr3.size == 9 && gr4.size == 10) - assert(gr5.sum == num.zero, gr5.toString) - assert(!(gr3 contains (x * fromInt(10)))) - assert((gr4 contains (x * fromInt(10)))) + assert(gr3.size == 2 && gr4.size == 2) + assert(gr5.size == 9 && gr6.size == 10) + assert(gr7.sum == num.zero, gr7.toString) + assert(!(gr5 contains (x * fromInt(10)))) + assert(gr6 contains (x * fromInt(10))) } } def main(args: Array[String]): Unit = { - implicit val imp1: Numeric.BigDecimalAsIfIntegral.type = Numeric.BigDecimalAsIfIntegral - implicit val imp2: Numeric.DoubleAsIfIntegral.type = Numeric.DoubleAsIfIntegral + implicit val imp1: Numeric.BigDecimalAsIfIntegral = Numeric.BigDecimalAsIfIntegral val _grs = List[GR[_]]( GR(BigDecimal(5.0)), + GR(BigDecimal(0.25)), // scala/bug#9348 GR(BigInt(5)), GR(5L), - GR(5.0d), GR(2.toByte) ) val grs = _grs ::: (_grs map (_.negated)) diff --git a/tests/run/sammy_repeated.check b/tests/run/sammy_repeated.check index 1cff0f067c46..9d495850f927 100644 --- a/tests/run/sammy_repeated.check +++ b/tests/run/sammy_repeated.check @@ -1 +1 @@ -WrappedArray(1) +ArraySeq(1) diff --git a/tests/run/scala2mixins.scala b/tests/run/scala2mixins.scala deleted file mode 100644 index 059e156f3582..000000000000 --- a/tests/run/scala2mixins.scala +++ /dev/null @@ -1,23 +0,0 @@ -import scala.collection.IndexedSeqOptimized -import scala.collection.mutable.Builder - -object Test { - class Name extends Seq[Int] - with IndexedSeqOptimized[Int, Name] { - val underlying = 0 to 10 - def length: Int = underlying.length - - def apply(idx: Int): Int = underlying(idx) - - override protected[this] def newBuilder: Builder[Int, Name] = ??? - - override def seq = toCollection(this) - - } - def main(args: Array[String]): Unit = { - val n = new Name - // need to make sure that super accessors were emitted - // ends with calls super.endsWith if argument is not an IndexedSeq - assert(n.endsWith(10 :: Nil)) - } -} diff --git a/tests/run/scala2trait-lazyval.scala b/tests/run/scala2trait-lazyval.scala deleted file mode 100644 index b4c492166b52..000000000000 --- a/tests/run/scala2trait-lazyval.scala +++ /dev/null @@ -1,17 +0,0 @@ -class Foo extends scala.collection.SeqView[Int, List[Int]] { - def iterator: Iterator[Int] = null - def apply(idx: Int): Int = idx - def length: Int = 0 - protected def underlying = null -} - -object Test { - def main(args: Array[String]): Unit = { - val f: scala.collection.TraversableViewLike[Int, List[Int], _] = new Foo - new f.Transformed[Int] { - def foreach[U](f: Int => U): Unit = () - // underlying is a lazy val - assert(underlying == null) - } - } -} diff --git a/tests/run/scan.scala b/tests/run/scan.scala index a43da3387b3e..d6708d325fd2 100644 --- a/tests/run/scan.scala +++ b/tests/run/scan.scala @@ -1,7 +1,3 @@ - - - - object Test { def main(args: Array[String]): Unit = { @@ -14,10 +10,9 @@ object Test { assert(emp.scanLeft(0)(_ + _) == List(0)) assert(emp.scanRight(0)(_ + _) == List(0)) - val stream = Stream(1, 2, 3, 4, 5) - assert(stream.scanLeft(0)(_ + _) == Stream(0, 1, 3, 6, 10, 15)) + val stream = LazyList(1, 2, 3, 4, 5) + assert(stream.scanLeft(0)(_ + _) == LazyList(0, 1, 3, 6, 10, 15)) - assert(Stream.from(1).scanLeft(0)(_ + _).take(5) == Stream(0, 1, 3, 6, 10)) + assert(LazyList.from(1).scanLeft(0)(_ + _).take(5) == LazyList(0, 1, 3, 6, 10)) } - } diff --git a/tests/run/search.scala b/tests/run/search.scala index ed7fed54a707..329261bfac17 100644 --- a/tests/run/search.scala +++ b/tests/run/search.scala @@ -2,12 +2,12 @@ object Test extends App { import scala.collection.{LinearSeq, IndexedSeq} import scala.collection.Searching.search - val ls = LinearSeq(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 13) + val ls = List(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 13) println(ls.search(3)) println(ls.search(5, 3, 8)) println(ls.search(12)) - val is = IndexedSeq(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 13) + val is = Vector(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 13) println(is.search(3)) println(is.search(5, 3, 8)) println(is.search(12)) diff --git a/tests/run/serialization-new.check b/tests/run/serialization-new.check index 40646e993a0b..697b2c3bf4d8 100644 --- a/tests/run/serialization-new.check +++ b/tests/run/serialization-new.check @@ -72,12 +72,12 @@ x = BitSet(2, 3) y = BitSet(2, 3) x equals y: true, y equals x: true -x = Map(1 -> A, 2 -> B, 3 -> C) -y = Map(1 -> A, 2 -> B, 3 -> C) +x = HashMap(1 -> A, 2 -> B, 3 -> C) +y = HashMap(1 -> A, 2 -> B, 3 -> C) x equals y: true, y equals x: true -x = Set(1, 2) -y = Set(1, 2) +x = HashSet(1, 2) +y = HashSet(1, 2) x equals y: true, y equals x: true x = List((buffers,20), (layers,2), (title,3)) @@ -104,24 +104,20 @@ x = NumericRange 0 until 10 y = NumericRange 0 until 10 x equals y: true, y equals x: true -x = Map(1 -> A, 2 -> B, 3 -> C) -y = Map(1 -> A, 2 -> B, 3 -> C) +x = TreeMap(1 -> A, 2 -> B, 3 -> C) +y = TreeMap(1 -> A, 2 -> B, 3 -> C) x equals y: true, y equals x: true x = TreeSet(1, 2, 3) y = TreeSet(1, 2, 3) x equals y: true, y equals x: true -x = Stack(c, b, a) -y = Stack(c, b, a) +x = LazyList() +y = LazyList() x equals y: true, y equals x: true -x = Stream(0, ?) -y = Stream(0, ?) -x equals y: true, y equals x: true - -x = Map(42 -> FortyTwo) -y = Map(42 -> FortyTwo) +x = TreeMap(42 -> FortyTwo) +y = TreeMap(42 -> FortyTwo) x equals y: true, y equals x: true x = TreeSet(0, 2) @@ -144,51 +140,39 @@ x = ArrayBuilder.ofFloat y = ArrayBuilder.ofFloat x equals y: true, y equals x: true -x = ArraySeq(1, 2, 3) -y = ArraySeq(1, 2, 3) -x equals y: true, y equals x: true - -x = ArrayStack(3, 2, 20) -y = ArrayStack(3, 2, 20) -x equals y: true, y equals x: true - x = BitSet(0, 8, 9) y = BitSet(0, 8, 9) x equals y: true, y equals x: true -x = Map(A -> 1, C -> 3, B -> 2) -y = Map(A -> 1, C -> 3, B -> 2) +x = HashMap(A -> 1, B -> 2, C -> 3) +y = HashMap(A -> 1, B -> 2, C -> 3) x equals y: true, y equals x: true -x = Set(buffers, title, layers) -y = Set(buffers, title, layers) +x = HashSet(buffers, layers, title) +y = HashSet(buffers, layers, title) x equals y: true, y equals x: true -x = History() -y = History() +x = LinkedHashMap(Linked -> 1, Hash -> 2, Map -> 3) +y = LinkedHashMap(Linked -> 1, Hash -> 2, Map -> 3) x equals y: true, y equals x: true -x = Map(Linked -> 1, Hash -> 2, Map -> 3) -y = Map(Linked -> 1, Hash -> 2, Map -> 3) -x equals y: true, y equals x: true - -x = ArrayBuffer((Linked,1), (Hash,2), (Map,3)) -y = ArrayBuffer((Linked,1), (Hash,2), (Map,3)) +x = List((Linked,1), (Hash,2), (Map,3)) +y = List((Linked,1), (Hash,2), (Map,3)) x equals y: true, y equals x: true -x = ArrayBuffer((Linked,1), (Hash,2), (Map,3)) +x = List((Linked,1), (Hash,2), (Map,3)) y = List((Linked,1), (Hash,2), (Map,3)) x equals y: true, y equals x: true -x = Set(layers, buffers, title) -y = Set(layers, buffers, title) +x = LinkedHashSet(layers, buffers, title) +y = LinkedHashSet(layers, buffers, title) x equals y: true, y equals x: true -x = ArrayBuffer(layers, buffers, title) -y = ArrayBuffer(layers, buffers, title) +x = List(layers, buffers, title) +y = List(layers, buffers, title) x equals y: true, y equals x: true -x = ArrayBuffer(layers, buffers, title) +x = List(layers, buffers, title) y = List(layers, buffers, title) x equals y: true, y equals x: true @@ -208,8 +192,8 @@ x = abc y = abc x equals y: true, y equals x: true -x = WrappedArray(1, 2, 3) -y = WrappedArray(1, 2, 3) +x = ArraySeq(1, 2, 3) +y = ArraySeq(1, 2, 3) x equals y: true, y equals x: true x = TreeSet(1, 2, 3) @@ -244,40 +228,3 @@ x equals y: true, y equals x: true 2 1 2 - -x = UnrolledBuffer(one, two) -y = UnrolledBuffer(one, two) -x equals y: true, y equals x: true - -x = ParArray(abc, def, etc) -y = ParArray(abc, def, etc) -x equals y: true, y equals x: true - -x = ParHashMap(2 -> 4, 1 -> 2) -y = ParHashMap(2 -> 4, 1 -> 2) -x equals y: true, y equals x: true - -x = ParTrieMap(1 -> 2, 2 -> 4) -y = ParTrieMap(1 -> 2, 2 -> 4) -x equals y: true, y equals x: true - -x = ParHashSet(1, 2, 3) -y = ParHashSet(1, 2, 3) -x equals y: true, y equals x: true - -x = ParRange 0 to 4 -y = ParRange 0 to 4 -x equals y: true, y equals x: true - -x = ParRange 0 until 4 -y = ParRange 0 until 4 -x equals y: true, y equals x: true - -x = ParMap(5 -> 1, 10 -> 2) -y = ParMap(5 -> 1, 10 -> 2) -x equals y: true, y equals x: true - -x = ParSet(two, one) -y = ParSet(two, one) -x equals y: true, y equals x: true - diff --git a/tests/run/serialization-new.scala b/tests/run/serialization-new.scala index 8da9d4b3db30..842b065fe5ad 100644 --- a/tests/run/serialization-new.scala +++ b/tests/run/serialization-new.scala @@ -180,7 +180,7 @@ object Test1_scala { object Test2_immutable { import scala.collection.immutable.{ BitSet, HashMap, HashSet, ListMap, ListSet, Queue, Range, SortedMap, - SortedSet, Stack, Stream, TreeMap, TreeSet, Vector} + SortedSet, LazyList, TreeMap, TreeSet, Vector} // in alphabetic order try { @@ -198,12 +198,12 @@ object Test2_immutable { check(bs2, _bs2) // HashMap - val hm1 = new HashMap[Int, String] + (1 -> "A", 2 -> "B", 3 -> "C") + val hm1 = HashMap.empty[Int, String] ++ List(1 -> "A", 2 -> "B", 3 -> "C") val _hm1: HashMap[Int, String] = read(write(hm1)) check(hm1, _hm1) // HashSet - val hs1 = new HashSet[Int] + 1 + 2 + val hs1 = HashSet.empty[Int] + 1 + 2 val _hs1: HashSet[Int] = read(write(hs1)) check(hs1, _hs1) @@ -213,7 +213,7 @@ object Test2_immutable { check(xs1, _xs1) // ListMap - val lm1 = new ListMap[String, Int] + ("buffers" -> 20, "layers" -> 2, "title" -> 3) + val lm1 = new ListMap[String, Int] ++ List("buffers" -> 20, "layers" -> 2, "title" -> 3) val _lm1: ListMap[String, Int] = read(write(lm1)) check(lm1, _lm1) @@ -237,7 +237,7 @@ object Test2_immutable { check(r2, _r2) // SortedMap - val sm1 = SortedMap.empty[Int, String] + (2 -> "B", 3 -> "C", 1 -> "A") + val sm1 = SortedMap.empty[Int, String] ++ List(2 -> "B", 3 -> "C", 1 -> "A") val _sm1: SortedMap[Int, String] = read(write(sm1)) check(sm1, _sm1) @@ -246,14 +246,9 @@ object Test2_immutable { val _ss1: SortedSet[Int] = read(write(ss1)) check(ss1, _ss1) - // Stack - val s1 = new Stack().push("a", "b", "c") - val _s1: Stack[String] = read(write(s1)) - check(s1, _s1) - - // Stream - val st1 = Stream.range(0, 10) - val _st1: Stream[Int] = read(write(st1)) + // LazyList + val st1 = LazyList.range(0, 10) + val _st1: LazyList[Int] = read(write(st1)) check(st1, _st1) // TreeMap @@ -284,9 +279,9 @@ object Test2_immutable { object Test3_mutable { import scala.reflect.ClassTag import scala.collection.mutable.{ - ArrayBuffer, ArrayBuilder, ArraySeq, ArrayStack, BitSet, DoubleLinkedList, - HashMap, HashSet, History, LinkedHashMap, LinkedHashSet, LinkedList, ListBuffer, - Publisher, Queue, Stack, StringBuilder, WrappedArray, TreeSet} + ArrayBuffer, ArrayBuilder, BitSet, + HashMap, HashSet, LinkedHashMap, LinkedHashSet, ListBuffer, + Queue, Stack, StringBuilder, ArraySeq, TreeSet} import scala.collection.concurrent.TrieMap // in alphabetic order @@ -306,17 +301,6 @@ object Test3_mutable { val _abu2: ArrayBuilder[ClassTag[Float]] = read(write(abu2)) check(abu2, _abu2) - // ArraySeq - val aq1 = ArraySeq(1, 2, 3) - val _aq1: ArraySeq[Int] = read(write(aq1)) - check(aq1, _aq1) - - // ArrayStack - val as1 = new ArrayStack[Int] - as1 ++= List(20, 2, 3).iterator - val _as1: ArrayStack[Int] = read(write(as1)) - check(as1, _as1) - // BitSet val bs1 = new BitSet() bs1 += 0 @@ -324,13 +308,7 @@ object Test3_mutable { bs1 += 9 val _bs1: BitSet = read(write(bs1)) check(bs1, _bs1) -/* - // DoubleLinkedList - val dl1 = new DoubleLinkedList[Int](2, null) - dl1.append(new DoubleLinkedList(3, null)) - val _dl1: DoubleLinkedList[Int] = read(write(dl1)) - check(dl1, _dl1) -*/ + // HashMap val hm1 = new HashMap[String, Int] hm1 ++= List(("A", 1), ("B", 2), ("C", 3)).iterator @@ -343,10 +321,6 @@ object Test3_mutable { val _hs1: HashSet[String] = read(write(hs1)) check(hs1, _hs1) - val h1 = new History[String, Int] - val _h1: History[String, Int] = read(write(h1)) - check(h1, _h1) - // LinkedHashMap { val lhm1 = new LinkedHashMap[String, Int] val list = List(("Linked", 1), ("Hash", 2), ("Map", 3)) @@ -366,13 +340,7 @@ object Test3_mutable { check(lhs1.toSeq, _lhs1.toSeq) // check elements order check(lhs1.toSeq, list) // check elements order } -/* - // LinkedList - val ll1 = new LinkedList[Int](2, null) - ll1.append(new LinkedList(3, null)) - val _ll1: LinkedList[Int] = read(write(ll1)) - check(ll1, _ll1) -*/ + // ListBuffer val lb1 = new ListBuffer[String] lb1 ++= List("white", "black") @@ -397,9 +365,9 @@ object Test3_mutable { val _sb1: StringBuilder = read(write(sb1)) check(sb1, _sb1) - // WrappedArray - val wa1 = WrappedArray.make(Array(1, 2, 3)) - val _wa1: WrappedArray[Int] = read(write(wa1)) + // ArraySeq + val wa1 = ArraySeq.make(Array(1, 2, 3)) + val _wa1: ArraySeq[Int] = read(write(wa1)) check(wa1, _wa1) // TreeSet @@ -492,7 +460,7 @@ class Outer extends Serializable { object Test7 { val x = new Outer - val _ = x.Inner // initialize + x.Inner // initialize val y:Outer = read(write(x)) if (y.Inner == null) println("Inner object is null") @@ -538,85 +506,5 @@ object Test { Test6 Test7 Test8 - Test9_parallel - Test10_util - } -} - -//############################################################################ - - -//############################################################################ -// Test classes in package "scala.collection.parallel" and subpackages -object Test9_parallel { - import scala.collection.parallel._ - - try { - println() - - // UnrolledBuffer - val ub = new collection.mutable.UnrolledBuffer[String] - ub ++= List("one", "two") - val _ub: collection.mutable.UnrolledBuffer[String] = read(write(ub)) - check(ub, _ub) - - // mutable.ParArray - val pa = mutable.ParArray("abc", "def", "etc") - val _pa: mutable.ParArray[String] = read(write(pa)) - check(pa, _pa) - - // mutable.ParHashMap - val mpm = mutable.ParHashMap(1 -> 2, 2 -> 4) - val _mpm: mutable.ParHashMap[Int, Int] = read(write(mpm)) - check(mpm, _mpm) - - // mutable.ParTrieMap - val mpc = mutable.ParTrieMap(1 -> 2, 2 -> 4) - val _mpc: mutable.ParTrieMap[Int, Int] = read(write(mpc)) - check(mpc, _mpc) - - // mutable.ParHashSet - val mps = mutable.ParHashSet(1, 2, 3) - val _mps: mutable.ParHashSet[Int] = read(write(mps)) - check(mps, _mps) - - // immutable.ParRange - val pr1 = immutable.ParRange(0, 4, 1, true) - val _pr1: immutable.ParRange = read(write(pr1)) - check(pr1, _pr1) - - val pr2 = immutable.ParRange(0, 4, 1, false) - val _pr2: immutable.ParRange = read(write(pr2)) - check(pr2, _pr2) - - // immutable.ParHashMap - val ipm = immutable.ParHashMap(5 -> 1, 10 -> 2) - val _ipm: immutable.ParHashMap[Int, Int] = read(write(ipm)) - check(ipm, _ipm) - - // immutable.ParHashSet - val ips = immutable.ParHashSet("one", "two") - val _ips: immutable.ParHashSet[String] = read(write(ips)) - check(ips, _ips) - - } catch { - case e: Exception => - println("Error in Test5_parallel: " + e) - throw e - } -} - -//############################################################################ -// Test classes in package scala.util - -object Test10_util { - import scala.util.Random - def rep[A](n: Int)(f: => A): Unit = { if (n > 0) { f; rep(n-1)(f) } } - - { - val random = new Random(345) - val random2: Random = read(write(random)) - rep(5) { assert(random.nextInt == random2.nextInt) } } } - diff --git a/tests/run/serialize-stream.check b/tests/run/serialize-stream.check index e2a9f57aa776..04ec4005b710 100644 --- a/tests/run/serialize-stream.check +++ b/tests/run/serialize-stream.check @@ -1,6 +1,12 @@ -Stream(1, ?) +Stream(1, ) List(1, 2, 3) -Stream(1, ?) +Stream(1, ) List(1) Stream() List() +LazyList() +List(1, 2, 3) +LazyList() +List(1) +LazyList() +List() diff --git a/tests/run/serialize-stream.scala b/tests/run/serialize-stream.scala index fc84d610af6b..a8fb008f0788 100644 --- a/tests/run/serialize-stream.scala +++ b/tests/run/serialize-stream.scala @@ -1,7 +1,7 @@ object Test { - def ser[T](s: Stream[T]): Unit = { + def ser[T](s: Seq[T]): Unit = { val bos = new java.io.ByteArrayOutputStream() val oos = new java.io.ObjectOutputStream(bos) oos.writeObject(s) @@ -16,5 +16,9 @@ object Test { ser(Stream(1, 2, 3)) ser(Stream(1)) ser(Stream()) + + ser(LazyList(1, 2, 3)) + ser(LazyList(1)) + ser(LazyList()) } } diff --git a/tests/run/slices.check b/tests/run/slices.check index c7af9db46677..8413efe36678 100644 --- a/tests/run/slices.check +++ b/tests/run/slices.check @@ -16,19 +16,19 @@ List() List() List() -Array(2) -Array() -Array(1) -Array() -Array(1, 2) +List(2) +List() +List(1) +List() +List(1, 2) -Array(1, 2, 3) -Array(1, 2, 3) -Array(1, 2) -Array() +List(1, 2, 3) +List(1, 2, 3) +List(1, 2) +List() -Array(4) -Array() -Array() -Array() +List(4) +List() +List() +List() diff --git a/tests/run/slices.scala b/tests/run/slices.scala index 9a02dbc2d4d0..20d0789cba92 100644 --- a/tests/run/slices.scala +++ b/tests/run/slices.scala @@ -1,4 +1,3 @@ - import scala.language.postfixOps object Test extends App { @@ -26,24 +25,24 @@ object Test extends App { println() // arrays - println(Array(1, 2, 3, 4).slice(1, 2).deep) - println(Array(1, 2, 3, 4).slice(2, 1).deep) - println(Array(1, 2, 3, 4).slice(-1, 1).deep) - println(Array(1, 2, 3, 4).slice(1, -1).deep) - println(Array(1, 2, 3, 4).slice(-2, 2).deep) + println(Array(1, 2, 3, 4).slice(1, 2).toList) + println(Array(1, 2, 3, 4).slice(2, 1).toList) + println(Array(1, 2, 3, 4).slice(-1, 1).toList) + println(Array(1, 2, 3, 4).slice(1, -1).toList) + println(Array(1, 2, 3, 4).slice(-2, 2).toList) println() - println(Array(1, 2, 3, 4) take 3 deep) - println(Array(1, 2, 3) take 3 deep) - println(Array(1, 2) take 3 deep) - println((Array(): Array[Int]) take 3 deep) + println(Array(1, 2, 3, 4) take 3 toList) + println(Array(1, 2, 3) take 3 toList) + println(Array(1, 2) take 3 toList) + println((Array(): Array[Int]) take 3 toList) // println(Array[Nothing]() take 3) // contrib #757 println() - println(Array(1, 2, 3, 4) drop 3 deep) - println(Array(1, 2, 3) drop 3 deep) - println(Array(1, 2) drop 3 deep) - println((Array(): Array[Int]) drop 3 deep) + println(Array(1, 2, 3, 4) drop 3 toList) + println(Array(1, 2, 3) drop 3 toList) + println(Array(1, 2) drop 3 toList) + println((Array(): Array[Int]) drop 3 toList) // println(Array[Nothing]() drop 3) println() } diff --git a/tests/run/stream-stack-overflow-filter-map.scala b/tests/run/stream-stack-overflow-filter-map.scala index bde7aea61058..c93ab4b8e470 100644 --- a/tests/run/stream-stack-overflow-filter-map.scala +++ b/tests/run/stream-stack-overflow-filter-map.scala @@ -1,29 +1,26 @@ -import collection.generic.{FilterMonadic, CanBuildFrom} +import collection.{IterableOps, BuildFrom} object Test extends App { - def mapSucc[Repr, That](s: FilterMonadic[Int, Repr])(implicit cbf: CanBuildFrom[Repr, Int, That]) = s map (_ + 1) - def flatMapId[T, Repr, That](s: FilterMonadic[T, Repr])(implicit cbf: CanBuildFrom[Repr, T, That]) = s flatMap (Seq(_)) - - def testStreamPred(s: Stream[Int])(p: Int => Boolean): Unit = { + def testStreamPred(s: LazyList[Int])(p: Int => Boolean): Unit = { val res1 = s withFilter p val res2 = s filter p val expected = s.toSeq filter p - val fMapped1 = flatMapId(res1) - val fMapped2 = flatMapId(res2) + val fMapped1 = res1.flatMap(Seq(_)) + val fMapped2 = res2.flatMap(Seq(_)) assert(fMapped1 == fMapped2) assert(fMapped1.toSeq == expected) - val mapped1 = mapSucc(res1) - val mapped2 = mapSucc(res2) + val mapped1 = res1 map (_ + 1) + val mapped2 = res2 map (_ + 1) assert(mapped1 == mapped2) assert(mapped1.toSeq == (expected map (_ + 1))) assert((res1 map identity).toSeq == res2.toSeq) } - def testStream(s: Stream[Int]): Unit = { + def testStream(s: LazyList[Int]): Unit = { testStreamPred(s)(_ => false) testStreamPred(s)(_ => true) testStreamPred(s)(_ % 2 == 0) @@ -32,12 +29,12 @@ object Test extends App { //Reduced version of the test case - either invocation used to cause a stack //overflow before commit 80b3f433e5536d086806fa108ccdfacf10719cc2. - val resFMap = (1 to 10000).toStream withFilter (_ => false) flatMap (Seq(_)) - val resMap = (1 to 10000).toStream withFilter (_ => false) map (_ + 1) + val resFMap = (1 to 10000).to(LazyList) withFilter (_ => false) flatMap (Seq(_)) + val resMap = (1 to 10000).to(LazyList) withFilter (_ => false) map (_ + 1) //Complete test case for withFilter + map/flatMap, as requested by @axel22. for (j <- (0 to 3) :+ 10000) { - val stream = (1 to j).toStream + val stream = (1 to j).to(LazyList) assert(stream.toSeq == (1 to j).toSeq) testStream(stream) } diff --git a/tests/run/streamWithFilter.scala b/tests/run/streamWithFilter.scala index cb919d4f5568..806e65d950c4 100644 --- a/tests/run/streamWithFilter.scala +++ b/tests/run/streamWithFilter.scala @@ -1,5 +1,5 @@ object Test { - val nums = Stream.from(1) + val nums = LazyList.from(1) def isFizz(x: Int) = x % 3 == 0 def isBuzz(x: Int) = x % 5 == 0 // next line will run forever if withFilter isn't doing its thing. diff --git a/tests/run/stream_flatmap_odds.check b/tests/run/stream_flatmap_odds.check index 2b945e7c6492..be33ef6e8f44 100644 --- a/tests/run/stream_flatmap_odds.check +++ b/tests/run/stream_flatmap_odds.check @@ -1 +1 @@ -Stream(1, 3, 5, 7, 9, 11, 13, 15, 17, 19, 21, 23, 25, 27, 29, 31, 33, 35, 37, 39, 41, 43, 45, 47, 49, 51, 53, 55, 57, 59, 61, 63, 65, 67, 69, 71, 73, 75, 77, 79, 81, 83) +LazyList(1, 3, 5, 7, 9, 11, 13, 15, 17, 19, 21, 23, 25, 27, 29, 31, 33, 35, 37, 39, 41, 43, 45, 47, 49, 51, 53, 55, 57, 59, 61, 63, 65, 67, 69, 71, 73, 75, 77, 79, 81, 83) diff --git a/tests/run/stream_flatmap_odds.scala b/tests/run/stream_flatmap_odds.scala index 1935253595c5..80ba1a749d4b 100644 --- a/tests/run/stream_flatmap_odds.scala +++ b/tests/run/stream_flatmap_odds.scala @@ -1,4 +1,4 @@ object Test extends App { - lazy val odds: Stream[Int] = Stream(1) append ( odds flatMap {x => Stream(x + 2)} ) + lazy val odds: LazyList[Int] = LazyList(1) lazyAppendedAll ( odds flatMap {x => LazyList(x + 2)} ) Console println (odds take 42).force } diff --git a/tests/run/t0017.check b/tests/run/t0017.check index 3a72142467e5..db31ab64da05 100644 --- a/tests/run/t0017.check +++ b/tests/run/t0017.check @@ -1 +1 @@ -Array(ArraySeq(1, 3), ArraySeq(2, 4)) +List(ArraySeq(1, 3), ArraySeq(2, 4)) diff --git a/tests/run/t0017.scala b/tests/run/t0017.scala index 245cbb7e42e4..2676b98930b5 100644 --- a/tests/run/t0017.scala +++ b/tests/run/t0017.scala @@ -1,17 +1,17 @@ -object Test extends App { - -def transpose[A](arr: Array[Array[A]]) = { - for (i <- Array.range(0, arr(0).length)) yield - for (row <- arr) yield row(i) -} +import scala.collection.mutable -var my_arr = Array(Array(1,2),Array(3,4)) +object Test extends App { + def transpose[A](arr: Array[Array[A]]) = { + for (i <- Array.range(0, arr(0).length)) yield + for (row <- arr.toIndexedSeq) yield row(i) + } -for (i <- Array.range(0, my_arr(0).length)) yield - for (row <- my_arr) yield row(i) + var my_arr = Array(Array(1, 2), Array(3, 4)) -val transposed = transpose(my_arr) + for (i <- Array.range(0, my_arr(0).length)) yield + for (row <- my_arr) yield row(i) -println(transposed.deep.toString) + val transposed = transpose(my_arr) + println(transposed.toList.toString) } diff --git a/tests/run/t0677-old.scala b/tests/run/t0677-old.scala index 8d4c3ee06056..d51d7d7ce6fc 100644 --- a/tests/run/t0677-old.scala +++ b/tests/run/t0677-old.scala @@ -2,7 +2,7 @@ @deprecated("Suppress warnings", since="2.11") object Test extends App { - class X[T: ClassManifest] { + class X[T: scala.reflect.ClassManifest] { val a = Array.ofDim[T](3, 4) } val x = new X[String] diff --git a/tests/run/t1074.check b/tests/run/t1074.check index ccf1cb1551cc..9e12a97e44d0 100644 --- a/tests/run/t1074.check +++ b/tests/run/t1074.check @@ -1,3 +1,3 @@ -q0 = Set(kl, jk, cd, fg, ef, gh, a, de, hj, b, lm, mn) -q1 = Set() 0 -q2 = Set() 0 +q0 = List(a, b, cd, de, ef, fg, gh, hj, jk, kl, lm, mn) +q1 = HashSet() 0 +q2 = HashSet() 0 diff --git a/tests/run/t1074.scala b/tests/run/t1074.scala index a95f9eedbc9c..21fc45df8bdd 100644 --- a/tests/run/t1074.scala +++ b/tests/run/t1074.scala @@ -3,11 +3,10 @@ object Test { def main(args : Array[String]) : Unit = { var words = "a" :: "b" :: "cd" :: "de" :: "fg" :: "ef" :: "gh" :: "jk" :: "hj" :: "kl" :: "lm" :: "mn" :: Nil - val q0:Set[String] = - new HashSet[String]() ++ words + val q0:Set[String] = HashSet[String]() ++ words val q1 = q0.filter(w => false) val q2 = q1.filter(w => false) - Console.println("q0 = " + q0) + Console.println("q0 = " + q0.toList.sorted) Console.println("q1 = " + q1+" "+q1.size) Console.println("q2 = " + q2+" "+q2.size) } diff --git a/tests/run/t1192.check b/tests/run/t1192.check index 57234e1d8a7e..33ccccda84c8 100644 --- a/tests/run/t1192.check +++ b/tests/run/t1192.check @@ -1,2 +1,2 @@ -Array(1, 2) -Array(3, 4) +List(1, 2) +List(3, 4) diff --git a/tests/run/t1192.scala b/tests/run/t1192.scala index cdff7ef72d0f..00519909207f 100644 --- a/tests/run/t1192.scala +++ b/tests/run/t1192.scala @@ -1,7 +1,7 @@ object Test extends App { val v1: Array[Array[Int]] = Array(Array(1, 2), Array(3, 4)) def f[T](w: Array[Array[T]]): Unit = { - for (r <- w) println(r.deep.toString) + for (r <- w) println(r.toList.toString) } f(v1) } diff --git a/tests/run/t1309.scala b/tests/run/t1309.scala index 84963549a27e..ca1429aed13d 100644 --- a/tests/run/t1309.scala +++ b/tests/run/t1309.scala @@ -2,6 +2,6 @@ object Test { def f(ras: => IndexedSeq[Byte]): IndexedSeq[Byte] = ras def main(args: Array[String]): Unit = { - f(new Array[Byte](0)) + f(new Array[Byte](0).toIndexedSeq) } } diff --git a/tests/run/t1323.scala b/tests/run/t1323.scala index 94b51bd2a4aa..17a4c467789a 100644 --- a/tests/run/t1323.scala +++ b/tests/run/t1323.scala @@ -17,7 +17,7 @@ object Test extends App { println("16:" + List().indexOfSlice(List(1,2,3,4))) // -1 // Do some testing with infinite sequences - def from(n: Int): Stream[Int] = Stream.cons(n, from(n + 1)) + def from(n: Int): LazyList[Int] = LazyList.cons(n, from(n + 1)) println("17:" + List(1,2,3,4).indexOfSlice(from(1))) // -1 println("18:" + from(1).indexOfSlice(List(4,5,6))) // 3 diff --git a/tests/run/t153.check b/tests/run/t153.check index 648a6de7c307..05767e43555a 100644 --- a/tests/run/t153.check +++ b/tests/run/t153.check @@ -1 +1 @@ -Stream(262144, 131072, 65536, 32768, 16384, 8192, 4096, 2048, 1024, 512, 256, 128, 64, 32, 16, 8, 4, 2, 1) +LazyList(262144, 131072, 65536, 32768, 16384, 8192, 4096, 2048, 1024, 512, 256, 128, 64, 32, 16, 8, 4, 2, 1) diff --git a/tests/run/t153.scala b/tests/run/t153.scala index 3fdb4238650a..d51a30afdce7 100644 --- a/tests/run/t153.scala +++ b/tests/run/t153.scala @@ -1,5 +1,5 @@ object Test extends App { def powers(x: Int) = if ((x&(x-1))==0) Some(x) else None - val res = (Stream.range(1, 500000) flatMap powers).reverse + val res = (LazyList.range(1, 500000) flatMap powers).reverse println((res take 42).force) } diff --git a/tests/run/t2027.scala b/tests/run/t2027.scala index 96012d90fa3b..c0afb8d3415e 100644 --- a/tests/run/t2027.scala +++ b/tests/run/t2027.scala @@ -1,6 +1,6 @@ object Test { def main(args: Array[String]): Unit = { - def fibs: Stream[Int] = Stream.cons(0, Stream.cons(1, fibs.zip(fibs.tail).map(p => p._1 + p._2))) + def fibs: LazyList[Int] = LazyList.cons(0, LazyList.cons(1, fibs.zip(fibs.tail).map(p => p._1 + p._2))) println(fibs(2)) // stack overflow } } diff --git a/tests/run/t2074_2.check b/tests/run/t2074_2.check deleted file mode 100644 index 0876ef7d034b..000000000000 --- a/tests/run/t2074_2.check +++ /dev/null @@ -1,3 +0,0 @@ -SeqView(...) -SeqView(...) -SeqViewZ(...) diff --git a/tests/run/t2074_2.scala b/tests/run/t2074_2.scala deleted file mode 100644 index 4624170f8961..000000000000 --- a/tests/run/t2074_2.scala +++ /dev/null @@ -1,22 +0,0 @@ -// replaced all occurrences of 'Vector' with 'IndexedSeq' -import scala.collection.immutable.IndexedSeq -import scala.collection.SeqView - -object Test { - val funWithCCE = List.range(1,11).view.patch(5, List(100,101), 2) - - val v = new SeqView[Int, IndexedSeq[Int]] { - def underlying = IndexedSeq(1,2,3) - def apply(idx: Int) = underlying(idx) - def length = underlying.length - def iterator = underlying.iterator - } - val w = IndexedSeq(1, 2, 3).view - - def main(args: Array[String]): Unit = { - println(v) - println(w) - println(go) - } - def go = v zip v -} diff --git a/tests/run/t2075.scala b/tests/run/t2075.scala index e3a68e4a8359..d1b04178d00b 100644 --- a/tests/run/t2075.scala +++ b/tests/run/t2075.scala @@ -1,7 +1,7 @@ object Test extends App { var tm = new scala.collection.immutable.TreeMap[Int,Int] for (i <- 0 to 100) - tm = tm.insert(i, i) + tm = tm.updated(i, i) tm.keySet.filter(_ < 40) } diff --git a/tests/run/t2147.scala b/tests/run/t2147.scala index 1a1dfddc7155..e9145a680453 100644 --- a/tests/run/t2147.scala +++ b/tests/run/t2147.scala @@ -1,8 +1,8 @@ object Test extends App { - val s: Seq[Int] = Stream.from(1) + val s: Seq[Int] = LazyList.from(1) val res0 = s.map(a => 2).head - val res1 = Stream.from(1).flatMap(a => List(1)).head + val res1 = LazyList.from(1).flatMap(a => List(1)).head - println((for{a <- Stream.from(1); b <- 1 to 5; if a > 10} yield a).head) - println((for{a <- Stream.from(1); b <- 1 to a; if a > 10} yield a).head) + println((for{a <- LazyList.from(1); b <- 1 to 5; if a > 10} yield a).head) + println((for{a <- LazyList.from(1); b <- 1 to a; if a > 10} yield a).head) } diff --git a/tests/run/t2176.check b/tests/run/t2176.check index 2298e8b6e914..0c51f2c3bdba 100644 --- a/tests/run/t2176.check +++ b/tests/run/t2176.check @@ -1 +1 @@ -Stream(1) +LazyList(1) diff --git a/tests/run/t2176.scala b/tests/run/t2176.scala index 2a46dcf2c422..826651f94723 100644 --- a/tests/run/t2176.scala +++ b/tests/run/t2176.scala @@ -1,4 +1,4 @@ object Test extends App { - val res0 = Stream.cons(1, Stream.cons( { println("ouch"); 2 }, Stream.empty)) + val res0 = LazyList.cons(1, LazyList.cons( { println("ouch"); 2 }, LazyList.empty)) println(res0.take(1).force) } diff --git a/tests/run/t2177.scala b/tests/run/t2177.scala index 5e344c654d0d..e87a15b04428 100644 --- a/tests/run/t2177.scala +++ b/tests/run/t2177.scala @@ -1,3 +1,3 @@ object Test extends App { - println(Stream.from(1).take(5).mkString) + println(LazyList.from(1).take(5).mkString) } diff --git a/tests/run/t2212.check b/tests/run/t2212.check deleted file mode 100644 index 302bd0b6a852..000000000000 --- a/tests/run/t2212.check +++ /dev/null @@ -1,3 +0,0 @@ -LinkedList(1) -LinkedList(1) -true diff --git a/tests/run/t2212.scala b/tests/run/t2212.scala deleted file mode 100644 index 78fbfbe25590..000000000000 --- a/tests/run/t2212.scala +++ /dev/null @@ -1,10 +0,0 @@ -object Test { - def main(args: Array[String]): Unit = { - import collection.mutable._ - val x4 = LinkedList[Int](1) - println(x4) - val y4 = LinkedList[Int](1) - println(y4) - println(x4 equals y4) // or (y4 equals x4) - } -} diff --git a/tests/run/t2250.scala b/tests/run/t2250.scala deleted file mode 100644 index 1ed333792ae9..000000000000 --- a/tests/run/t2250.scala +++ /dev/null @@ -1,12 +0,0 @@ -object Test { - def main(args: Array[String]): Unit = { - val a: Array[String] = "goobledy bing, goobledy bling, wikka wokka wup.".split("") - val b = java.util.Arrays.asList(a: _*) - java.util.Collections.shuffle(b) - - // we'll say rather unlikely a.sameElements(b) unless - // they are pointing to the same array - import scala.collection.JavaConversions._ - assert(a sameElements b) - } -} diff --git a/tests/run/t2255.scala b/tests/run/t2255.scala index 0b34115ed231..5b0d6445f50d 100644 --- a/tests/run/t2255.scala +++ b/tests/run/t2255.scala @@ -1,3 +1,3 @@ object Test extends App { - println(Stream.continually(Stream(1, 2, 3)).flatten.take(6).toList) + println(LazyList.continually(LazyList(1, 2, 3)).flatten.take(6).toList) } diff --git a/tests/run/t2524.scala b/tests/run/t2524.scala deleted file mode 100644 index e806b8c3518b..000000000000 --- a/tests/run/t2524.scala +++ /dev/null @@ -1,10 +0,0 @@ -object Test { - def main(args: Array[String]): Unit = { - val m = new collection.mutable.HashMap[String, String] { - override def initialSize = 0 - } - m.toString - m("key") = "value" - assert(m("key") == "value") - } -} diff --git a/tests/run/t2526.scala b/tests/run/t2526.scala index 4d567bbbf44a..f0e24f844833 100644 --- a/tests/run/t2526.scala +++ b/tests/run/t2526.scala @@ -27,7 +27,7 @@ object Test { } /* Checks foreach of `actual` goes over all the elements in `expected` */ - private def assertForeach[E](expected: Traversable[E], actual: Iterator[E]): Unit = { + private def assertForeach[E](expected: Iterable[E], actual: Iterator[E]): Unit = { val notYetFound = new mutable.ArrayBuffer[E]() ++= expected actual.foreach { e => assert(notYetFound.contains(e)) @@ -38,11 +38,11 @@ object Test { /* * Checks foreach of `actual` goes over all the elements in `expected` - * We duplicate the method above because there is no common inteface between Traversable and + * We duplicate the method above because there is no common interface between Iterable and * Iterator and we want to avoid converting between collections to ensure that we test what * we mean to test. */ - private def assertForeach[E](expected: Traversable[E], actual: Traversable[E]): Unit = { + private def assertForeach[E](expected: Iterable[E], actual: Iterable[E]): Unit = { val notYetFound = new mutable.ArrayBuffer[E]() ++= expected actual.foreach { e => assert(notYetFound.contains(e)) diff --git a/tests/run/t2813.2.scala b/tests/run/t2813.2.scala deleted file mode 100644 index 768653f1def3..000000000000 --- a/tests/run/t2813.2.scala +++ /dev/null @@ -1,39 +0,0 @@ -import java.util.LinkedList -import collection.JavaConversions._ - -object Test extends App { - def assertListEquals[A](expected: List[A], actual: Seq[A]): Unit = { - assert(expected.sameElements(actual), - "Expected list to contain " + expected.mkString("[", ", ", "]") + - ", but was " + actual.mkString("[", ", ", "]")) - } - - def addAllOfNonCollectionWrapperAtZeroOnEmptyLinkedList(): Unit = { - val l = new LinkedList[Int] - l.addAll(0, List(1, 2)) - assertListEquals(List(1, 2), l) - } - - def addAllOfNonCollectionWrapperAtZeroOnLinkedList(): Unit = { - val l = new LinkedList[Int] += 1 += 2 - l.addAll(0, List(10, 11)) - assertListEquals((List(10, 11, 1, 2)), l) - } - - def addAllOfCollectionWrapperAtZeroOnLinkedList(): Unit = { - val l = new LinkedList[Int] += 1 += 2 - l.addAll(0, new LinkedList[Int] += 10 += 11) - assertListEquals((List(10, 11, 1, 2)), l) - } - - def addAllOfCollectionWrapperAtZeroOnEmptyLinkedList(): Unit = { - val l = new LinkedList[Int] - l.addAll(0, new LinkedList[Int] += 10 += 11) - assertListEquals((List(10, 11)), l) - } - - addAllOfNonCollectionWrapperAtZeroOnEmptyLinkedList() - addAllOfNonCollectionWrapperAtZeroOnLinkedList() - addAllOfCollectionWrapperAtZeroOnEmptyLinkedList() - addAllOfCollectionWrapperAtZeroOnLinkedList() -} diff --git a/tests/run/t2818.scala b/tests/run/t2818.scala index 746cdfb04923..5f86eceabd5f 100644 --- a/tests/run/t2818.scala +++ b/tests/run/t2818.scala @@ -1,6 +1,6 @@ object Test extends App { - println((List.range(1L, 15L) :\ 0L) (_ + _)) - println((List.range(1L, 1000000L) :\ 0L) (_ + _)) - println((List.fill(5)(1) :\ 1) (_ - _)) - println((List.fill(1000000)(1) :\ 1) (_ - _)) + println(List.range(1L, 15L).foldRight(0L)(_ + _)) + println(List.range(1L, 1000000L).foldRight(0L)(_ + _)) + println(List.fill(5)(1).foldRight(1)(_ - _)) + println(List.fill(1000000)(1).foldRight(1)(_ - _)) } diff --git a/tests/run/t3241.scala b/tests/run/t3241.scala index 40097a046f42..a9b39d67440b 100644 --- a/tests/run/t3241.scala +++ b/tests/run/t3241.scala @@ -15,7 +15,7 @@ object Test { def recurse(set: collection.immutable.Set[Int]): Unit = { if (!set.isEmpty) { - val x = set.toStream.head + val x = set.to(LazyList).head recurse(set - x) } } diff --git a/tests/run/t3273.scala b/tests/run/t3273.scala index 379a8a29c11b..585dffd22c13 100644 --- a/tests/run/t3273.scala +++ b/tests/run/t3273.scala @@ -1,6 +1,6 @@ object Test { - val num1: Stream[Int] = 1 #:: num1.map(_ + 1) - val num2: Stream[Int] = 1 #:: num2.iterator.map(_ + 1).toStream + val num1: LazyList[Int] = 1 #:: num1.map(_ + 1) + val num2: LazyList[Int] = 1 #:: num2.iterator.map(_ + 1).to(LazyList) def main(args: Array[String]): Unit = { val x1 = (num1 take 10).toList diff --git a/tests/run/t3326.check b/tests/run/t3326.check index d0e11cebf770..8c3e93aa45dc 100644 --- a/tests/run/t3326.check +++ b/tests/run/t3326.check @@ -1,8 +1,8 @@ -Map(2 -> Hello, 1 -> World) -Map(5 -> Foo, 4 -> Bar) -Map(5 -> Foo, 4 -> Bar, 2 -> Hello, 1 -> World) -Map(3 -> ?, 2 -> Hello, 1 -> World) -Map(2 -> Hello, 1 -> World) -Map(5 -> Foo, 4 -> Bar) -Map(5 -> Foo, 4 -> Bar, 2 -> Hello, 1 -> World) -Map(3 -> ?, 2 -> Hello, 1 -> World) \ No newline at end of file +TreeMap(2 -> Hello, 1 -> World) +TreeMap(5 -> Foo, 4 -> Bar) +TreeMap(5 -> Foo, 4 -> Bar, 2 -> Hello, 1 -> World) +TreeMap(3 -> ?, 2 -> Hello, 1 -> World) +TreeMap(2 -> Hello, 1 -> World) +TreeMap(5 -> Foo, 4 -> Bar) +TreeMap(5 -> Foo, 4 -> Bar, 2 -> Hello, 1 -> World) +TreeMap(3 -> ?, 2 -> Hello, 1 -> World) diff --git a/tests/run/t3326.scala b/tests/run/t3326.scala index 5e403794dfe8..0e419bebd4f4 100644 --- a/tests/run/t3326.scala +++ b/tests/run/t3326.scala @@ -1,10 +1,5 @@ - - - import scala.math.Ordering - - /** The heart of the problem - we want to retain the ordering when * using `++` on sorted maps. * @@ -19,7 +14,7 @@ import scala.math.Ordering * This is why `collection.SortedMap` used to resort to the generic * `TraversableLike.++` which knows nothing about the ordering. * - * To avoid `collection.SortedMap`s resort to the more generic `TraverableLike.++`, + * To avoid `collection.SortedMap`s resort to the more generic `TraversableLike.++`, * we override the `MapLike.++` overload in `collection.SortedMap` to return * the proper type `SortedMap`. */ @@ -36,11 +31,11 @@ object Test { var m1: SortedMap[Int, String] = SortedMap.empty[Int, String](order) var m2: SortedMap[Int, String] = SortedMap.empty[Int, String](order) - m1 += (1 -> "World") - m1 += (2 -> "Hello") + m1 ++= List(1 -> "World") + m1 ++= List(2 -> "Hello") - m2 += (4 -> "Bar") - m2 += (5 -> "Foo") + m2 ++= List(4 -> "Bar") + m2 ++= List(5 -> "Foo") val m3: SortedMap[Int, String] = m1 ++ m2 @@ -48,7 +43,7 @@ object Test { println(m2) println(m3) - println(m1 + (3 -> "?")) + println(m1 ++ List(3 -> "?")) } def testImmutableSorted(): Unit = { diff --git a/tests/run/t3361.scala b/tests/run/t3361.scala deleted file mode 100644 index 25e095476f2c..000000000000 --- a/tests/run/t3361.scala +++ /dev/null @@ -1,98 +0,0 @@ -object Test extends App { - import scala.collection.mutable.DoubleLinkedList - - empty - builder_1 - builder_2 - chaining_1 - chaining_2 - insert_1 - insert_2 - append_1 - append_2 - - def empty: Unit = { - val none = DoubleLinkedList() - require(none.size == 0) - none.foreach( _ => require(false)) - } - - def builder_1: Unit = { - val ten = DoubleLinkedList(1 to 10: _*) - require(10 == ten.size) - } - - def builder_2: Unit = { - val ten = DoubleLinkedList(1 to 10: _*) - require((ten.size*(ten.size+1))/2 == ten.reduceLeft(_ + _)) - } - - def chaining_1: Unit = { - val ten = DoubleLinkedList(1 to 10: _*) - require(ten.reverse == DoubleLinkedList((1 to 10).reverse: _*)) - } - - def chaining_2: Unit = { - val ten = DoubleLinkedList(1 to 10: _*) - require(ten == ten.reverse.reverse) - } - - def insert_1: Unit = { - val ten = DoubleLinkedList(1 to 10: _*) - ten.append(DoubleLinkedList(11)) - - // Post-insert size test - require(11 == ten.size) - // Post-insert data test - require((ten.size*(ten.size+1))/2 == ten.reduceLeft(_ + _)) - // Post-insert chaining test - require(ten == ten.reverse.reverse) - // Post-insert position test - require(ten.last == 11) - } - - def insert_2: Unit = { - val ten = DoubleLinkedList(1 to 10: _*) - try { - DoubleLinkedList().insert(ten) - } catch { - case _: IllegalArgumentException => require(true) - case _: Throwable => require(false) - } - val zero = DoubleLinkedList(0) - zero.insert(ten) - require(zero.size == 11) - require(zero.head == 0) - require(zero.last == 10) - } - - def append_1: Unit = { - val ten = DoubleLinkedList(1 to 10: _*) - val eleven = ten.append(DoubleLinkedList(11)) - // Post-append equality test - require(ten == eleven) - // Post-append size test - require(11 == ten.size) - // Post-append data test - require((ten.size*(ten.size+1))/2 == ten.reduceLeft(_ + _)) - // Post-append chaining test - require(ten == ten.reverse.reverse) - // Post-append position test - require(ten.last == 11) - } - - def append_2: Unit = { - val ten = DoubleLinkedList(1 to 10: _*) - try { - DoubleLinkedList().append(ten) - } catch { - case _: IllegalArgumentException => require(true) - case _: Throwable => require(false) - } - val zero = DoubleLinkedList(0) - zero.append(ten) - require(zero.size == 11) - require(zero.head == 0) - require(zero.last == 10) - } -} diff --git a/tests/run/t3496.scala b/tests/run/t3496.scala index 35bc5db24032..80b945e55798 100644 --- a/tests/run/t3496.scala +++ b/tests/run/t3496.scala @@ -1,12 +1,8 @@ - - - - // ticket #3496 object Test { def main(args: Array[String]): Unit = { - val s = Stream.from(1) + val s = LazyList.from(1) s.take(5) s.drop(5) s.splitAt(5) diff --git a/tests/run/t3502.scala b/tests/run/t3502.scala index 2f432e4861b8..25f43bc8ae01 100644 --- a/tests/run/t3502.scala +++ b/tests/run/t3502.scala @@ -1,15 +1,10 @@ - - - - - // ticket #3502 object Test { object GeneratePrimeFactorsLazy extends (Int => List[Int]) { override def apply(n:Int) = { - val s = Stream.range(2, n / 2).filter(n % _ == 0) - //val s = for (i <- Stream.range(2, n / 2); if n % i == 0) yield i + val s = LazyList.range(2, n / 2).filter(n % _ == 0) + //val s = for (i <- LazyList.range(2, n / 2); if n % i == 0) yield i s.headOption.map(x => x :: apply(n / x)).getOrElse(List(n)) } } diff --git a/tests/run/t3508.scala b/tests/run/t3508.scala index 80ef89a61ba3..8278775d13ea 100644 --- a/tests/run/t3508.scala +++ b/tests/run/t3508.scala @@ -1,11 +1,8 @@ - - import collection.immutable._ - // ticket #3508 object Test { def main(args: Array[String]): Unit = { - assert(Stream.tabulate(123)(_ + 1).toList == List.tabulate(123)(_ + 1)) + assert(LazyList.tabulate(123)(_ + 1).toList == List.tabulate(123)(_ + 1)) } } diff --git a/tests/run/t3511.scala b/tests/run/t3511.scala deleted file mode 100644 index 0df0321adb4d..000000000000 --- a/tests/run/t3511.scala +++ /dev/null @@ -1,36 +0,0 @@ - - - -import scala.collection.immutable._ - - -// ticket #3511 -object Test { - - def main(args: Array[String]): Unit = { - assert(Stream.from(0).view.force.take(5) == List(0, 1, 2, 3, 4)) - - val s = Stream.from(0) - val smap = s.view.map(_ * 2).force.take(5) - assert(smap == List(0, 2, 4, 6, 8)) - - val sfilter = s.view.filter(_ % 2 == 0).force.take(5) - assert(sfilter == List(0, 2, 4, 6, 8)) - - val sflatmap = s.view.flatMap(n => List(n, n * 2)).force.take(6) - assert(sflatmap == List(0, 0, 1, 2, 2, 4)) - - val stakewhile = s.view.takeWhile(_ < 10).force - assert(stakewhile == List.range(0, 10)) - - val szip = s.view.zip(s.map(_ / 2)).force.take(5) - assert(szip == List((0, 0), (1, 0), (2, 1), (3, 1), (4, 2))) - - val szipall = s.view.zipAll(List(0, 1, 2), 0, 0).force.take(5) - assert(szipall == List((0, 0), (1, 1), (2, 2), (3, 0), (4, 0))) - - val spatch = s.view.patch(1, List(5, 5, 5), 5).force.take(5) - assert(spatch == List(0, 5, 5, 5, 6)) - } - -} diff --git a/tests/run/t3518.scala b/tests/run/t3518.scala deleted file mode 100644 index 033cc19548fb..000000000000 --- a/tests/run/t3518.scala +++ /dev/null @@ -1,16 +0,0 @@ -object Test { - val r1 = 1.0 to 10.0 by 0.5 - val r2 = 1.0 to 1.0 by 1.0 - val r3 = 10.0 to 1.0 by -0.5 - val r4 = 1.0 until 1.0 by 1.0 - val r5 = 1 to 100 by 2 - - def main(args: Array[String]): Unit = { - assert(r3 forall (r1 contains _)) - assert(r1 forall (r3 contains _)) - assert(r2.size == 1) - assert(r4.isEmpty) - assert(List(1,3,5,97,99) forall (r5 contains _)) - assert(List(2,4,6,98,100) forall (x => !r5.contains(x))) - } -} diff --git a/tests/run/t3529.scala b/tests/run/t3529.scala index a5977d0a6c55..dfd234633a71 100644 --- a/tests/run/t3529.scala +++ b/tests/run/t3529.scala @@ -10,6 +10,6 @@ object Test { assert((1 to 10 drop 9) == Seq(10)) assert((1 until 10 drop 9) == Nil) - assert(Stream(1 to 10).flatten.toList == Stream(1 until 11).flatten.toList) + assert(LazyList(1 to 10).flatten.toList == LazyList(1 until 11).flatten.toList) } } diff --git a/tests/run/t3540.scala b/tests/run/t3540.scala index 5ffacb5dff3f..905193ee4045 100644 --- a/tests/run/t3540.scala +++ b/tests/run/t3540.scala @@ -1,7 +1,7 @@ object Test { def main(args: Array[String]): Unit = { assert(List.iterate(List(1,2,3), 4)(_.tail).last.isEmpty) - assert(Stream.iterate(Stream(1,2,3), 4)(_.tail).last.isEmpty) + assert(LazyList.iterate(LazyList(1,2,3), 4)(_.tail).last.isEmpty) assert(Array.iterate(Array(1,2,3), 4)(_.tail).last.isEmpty) } } diff --git a/tests/run/t3580.scala b/tests/run/t3580.scala deleted file mode 100644 index f91d5a24f8d7..000000000000 --- a/tests/run/t3580.scala +++ /dev/null @@ -1,17 +0,0 @@ - - - - - -object Test { - - class Empty extends Traversable[Nothing] { - def foreach[U](f: Nothing => U): Unit = {} - } - - def main(args: Array[String]): Unit = { - val t = new Empty - t.toStream - } - -} diff --git a/tests/run/t3603.scala b/tests/run/t3603.scala index 25ca49b54143..ac5314b06448 100644 --- a/tests/run/t3603.scala +++ b/tests/run/t3603.scala @@ -1,8 +1,4 @@ - - - object Test { - def main(args: Array[String]): Unit = { import collection.immutable._ @@ -14,5 +10,4 @@ object Test { val longres = longmap.map { case (a, b) => (a, b.toString) } assert(longres.isInstanceOf[LongMap[_]]) } - } diff --git a/tests/run/t3645.scala b/tests/run/t3645.scala index cada14cd1488..e354f18f8798 100644 --- a/tests/run/t3645.scala +++ b/tests/run/t3645.scala @@ -1,6 +1,6 @@ object Test { def main(args: Array[String]): Unit = { - val s = Stream.tabulate(5)(x => x+2) + val s = LazyList.tabulate(5)(x => x+2) assert( s.toList == List(2,3,4,5,6) ) } } diff --git a/tests/run/t3647.scala b/tests/run/t3647.scala deleted file mode 100644 index 8202c30e7fb4..000000000000 --- a/tests/run/t3647.scala +++ /dev/null @@ -1,23 +0,0 @@ - - - -import collection.immutable._ - - -object Test { - def main(args: Array[String]): Unit = { - val ps = PagedSeq.fromLines(List( - "line1", - "line2", - "line3", - "line4" - ).iterator) - assert(ps.filter(_ == '\n').size == 3) - - val ps1 = PagedSeq.fromLines(List("Ok").iterator) - assert(ps1.filter(_ == '\n').size == 0) - - val eps = PagedSeq.fromLines(List().iterator) - assert(eps.filter(_ == '\n').size == 0) - } -} diff --git a/tests/run/t3760.scala b/tests/run/t3760.scala index b78406824e80..9f9d982b91b4 100644 --- a/tests/run/t3760.scala +++ b/tests/run/t3760.scala @@ -9,7 +9,7 @@ object Test { { val it = Iterator(1, 2) - val xs = it.toStream.toList + val xs = it.to(LazyList).toList assert(it.isEmpty) } diff --git a/tests/run/t3923.scala b/tests/run/t3923.scala index 484095a6077f..7c40a474b29f 100644 --- a/tests/run/t3923.scala +++ b/tests/run/t3923.scala @@ -1,8 +1,8 @@ object Test { def main(args: Array[String]): Unit = { - assert(collection.mutable.ArraySeq() == Nil) - assert(collection.mutable.ArraySeq() == Seq()) - assert(Seq() == collection.mutable.ArraySeq()) - assert(Nil == collection.mutable.ArraySeq()) + assert(collection.mutable.ArraySeq[Int]() == Nil) + assert(collection.mutable.ArraySeq[Int]() == Seq()) + assert(Seq[Int]() == collection.mutable.ArraySeq[Int]()) + assert(Nil == collection.mutable.ArraySeq[Int]()) } } diff --git a/tests/run/t3970.scala b/tests/run/t3970.scala deleted file mode 100644 index 4a1b7fd00a95..000000000000 --- a/tests/run/t3970.scala +++ /dev/null @@ -1,21 +0,0 @@ - - - -import collection.mutable._ - - - -object Test { - def main(args: Array[String]): Unit = { - val dl = DoubleLinkedList[Int]() - dl.remove() - - val dl2 = DoubleLinkedList[Int](1, 2, 3) - dl2.next.remove() - assert(dl2 == DoubleLinkedList(1, 3)) - - val dl3 = DoubleLinkedList[Int](1, 2, 3) - assert(dl3.drop(1) == DoubleLinkedList(2, 3)) - assert(dl3.drop(1).prev == null) - } -} diff --git a/tests/run/t3984.scala b/tests/run/t3984.scala index 15e50b10fde5..fd4ffbc1adb0 100644 --- a/tests/run/t3984.scala +++ b/tests/run/t3984.scala @@ -11,12 +11,12 @@ object SetBug { var ms = MutSet.empty[IH] for (ih <- List(IH(2,0),IH(0,0),IH(4,4),IH(6,4),IH(-8,1520786080))) { is = is + ih - ms = ms + ih + ms = ms ++ List(ih) } assert(is == ms) val x = IH(6,4) is = is - x - ms = ms - x + ms = ms.filter(e => e != x) assert(is == ms) } } @@ -34,12 +34,12 @@ object MapBug { var mm = MutMap.empty[IH,IH] for (ih <- List(IH(2,0),IH(0,0),IH(4,4),IH(6,4),IH(-8,1520786080))) { im = im + ((ih,ih)) - mm = mm + ((ih,ih)) + mm = mm ++ List((ih,ih)) } assert(im == mm) val x = IH(6,4) im = im - x - mm = mm - x + mm = mm.view.filterKeys(k => k != x).to(MutMap) assert(im == mm) } } diff --git a/tests/run/t3996.check b/tests/run/t3996.check deleted file mode 100644 index e69de29bb2d1..000000000000 diff --git a/tests/run/t3996.scala b/tests/run/t3996.scala deleted file mode 100644 index b40b3ec111f6..000000000000 --- a/tests/run/t3996.scala +++ /dev/null @@ -1,13 +0,0 @@ - - - - - -// should not result in a stack overflow -object Test { - def main(args: Array[String]): Unit = { - import collection.mutable.LinkedList - val l = new LinkedList[Int]() ++ (0 until 10000) - assert(l.length == 10000) - } -} diff --git a/tests/run/t4027.check b/tests/run/t4027.check index bdacfc1c065b..a3b77109726c 100644 --- a/tests/run/t4027.check +++ b/tests/run/t4027.check @@ -9,4 +9,4 @@ Map(1 -> false!, 2 -> true!, 3 -> false!, 4 -> true!) Map(2 -> 4, 4 -> 4) Map(1 -> 6, 2 -> 5, 3 -> 6, 4 -> 5) Map() -Map(1 -> false!) \ No newline at end of file +Map(1 -> false!) diff --git a/tests/run/t4027.scala b/tests/run/t4027.scala index 585c5c8b3775..0cbd1cbbb838 100644 --- a/tests/run/t4027.scala +++ b/tests/run/t4027.scala @@ -2,26 +2,22 @@ import collection._ - -/** Sorted maps should have `filterKeys` and `mapValues` which return sorted maps. - * Mapping, filtering, etc. on these views should return sorted maps again. - */ +// Sorted maps have `filterKeys` and `mapValues` which return MapView. +// Calling a transformation (map/filter) returns a View. object Test extends App { - val sortedmap = SortedMap(1 -> false, 2 -> true, 3 -> false, 4 -> true) - println(sortedmap.filterKeys(_ % 2 == 0): SortedMap[Int, Boolean]) - println(sortedmap.mapValues(_ + "!"): SortedMap[Int, String]) - println(sortedmap.filterKeys(_ % 2 == 0).map(t => (t._1, t._2.toString.length)): SortedMap[Int, Int]) - println(sortedmap.mapValues(_ + "!").map(t => (t._1, t._2.toString.length)): SortedMap[Int, Int]) - println(sortedmap.filterKeys(_ % 2 == 0).filter(t => t._1 < 2): SortedMap[Int, Boolean]) - println(sortedmap.mapValues(_ + "!").filter(t => t._1 < 2): SortedMap[Int, String]) + println((sortedmap.view.filterKeys(_ % 2 == 0): MapView[Int, Boolean]).toMap) + println((sortedmap.view.mapValues(_.toString + "!"): MapView[Int, String]).toMap) + println((sortedmap.view.filterKeys(_ % 2 == 0).map(t => (t._1, t._2.toString.length)): View[(Int, Int)]).toMap) + println((sortedmap.view.mapValues(_.toString + "!").map(t => (t._1, t._2.toString.length)): View[(Int, Int)]).toMap) + println((sortedmap.view.filterKeys(_ % 2 == 0).filter(t => t._1 < 2): View[(Int, Boolean)]).toMap) + println((sortedmap.view.mapValues(_.toString + "!").filter(t => t._1 < 2): View[(Int, String)]).toMap) val immsortedmap = immutable.SortedMap(1 -> false, 2 -> true, 3 -> false, 4 -> true) - println(immsortedmap.filterKeys(_ % 2 == 0): immutable.SortedMap[Int, Boolean]) - println(immsortedmap.mapValues(_ + "!"): immutable.SortedMap[Int, String]) - println(immsortedmap.filterKeys(_ % 2 == 0).map(t => (t._1, t._2.toString.length)): immutable.SortedMap[Int, Int]) - println(immsortedmap.mapValues(_ + "!").map(t => (t._1, t._2.toString.length)): immutable.SortedMap[Int, Int]) - println(immsortedmap.filterKeys(_ % 2 == 0).filter(t => t._1 < 2): immutable.SortedMap[Int, Boolean]) - println(immsortedmap.mapValues(_ + "!").filter(t => t._1 < 2): immutable.SortedMap[Int, String]) - + println((immsortedmap.view.filterKeys(_ % 2 == 0): MapView[Int, Boolean]).toMap) + println((immsortedmap.view.mapValues(_.toString + "!"): MapView[Int, String]).toMap) + println((immsortedmap.view.filterKeys(_ % 2 == 0).map(t => (t._1, t._2.toString.length)): View[(Int, Int)]).toMap) + println((immsortedmap.view.mapValues(_.toString + "!").map(t => (t._1, t._2.toString.length)): View[(Int, Int)]).toMap) + println((immsortedmap.view.filterKeys(_ % 2 == 0).filter(t => t._1 < 2): View[(Int, Boolean)]).toMap) + println((immsortedmap.view.mapValues(_.toString + "!").filter(t => t._1 < 2): View[(Int, String)]).toMap) } diff --git a/tests/run/t408.scala b/tests/run/t408.scala index 9e51e881edb0..bb05d1e9edeb 100644 --- a/tests/run/t408.scala +++ b/tests/run/t408.scala @@ -4,9 +4,9 @@ object Test val b = scala.collection.immutable.Set.empty ++ (0 to 100000) def main(args: Array[String]): Unit = { - a -- b - a -- b - a -- b - a -- b + a diff b + a diff b + a diff b + a diff b } } diff --git a/tests/run/t4080.check b/tests/run/t4080.check deleted file mode 100644 index 66ce31bb43cb..000000000000 --- a/tests/run/t4080.check +++ /dev/null @@ -1 +0,0 @@ -LinkedList(1, 0, 2, 3) diff --git a/tests/run/t4080.scala b/tests/run/t4080.scala deleted file mode 100644 index 551738018767..000000000000 --- a/tests/run/t4080.scala +++ /dev/null @@ -1,13 +0,0 @@ -import scala.collection.mutable.LinkedList -import java.util.NoSuchElementException - -object Test { - def main(args: Array[String]): Unit = { - val ll = LinkedList(1, 2, 3) - ll.insert(LinkedList(0)) - println(ll) - val ll2 = LinkedList[Int]() - try println("Empty head? " + ll2.head) - catch { case _: NoSuchElementException => () } - } -} diff --git a/tests/run/t4122.scala b/tests/run/t4122.scala index 5ff570c00966..d19130427269 100644 --- a/tests/run/t4122.scala +++ b/tests/run/t4122.scala @@ -1,14 +1,14 @@ object Test { val sw: Seq[Char] = "ab" - val sw2: Seq[Char] = Array('a', 'b') + val sw2: Seq[Char] = Array('a', 'b').toIndexedSeq val sw3 = Seq('a', 'b') val sw4 = "ab".toList val all = List(sw, sw2, sw3, sw4) def main(args: Array[String]): Unit = { for (s1 <- all ; s2 <- all) { - assert(s1 == s2, s1 + " != " + s2) - assert(s1.## == s2.##, s1 + ".## != " + s2 + ".##") + assert(s1 == s2, s"$s1 != $s2") + assert(s1.## == s2.##, s"$s1.## != $s2.##") } } } diff --git a/tests/run/t4201.scala b/tests/run/t4201.scala deleted file mode 100644 index f6c0acaf945b..000000000000 --- a/tests/run/t4201.scala +++ /dev/null @@ -1,7 +0,0 @@ -object Test { - def main(args: Array[String]): Unit = { - val f = 0.0 to 1.0 by 1.0 / 3.0 - assert(f.size == 4) - } -} - diff --git a/tests/run/t4288.scala b/tests/run/t4288.scala index 23319d1c275b..5b6fcbe3dd12 100644 --- a/tests/run/t4288.scala +++ b/tests/run/t4288.scala @@ -2,12 +2,12 @@ object Test { def f1 = scala.collection.mutable.ListBuffer(1 to 9: _*).slice(-5, -1) def f2 = List(1 to 9: _*).slice(-5, -1) def f3 = Vector(1 to 9: _*).slice(-5, -1) - def f4 = Traversable(1 to 9: _*).slice(-5, -1) + def f4 = Iterable(1 to 9: _*).slice(-5, -1) def f5 = (1 to 9).toArray.slice(-5, -1) - def f6 = (1 to 9).toStream.slice(-5, -1) + def f6 = LazyList.from(1 to 9).slice(-5, -1) def f7 = (1 to 9).slice(-5, -1) def main(args: Array[String]): Unit = { - List[Traversable[Int]](f1, f2, f3, f4, f5, f6, f7) foreach (x => assert(x.isEmpty, x)) + List[Iterable[Int]](f1, f2, f3, f4, f5, f6, f7) foreach (x => assert(x.isEmpty, x)) } } diff --git a/tests/run/t4297.scala b/tests/run/t4297.scala index 7497dfe4c236..72f5b0069162 100644 --- a/tests/run/t4297.scala +++ b/tests/run/t4297.scala @@ -1,7 +1,7 @@ object Test { def main(args: Array[String]): Unit = { def f = List(1,2,3).view - assert(f.toString == "SeqView(...)") + assert(f.toString == "SeqView()") assert(f.mkString == "123") } } diff --git a/tests/run/t4332b.scala b/tests/run/t4332b.scala index 8ee069ca2d1d..a62c3b1b4904 100644 --- a/tests/run/t4332b.scala +++ b/tests/run/t4332b.scala @@ -13,7 +13,7 @@ object Test extends App { check(ls.sliding(N, 2).toList, ls.view.sliding(N, 2).toList.map(_.toList), s"sliding($N, 2)") } for (b <- List(true, false)) - check(ls.filterNot(x => true), ls.view.filterNot(x => true), s"filterNot($b)") + check(ls.filterNot(x => true), ls.view.filterNot(x => true).toList, s"filterNot($b)") check(ls.inits.toList, ls.view.inits.toList.map(_.toList), "inits") check(ls.tails.toList, ls.view.tails.toList.map(_.toList), "tails") @@ -31,5 +31,5 @@ object Test extends App { import collection.mutable.Buffer check(Buffer(1, 2, 3).tail, Buffer(1, 2, 3).view.tail.toList, "Buffer#tail") - check(Buffer(1, 2, 3).tail.length, Buffer(1, 2, 3).view.tail.length, "Buffer#tail#length") + check(Buffer(1, 2, 3).tail.length, Buffer(1, 2, 3).view.tail.size, "Buffer#tail#length") } diff --git a/tests/run/t4535.check b/tests/run/t4535.check index 9d4ce0d5352e..944163e0c760 100644 --- a/tests/run/t4535.check +++ b/tests/run/t4535.check @@ -1,3 +1,3 @@ -ArrayStack(1, 2, 3) -ArrayStack(1, 2, 3, 4, 5, 6) -ArrayStack(6, 5, 4, 3, 2, 1) \ No newline at end of file +Stack(1, 2, 3) +Stack(1, 2, 3, 4, 5, 6) +Stack(6, 5, 4, 3, 2, 1) diff --git a/tests/run/t4535.scala b/tests/run/t4535.scala index 37aacb0adcf4..f1cb7c4df7aa 100644 --- a/tests/run/t4535.scala +++ b/tests/run/t4535.scala @@ -1,8 +1,5 @@ - - import collection._ - // #4535 object Test { @@ -26,5 +23,4 @@ object Test { assert(as == as.reverse.reverse) } } - } diff --git a/tests/run/t4608.scala b/tests/run/t4608.scala deleted file mode 100644 index 34586e2d0dec..000000000000 --- a/tests/run/t4608.scala +++ /dev/null @@ -1,8 +0,0 @@ -// #4608 -object Test { - - def main(args: Array[String]): Unit = { - ((1 to 100) sliding 10).toList.par.map{_.map{i => i * i}}.flatten - } - -} diff --git a/tests/run/t4660.scala b/tests/run/t4660.scala index 9aac10ddfd36..ae06aa87fb9d 100644 --- a/tests/run/t4660.scala +++ b/tests/run/t4660.scala @@ -1,7 +1,7 @@ object Test { def main(args: Array[String]): Unit = { val traversable = 1 to 20 map (_.toString) - def normalize(m: Map[Char, Traversable[String]]) = m.map { case (k,v) => (k, v.toList) } + def normalize(m: Map[Char, Iterable[String]]) = m.map { case (k,v) => (k, v.toList) } val groupedFromView = traversable.view.groupBy(_(0)) val groupedFromStrict = traversable.groupBy(_(0)) diff --git a/tests/run/t4697.scala b/tests/run/t4697.scala index 95592172e074..8308a19d24e0 100644 --- a/tests/run/t4697.scala +++ b/tests/run/t4697.scala @@ -1,5 +1,5 @@ object Test { - var st = Stream(0) + var st = LazyList(0) for (i <- 1 to 10000) st = i +: st def main(args: Array[String]): Unit = { diff --git a/tests/run/t4709.scala b/tests/run/t4709.scala index 29d0dac613ab..2d9119e33076 100644 --- a/tests/run/t4709.scala +++ b/tests/run/t4709.scala @@ -1,10 +1,5 @@ - - -import collection.GenSeq - - object Test { def main(args: Array[String]): Unit = { - val Seq(1, 2) = Stream(1, 2) + val Seq(1, 2) = LazyList(1, 2) } } diff --git a/tests/run/t4723.scala b/tests/run/t4723.scala deleted file mode 100644 index 462d0be43b15..000000000000 --- a/tests/run/t4723.scala +++ /dev/null @@ -1,9 +0,0 @@ - - - -object Test { - def main(args: Array[String]): Unit = { - assert(Nil == collection.parallel.ParSeq()) - assert(collection.parallel.ParSeq() == Nil) - } -} diff --git a/tests/run/t4761.scala b/tests/run/t4761.scala deleted file mode 100644 index c9d8576ab244..000000000000 --- a/tests/run/t4761.scala +++ /dev/null @@ -1,11 +0,0 @@ -object Test { - def main(args: Array[String]): Unit = { - val gs = for (x <- (1 to 5)) yield { if (x % 2 == 0) List(1).seq else List(1).par } - println(gs.flatten) - println(gs.transpose) - - val s = Stream(Vector(1).par, Vector(2).par) - println(s.flatten.toList) - println(s.transpose.map(_.toList).toList) - } -} diff --git a/tests/run/t4813.check b/tests/run/t4813.check deleted file mode 100644 index e69de29bb2d1..000000000000 diff --git a/tests/run/t4813.scala b/tests/run/t4813.scala index 6d48ca87588e..e6e65ffa41d5 100644 --- a/tests/run/t4813.scala +++ b/tests/run/t4813.scala @@ -1,25 +1,20 @@ import collection.mutable._ import reflect._ - object Test extends App { def runTest[T, U](col: T)(clone: T => U)(mod: T => Unit)(implicit ct: ClassTag[T]): Unit = { - val cloned = clone(col) - assert(cloned == col, s"cloned should be equal to original. $cloned != $col") - mod(col) - assert(cloned != col, s"cloned should not modify when original does: $ct") + val cloned = clone(col) + assert(cloned == col, s"cloned should be equal to original. $cloned != $col") + mod(col) + assert(cloned != col, s"cloned should not modify when original does: $ct") } // Seqs runTest(ArrayBuffer(1,2,3))(_.clone) { buf => buf transform (_ + 1) } runTest(ArraySeq(1,2,3))(_.clone) { buf => buf transform (_ + 1) } runTest(Buffer(1,2,3))(_.clone) { buf => buf transform (_ + 1) } - runTest(DoubleLinkedList(1,2,3))(_.clone) { buf => buf transform (_ + 1) } runTest(IndexedSeq(1,2,3))(_.clone) { buf => buf transform (_ + 1) } - runTest(LinearSeq(1,2,3))(_.clone) { buf => buf transform (_ + 1) } - runTest(LinkedList(1,2,3))(_.clone) { buf => buf transform (_ + 1) } runTest(ListBuffer(1,2,3))(_.clone) { buf => buf transform (_ + 1) } - runTest(MutableList(1,2,3))(_.clone) { buf => buf transform (_ + 1) } runTest(Queue(1,2,3))(_.clone) { buf => buf transform (_ + 1) } runTest(Stack(1,2,3))(_.clone) { buf => buf transform (_ + 1) } @@ -34,4 +29,3 @@ object Test extends App { runTest(HashMap(1->1,2->2,3->3))(_.clone) { buf => buf put (4,4) } runTest(WeakHashMap(1->1,2->2,3->3))(_.clone) { buf => buf put (4,4) } } - diff --git a/tests/run/t4835.scala b/tests/run/t4835.scala index 26275c0ab4fc..dc5a889baebe 100644 --- a/tests/run/t4835.scala +++ b/tests/run/t4835.scala @@ -1,5 +1,5 @@ /* - * Test case for SI-4835. This tests confirm that the fix + * Test case for scala/bug#4835. This tests confirm that the fix * doesn't break laziness. To test memory consumption, * I need to confirm that OutOfMemoryError doesn't occur. * I could create such tests. However, such tests consume @@ -7,7 +7,7 @@ */ object Test { private final val INFINITE = -1 - def testStreamIterator(num: Int, stream: Stream[Int]): Unit = { + def testLazyListIterator(num: Int, stream: LazyList[Int]): Unit = { val iter = stream.iterator print(num) // if num == -1, then steram is infinite sequence @@ -24,15 +24,15 @@ object Test { } def main(args: Array[String]): Unit = { - import Stream.{from, cons, empty} - testStreamIterator(INFINITE, from(0)) - testStreamIterator(INFINITE, from(0).filter(_ % 2 == 1)) - testStreamIterator(1, Stream(1)) - testStreamIterator(2, Stream(1, 2)) - //Stream with side effect - testStreamIterator(2, cons(1, cons({ print(" A"); 2}, empty))) - testStreamIterator(3, Stream(1, 2, 3)) - //Stream with side effect - testStreamIterator(3, cons(1, cons({ print(" A"); 2}, cons({ print(" B"); 3}, Stream.empty)))) + import LazyList.{from, cons, empty} + testLazyListIterator(INFINITE, from(0)) + testLazyListIterator(INFINITE, from(0).filter(_ % 2 == 1)) + testLazyListIterator(1, LazyList(1)) + testLazyListIterator(2, LazyList(1, 2)) + //LazyList with side effect + testLazyListIterator(2, cons(1, cons({ print(" A"); 2}, empty))) + testLazyListIterator(3, LazyList(1, 2, 3)) + //LazyList with side effect + testLazyListIterator(3, cons(1, cons({ print(" A"); 2}, cons({ print(" B"); 3}, LazyList.empty)))) } } diff --git a/tests/run/t4894.scala b/tests/run/t4894.scala deleted file mode 100644 index b2d915fdad13..000000000000 --- a/tests/run/t4894.scala +++ /dev/null @@ -1,27 +0,0 @@ - - - - - -object Test { - - def main(args: Array[String]): Unit = { - import collection._ - val hs = mutable.HashSet[Int]() - hs ++= 1 to 10 - hs --= 1 to 10 - - val phs = parallel.mutable.ParHashSet[Int]() - phs ++= 1 to 10 - for (i <- 1 to 10) assert(phs(i)) - phs --= 1 to 10 - assert(phs.isEmpty) - - val phm = parallel.mutable.ParHashMap[Int, Int]() - phm ++= ((1 to 10) zip (1 to 10)) - for (i <- 1 to 10) assert(phm(i) == i) - phm --= 1 to 10 - assert(phm.isEmpty) - } - -} diff --git a/tests/run/t4895.scala b/tests/run/t4895.scala deleted file mode 100644 index fdd091511ab7..000000000000 --- a/tests/run/t4895.scala +++ /dev/null @@ -1,16 +0,0 @@ -object Test { - - def checkPar(sz: Int): Unit = { - import collection._ - val hs = mutable.HashSet[Int]() ++ (1 to sz) - assert(hs.par.map(_ + 1).seq.toSeq.sorted == (2 to (sz + 1))) - } - - def main(args: Array[String]): Unit = { - for (i <- 0 until 100) checkPar(i) - for (i <- 100 until 1000 by 50) checkPar(i) - for (i <- 1000 until 10000 by 500) checkPar(i) - for (i <- 10000 until 100000 by 5000) checkPar(i) - } - -} diff --git a/tests/run/t4930.scala b/tests/run/t4930.scala index 775f62794845..46705729a1d2 100644 --- a/tests/run/t4930.scala +++ b/tests/run/t4930.scala @@ -1,11 +1,12 @@ import collection.immutable.SortedMap +import scala.math.Ordering.Implicits._ object Test { - implicit val ord: Ordering[Array[Byte]] = Ordering.by((_: Array[Byte]).toIterable) + implicit val ord: Ordering[Array[Byte]] = Ordering.by(x => x.toIterable: collection.Seq[Byte]) def main(args: Array[String]): Unit = { val m = SortedMap(Array[Byte](1) -> 0) - println(m.to(Array[Byte](1)) flatMap (_._1.mkString)) - println(m.from(Array[Byte](1)) flatMap (_._1.mkString)) + println(m.rangeTo(Array[Byte](1)).toSeq flatMap (_._1.mkString)) + println(m.rangeFrom(Array[Byte](1)).toSeq flatMap (_._1.mkString)) } } diff --git a/tests/run/t4954.scala b/tests/run/t4954.scala index 655a90f749cb..1b3c00a62601 100644 --- a/tests/run/t4954.scala +++ b/tests/run/t4954.scala @@ -1,8 +1,5 @@ - - import collection._ - object Test { def main(args: Array[String]): Unit = { @@ -26,20 +23,20 @@ object Test { assert(m.values.drop(5).iterator.toList == expvals.drop(5)) val pred = (x: String) => x.length < 6 - val filtered = m.filterKeys(pred) - assert(filtered.drop(0).keys.toList == expected.filter(pred)) - assert(filtered.drop(1).keys.toList == expected.filter(pred).drop(1)) - assert(filtered.drop(2).keys.toList == expected.filter(pred).drop(2)) - assert(filtered.drop(3).keys.toList == expected.filter(pred).drop(3)) - assert(filtered.drop(4).keys.toList == expected.filter(pred).drop(4)) - - val mapped = m.mapValues(-_) - assert(mapped.drop(0).keys.toList == expected) - assert(mapped.drop(1).keys.toList == expected.drop(1)) - assert(mapped.drop(2).keys.toList == expected.drop(2)) - assert(mapped.drop(3).keys.toList == expected.drop(3)) - assert(mapped.drop(4).keys.toList == expected.drop(4)) - assert(mapped.drop(5).keys.toList == expected.drop(5)) + val filtered = m.view.filterKeys(pred) + assert(filtered.drop(0).map(_._1).toList == expected.filter(pred)) + assert(filtered.drop(1).map(_._1).toList == expected.filter(pred).drop(1)) + assert(filtered.drop(2).map(_._1).toList == expected.filter(pred).drop(2)) + assert(filtered.drop(3).map(_._1).toList == expected.filter(pred).drop(3)) + assert(filtered.drop(4).map(_._1).toList == expected.filter(pred).drop(4)) + + val mapped = m.view.mapValues(-_) + assert(mapped.drop(0).map(_._1).toList == expected) + assert(mapped.drop(1).map(_._1).toList == expected.drop(1)) + assert(mapped.drop(2).map(_._1).toList == expected.drop(2)) + assert(mapped.drop(3).map(_._1).toList == expected.drop(3)) + assert(mapped.drop(4).map(_._1).toList == expected.drop(4)) + assert(mapped.drop(5).map(_._1).toList == expected.drop(5)) } } diff --git a/tests/run/t498.check b/tests/run/t498.check index b1ce75e80bc7..62a1fb8cfad7 100644 --- a/tests/run/t498.check +++ b/tests/run/t498.check @@ -1 +1 @@ -Stream(1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1) +LazyList(1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1) diff --git a/tests/run/t498.scala b/tests/run/t498.scala index 5c10e6630f76..26af41185706 100644 --- a/tests/run/t498.scala +++ b/tests/run/t498.scala @@ -3,6 +3,6 @@ import scala.language.postfixOps object Test extends App { // the function passed to flatMap produces lots of empty streams, but this should not overflow the stack - val res = Stream.from(1).flatMap(i => if (i < 3000) Stream.empty else List(1)) + val res = LazyList.from(1).flatMap(i => if (i < 3000) LazyList.empty else List(1)) println(res take 42 force) } diff --git a/tests/run/t5053.check b/tests/run/t5053.check index 5ec39bbdeb5c..1140ff52e2ba 100644 --- a/tests/run/t5053.check +++ b/tests/run/t5053.check @@ -2,5 +2,3 @@ true true true true -true -true diff --git a/tests/run/t5053.scala b/tests/run/t5053.scala index 50057ce66cab..6cd56788ad82 100644 --- a/tests/run/t5053.scala +++ b/tests/run/t5053.scala @@ -1,23 +1,17 @@ - import scala.language.{ existentials } +import collection.View object Test extends App { { val (left, right) = Seq((1, "a"), (1, "a"), (1, "a"), (3, "c")).view.unzip - println(left.isInstanceOf[scala.collection.SeqViewLike[_,_,_]]) + println(left.isInstanceOf[View[_]]) val (l, m, r) = Seq((1, 1.0, "a"), (1, 1.0, "a"), (1, 1.0, "a"), (3, 3.0, "c")).view.unzip3 - println(l.isInstanceOf[scala.collection.SeqViewLike[_,_,_]]) + println(l.isInstanceOf[View[_]]) } { val (left, right) = Iterable((1, "a"), (1, "a"), (1, "a"), (3, "c")).view.unzip - println(left.isInstanceOf[scala.collection.IterableViewLike[_,_,_]]) + println(left.isInstanceOf[View[_]]) val (l, m, r) = Iterable((1, 1.0, "a"), (1, 1.0, "a"), (1, 1.0, "a"), (3, 3.0, "c")).view.unzip3 - println(l.isInstanceOf[scala.collection.IterableViewLike[_,_,_]]) - } - { - val (left, right) = Traversable((1, "a"), (1, "a"), (1, "a"), (3, "c")).view.unzip - println(left.isInstanceOf[scala.collection.TraversableViewLike[_,_,_]]) - val (l, m, r) = Traversable((1, 1.0, "a"), (1, 1.0, "a"), (1, 1.0, "a"), (3, 3.0, "c")).view.unzip3 - println(l.isInstanceOf[scala.collection.TraversableViewLike[_,_,_]]) + println(l.isInstanceOf[View[_]]) } } diff --git a/tests/run/t5201.scala b/tests/run/t5201.scala index 48aa7ba54cae..5c7855f46307 100644 --- a/tests/run/t5201.scala +++ b/tests/run/t5201.scala @@ -1,8 +1,5 @@ object Test extends App { - // First make sure specific types are preserved - val tmp: Vector[Int] = Vector(Vector(1,2), Vector(3,4)).view.flatten.force - // Now make sure we really get a view val seq = Seq(Seq(1, 2), Seq(3, 4)).view.flatten - Console.println(seq.isInstanceOf[collection.SeqView[_,_]]) + Console.println(seq.isInstanceOf[collection.View[_]]) } diff --git a/tests/run/t5328.check b/tests/run/t5328.check index 77a43968c56d..98026afd2f5b 100644 --- a/tests/run/t5328.check +++ b/tests/run/t5328.check @@ -1,3 +1,6 @@ 2 1,2,8 1,8,3 +2 +1,2,8 +1,8,3 diff --git a/tests/run/t5328.scala b/tests/run/t5328.scala index 12adf45b84fc..8072921decbb 100644 --- a/tests/run/t5328.scala +++ b/tests/run/t5328.scala @@ -2,4 +2,7 @@ object Test extends App { println(Vector(1).view.updated(0,2).toList mkString ",") println(Seq(1,2,3).view.updated(2,8).toList mkString ",") println(List(1,2,3).view.updated(1,8).toList mkString ",") + println(Vector(1).view.patch(0,List(2), 1).toList mkString ",") + println(Seq(1,2,3).view.patch(2,List(8), 1).toList mkString ",") + println(List(1,2,3).view.patch(1,List(8), 1).toList mkString ",") } diff --git a/tests/run/t5377.scala b/tests/run/t5377.scala index 2e8fb1a6af05..38532ff23d6b 100644 --- a/tests/run/t5377.scala +++ b/tests/run/t5377.scala @@ -1,5 +1,5 @@ object Test { - def testPermutations1(num: Int, stream: Stream[Int]): Unit = { + def testPermutations1(num: Int, stream: LazyList[Int]): Unit = { val perm = stream.permutations print(num) while(perm.hasNext) { @@ -17,31 +17,31 @@ object Test { } def main(args: Array[String]): Unit = { - testPermutations1(1, Stream(1)) + testPermutations1(1, LazyList(1)) testPermutations2(1, List(1)) - testPermutations1(2, Stream(1, 2)) + testPermutations1(2, LazyList(1, 2)) testPermutations2(2, List(1, 2)) - testPermutations1(2, Stream(2, 1)) + testPermutations1(2, LazyList(2, 1)) testPermutations2(2, List(2, 1)) - testPermutations1(3, Stream(1, 2, 3)) + testPermutations1(3, LazyList(1, 2, 3)) testPermutations2(3, List(1, 2, 3)) - testPermutations1(3, Stream(1, 3, 2)) + testPermutations1(3, LazyList(1, 3, 2)) testPermutations2(3, List(1, 3, 2)) - testPermutations1(3, Stream(2, 1, 3)) + testPermutations1(3, LazyList(2, 1, 3)) testPermutations2(3, List(2, 1, 3)) - testPermutations1(3, Stream(2, 3, 1)) + testPermutations1(3, LazyList(2, 3, 1)) testPermutations2(3, List(2, 3, 1)) - testPermutations1(3, Stream(3, 1, 2)) + testPermutations1(3, LazyList(3, 1, 2)) testPermutations2(3, List(3, 1, 2)) - testPermutations1(3, Stream(3, 2, 1)) + testPermutations1(3, LazyList(3, 2, 1)) testPermutations2(3, List(3, 2, 1)) } } diff --git a/tests/run/t5428.check b/tests/run/t5428.check deleted file mode 100644 index e3b08eef2012..000000000000 --- a/tests/run/t5428.check +++ /dev/null @@ -1 +0,0 @@ -Stack(8, 7, 6, 5, 4, 3) diff --git a/tests/run/t5428.scala b/tests/run/t5428.scala deleted file mode 100644 index 697dcbf5c9dc..000000000000 --- a/tests/run/t5428.scala +++ /dev/null @@ -1,29 +0,0 @@ - - - -import collection.mutable.{Stack, StackProxy} - - - -class A extends StackProxy[Int] { - val self = Stack[Int]() -} - - -object Test { - - def main(args: Array[String]): Unit = { - val a = new A - - a push 3 - a push 4 - a push 5 - - a.push(6, 7, 8) - - println(a) - - a.pop - } - -} diff --git a/tests/run/t5590.check b/tests/run/t5590.check index ad4a2eee6496..7006e5bcd2fa 100644 --- a/tests/run/t5590.check +++ b/tests/run/t5590.check @@ -1,4 +1,4 @@ -Map(a -> a, b -> b, c -> c) -Map(a -> a, b -> b, c -> c) -Set(a, b, c, d, e) -Set(a, b, c, d, e) \ No newline at end of file +LinkedHashMap(a -> a, b -> b, c -> c) +LinkedHashMap(a -> a, b -> b, c -> c) +LinkedHashSet(a, b, c, d, e) +LinkedHashSet(a, b, c, d, e) diff --git a/tests/run/t5656.scala b/tests/run/t5656.scala index 2c97d374d069..3cec1b47765e 100644 --- a/tests/run/t5656.scala +++ b/tests/run/t5656.scala @@ -1,11 +1,5 @@ - - - - object Test { - def main(args: Array[String]): Unit = { - println(Seq(List('1', '2', '3'), List('a', 'b', 'c')).view.addString(new StringBuilder, "_")) + println(Seq(List('1', '2', '3'), List('a', 'b', 'c')).view.addString(new StringBuilder,"_")) } - } diff --git a/tests/run/t5804.check b/tests/run/t5804.check deleted file mode 100644 index 3ccc1c24d3da..000000000000 --- a/tests/run/t5804.check +++ /dev/null @@ -1,4 +0,0 @@ -128 -16 -128 -32 \ No newline at end of file diff --git a/tests/run/t5804.scala b/tests/run/t5804.scala deleted file mode 100644 index 93cfa69ff70f..000000000000 --- a/tests/run/t5804.scala +++ /dev/null @@ -1,32 +0,0 @@ - - -import collection.mutable._ - - -object Test { - - def main(args: Array[String]): Unit = { - class CustomHashMap extends HashMap[Int, Int] { - override def initialSize = 65 - - println(table.length) - } - - new CustomHashMap - new HashMap { - println(table.length) - } - - class CustomHashSet extends HashSet[Int] { - override def initialSize = 96 - - println(table.length) - } - - new CustomHashSet - new HashSet { - println(table.length) - } - } - -} diff --git a/tests/run/t5857.scala b/tests/run/t5857.scala index fe67a75465a4..f3260a9a7736 100644 --- a/tests/run/t5857.scala +++ b/tests/run/t5857.scala @@ -21,14 +21,6 @@ object Test { val descending = sz to 1 by -1 check { assert(descending.min == 1) } check { assert(descending.max == sz) } - - val numeric = 1.0 to sz.toDouble by 1 - check { assert(numeric.min == 1.0) } - check { assert(numeric.max == sz.toDouble) } - - val numdesc = sz.toDouble to 1.0 by -1 - check { assert(numdesc.min == 1.0) } - check { assert(numdesc.max == sz.toDouble) } } def check[U](b: =>U): Unit = { diff --git a/tests/run/t5879.check b/tests/run/t5879.check index 4bdf3f5fcff0..5b14c9ea3ada 100644 --- a/tests/run/t5879.check +++ b/tests/run/t5879.check @@ -1,8 +1,8 @@ -Map(1 -> 1) +HashMap(1 -> 1) 1 (1,1) -Map(1 -> 1) +HashMap(1 -> 1) 1 (1,2) -Map(1 -> 2) +HashMap(1 -> 2) 2 diff --git a/tests/run/t5937.scala b/tests/run/t5937.scala deleted file mode 100644 index 9ec4ff12d36c..000000000000 --- a/tests/run/t5937.scala +++ /dev/null @@ -1,12 +0,0 @@ - - - -import collection._ - - - -object Test extends App { - - val list: List[Int] = (immutable.Vector(1, 2, 3) :+ 4)(breakOut) - -} diff --git a/tests/run/t5986.scala b/tests/run/t5986.scala index b05d488f206d..4b56bfd69b33 100644 --- a/tests/run/t5986.scala +++ b/tests/run/t5986.scala @@ -20,7 +20,7 @@ object Test { def check[S <: Set[Foo]](set: S): Unit = { def output(s: Set[Foo]) = println(s.toList.sorted.mkString(",")) - output(set + new Foo("bar", 2)) + output(set ++ List(new Foo("bar", 2))) output(set ++ List(new Foo("bar", 2), new Foo("bar", 3), new Foo("bar", 4))) output(set union Set(new Foo("bar", 2), new Foo("baz", 3), new Foo("bazz", 4))) } diff --git a/tests/run/t6052.scala b/tests/run/t6052.scala deleted file mode 100644 index fc70d396247e..000000000000 --- a/tests/run/t6052.scala +++ /dev/null @@ -1,21 +0,0 @@ - - - - - - - -object Test extends App { - def seqarr(i: Int) = Array[Int]() ++ (0 until i) - def pararr(i: Int) = seqarr(i).par - - def check[T](i: Int, f: Int => T): Unit = { - val gseq = seqarr(i).toSeq.groupBy(f) - val gpar = pararr(i).groupBy(f) - assert(gseq == gpar, (gseq, gpar)) - } - - for (i <- 0 until 20) check(i, _ > 0) - for (i <- 0 until 20) check(i, _ % 2) - for (i <- 0 until 20) check(i, _ % 4) -} diff --git a/tests/run/t6150.scala b/tests/run/t6150.scala deleted file mode 100644 index f3e83e15497a..000000000000 --- a/tests/run/t6150.scala +++ /dev/null @@ -1,36 +0,0 @@ -object Test { - import collection.{ immutable, mutable, generic } - def TheOneTrueCBF = collection.IndexedSeq.ReusableCBF - - val cbf1 = implicitly[generic.CanBuildFrom[immutable.Vector[Int], Int, collection.IndexedSeq[Int]]] - val cbf2 = implicitly[generic.CanBuildFrom[immutable.IndexedSeq[Int], Int, collection.IndexedSeq[Int]]] - val cbf3 = implicitly[generic.CanBuildFrom[collection.IndexedSeq[Int], Int, collection.IndexedSeq[Int]]] - - val cbf4 = implicitly[generic.CanBuildFrom[immutable.Vector[Int], Int, immutable.IndexedSeq[Int]]] - val cbf5 = implicitly[generic.CanBuildFrom[immutable.Vector[Int], Int, immutable.Vector[Int]]] - val cbf6 = implicitly[generic.CanBuildFrom[immutable.IndexedSeq[Int], Int, immutable.IndexedSeq[Int]]] - - def check[C](v: C) = { - assert(v == Vector(1, 2, 3, 4)) - assert(v.isInstanceOf[Vector[_]]) - } - def checkRealMccoy(x: AnyRef) = { - assert(x eq TheOneTrueCBF, cbf1) - } - - val v = immutable.Vector(1, 2, 3) - val iiv: immutable.IndexedSeq[Int] = immutable.Vector(1, 2, 3) - val iv: IndexedSeq[Int] = immutable.Vector(1, 2, 3) - - def main(args: Array[String]): Unit = { - List(cbf1, cbf2, cbf3, cbf4, cbf5, cbf6) foreach checkRealMccoy - check(v.:+(4)(cbf1)) - check(v.:+(4)(cbf2)) - check(v.:+(4)(cbf3)) - - check(iiv.:+(4)(cbf2)) - check(iiv.:+(4)(cbf3)) - - check(iv.:+(4)(cbf3)) - } -} diff --git a/tests/run/t6196.scala b/tests/run/t6196.scala deleted file mode 100644 index 19ac41193cb2..000000000000 --- a/tests/run/t6196.scala +++ /dev/null @@ -1,68 +0,0 @@ -import scala.collection.immutable.HashSet - -object Test extends App { - - case class Collision(value: Int) extends Ordered[Collision] { - def compare(that:Collision) = value compare that.value - - override def hashCode = value / 5 - } - - def testCorrectness[T : Ordering](n: Int, mkKey: Int => T): Unit = { - val o = implicitly[Ordering[T]] - val s = HashSet.empty[T] ++ (0 until n).map(mkKey) - for (i <- 0 until n) { - val ki = mkKey(i) - val a = s.filter(o.lt(_,ki)) - val b = s.filterNot(o.lt(_,ki)) - require(a.size == i && (0 until i).forall(i => a.contains(mkKey(i)))) - require(b.size == n - i && (i until n).forall(i => b.contains(mkKey(i)))) - } - } - - // this tests the structural sharing of the new filter - // I could not come up with a simple test that tests structural sharing when only parts are reused, but - // at least this fails with the old and passes with the new implementation - def testSharing(): Unit = { - val s = HashSet.empty[Int] ++ (0 until 100) - require(s.filter(_ => true) eq s) - require(s.filterNot(_ => false) eq s) - } - - // this tests that neither hashCode nor equals are called during filter - def testNoHashing(): Unit = { - var hashCount = 0 - var equalsCount = 0 - case class HashCounter(value:Int) extends Ordered[HashCounter] { - def compare(that:HashCounter) = value compare that.value - - override def hashCode = { - hashCount += 1 - value - } - - override def equals(that:Any) = { - equalsCount += 1 - that match { - case HashCounter(value) => this.value == value - case _ => false - } - } - } - - val s = HashSet.empty[HashCounter] ++ (0 until 100).map(HashCounter) - val hashCount0 = hashCount - val equalsCount0 = equalsCount - val t = s.filter(_1 with collisions should use HashSetCollision") - - // remove the collision again by removing all but one element - val y = x - Collision(0) - if(y.getClass.getSimpleName != "HashSet1") - println("HashSet of size 1 should use HashSet1" + y.getClass) -} diff --git a/tests/run/t6200.scala b/tests/run/t6200.scala deleted file mode 100644 index fd96b3ab5f14..000000000000 --- a/tests/run/t6200.scala +++ /dev/null @@ -1,68 +0,0 @@ -import scala.collection.immutable.HashMap - -object Test extends App { - - case class Collision(value: Int) extends Ordered[Collision] { - def compare(that: Collision) = value compare that.value - - override def hashCode = value / 5 - } - - def testCorrectness[T: Ordering](n: Int, mkKey: Int => T): Unit = { - val o = implicitly[Ordering[T]] - val s = HashMap.empty[T, Unit] ++ (0 until n).map(x => mkKey(x) -> (())) - for (i <- 0 until n) { - val ki = mkKey(i) - val a = s.filter(kv => o.lt(kv._1, ki)) - val b = s.filterNot(kv => o.lt(kv._1, ki)) - require(a.size == i && (0 until i).forall(i => a.contains(mkKey(i)))) - require(b.size == n - i && (i until n).forall(i => b.contains(mkKey(i)))) - } - } - - // this tests the structural sharing of the new filter - // I could not come up with a simple test that tests structural sharing when only parts are reused, but - // at least this fails with the old and passes with the new implementation - def testSharing(): Unit = { - val s = HashMap.empty[Int, Unit] ++ (0 until 100).map(_ -> (())) - require(s.filter(_ => true) eq s) - require(s.filterNot(_ => false) eq s) - } - - // this tests that neither hashCode nor equals are called during filter - def testNoHashing(): Unit = { - var hashCount = 0 - var equalsCount = 0 - case class HashCounter(value: Int) extends Ordered[HashCounter] { - def compare(that: HashCounter) = value compare that.value - - override def hashCode = { - hashCount += 1 - value - } - - override def equals(that: Any) = { - equalsCount += 1 - that match { - case HashCounter(value) => this.value == value - case _ => false - } - } - } - - val s = HashMap.empty[HashCounter, Unit] ++ (0 until 100).map(k => HashCounter(k) -> (())) - val hashCount0 = hashCount - val equalsCount0 = equalsCount - val t = s.filter(_._1 < HashCounter(50)) - require(hashCount == hashCount0) - require(equalsCount == equalsCount0) - } - - // this tests correctness of filter and filterNot for integer keys - testCorrectness[Int](100, identity _) - // this tests correctness of filter and filterNot for keys with lots of collisions - // this is necessary because usually collisions are rare so the collision-related code is not thoroughly tested - testCorrectness[Collision](100, Collision.apply _) - testSharing() - testNoHashing() -} diff --git a/tests/run/t6220.scala b/tests/run/t6220.scala deleted file mode 100644 index 85995e1ef213..000000000000 --- a/tests/run/t6220.scala +++ /dev/null @@ -1,92 +0,0 @@ -import scala.collection.immutable._ - -object Test extends App { - - // finds an int x such that improved(x) differs in the first bit to improved(0), - // which is the worst case for the HashTrieSet - def findWorstCaseInts(): Unit = { - // copy of improve from HashSet - def improve(hcode: Int) = { - var h: Int = hcode + ~(hcode << 9) - h = h ^ (h >>> 14) - h = h + (h << 4) - h ^ (h >>> 10) - } - - // find two hashes which have a large separation - val x = 0 - var y = 1 - val ix = improve(x) - while(y!=0 && improve(y)!=ix+(1<<31)) - y+=1 - printf("%s %s %x %x\n",x,y,improve(x), improve(y)) - } - // this is not done every test run since it would slow down ant test.suite too much. - // findWorstCaseInts() - - // two numbers that are immediately adiacent when fed through HashSet.improve - val h0 = 0 - val h1 = 1270889724 - - // h is the hashcode, i is ignored for the hashcode but relevant for equality - case class Collision(h:Int, i:Int) { - override def hashCode = h - } - val a = Collision(h0,0) - val b = Collision(h0,1) - val c = Collision(h1,0) - - // create a HashSetCollision1 - val x = HashSet(a) + b - if(x.getClass.getSimpleName != "HashSetCollision1") - println("x should be a collision") - StructureTests.validate(x) - // StructureTests.printStructure(x) - require(x.size==2 && x.contains(a) && x.contains(b)) - - // go from a HashSetCollision1 to a HashTrieSet with maximum depth - val y = x + c - if(y.getClass.getSimpleName != "HashTrieSet") - println("y should be a HashTrieSet") - StructureTests.validate(y) - // StructureTests.printStructure(y) - require(y.size==3 && y.contains(a) && y.contains(b) && y.contains(c)) - - // go from a HashSet1 directly to a HashTrieSet with maximum depth - val z = HashSet(a) + c - if(y.getClass.getSimpleName != "HashTrieSet") - println("y should be a HashTrieSet") - StructureTests.validate(z) - // StructureTests.printStructure(z) - require(z.size == 2 && z.contains(a) && z.contains(c)) -} - -package scala.collection.immutable { - object StructureTests { - def printStructure(x:HashSet[_], prefix:String=""): Unit = { - x match { - case m:HashSet.HashTrieSet[_] => - println(prefix+m.getClass.getSimpleName + " " + m.size) - m.elems.foreach(child => printStructure(child, prefix + " ")) - case m:HashSet.HashSetCollision1[_] => - println(prefix+m.getClass.getSimpleName + " " + m.ks.size) - case m:HashSet.HashSet1[_] => - println(prefix+m.getClass.getSimpleName + " " + m.head) - case _ => - println(prefix+"empty") - } - } - - def validate(x:HashSet[_]): Unit = { - x match { - case m:HashSet.HashTrieSet[_] => - require(m.elems.size>1 || (m.elems.size==1 && m.elems(0).isInstanceOf[HashSet.HashTrieSet[_]])) - m.elems.foreach(validate _) - case m:HashSet.HashSetCollision1[_] => - require(m.ks.size>1) - case m:HashSet.HashSet1[_] => - case _ => - } - } - } -} diff --git a/tests/run/t6261.scala b/tests/run/t6261.scala deleted file mode 100644 index 92193b8798c5..000000000000 --- a/tests/run/t6261.scala +++ /dev/null @@ -1,123 +0,0 @@ -import scala.collection.immutable._ - -object Test extends App { - - def test1(): Unit = { - // test that a HashTrieMap with one leaf element is not created! - val x = HashMap.empty + (1->1) + (2->2) - if(x.getClass.getSimpleName != "HashTrieMap") - println("A hash map containing two non-colliding values should be a HashTrieMap") - - val y = x - 1 - if(y.getClass.getSimpleName != "HashMap1") - println("A hash map containing one element should always use HashMap1") - } - - def test2(): Unit = { - // class that always causes hash collisions - case class Collision(value:Int) { override def hashCode = 0 } - - // create a set that should have a collison - val x = HashMap.empty + (Collision(0)->0) + (Collision(1) ->0) - if(x.getClass.getSimpleName != "HashMapCollision1") - println("HashMap of size >1 with collisions should use HashMapCollision") - - // remove the collision again by removing all but one element - val y = x - Collision(0) - if(y.getClass.getSimpleName != "HashMap1") - println("HashMap of size 1 should use HashMap1" + y.getClass) - } - def test3(): Unit = { - // finds an int x such that improved(x) differs in the first bit to improved(0), - // which is the worst case for the HashTrieSet - def findWorstCaseInts(): Unit = { - // copy of improve from HashSet - def improve(hcode: Int) = { - var h: Int = hcode + ~(hcode << 9) - h = h ^ (h >>> 14) - h = h + (h << 4) - h ^ (h >>> 10) - } - - // find two hashes which have a large separation - val x = 0 - var y = 1 - val ix = improve(x) - while(y!=0 && improve(y)!=ix+(1<<31)) - y+=1 - printf("%s %s %x %x\n",x,y,improve(x), improve(y)) - } - // this is not done every test run since it would slow down ant test.suite too much. - // findWorstCaseInts() - - // two numbers that are immediately adiacent when fed through HashSet.improve - val h0 = 0 - val h1 = 1270889724 - - // h is the hashcode, i is ignored for the hashcode but relevant for equality - case class Collision(h:Int, i:Int) { - override def hashCode = h - } - val a = Collision(h0,0)->0 - val b = Collision(h0,1)->0 - val c = Collision(h1,0)->0 - - // create a HashSetCollision1 - val x = HashMap(a) + b - if(x.getClass.getSimpleName != "HashMapCollision1") - println("x should be a HashMapCollision") - StructureTests.validate(x) - //StructureTests.printStructure(x) - require(x.size==2 && x.contains(a._1) && x.contains(b._1)) - - // go from a HashSetCollision1 to a HashTrieSet with maximum depth - val y = x + c - if(y.getClass.getSimpleName != "HashTrieMap") - println("y should be a HashTrieMap") - StructureTests.validate(y) - // StructureTests.printStructure(y) - require(y.size==3 && y.contains(a._1) && y.contains(b._1) && y.contains(c._1)) - - // go from a HashSet1 directly to a HashTrieSet with maximum depth - val z = HashMap(a) + c - if(y.getClass.getSimpleName != "HashTrieMap") - println("y should be a HashTrieMap") - StructureTests.validate(z) - // StructureTests.printStructure(z) - require(z.size == 2 && z.contains(a._1) && z.contains(c._1)) - } - test1() - test2() - test3() -} - - -package scala.collection.immutable { - object StructureTests { - def printStructure(x:HashMap[_,_], prefix:String=""): Unit = { - x match { - case m:HashMap.HashTrieMap[_,_] => - println(prefix+m.getClass.getSimpleName + " " + m.size) - m.elems.foreach(child => printStructure(child, prefix + " ")) - case m:HashMap.HashMapCollision1[_,_] => - println(prefix+m.getClass.getSimpleName + " " + m.kvs.size) - case m:HashMap.HashMap1[_,_] => - println(prefix+m.getClass.getSimpleName + " " + m.head) - case _ => - println(prefix+"empty") - } - } - - def validate(x:HashMap[_,_]): Unit = { - x match { - case m:HashMap.HashTrieMap[_,_] => - require(m.elems.size>1 || (m.elems.size==1 && m.elems(0).isInstanceOf[HashMap.HashTrieMap[_,_]])) - m.elems.foreach(validate _) - case m:HashMap.HashMapCollision1[_,_] => - require(m.kvs.size>1) - case m:HashMap.HashMap1[_,_] => - case _ => - } - } - } -} diff --git a/tests/run/t627.check b/tests/run/t627.check index 39e641d98784..da94cd626c9d 100644 --- a/tests/run/t627.check +++ b/tests/run/t627.check @@ -1 +1 @@ -WrappedArray(1, 2, 3, 4) +ArraySeq(1, 2, 3, 4) diff --git a/tests/run/t627.scala b/tests/run/t627.scala index 7136169b0044..e4b19308d6db 100644 --- a/tests/run/t627.scala +++ b/tests/run/t627.scala @@ -1,6 +1,6 @@ object Test { def main(args: Array[String]): Unit = { - val s: Seq[Int] = Array(1, 2, 3, 4) + val s: Seq[Int] = Array(1, 2, 3, 4).toIndexedSeq println(s) } } diff --git a/tests/run/t6271.scala b/tests/run/t6271.scala index 8ebf7ad8b502..5c7a97b04746 100644 --- a/tests/run/t6271.scala +++ b/tests/run/t6271.scala @@ -2,27 +2,27 @@ object Test extends App { def filterIssue = { val viewed : Iterable[Iterable[Int]] = List(List(0).view).view val filtered = viewed flatMap { x => List( x filter (_ > 0) ) } - filtered.iterator.toIterable.flatten + filtered.iterator.to(Iterable).flatten } def takenIssue = { val viewed : Iterable[Iterable[Int]] = List(List(0).view).view val filtered = viewed flatMap { x => List( x take 0 ) } - filtered.iterator.toIterable.flatten + filtered.iterator.to(Iterable).flatten } def droppedIssue = { val viewed : Iterable[Iterable[Int]] = List(List(0).view).view val filtered = viewed flatMap { x => List( x drop 1 ) } - filtered.iterator.toIterable.flatten + filtered.iterator.to(Iterable).flatten } def flatMappedIssue = { val viewed : Iterable[Iterable[Int]] = List(List(0).view).view val filtered = viewed flatMap { x => List( x flatMap (_ => List()) ) } - filtered.iterator.toIterable.flatten + filtered.iterator.to(Iterable).flatten } def slicedIssue = { val viewed : Iterable[Iterable[Int]] = List(List(0).view).view val filtered = viewed flatMap { x => List( x slice (2,3) ) } - filtered.iterator.toIterable.flatten + filtered.iterator.to(Iterable).flatten } filterIssue takenIssue diff --git a/tests/run/t6292.check b/tests/run/t6292.check deleted file mode 100644 index e69de29bb2d1..000000000000 diff --git a/tests/run/t6292.scala b/tests/run/t6292.scala deleted file mode 100644 index 51e31f95fc2f..000000000000 --- a/tests/run/t6292.scala +++ /dev/null @@ -1,18 +0,0 @@ - import scala.collection.mutable.DoubleLinkedList - -object Test { - def main(args: Array[String]): Unit = { - cloneAndtest(DoubleLinkedList[Int]()) - cloneAndtest(DoubleLinkedList[Int](1)) - cloneAndtest(DoubleLinkedList[Int](1,2,3,4)) - } - - def cloneAndtest(l: DoubleLinkedList[Int]): Unit = - testSame(l, l.clone.asInstanceOf[DoubleLinkedList[Int]]) - - def testSame(one: DoubleLinkedList[Int], two: DoubleLinkedList[Int]): Unit = { - def msg = s" for ${one} and ${two} !" - assert(one.size == two.size, s"Cloned sizes are not the same $msg!") - assert(one == two, s"Cloned lists are not equal $msg") - } -} diff --git a/tests/run/t6410.scala b/tests/run/t6410.scala deleted file mode 100644 index 7d25c248500d..000000000000 --- a/tests/run/t6410.scala +++ /dev/null @@ -1,9 +0,0 @@ - - - -object Test extends App { - val x = collection.parallel.mutable.ParArray.range(1,10) groupBy { _ % 2 } mapValues { _.size } - println(x) - val y = collection.parallel.immutable.ParVector.range(1,10) groupBy { _ % 2 } mapValues { _.size } - println(y) -} diff --git a/tests/run/t6467.scala b/tests/run/t6467.scala deleted file mode 100644 index 0ee8699d6475..000000000000 --- a/tests/run/t6467.scala +++ /dev/null @@ -1,20 +0,0 @@ - - - - -import collection._ - - - -object Test extends App { - - def compare(s1: String, s2: String): Unit = { - assert(s1 == s2, s1 + "\nvs.\n" + s2) - } - - compare(List(1, 2, 3, 4).aggregate(new java.lang.StringBuffer)(_ append _, _ append _).toString, "1234") - compare(List(1, 2, 3, 4).par.aggregate(new java.lang.StringBuffer)(_ append _, _ append _).toString, "1234") - compare(Seq(0 until 100: _*).aggregate(new java.lang.StringBuffer)(_ append _, _ append _).toString, (0 until 100).mkString) - compare(Seq(0 until 100: _*).par.aggregate(new java.lang.StringBuffer)(_ append _, _ append _).toString, (0 until 100).mkString) - -} diff --git a/tests/run/t6584.scala b/tests/run/t6584.scala index 24c236ef3590..c5148907c4dc 100644 --- a/tests/run/t6584.scala +++ b/tests/run/t6584.scala @@ -1,3 +1,5 @@ +import collection.immutable.LazyList + object Test { def main(args: Array[String]): Unit = { val size = 100 * 1024 @@ -6,11 +8,11 @@ object Test { println("Array: " + Array.tabulate(size)(x => x).distinct.size) println("Vector: " + Vector.tabulate(size)(x => x).distinct.size) println("List: " + List.tabulate(size)(x => x).distinct.size) - println("Stream: " + Stream.tabulate(size)(x => x).distinct.size) + println("Stream: " + LazyList.tabulate(size)(x => x).distinct.size) println("Array: " + doubled.toArray.distinct.size) println("Vector: " + doubled.toVector.distinct.size) println("List: " + doubled.toList.distinct.size) - println("Stream: " + doubled.toStream.distinct.size) + println("Stream: " + doubled.to(LazyList).distinct.size) } } diff --git a/tests/run/t6614.check b/tests/run/t6614.check index 2e80ebda8bf6..8e59641fa8b3 100644 --- a/tests/run/t6614.check +++ b/tests/run/t6614.check @@ -1,11 +1,11 @@ -(ArrayStack(),true) -(ArrayStack(0),true) -(ArrayStack(0, 1),true) -(ArrayStack(0, 1, 2),true) -(ArrayStack(0, 1, 2, 3),true) -(ArrayStack(0, 1, 2, 3, 4),true) -(ArrayStack(0, 1, 2, 3, 4, 5),true) -(ArrayStack(0, 1, 2, 3, 4, 5, 6),true) -(ArrayStack(0, 1, 2, 3, 4, 5, 6, 7),true) -(ArrayStack(0, 1, 2, 3, 4, 5, 6, 7, 8),true) -(ArrayStack(0, 1, 2, 3, 4, 5, 6, 7, 8, 9),true) +(Stack(),true) +(Stack(0),true) +(Stack(0, 1),true) +(Stack(0, 1, 2),true) +(Stack(0, 1, 2, 3),true) +(Stack(0, 1, 2, 3, 4),true) +(Stack(0, 1, 2, 3, 4, 5),true) +(Stack(0, 1, 2, 3, 4, 5, 6),true) +(Stack(0, 1, 2, 3, 4, 5, 6, 7),true) +(Stack(0, 1, 2, 3, 4, 5, 6, 7, 8),true) +(Stack(0, 1, 2, 3, 4, 5, 6, 7, 8, 9),true) diff --git a/tests/run/t6614.scala b/tests/run/t6614.scala index 3ad9f36fc471..dc11f7719c21 100644 --- a/tests/run/t6614.scala +++ b/tests/run/t6614.scala @@ -1,8 +1,8 @@ object Test extends App { - import scala.collection.mutable.ArrayStack + import scala.collection.mutable.Stack println((for (i <- 0 to 10) yield { - val in = ArrayStack.tabulate(i)(_.toString) + val in = Stack.tabulate(i)(_.toString) (in, (in filter (_ => true)) == in) }).mkString("\n")) } diff --git a/tests/run/t6628.scala b/tests/run/t6628.scala index bc87c125046d..92c29310cae8 100644 --- a/tests/run/t6628.scala +++ b/tests/run/t6628.scala @@ -1,11 +1,11 @@ object Test { - def coll = new Traversable[String] { - override def foreach[U](f:String=>U): Unit = { f("1") } + def coll = new Iterable[String] { + override def iterator: Iterator[String] = Iterator("1") } val dropped = coll.view drop 1 def main(args: Array[String]): Unit = { println(dropped.isEmpty) - println(dropped.force.isEmpty) + println(dropped.toIndexedSeq.isEmpty) } } diff --git a/tests/run/t6632.check b/tests/run/t6632.check index 26cf061b5f44..6f57b933c3c9 100644 --- a/tests/run/t6632.check +++ b/tests/run/t6632.check @@ -1,5 +1,5 @@ -java.lang.IndexOutOfBoundsException: -1 -java.lang.IndexOutOfBoundsException: -2 -java.lang.IndexOutOfBoundsException: -3 -java.lang.IndexOutOfBoundsException: -1 -java.lang.IndexOutOfBoundsException: 5 +java.lang.IndexOutOfBoundsException: -1 is out of bounds (min 0, max 4) +java.lang.IndexOutOfBoundsException: -2 is out of bounds (min 0, max 4) +java.lang.IndexOutOfBoundsException: -3 is out of bounds (min 0, max 4) +java.lang.IndexOutOfBoundsException: -1 is out of bounds (min 0, max 4) +java.lang.IndexOutOfBoundsException: 5 is out of bounds (min 0, max 4) diff --git a/tests/run/t6632.scala b/tests/run/t6632.scala index 58c52ec42562..3acb903151fc 100644 --- a/tests/run/t6632.scala +++ b/tests/run/t6632.scala @@ -1,22 +1,22 @@ object Test extends App { import collection.mutable.ListBuffer - def newLB = ListBuffer(Symbol("a"), Symbol("b"), Symbol("c"), Symbol("d"), Symbol("e")) + def newLB = ListBuffer("a", "b", "c", "d", "e") def iiobe[A](f: => A) = try { f } catch { case ex: IndexOutOfBoundsException => println(ex) } val lb0 = newLB - iiobe( lb0.insert(-1, Symbol("x")) ) + iiobe( lb0.insert(-1, "x") ) val lb1 = newLB - iiobe( lb1.insertAll(-2, Array(Symbol("x"), Symbol("y"), Symbol("z"))) ) + iiobe( lb1.insertAll(-2, Array("x", "y", "z")) ) val lb2 = newLB - iiobe( lb2.update(-3, Symbol("u")) ) + iiobe( lb2.update(-3, "u") ) val lb3 = newLB - iiobe( lb3.updated(-1, Symbol("u")) ) - iiobe( lb3.updated(5, Symbol("u")) ) + iiobe( lb3.update(-1, "u") ) + iiobe( lb3.update(5, "u") ) } diff --git a/tests/run/t6633.check b/tests/run/t6633.check index 1ff8cdbc4459..6ee7bc74949b 100644 --- a/tests/run/t6633.check +++ b/tests/run/t6633.check @@ -1,3 +1,3 @@ -java.lang.IndexOutOfBoundsException: 9 +java.lang.IndexOutOfBoundsException: 9 is out of bounds (min 0, max 4) replStringOf OK length OK diff --git a/tests/run/t6634.check b/tests/run/t6634.check index b085f397e630..1cb3feef3a91 100644 --- a/tests/run/t6634.check +++ b/tests/run/t6634.check @@ -4,25 +4,25 @@ String OK. Length OK. Trying lb1 ... -java.lang.IndexOutOfBoundsException: at 6 deleting 6 +java.lang.IndexOutOfBoundsException: 6 to 12 is out of bounds (min 0, max 4) Checking ... String OK. Length OK. Trying lb2 ... -java.lang.IndexOutOfBoundsException: at 99 deleting 6 +java.lang.IndexOutOfBoundsException: 99 to 105 is out of bounds (min 0, max 4) Checking ... String OK. Length OK. Trying lb3 ... -java.lang.IndexOutOfBoundsException: at 1 deleting 9 +java.lang.IndexOutOfBoundsException: 1 to 10 is out of bounds (min 0, max 4) Checking ... String OK. Length OK. Trying lb4 ... -java.lang.IndexOutOfBoundsException: at -1 deleting 1 +java.lang.IndexOutOfBoundsException: -1 to 0 is out of bounds (min 0, max 4) Checking ... String OK. Length OK. diff --git a/tests/run/t6827.scala b/tests/run/t6827.scala index 8b655e9e6bdd..cb35719408f9 100644 --- a/tests/run/t6827.scala +++ b/tests/run/t6827.scala @@ -4,7 +4,7 @@ object Test extends App { def tryit(label: String, start: Int, len: Int): Unit = { val status = try { - val it = ns.toIterator + val it = ns.iterator it.copyToArray(arr, start, len) "ok" } catch { @@ -40,15 +40,15 @@ object Test extends App { tryit("invalid read -1", 30, -1) // okay, see scala/bug#7128 - "...".toIterator.copyToArray(new Array[Char](0), 0, 0) + "...".iterator.copyToArray(new Array[Char](0), 0, 0) // Bonus test from @som-snytt to check for overflow in // index calculations. - def testOverflow(start: Int, len: Int, expected: List[Char]) = { + def testOverflow(start: Int, len: Int, expected: List[Char]): Unit = { def copyFromIterator = { val arr = Array.fill[Char](3)('-') - "abc".toIterator.copyToArray(arr, start, len) + "abc".iterator.copyToArray(arr, start, len) arr.toList } def copyFromArray = { diff --git a/tests/run/t6908.scala b/tests/run/t6908.scala deleted file mode 100644 index da37cb0bbde3..000000000000 --- a/tests/run/t6908.scala +++ /dev/null @@ -1,6 +0,0 @@ -object Test { - def main(args: Array[String]): Unit = { - val set = collection.mutable.Set("1", null, "3").par - assert( set exists (_ eq null) ) - } -} diff --git a/tests/run/t7215.scala b/tests/run/t7215.scala index c93e97f9c89d..396399c7bb27 100644 --- a/tests/run/t7215.scala +++ b/tests/run/t7215.scala @@ -2,5 +2,5 @@ object Test extends App { List[List[Any]]().transpose.isEmpty Array[Array[Any]]().transpose.isEmpty Vector[Vector[Any]]().transpose.isEmpty - Stream[Stream[Any]]().transpose.isEmpty + LazyList[LazyList[Any]]().transpose.isEmpty } diff --git a/tests/run/t7269.scala b/tests/run/t7269.scala index d22e57dfee86..5876e88eae7c 100644 --- a/tests/run/t7269.scala +++ b/tests/run/t7269.scala @@ -1,29 +1,28 @@ -import scala.collection.JavaConversions._ +import scala.jdk.CollectionConverters._ import scala.collection.mutable object Test extends App { - def testMap(): Unit = { val mapJ = new java.util.HashMap[Int, String] - val mapS: mutable.Map[Int, String] = mapJ + val mapS: mutable.Map[Int, String] = mapJ.asScala (10 to 20).foreach(i => mapS += ((i, i.toString))) assert(11 == mapS.size) // ConcurrentModificationException thrown in the following line - mapS.retain((i, str) => i % 2 == 0) + mapS.filterInPlace((i, str) => i % 2 == 0) assert(6 == mapS.size) } def testSet(): Unit = { val mapJ = new java.util.HashSet[Int] - val mapS: mutable.Set[Int] = mapJ + val mapS: mutable.Set[Int] = mapJ.asScala (10 to 20).foreach(i => mapS += i) assert(11 == mapS.size) // ConcurrentModificationException thrown in the following line - mapS.retain((i) => i % 2 == 0) + mapS.filterInPlace((i) => i % 2 == 0) assert(6 == mapS.size) } diff --git a/tests/run/t7326.scala b/tests/run/t7326.scala deleted file mode 100644 index ce3fa122aaa4..000000000000 --- a/tests/run/t7326.scala +++ /dev/null @@ -1,64 +0,0 @@ -import scala.collection.immutable.ListSet -import scala.collection.immutable.HashSet - -object Test extends App { - - def testCorrectness(): Unit = { - // a key that has many hashCode collisions - case class Collision(i: Int) { override def hashCode = i / 5 } - - def subsetTest[T](emptyA:Set[T], emptyB:Set[T], mkKey:Int => T, n:Int): Unit = { - val outside = mkKey(n + 1) - for(i <- 0 to n) { - val a = emptyA ++ (0 until i).map(mkKey) - // every set must be a subset of itself - require(a.subsetOf(a), "A set must be the subset of itself") - for(k <- 0 to i) { - // k <= i, so b is definitely a subset - val b = emptyB ++ (0 until k).map(mkKey) - // c has less elements than a, but contains a value that is not in a - // so it is not a subset, but that is not immediately obvious due to size - val c = b + outside - require(b.subsetOf(a), s"$b must be a subset of $a") - require(!c.subsetOf(a), s"$c must not be a subset of $a") - } - } - } - - // test the HashSet/HashSet case - subsetTest(HashSet.empty[Int], HashSet.empty[Int], identity, 100) - - // test the HashSet/other set case - subsetTest(HashSet.empty[Int], ListSet.empty[Int], identity, 100) - - // test the HashSet/HashSet case for Collision keys - subsetTest(HashSet.empty[Collision], HashSet.empty[Collision], Collision, 100) - - // test the HashSet/other set case for Collision keys - subsetTest(HashSet.empty[Collision], ListSet.empty[Collision], Collision, 100) - } - - /** - * A main performance benefit of the new subsetOf is that we do not have to call hashCode during subsetOf - * since we already have the hash codes in the HashSet1 nodes. - */ - def testNoHashCodeInvocationsDuringSubsetOf() = { - var count = 0 - - case class HashCodeCounter(i:Int) { - override def hashCode = { - count += 1 - i - } - } - - val a = HashSet.empty ++ (0 until 100).map(HashCodeCounter) - val b = HashSet.empty ++ (0 until 50).map(HashCodeCounter) - val count0 = count - val result = b.subsetOf(a) - require(count == count0, "key.hashCode must not be called during subsetOf of two HashSets") - result - } - testCorrectness() - testNoHashCodeInvocationsDuringSubsetOf() -} diff --git a/tests/run/t7374.check b/tests/run/t7374.check index 4efa6f7af3a2..03d778308bec 100644 --- a/tests/run/t7374.check +++ b/tests/run/t7374.check @@ -1,3 +1,2 @@ List(2, 3) -ParVector(1, 2, 3) List(1, 2) diff --git a/tests/run/t7374/Test.java b/tests/run/t7374/Test.java index 02f86146ca96..52010858d802 100644 --- a/tests/run/t7374/Test.java +++ b/tests/run/t7374/Test.java @@ -1,7 +1,6 @@ public class Test { public static void main(String[] args) { System.out.println(SomeScala.list().tail()); - System.out.println(SomeScala.list().par()); System.out.println(SomeScala.list().init()); } } diff --git a/tests/run/t7436.scala b/tests/run/t7436.scala index 9627e38f5490..6fc6375ef03a 100644 --- a/tests/run/t7436.scala +++ b/tests/run/t7436.scala @@ -4,6 +4,6 @@ class B(val p1: Int) extends A(p1) object Test { def main(args: Array[String]): Unit = { - new B(1).p1 // threw java.lang.ClassCastException: scala.collection.mutable.WrappedArray$ofInt cannot be cast to java.lang.Integer + new B(1).p1 // threw java.lang.ClassCastException: scala.collection.mutable.ArraySeq$ofInt cannot be cast to java.lang.Integer } } diff --git a/tests/run/t7498.scala b/tests/run/t7498.scala deleted file mode 100644 index 1dbf0597e0b9..000000000000 --- a/tests/run/t7498.scala +++ /dev/null @@ -1,20 +0,0 @@ - - - - - - - -object Test extends App { - import scala.collection.concurrent.TrieMap - - class Collision(val idx: Int) { - override def hashCode = idx % 10 - } - - val tm = TrieMap[Collision, Unit]() - for (i <- 0 until 1000) tm(new Collision(i)) = () - - tm.par.foreach(kv => ()) -} - diff --git a/tests/run/t751.scala b/tests/run/t751.scala deleted file mode 100644 index 294d3af5c2b8..000000000000 --- a/tests/run/t751.scala +++ /dev/null @@ -1,6 +0,0 @@ -object Test { - def main(args: Array[String]): Unit = { - val map = Map(1 -> "a", 2 -> "b", 3 -> "c") - assert(map.filterKeys(_ % 2 == 0).isInstanceOf[scala.collection.immutable.Map[_,_]]) - } -} diff --git a/tests/run/t7880.scala b/tests/run/t7880.scala index c69663b13ea1..3004f0d99e2f 100644 --- a/tests/run/t7880.scala +++ b/tests/run/t7880.scala @@ -1,8 +1,7 @@ -// Do "git log" on this file to know why it's been moved to pending object Test extends App { // This should terminate in one way or another, but it shouldn't loop forever. try { val buffer = collection.mutable.ArrayBuffer.fill(Int.MaxValue / 2 + 1)(0) - buffer append 1 + buffer += 1 } catch { case _: OutOfMemoryError => } } diff --git a/tests/run/t8100.scala b/tests/run/t8100.scala index b9d0fe50031c..4c376829f398 100644 --- a/tests/run/t8100.scala +++ b/tests/run/t8100.scala @@ -2,7 +2,7 @@ object Test { import scala.util.Try def main(args: Array[String]): Unit = { - def stream = Stream.from(0).take(100000).map(n => None) + def stream = LazyList.from(0).take(100000).map(n => None) println(Try(stream.flatten.length)) } } diff --git a/tests/run/t8680.scala b/tests/run/t8680.scala deleted file mode 100644 index 2bce09c507b9..000000000000 --- a/tests/run/t8680.scala +++ /dev/null @@ -1,53 +0,0 @@ -object Test extends App { - def pre(n: Int) = (-n to -1).toStream - - def cyc(m: Int) = { - lazy val s: Stream[Int] = (0 until m).toStream #::: s - s - } - - def precyc(n: Int, m: Int) = pre(n) #::: cyc(m) - - def str(s: Stream[Int]) = { - val b = new StringBuilder - s.addString(b, "", "", "") - b.toString - } - - def goal(n: Int, m: Int) = (-n until m).mkString + "..." - - // Check un-forced cyclic and non-cyclic streams - assert(str(pre(2)) == pre(2).take(1).toList.mkString + "?") - assert(str(cyc(2)) == cyc(2).take(1).toList.mkString + "?") - assert(str(precyc(2,2)) == precyc(2,2).take(1).toList.mkString + "?") - assert(!pre(2).hasDefiniteSize) - assert(!cyc(2).hasDefiniteSize) - assert(!precyc(2,2).hasDefiniteSize) - - // Check forced cyclic and non-cyclic streams - assert(str(pre(2).force) == (-2 to -1).mkString) - assert(str(cyc(2).force) == (0 until 2).mkString + "...") - assert(str(precyc(2,2).force) == (-2 until 2).mkString + "...") - assert(pre(2).force.hasDefiniteSize) - assert(!cyc(2).force.hasDefiniteSize) - assert(!precyc(2,2).force.hasDefiniteSize) - - // Special cases - assert(str(cyc(1).force) == goal(0,1)) - assert(str(precyc(1,6).force) == goal(1,6)) - assert(str(precyc(6,1).force) == goal(6,1)) - - // Make sure there are no odd/even problems - for (n <- 3 to 4; m <- 3 to 4) { - assert(precyc(n,m).mkString == goal(n,m), s"mkString $n $m") - assert(!precyc(n,m).force.hasDefiniteSize, s"hasDef $n$m") - } - - // Make sure there are no cycle/prefix modulus problems - for (i <- 6 to 8) { - assert(precyc(i,3).mkString == goal(i,3), s"mkString $i 3") - assert(precyc(3,i).mkString == goal(3,i), s"mkString 3 $i") - assert(!precyc(i,3).force.hasDefiniteSize, s"hasDef $i 3") - assert(!precyc(3,i).force.hasDefiniteSize, s"hasDef 3 $i") - } -} diff --git a/tests/run/tagless.check b/tests/run/tagless.check index b397f0d211d5..b9191517867b 100644 --- a/tests/run/tagless.check +++ b/tests/run/tagless.check @@ -4,13 +4,13 @@ (7 + (-1 * 2)) 35 7 * (8 + (-(1 + 2))) -tf1Tree = Node(Add,WrappedArray(Node(Lit,WrappedArray(Leaf(8))), Node(Neg,WrappedArray(Node(Add,WrappedArray(Node(Lit,WrappedArray(Leaf(1))), Node(Lit,WrappedArray(Leaf(2))))))))) -tfm1Tree = Node(Add,WrappedArray(Node(Lit,WrappedArray(Leaf(7))), Node(Neg,WrappedArray(Node(Mult,WrappedArray(Node(Lit,WrappedArray(Leaf(1))), Node(Lit,WrappedArray(Leaf(2))))))))) +tf1Tree = Node(Add,ArraySeq(Node(Lit,ArraySeq(Leaf(8))), Node(Neg,ArraySeq(Node(Add,ArraySeq(Node(Lit,ArraySeq(Leaf(1))), Node(Lit,ArraySeq(Leaf(2))))))))) +tfm1Tree = Node(Add,ArraySeq(Node(Lit,ArraySeq(Leaf(7))), Node(Neg,ArraySeq(Node(Mult,ArraySeq(Node(Lit,ArraySeq(Leaf(1))), Node(Lit,ArraySeq(Leaf(2))))))))) 2 Not a number: "X" 5 (8 + (-(1 + 2))) -Node(Add,WrappedArray(Node(Lit,WrappedArray(Leaf(8))), Node(Neg,WrappedArray(Node(Add,WrappedArray(Node(Lit,WrappedArray(Leaf(1))), Node(Lit,WrappedArray(Leaf(2))))))))) +Node(Add,ArraySeq(Node(Lit,ArraySeq(Leaf(8))), Node(Neg,ArraySeq(Node(Add,ArraySeq(Node(Lit,ArraySeq(Leaf(1))), Node(Lit,ArraySeq(Leaf(2))))))))) 5 (8 + (-(1 + 2))) (8 + ((-1) + (-2))) diff --git a/tests/run/transform.scala b/tests/run/transform.scala index d73155ceec2a..48cfdf24eac1 100644 --- a/tests/run/transform.scala +++ b/tests/run/transform.scala @@ -1,8 +1,10 @@ +import collection.mutable.ArrayBuffer + object Test { - val x = (1 to 10).toBuffer + val x = (1 to 10).to(ArrayBuffer) def main(args: Array[String]): Unit = { - x transform (_ * 2) + x mapInPlace (_ * 2) assert(x.sum == (1 to 10).sum * 2) } } diff --git a/tests/run/tuple-zipped.scala b/tests/run/tuple-zipped.scala deleted file mode 100644 index 37ac52977f35..000000000000 --- a/tests/run/tuple-zipped.scala +++ /dev/null @@ -1,41 +0,0 @@ - -import scala.language.postfixOps - -object Test { - val xs1 = List.range(1, 100) - val xs2 = xs1.view - val xs3 = xs1 take 10 - val ss1 = Stream from 1 - val ss2 = ss1.view - val ss3 = ss1 take 10 - val as1 = 1 to 100 toArray - val as2 = as1.view - val as3 = as1 take 10 - - def xss1 = List[Seq[Int]](xs1, xs2, xs3, ss1, ss2, ss3, as1, as2, as3) - def xss2 = List[Seq[Int]](xs1, xs2, xs3, ss3, as1, as2, as3) // no infinities - def xss3 = List[Seq[Int]](xs2, xs3, ss3, as1) // representative sampling - - def main(args: Array[String]): Unit = { - for (cc1 <- xss1 ; cc2 <- xss2) { - val sum1 = (cc1, cc2).zipped map { case (x, y) => x + y } sum - val sum2 = (cc1, cc2).zipped map (_ + _) sum - - assert(sum1 == sum2) - } - - for (cc1 <- xss1 ; cc2 <- xss2 ; cc3 <- xss3) { - val sum1 = (cc1, cc2, cc3).zipped map { case (x, y, z) => x + y + z } sum - val sum2 = (cc1, cc2, cc3).zipped map (_ + _ + _) sum - - assert(sum1 == sum2) - } - - assert((ss1, ss1).zipped exists ((x, y) => true)) - assert((ss1, ss1, ss1).zipped exists ((x, y, z) => true)) - - assert(!(ss1, ss2, 1 to 3).zipped.exists(_ + _ + _ > 100000)) - assert((1 to 3, ss1, ss2).zipped.forall(_ + _ + _ > 0)) - assert((ss1, 1 to 3, ss2).zipped.map(_ + _ + _).size == 3) - } -} diff --git a/tests/run/typeclass-derivation3.check b/tests/run/typeclass-derivation3.check index c39266a635b2..12bb24628b0c 100644 --- a/tests/run/typeclass-derivation3.check +++ b/tests/run/typeclass-derivation3.check @@ -9,6 +9,6 @@ Cons(hd = Cons(hd = 11, tl = Cons(hd = 22, tl = Cons(hd = 33, tl = Nil))), tl = Cons(hd = Left(x = 1), tl = Cons(hd = Right(x = Pair(x = 2, y = 3)), tl = Nil)) Cons(hd = Left(x = 1), tl = Cons(hd = Right(x = Pair(x = 2, y = 3)), tl = Nil)) true -::(head = 1, tl$access$1 = ::(head = 2, tl$access$1 = ::(head = 3, tl$access$1 = Nil()))) -::(head = ::(head = 1, tl$access$1 = Nil()), tl$access$1 = ::(head = ::(head = 2, tl$access$1 = ::(head = 3, tl$access$1 = Nil())), tl$access$1 = Nil())) -::(head = Nil(), tl$access$1 = ::(head = ::(head = 1, tl$access$1 = Nil()), tl$access$1 = ::(head = ::(head = 2, tl$access$1 = ::(head = 3, tl$access$1 = Nil())), tl$access$1 = Nil()))) +::(head = 1, next$access$1 = ::(head = 2, next$access$1 = ::(head = 3, next$access$1 = Nil()))) +::(head = ::(head = 1, next$access$1 = Nil()), next$access$1 = ::(head = ::(head = 2, next$access$1 = ::(head = 3, next$access$1 = Nil())), next$access$1 = Nil())) +::(head = Nil(), next$access$1 = ::(head = ::(head = 1, next$access$1 = Nil()), next$access$1 = ::(head = ::(head = 2, next$access$1 = ::(head = 3, next$access$1 = Nil())), next$access$1 = Nil()))) diff --git a/tests/run/unapply.scala b/tests/run/unapply.scala index 7b10030ba76d..5f6a3be2fd6b 100644 --- a/tests/run/unapply.scala +++ b/tests/run/unapply.scala @@ -87,19 +87,19 @@ object Mas { object LisSeqArr { def run(): Unit = { - assert((1,2) == ((List(1,2,3): Any) match { case List(x,y,_: _*) => (x,y)})) - assert((1,2) == ((List(1,2,3): Any) match { case Seq(x,y,_: _*) => (x,y)})) + assert((1,2) == ((List(1,2,3): Any) match { case List(x,y,_*) => (x,y)})) + assert((1,2) == ((List(1,2,3): Any) match { case Seq(x,y,_*) => (x,y)})) } } object StreamFoo { - def sum(stream: Stream[Int]): Int = - stream match { - case Stream.Empty => 0 - case Stream.cons(hd, tl) => hd + sum(tl) + def sum(lazyList: LazyList[Int]): Int = + lazyList match { + case ll if ll.isEmpty => 0 + case LazyList.cons(hd, tl) => hd + sum(tl) } def run(): Unit = { - val str: Stream[Int] = List(1,2,3).toStream + val str: LazyList[Int] = List(1,2,3).to(LazyList) assert(6 == sum(str)) } } diff --git a/tests/run/unapplyArray.scala b/tests/run/unapplyArray.scala index a29ef124b882..63919fb35506 100644 --- a/tests/run/unapplyArray.scala +++ b/tests/run/unapplyArray.scala @@ -1,7 +1,7 @@ object Test { def main(args:Array[String]): Unit = { val z = Array(1,2,3,4) - val zs: Seq[Int] = z + val zs: Seq[Int] = z.toIndexedSeq val za: Any = z /* @@ -24,7 +24,7 @@ object Test { Console.println("za aseq "+ Seq.unapplySeq(za)) */ val zl = zs match { - case Seq(xs:_*) => xs.length + case Seq(xs@_*) => xs.length } assert(zl == 4) } diff --git a/tests/run/unittest_collection.scala b/tests/run/unittest_collection.scala index 8be8ea1a5927..18e111fdc3a4 100644 --- a/tests/run/unittest_collection.scala +++ b/tests/run/unittest_collection.scala @@ -1,14 +1,10 @@ object Test { - import scala.collection.mutable.{ArrayBuffer, Buffer, BufferProxy, ListBuffer} + import scala.collection.mutable.{ArrayBuffer, Buffer, ListBuffer} def main(args: Array[String]): Unit = { test(collection.mutable.ArrayBuffer[String]()) test(collection.mutable.ListBuffer[String]()) - class BBuf(z:ListBuffer[String]) extends BufferProxy[String] { // @odersky - bug here in scala 2.12 trait encoding seems like... - def self = z - } - test(new BBuf(collection.mutable.ListBuffer[String]())) } def test(x: Buffer[String]): Unit = { @@ -50,7 +46,7 @@ object Test { x += "a" x += "b" val dest = new ArrayBuffer[String] - x.copyToBuffer(dest) + dest ++= x assert(List("a", "b") == dest.toList, "dest") assert(List("a", "b") == x.toList, "source") } diff --git a/tests/run/unittest_iterator.scala b/tests/run/unittest_iterator.scala index a8b9604dce2a..f1e43f590967 100644 --- a/tests/run/unittest_iterator.scala +++ b/tests/run/unittest_iterator.scala @@ -9,11 +9,11 @@ object Test { } def main(args: Array[String]): Unit = { - val itSum = it.toStream.sum + val itSum = it.to(LazyList).sum for (i <- it) { // sum of the groups == sum of the original - val thisSum = ((it grouped i) map (_.sum)).toStream.sum - assert(thisSum == itSum, thisSum + " != " + itSum) + val thisSum = ((it grouped i) map (_.sum)).to(LazyList).sum + assert(thisSum == itSum, s"$thisSum != $itSum" ) } // grouped diff --git a/tests/run/value-class-extractor-seq.check b/tests/run/value-class-extractor-seq.check index dc1cb3905607..9efcde120dda 100644 --- a/tests/run/value-class-extractor-seq.check +++ b/tests/run/value-class-extractor-seq.check @@ -1,3 +1,3 @@ Bip(1, 2, 3) -Bip(1, 2, c : WrappedArray(3, 4, 5): _*) +Bip(1, 2, c : ArraySeq(3, 4, 5): _*) class [I diff --git a/tests/run/valueclasses-classmanifest-basic.scala b/tests/run/valueclasses-classmanifest-basic.scala deleted file mode 100644 index 50addda359f7..000000000000 --- a/tests/run/valueclasses-classmanifest-basic.scala +++ /dev/null @@ -1,6 +0,0 @@ -class Foo(val x: Int) extends AnyVal - -@deprecated("Suppress warnings", since="2.11") -object Test extends App { - println(classManifest[Foo]) -} diff --git a/tests/run/variable-pattern-access.scala b/tests/run/variable-pattern-access.scala index 1d27b3e42dbb..76fc5ecfc0f3 100644 --- a/tests/run/variable-pattern-access.scala +++ b/tests/run/variable-pattern-access.scala @@ -3,9 +3,9 @@ class A { } object Test { def printFields(cls: Class[_]) = - println(cls.getDeclaredFields.map(_.toString).sorted.deep.mkString("\n")) + println(cls.getDeclaredFields.map(_.toString).sorted.toList.mkString("\n")) def printMethods(cls: Class[_]) = - println(cls.getDeclaredMethods.map(_.toString).sorted.deep.mkString("\n")) + println(cls.getDeclaredMethods.map(_.toString).sorted.toList.mkString("\n")) def main(args: Array[String]): Unit = { println("# Fields of A:") diff --git a/tests/run/vc-equals.scala b/tests/run/vc-equals.scala index 15d9e73e5f47..0e3de64f7969 100644 --- a/tests/run/vc-equals.scala +++ b/tests/run/vc-equals.scala @@ -2,7 +2,7 @@ object Test extends App { class C(val s: Array[Int]) extends AnyVal { override def equals(that: Any) = that match { - case that: C => s.deep == that.s.deep + case that: C => s.toList == that.s.toList case _ => false } } @@ -22,13 +22,13 @@ object Test extends App { trait Eql extends Any { def deep: Any override def equals(that: Any) = that match { - case that: D => deep == that.s.deep + case that: D => deep == that.s.toList case _ => false } } class D(val s: Array[Int]) extends AnyVal with Eql { - def deep = s.deep + def deep = s.toList } def test2() = { diff --git a/tests/run/view-headoption.check b/tests/run/view-headoption.check index 10e02753507f..d9f96cf25402 100644 --- a/tests/run/view-headoption.check +++ b/tests/run/view-headoption.check @@ -7,6 +7,8 @@ f2: 5 fail success fail +success +fail fail success fail diff --git a/tests/run/view-iterator-stream.check b/tests/run/view-iterator-stream.check index 39de54a67e23..4b4e7c549c67 100644 --- a/tests/run/view-iterator-stream.check +++ b/tests/run/view-iterator-stream.check @@ -2,111 +2,111 @@ ** drop 20 -> take 10 -> slice(1, 5) ** ------------------- -toIndexedSeq -> toIterator -> toStream Stream(22, ?) 22 23 24 25 -toIndexedSeq -> toIterator -> view StreamView(...) 22 23 24 25 -toIndexedSeq -> toStream -> toIterator 22 23 24 25 -toIndexedSeq -> toStream -> view StreamView(...) 22 23 24 25 -toIndexedSeq -> view -> toIterator 22 23 24 25 -toIndexedSeq -> view -> toStream Stream(22, ?) 22 23 24 25 -toIterator -> toIndexedSeq -> toStream Stream(22, ?) 22 23 24 25 -toIterator -> toIndexedSeq -> view SeqView(...) 22 23 24 25 -toIterator -> toStream -> toIndexedSeq Vector(22, 23, 24, 25) 22 23 24 25 -toIterator -> toStream -> view StreamView(...) 22 23 24 25 -toIterator -> view -> toIndexedSeq Vector(22, 23, 24, 25) 22 23 24 25 -toIterator -> view -> toStream Stream(22, ?) 22 23 24 25 -toStream -> toIndexedSeq -> toIterator 22 23 24 25 -toStream -> toIndexedSeq -> view SeqView(...) 22 23 24 25 -toStream -> toIterator -> toIndexedSeq Vector(22, 23, 24, 25) 22 23 24 25 -toStream -> toIterator -> view StreamView(...) 22 23 24 25 -toStream -> view -> toIndexedSeq Vector(22, 23, 24, 25) 22 23 24 25 -toStream -> view -> toIterator 22 23 24 25 -view -> toIndexedSeq -> toIterator 22 23 24 25 -view -> toIndexedSeq -> toStream Stream(22, ?) 22 23 24 25 -view -> toIterator -> toIndexedSeq Vector(22, 23, 24, 25) 22 23 24 25 -view -> toIterator -> toStream Stream(22, ?) 22 23 24 25 -view -> toStream -> toIndexedSeq Vector(22, 23, 24, 25) 22 23 24 25 -view -> toStream -> toIterator 22 23 24 25 +IndexedSeq.from -> LazyList.from -> iterator 22 23 24 25 +IndexedSeq.from -> LazyList.from -> view SeqView() 22 23 24 25 +IndexedSeq.from -> iterator -> LazyList.from LazyList() 22 23 24 25 +IndexedSeq.from -> iterator -> view SeqView() 22 23 24 25 +IndexedSeq.from -> view -> LazyList.from LazyList() 22 23 24 25 +IndexedSeq.from -> view -> iterator 22 23 24 25 +LazyList.from -> IndexedSeq.from -> iterator 22 23 24 25 +LazyList.from -> IndexedSeq.from -> view SeqView() 22 23 24 25 +LazyList.from -> iterator -> IndexedSeq.from Vector(22, 23, 24, 25) 22 23 24 25 +LazyList.from -> iterator -> view SeqView() 22 23 24 25 +LazyList.from -> view -> IndexedSeq.from Vector(22, 23, 24, 25) 22 23 24 25 +LazyList.from -> view -> iterator 22 23 24 25 +iterator -> IndexedSeq.from -> LazyList.from LazyList() 22 23 24 25 +iterator -> IndexedSeq.from -> view SeqView() 22 23 24 25 +iterator -> LazyList.from -> IndexedSeq.from Vector(22, 23, 24, 25) 22 23 24 25 +iterator -> LazyList.from -> view SeqView() 22 23 24 25 +iterator -> view -> IndexedSeq.from Vector(22, 23, 24, 25) 22 23 24 25 +iterator -> view -> LazyList.from LazyList() 22 23 24 25 +view -> IndexedSeq.from -> LazyList.from LazyList() 22 23 24 25 +view -> IndexedSeq.from -> iterator 22 23 24 25 +view -> LazyList.from -> IndexedSeq.from Vector(22, 23, 24, 25) 22 23 24 25 +view -> LazyList.from -> iterator 22 23 24 25 +view -> iterator -> IndexedSeq.from Vector(22, 23, 24, 25) 22 23 24 25 +view -> iterator -> LazyList.from LazyList() 22 23 24 25 ** take 20 -> drop 10 -> slice(1, 5) ** ------------------- -toIndexedSeq -> toIterator -> toStream Stream(12, ?) 12 13 14 15 -toIndexedSeq -> toIterator -> view StreamView(...) 12 13 14 15 -toIndexedSeq -> toStream -> toIterator 12 13 14 15 -toIndexedSeq -> toStream -> view StreamView(...) 12 13 14 15 -toIndexedSeq -> view -> toIterator 12 13 14 15 -toIndexedSeq -> view -> toStream Stream(12, ?) 12 13 14 15 -toIterator -> toIndexedSeq -> toStream Stream(12, ?) 12 13 14 15 -toIterator -> toIndexedSeq -> view SeqView(...) 12 13 14 15 -toIterator -> toStream -> toIndexedSeq Vector(12, 13, 14, 15) 12 13 14 15 -toIterator -> toStream -> view StreamView(...) 12 13 14 15 -toIterator -> view -> toIndexedSeq Vector(12, 13, 14, 15) 12 13 14 15 -toIterator -> view -> toStream Stream(12, ?) 12 13 14 15 -toStream -> toIndexedSeq -> toIterator 12 13 14 15 -toStream -> toIndexedSeq -> view SeqView(...) 12 13 14 15 -toStream -> toIterator -> toIndexedSeq Vector(12, 13, 14, 15) 12 13 14 15 -toStream -> toIterator -> view StreamView(...) 12 13 14 15 -toStream -> view -> toIndexedSeq Vector(12, 13, 14, 15) 12 13 14 15 -toStream -> view -> toIterator 12 13 14 15 -view -> toIndexedSeq -> toIterator 12 13 14 15 -view -> toIndexedSeq -> toStream Stream(12, ?) 12 13 14 15 -view -> toIterator -> toIndexedSeq Vector(12, 13, 14, 15) 12 13 14 15 -view -> toIterator -> toStream Stream(12, ?) 12 13 14 15 -view -> toStream -> toIndexedSeq Vector(12, 13, 14, 15) 12 13 14 15 -view -> toStream -> toIterator 12 13 14 15 +IndexedSeq.from -> LazyList.from -> iterator 12 13 14 15 +IndexedSeq.from -> LazyList.from -> view SeqView() 12 13 14 15 +IndexedSeq.from -> iterator -> LazyList.from LazyList() 12 13 14 15 +IndexedSeq.from -> iterator -> view SeqView() 12 13 14 15 +IndexedSeq.from -> view -> LazyList.from LazyList() 12 13 14 15 +IndexedSeq.from -> view -> iterator 12 13 14 15 +LazyList.from -> IndexedSeq.from -> iterator 12 13 14 15 +LazyList.from -> IndexedSeq.from -> view SeqView() 12 13 14 15 +LazyList.from -> iterator -> IndexedSeq.from Vector(12, 13, 14, 15) 12 13 14 15 +LazyList.from -> iterator -> view SeqView() 12 13 14 15 +LazyList.from -> view -> IndexedSeq.from Vector(12, 13, 14, 15) 12 13 14 15 +LazyList.from -> view -> iterator 12 13 14 15 +iterator -> IndexedSeq.from -> LazyList.from LazyList() 12 13 14 15 +iterator -> IndexedSeq.from -> view SeqView() 12 13 14 15 +iterator -> LazyList.from -> IndexedSeq.from Vector(12, 13, 14, 15) 12 13 14 15 +iterator -> LazyList.from -> view SeqView() 12 13 14 15 +iterator -> view -> IndexedSeq.from Vector(12, 13, 14, 15) 12 13 14 15 +iterator -> view -> LazyList.from LazyList() 12 13 14 15 +view -> IndexedSeq.from -> LazyList.from LazyList() 12 13 14 15 +view -> IndexedSeq.from -> iterator 12 13 14 15 +view -> LazyList.from -> IndexedSeq.from Vector(12, 13, 14, 15) 12 13 14 15 +view -> LazyList.from -> iterator 12 13 14 15 +view -> iterator -> IndexedSeq.from Vector(12, 13, 14, 15) 12 13 14 15 +view -> iterator -> LazyList.from LazyList() 12 13 14 15 ** slice(20, 40) -> drop 10 -> take 5 ** ------------------- -toIndexedSeq -> toIterator -> toStream Stream(31, ?) 31 32 33 34 35 -toIndexedSeq -> toIterator -> view StreamView(...) 31 32 33 34 35 -toIndexedSeq -> toStream -> toIterator 31 32 33 34 35 -toIndexedSeq -> toStream -> view StreamView(...) 31 32 33 34 35 -toIndexedSeq -> view -> toIterator 31 32 33 34 35 -toIndexedSeq -> view -> toStream Stream(31, ?) 31 32 33 34 35 -toIterator -> toIndexedSeq -> toStream Stream(31, ?) 31 32 33 34 35 -toIterator -> toIndexedSeq -> view SeqView(...) 31 32 33 34 35 -toIterator -> toStream -> toIndexedSeq Vector(31, 32, 33, 34, 35) 31 32 33 34 35 -toIterator -> toStream -> view StreamView(...) 31 32 33 34 35 -toIterator -> view -> toIndexedSeq Vector(31, 32, 33, 34, 35) 31 32 33 34 35 -toIterator -> view -> toStream Stream(31, ?) 31 32 33 34 35 -toStream -> toIndexedSeq -> toIterator 31 32 33 34 35 -toStream -> toIndexedSeq -> view SeqView(...) 31 32 33 34 35 -toStream -> toIterator -> toIndexedSeq Vector(31, 32, 33, 34, 35) 31 32 33 34 35 -toStream -> toIterator -> view StreamView(...) 31 32 33 34 35 -toStream -> view -> toIndexedSeq Vector(31, 32, 33, 34, 35) 31 32 33 34 35 -toStream -> view -> toIterator 31 32 33 34 35 -view -> toIndexedSeq -> toIterator 31 32 33 34 35 -view -> toIndexedSeq -> toStream Stream(31, ?) 31 32 33 34 35 -view -> toIterator -> toIndexedSeq Vector(31, 32, 33, 34, 35) 31 32 33 34 35 -view -> toIterator -> toStream Stream(31, ?) 31 32 33 34 35 -view -> toStream -> toIndexedSeq Vector(31, 32, 33, 34, 35) 31 32 33 34 35 -view -> toStream -> toIterator 31 32 33 34 35 +IndexedSeq.from -> LazyList.from -> iterator 31 32 33 34 35 +IndexedSeq.from -> LazyList.from -> view SeqView() 31 32 33 34 35 +IndexedSeq.from -> iterator -> LazyList.from LazyList() 31 32 33 34 35 +IndexedSeq.from -> iterator -> view SeqView() 31 32 33 34 35 +IndexedSeq.from -> view -> LazyList.from LazyList() 31 32 33 34 35 +IndexedSeq.from -> view -> iterator 31 32 33 34 35 +LazyList.from -> IndexedSeq.from -> iterator 31 32 33 34 35 +LazyList.from -> IndexedSeq.from -> view SeqView() 31 32 33 34 35 +LazyList.from -> iterator -> IndexedSeq.from Vector(31, 32, 33, 34, 35) 31 32 33 34 35 +LazyList.from -> iterator -> view SeqView() 31 32 33 34 35 +LazyList.from -> view -> IndexedSeq.from Vector(31, 32, 33, 34, 35) 31 32 33 34 35 +LazyList.from -> view -> iterator 31 32 33 34 35 +iterator -> IndexedSeq.from -> LazyList.from LazyList() 31 32 33 34 35 +iterator -> IndexedSeq.from -> view SeqView() 31 32 33 34 35 +iterator -> LazyList.from -> IndexedSeq.from Vector(31, 32, 33, 34, 35) 31 32 33 34 35 +iterator -> LazyList.from -> view SeqView() 31 32 33 34 35 +iterator -> view -> IndexedSeq.from Vector(31, 32, 33, 34, 35) 31 32 33 34 35 +iterator -> view -> LazyList.from LazyList() 31 32 33 34 35 +view -> IndexedSeq.from -> LazyList.from LazyList() 31 32 33 34 35 +view -> IndexedSeq.from -> iterator 31 32 33 34 35 +view -> LazyList.from -> IndexedSeq.from Vector(31, 32, 33, 34, 35) 31 32 33 34 35 +view -> LazyList.from -> iterator 31 32 33 34 35 +view -> iterator -> IndexedSeq.from Vector(31, 32, 33, 34, 35) 31 32 33 34 35 +view -> iterator -> LazyList.from LazyList() 31 32 33 34 35 ** slice(20, 40) -> take 10 -> drop 5 ** ------------------- -toIndexedSeq -> toIterator -> toStream Stream(26, ?) 26 27 28 29 30 -toIndexedSeq -> toIterator -> view StreamView(...) 26 27 28 29 30 -toIndexedSeq -> toStream -> toIterator 26 27 28 29 30 -toIndexedSeq -> toStream -> view StreamView(...) 26 27 28 29 30 -toIndexedSeq -> view -> toIterator 26 27 28 29 30 -toIndexedSeq -> view -> toStream Stream(26, ?) 26 27 28 29 30 -toIterator -> toIndexedSeq -> toStream Stream(26, ?) 26 27 28 29 30 -toIterator -> toIndexedSeq -> view SeqView(...) 26 27 28 29 30 -toIterator -> toStream -> toIndexedSeq Vector(26, 27, 28, 29, 30) 26 27 28 29 30 -toIterator -> toStream -> view StreamView(...) 26 27 28 29 30 -toIterator -> view -> toIndexedSeq Vector(26, 27, 28, 29, 30) 26 27 28 29 30 -toIterator -> view -> toStream Stream(26, ?) 26 27 28 29 30 -toStream -> toIndexedSeq -> toIterator 26 27 28 29 30 -toStream -> toIndexedSeq -> view SeqView(...) 26 27 28 29 30 -toStream -> toIterator -> toIndexedSeq Vector(26, 27, 28, 29, 30) 26 27 28 29 30 -toStream -> toIterator -> view StreamView(...) 26 27 28 29 30 -toStream -> view -> toIndexedSeq Vector(26, 27, 28, 29, 30) 26 27 28 29 30 -toStream -> view -> toIterator 26 27 28 29 30 -view -> toIndexedSeq -> toIterator 26 27 28 29 30 -view -> toIndexedSeq -> toStream Stream(26, ?) 26 27 28 29 30 -view -> toIterator -> toIndexedSeq Vector(26, 27, 28, 29, 30) 26 27 28 29 30 -view -> toIterator -> toStream Stream(26, ?) 26 27 28 29 30 -view -> toStream -> toIndexedSeq Vector(26, 27, 28, 29, 30) 26 27 28 29 30 -view -> toStream -> toIterator 26 27 28 29 30 +IndexedSeq.from -> LazyList.from -> iterator 26 27 28 29 30 +IndexedSeq.from -> LazyList.from -> view SeqView() 26 27 28 29 30 +IndexedSeq.from -> iterator -> LazyList.from LazyList() 26 27 28 29 30 +IndexedSeq.from -> iterator -> view SeqView() 26 27 28 29 30 +IndexedSeq.from -> view -> LazyList.from LazyList() 26 27 28 29 30 +IndexedSeq.from -> view -> iterator 26 27 28 29 30 +LazyList.from -> IndexedSeq.from -> iterator 26 27 28 29 30 +LazyList.from -> IndexedSeq.from -> view SeqView() 26 27 28 29 30 +LazyList.from -> iterator -> IndexedSeq.from Vector(26, 27, 28, 29, 30) 26 27 28 29 30 +LazyList.from -> iterator -> view SeqView() 26 27 28 29 30 +LazyList.from -> view -> IndexedSeq.from Vector(26, 27, 28, 29, 30) 26 27 28 29 30 +LazyList.from -> view -> iterator 26 27 28 29 30 +iterator -> IndexedSeq.from -> LazyList.from LazyList() 26 27 28 29 30 +iterator -> IndexedSeq.from -> view SeqView() 26 27 28 29 30 +iterator -> LazyList.from -> IndexedSeq.from Vector(26, 27, 28, 29, 30) 26 27 28 29 30 +iterator -> LazyList.from -> view SeqView() 26 27 28 29 30 +iterator -> view -> IndexedSeq.from Vector(26, 27, 28, 29, 30) 26 27 28 29 30 +iterator -> view -> LazyList.from LazyList() 26 27 28 29 30 +view -> IndexedSeq.from -> LazyList.from LazyList() 26 27 28 29 30 +view -> IndexedSeq.from -> iterator 26 27 28 29 30 +view -> LazyList.from -> IndexedSeq.from Vector(26, 27, 28, 29, 30) 26 27 28 29 30 +view -> LazyList.from -> iterator 26 27 28 29 30 +view -> iterator -> IndexedSeq.from Vector(26, 27, 28, 29, 30) 26 27 28 29 30 +view -> iterator -> LazyList.from LazyList() 26 27 28 29 30 diff --git a/tests/run/view-iterator-stream.scala b/tests/run/view-iterator-stream.scala index 0e0c42d7d396..0f61f8e61fe3 100644 --- a/tests/run/view-iterator-stream.scala +++ b/tests/run/view-iterator-stream.scala @@ -2,17 +2,16 @@ import scala.language.postfixOps import scala.collection.{ mutable, immutable, generic } -import collection.TraversableView object Test { - type PerturberFn[T] = TraversableOnce[T] => TraversableOnce[T] - lazy val Id = new Perturber(Nil, identity[TraversableOnce[Int]] _) { } + type PerturberFn[T] = IterableOnce[T] => IterableOnce[T] + lazy val Id = new Perturber(Nil, identity[IterableOnce[Int]] _) { } class Perturber(val labels: List[String], val f: PerturberFn[Int]) extends PerturberFn[Int] { - def apply(xs: TraversableOnce[Int]): TraversableOnce[Int] = f(xs) - def show(xs: TraversableOnce[Int]): String = { + def apply(xs: IterableOnce[Int]): IterableOnce[Int] = f(xs) + def show(xs: IterableOnce[Int]): String = { val res = f(xs) val resString = "" + res - val rest = res.toTraversable + val rest = LazyList.from(res) val failed = (rest take 100).size == 100 "%-45s %-30s %s".format(toString, resString, @@ -28,19 +27,19 @@ object Test { def apply(label: String, f: PerturberFn[Int]) = new Perturber(List(label), f) } - def naturals = Stream from 1 - val toV : Perturber = Perturber("view", _.toTraversable.view) - val toI : Perturber = Perturber("toIterator", _.toIterator) - val toS : Perturber = Perturber("toStream", _.toStream) - val toIS : Perturber = Perturber("toIndexedSeq", _.toIndexedSeq) + def naturals = LazyList from 1 + val toV : Perturber = Perturber("view", LazyList.from(_).view) + val toI : Perturber = Perturber("iterator", _.iterator) + val toS : Perturber = Perturber("LazyList.from", LazyList.from(_)) + val toIS : Perturber = Perturber("IndexedSeq.from", IndexedSeq.from(_)) def p(ps: Perturber*): Perturber = if (ps.isEmpty) Id else ps.reduceLeft(_ and _) - def drop(n: Int): Perturber = Perturber("drop " + n, _.toIterator drop n) - def take(n: Int): Perturber = Perturber("take " + n, _.toIterator take n) + def drop(n: Int): Perturber = Perturber("drop " + n, _.iterator drop n) + def take(n: Int): Perturber = Perturber("take " + n, _.iterator take n) def slice(from: Int, until: Int): Perturber = Perturber( "slice(%d, %d)".format(from, until), - _.toTraversable.slice(from, until) + LazyList.from(_).slice(from, until) ) val fns = List[Perturber](toV, toI, toS, toIS) diff --git a/tests/run/viewtest.check b/tests/run/viewtest.check index 6e0fe81a6715..73a0b850ca28 100644 --- a/tests/run/viewtest.check +++ b/tests/run/viewtest.check @@ -1,12 +1,12 @@ -SeqViewZ(...) +View() ys defined mapping 1 2 -SeqViewMS(...) +SeqView() mapping 3 4 -SeqViewM(...) +SeqView() mapping 1 mapping 2 mapping 3 -List(2, 3, 4) +Vector(2, 3, 4) diff --git a/tests/run/viewtest.scala b/tests/run/viewtest.scala index 581958e9a6c6..d3d0ae8daf89 100755 --- a/tests/run/viewtest.scala +++ b/tests/run/viewtest.scala @@ -1,6 +1,6 @@ object Test extends App { import collection._ - val xs: SeqView[(String, Int), Seq[_]] = List("x").view.zip(Stream.from(0)) + val xs: View[(String, Int)] = List("x").view.zip(LazyList.from(0)) println(xs) val ys = List(1, 2, 3).view map { x => println("mapping "+x); x + 1 } @@ -9,16 +9,15 @@ object Test extends App { println(ys.tail) println(ys(2)) println(ys) - println(ys.force) + println(ys.toIndexedSeq) val zs = Array(1, 2, 3).view - val as: SeqView[Int, Array[Int]] = zs map (_ + 1) - val bs: Array[Int] = as.force + val as: SeqView[Int] = zs map (_ + 1) + val bs: IndexedSeq[Int] = as.toIndexedSeq val cs = zs.reverse - cs(0) += 1 - assert(cs.force.deep == Array(4, 2, 1).deep) - assert(zs(2) == 4) - assert(bs.deep == Array(2, 3, 4).deep) + assert(cs.toIndexedSeq == List(3, 2, 1)) + assert(zs(2) == 3) + assert(bs == List(2, 3, 4)) } /* crash confirmed. diff --git a/tests/run/withIndex.scala b/tests/run/withIndex.scala index ebf1941c9595..d2a809d61835 100644 --- a/tests/run/withIndex.scala +++ b/tests/run/withIndex.scala @@ -3,7 +3,7 @@ object Test { val ary: Array[String] = Array("a", "b", "c") val lst: List[String] = List("a", "b", "c") val itr: Iterator[String] = lst.iterator - val str: Stream[String] = lst.iterator.toStream + val str: LazyList[String] = lst.iterator.to(LazyList) Console.println(ary.zipWithIndex.toList) Console.println(lst.zipWithIndex.toList) @@ -19,7 +19,7 @@ object Test { val emptyArray = new Array[String](0) val emptyList: List[String] = Nil val emptyIterator = emptyList.iterator - val emptyStream: Stream[String] = Stream.empty + val emptyStream: LazyList[String] = LazyList.empty Console.println(emptyArray.zipWithIndex.toList) Console.println(emptyList.zipWithIndex.toList) diff --git a/tests/scala2-library/src/library/scala/AnyValCompanion.scala b/tests/scala2-library/src/library/scala/AnyValCompanion.scala deleted file mode 100644 index 302cafe0ecdf..000000000000 --- a/tests/scala2-library/src/library/scala/AnyValCompanion.scala +++ /dev/null @@ -1,21 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala - -/** A common supertype for companion classes of primitive types. - * - * A common trait for /companion/ objects of primitive types comes handy - * when parameterizing code on types. For instance, the specialized - * annotation is passed a sequence of types on which to specialize: - * {{{ - * class Tuple1[@specialized(Unit, Int, Double) T] - * }}} - * - */ -private[scala] trait AnyValCompanion extends Specializable { } diff --git a/tests/scala2-library/src/library/scala/App.scala b/tests/scala2-library/src/library/scala/App.scala deleted file mode 100644 index 52ef9ca60f2e..000000000000 --- a/tests/scala2-library/src/library/scala/App.scala +++ /dev/null @@ -1,82 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2010-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala - -import scala.compat.Platform.currentTime -import scala.collection.mutable.ListBuffer - -/** The `App` trait can be used to quickly turn objects - * into executable programs. Here is an example: - * {{{ - * object Main extends App { - * Console.println("Hello World: " + (args mkString ", ")) - * } - * }}} - * Here, object `Main` inherits the `main` method of `App`. - * - * `args` returns the current command line arguments as an array. - * - * ==Caveats== - * - * '''''It should be noted that this trait is implemented using the [[DelayedInit]] - * functionality, which means that fields of the object will not have been initialized - * before the main method has been executed.''''' - * - * It should also be noted that the `main` method should not be overridden: - * the whole class body becomes the “main method”. - * - * Future versions of this trait will no longer extend `DelayedInit`. - * - * @author Martin Odersky - * @version 2.1, 15/02/2011 - */ -trait App extends DelayedInit { - - /** The time when the execution of this program started, in milliseconds since 1 - * January 1970 UTC. */ - @deprecatedOverriding("executionStart should not be overridden", "2.11.0") - val executionStart: Long = currentTime - - /** The command line arguments passed to the application's `main` method. - */ - @deprecatedOverriding("args should not be overridden", "2.11.0") - protected def args: Array[String] = _args - - private var _args: Array[String] = _ - - private val initCode = new ListBuffer[() => Unit] - - /** The init hook. This saves all initialization code for execution within `main`. - * This method is normally never called directly from user code. - * Instead it is called as compiler-generated code for those classes and objects - * (but not traits) that inherit from the `DelayedInit` trait and that do not - * themselves define a `delayedInit` method. - * @param body the initialization code to be stored for later execution - */ - @deprecated("the delayedInit mechanism will disappear", "2.11.0") - override def delayedInit(body: => Unit) { - initCode += (() => body) - } - - /** The main method. - * This stores all arguments so that they can be retrieved with `args` - * and then executes all initialization code segments in the order in which - * they were passed to `delayedInit`. - * @param args the arguments passed to the main method - */ - @deprecatedOverriding("main should not be overridden", "2.11.0") - def main(args: Array[String]) = { - this._args = args - for (proc <- initCode) proc() - if (util.Properties.propIsSet("scala.time")) { - val total = currentTime - executionStart - Console.println("[total " + total + "ms]") - } - } -} diff --git a/tests/scala2-library/src/library/scala/Array.scala b/tests/scala2-library/src/library/scala/Array.scala deleted file mode 100644 index 5d1c25732cce..000000000000 --- a/tests/scala2-library/src/library/scala/Array.scala +++ /dev/null @@ -1,555 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala - -import scala.collection.generic._ -import scala.collection.{ mutable, immutable } -import mutable.{ ArrayBuilder, ArraySeq } -import scala.reflect.ClassTag -import scala.runtime.ScalaRunTime.{ array_apply, array_update } - -/** Contains a fallback builder for arrays when the element type - * does not have a class tag. In that case a generic array is built. - */ -class FallbackArrayBuilding { - - /** A builder factory that generates a generic array. - * Called instead of `Array.newBuilder` if the element type of an array - * does not have a class tag. Note that fallbackBuilder factory - * needs an implicit parameter (otherwise it would not be dominated in - * implicit search by `Array.canBuildFrom`). We make sure that - * implicit search is always successful. - */ - implicit def fallbackCanBuildFrom[T](implicit m: DummyImplicit): CanBuildFrom[Array[_], T, ArraySeq[T]] = - new CanBuildFrom[Array[_], T, ArraySeq[T]] { - def apply(from: Array[_]) = ArraySeq.newBuilder[T] - def apply() = ArraySeq.newBuilder[T] - } -} - -/** Utility methods for operating on arrays. - * For example: - * {{{ - * val a = Array(1, 2) - * val b = Array.ofDim[Int](2) - * val c = Array.concat(a, b) - * }}} - * where the array objects `a`, `b` and `c` have respectively the values - * `Array(1, 2)`, `Array(0, 0)` and `Array(1, 2, 0, 0)`. - * - * @author Martin Odersky - * @version 1.0 - */ -object Array extends FallbackArrayBuilding { - val emptyBooleanArray = new Array[Boolean](0) - val emptyByteArray = new Array[Byte](0) - val emptyCharArray = new Array[Char](0) - val emptyDoubleArray = new Array[Double](0) - val emptyFloatArray = new Array[Float](0) - val emptyIntArray = new Array[Int](0) - val emptyLongArray = new Array[Long](0) - val emptyShortArray = new Array[Short](0) - val emptyObjectArray = new Array[Object](0) - - implicit def canBuildFrom[T](implicit t: ClassTag[T]): CanBuildFrom[Array[_], T, Array[T]] = - new CanBuildFrom[Array[_], T, Array[T]] { - def apply(from: Array[_]) = ArrayBuilder.make[T]()(t) - def apply() = ArrayBuilder.make[T]()(t) - } - - /** - * Returns a new [[scala.collection.mutable.ArrayBuilder]]. - */ - def newBuilder[T](implicit t: ClassTag[T]): ArrayBuilder[T] = ArrayBuilder.make[T]()(t) - - private def slowcopy(src : AnyRef, - srcPos : Int, - dest : AnyRef, - destPos : Int, - length : Int) { - var i = srcPos - var j = destPos - val srcUntil = srcPos + length - while (i < srcUntil) { - array_update(dest, j, array_apply(src, i)) - i += 1 - j += 1 - } - } - - /** Copy one array to another. - * Equivalent to Java's - * `System.arraycopy(src, srcPos, dest, destPos, length)`, - * except that this also works for polymorphic and boxed arrays. - * - * Note that the passed-in `dest` array will be modified by this call. - * - * @param src the source array. - * @param srcPos starting position in the source array. - * @param dest destination array. - * @param destPos starting position in the destination array. - * @param length the number of array elements to be copied. - * - * @see `java.lang.System#arraycopy` - */ - def copy(src: AnyRef, srcPos: Int, dest: AnyRef, destPos: Int, length: Int) { - val srcClass = src.getClass - if (srcClass.isArray && dest.getClass.isAssignableFrom(srcClass)) - java.lang.System.arraycopy(src, srcPos, dest, destPos, length) - else - slowcopy(src, srcPos, dest, destPos, length) - } - - /** Returns an array of length 0 */ - def empty[T: ClassTag]: Array[T] = new Array[T](0) - - /** Creates an array with given elements. - * - * @param xs the elements to put in the array - * @return an array containing all elements from xs. - */ - // Subject to a compiler optimization in Cleanup. - // Array(e0, ..., en) is translated to { val a = new Array(3); a(i) = ei; a } - def apply[T: ClassTag](xs: T*): Array[T] = { - val array = new Array[T](xs.length) - var i = 0 - for (x <- xs.iterator) { array(i) = x; i += 1 } - array - } - - /** Creates an array of `Boolean` objects */ - // Subject to a compiler optimization in Cleanup, see above. - def apply(x: Boolean, xs: Boolean*): Array[Boolean] = { - val array = new Array[Boolean](xs.length + 1) - array(0) = x - var i = 1 - for (x <- xs.iterator) { array(i) = x; i += 1 } - array - } - - /** Creates an array of `Byte` objects */ - // Subject to a compiler optimization in Cleanup, see above. - def apply(x: Byte, xs: Byte*): Array[Byte] = { - val array = new Array[Byte](xs.length + 1) - array(0) = x - var i = 1 - for (x <- xs.iterator) { array(i) = x; i += 1 } - array - } - - /** Creates an array of `Short` objects */ - // Subject to a compiler optimization in Cleanup, see above. - def apply(x: Short, xs: Short*): Array[Short] = { - val array = new Array[Short](xs.length + 1) - array(0) = x - var i = 1 - for (x <- xs.iterator) { array(i) = x; i += 1 } - array - } - - /** Creates an array of `Char` objects */ - // Subject to a compiler optimization in Cleanup, see above. - def apply(x: Char, xs: Char*): Array[Char] = { - val array = new Array[Char](xs.length + 1) - array(0) = x - var i = 1 - for (x <- xs.iterator) { array(i) = x; i += 1 } - array - } - - /** Creates an array of `Int` objects */ - // Subject to a compiler optimization in Cleanup, see above. - def apply(x: Int, xs: Int*): Array[Int] = { - val array = new Array[Int](xs.length + 1) - array(0) = x - var i = 1 - for (x <- xs.iterator) { array(i) = x; i += 1 } - array - } - - /** Creates an array of `Long` objects */ - // Subject to a compiler optimization in Cleanup, see above. - def apply(x: Long, xs: Long*): Array[Long] = { - val array = new Array[Long](xs.length + 1) - array(0) = x - var i = 1 - for (x <- xs.iterator) { array(i) = x; i += 1 } - array - } - - /** Creates an array of `Float` objects */ - // Subject to a compiler optimization in Cleanup, see above. - def apply(x: Float, xs: Float*): Array[Float] = { - val array = new Array[Float](xs.length + 1) - array(0) = x - var i = 1 - for (x <- xs.iterator) { array(i) = x; i += 1 } - array - } - - /** Creates an array of `Double` objects */ - // Subject to a compiler optimization in Cleanup, see above. - def apply(x: Double, xs: Double*): Array[Double] = { - val array = new Array[Double](xs.length + 1) - array(0) = x - var i = 1 - for (x <- xs.iterator) { array(i) = x; i += 1 } - array - } - - /** Creates an array of `Unit` objects */ - def apply(x: Unit, xs: Unit*): Array[Unit] = { - val array = new Array[Unit](xs.length + 1) - array(0) = x - var i = 1 - for (x <- xs.iterator) { array(i) = x; i += 1 } - array - } - - /** Creates array with given dimensions */ - def ofDim[T: ClassTag](n1: Int): Array[T] = - new Array[T](n1) - /** Creates a 2-dimensional array */ - def ofDim[T: ClassTag](n1: Int, n2: Int): Array[Array[T]] = { - val arr: Array[Array[T]] = (new Array[Array[T]](n1): Array[Array[T]]) - for (i <- 0 until n1) arr(i) = new Array[T](n2) - arr - // tabulate(n1)(_ => ofDim[T](n2)) - } - /** Creates a 3-dimensional array */ - def ofDim[T: ClassTag](n1: Int, n2: Int, n3: Int): Array[Array[Array[T]]] = - tabulate(n1)(_ => ofDim[T](n2, n3)) - /** Creates a 4-dimensional array */ - def ofDim[T: ClassTag](n1: Int, n2: Int, n3: Int, n4: Int): Array[Array[Array[Array[T]]]] = - tabulate(n1)(_ => ofDim[T](n2, n3, n4)) - /** Creates a 5-dimensional array */ - def ofDim[T: ClassTag](n1: Int, n2: Int, n3: Int, n4: Int, n5: Int): Array[Array[Array[Array[Array[T]]]]] = - tabulate(n1)(_ => ofDim[T](n2, n3, n4, n5)) - - /** Concatenates all arrays into a single array. - * - * @param xss the given arrays - * @return the array created from concatenating `xss` - */ - def concat[T: ClassTag](xss: Array[T]*): Array[T] = { - val b = newBuilder[T] - b.sizeHint(xss.map(_.length).sum) - for (xs <- xss) b ++= xs - b.result() - } - - /** Returns an array that contains the results of some element computation a number - * of times. - * - * Note that this means that `elem` is computed a total of n times: - * {{{ - * scala> Array.fill(3){ math.random } - * res3: Array[Double] = Array(0.365461167592537, 1.550395944913685E-4, 0.7907242137333306) - * }}} - * - * @param n the number of elements desired - * @param elem the element computation - * @return an Array of size n, where each element contains the result of computing - * `elem`. - */ - def fill[T: ClassTag](n: Int)(elem: => T): Array[T] = { - val b = newBuilder[T] - b.sizeHint(n) - var i = 0 - while (i < n) { - b += elem - i += 1 - } - b.result() - } - - /** Returns a two-dimensional array that contains the results of some element - * computation a number of times. - * - * @param n1 the number of elements in the 1st dimension - * @param n2 the number of elements in the 2nd dimension - * @param elem the element computation - */ - def fill[T: ClassTag](n1: Int, n2: Int)(elem: => T): Array[Array[T]] = - tabulate(n1)(_ => fill(n2)(elem)) - - /** Returns a three-dimensional array that contains the results of some element - * computation a number of times. - * - * @param n1 the number of elements in the 1st dimension - * @param n2 the number of elements in the 2nd dimension - * @param n3 the number of elements in the 3nd dimension - * @param elem the element computation - */ - def fill[T: ClassTag](n1: Int, n2: Int, n3: Int)(elem: => T): Array[Array[Array[T]]] = - tabulate(n1)(_ => fill(n2, n3)(elem)) - - /** Returns a four-dimensional array that contains the results of some element - * computation a number of times. - * - * @param n1 the number of elements in the 1st dimension - * @param n2 the number of elements in the 2nd dimension - * @param n3 the number of elements in the 3nd dimension - * @param n4 the number of elements in the 4th dimension - * @param elem the element computation - */ - def fill[T: ClassTag](n1: Int, n2: Int, n3: Int, n4: Int)(elem: => T): Array[Array[Array[Array[T]]]] = - tabulate(n1)(_ => fill(n2, n3, n4)(elem)) - - /** Returns a five-dimensional array that contains the results of some element - * computation a number of times. - * - * @param n1 the number of elements in the 1st dimension - * @param n2 the number of elements in the 2nd dimension - * @param n3 the number of elements in the 3nd dimension - * @param n4 the number of elements in the 4th dimension - * @param n5 the number of elements in the 5th dimension - * @param elem the element computation - */ - def fill[T: ClassTag](n1: Int, n2: Int, n3: Int, n4: Int, n5: Int)(elem: => T): Array[Array[Array[Array[Array[T]]]]] = - tabulate(n1)(_ => fill(n2, n3, n4, n5)(elem)) - - /** Returns an array containing values of a given function over a range of integer - * values starting from 0. - * - * @param n The number of elements in the array - * @param f The function computing element values - * @return A traversable consisting of elements `f(0),f(1), ..., f(n - 1)` - */ - def tabulate[T: ClassTag](n: Int)(f: Int => T): Array[T] = { - val b = newBuilder[T] - b.sizeHint(n) - var i = 0 - while (i < n) { - b += f(i) - i += 1 - } - b.result() - } - - /** Returns a two-dimensional array containing values of a given function - * over ranges of integer values starting from `0`. - * - * @param n1 the number of elements in the 1st dimension - * @param n2 the number of elements in the 2nd dimension - * @param f The function computing element values - */ - def tabulate[T: ClassTag](n1: Int, n2: Int)(f: (Int, Int) => T): Array[Array[T]] = - tabulate(n1)(i1 => tabulate(n2)(f(i1, _))) - - /** Returns a three-dimensional array containing values of a given function - * over ranges of integer values starting from `0`. - * - * @param n1 the number of elements in the 1st dimension - * @param n2 the number of elements in the 2nd dimension - * @param n3 the number of elements in the 3rd dimension - * @param f The function computing element values - */ - def tabulate[T: ClassTag](n1: Int, n2: Int, n3: Int)(f: (Int, Int, Int) => T): Array[Array[Array[T]]] = - tabulate(n1)(i1 => tabulate(n2, n3)(f(i1, _, _))) - - /** Returns a four-dimensional array containing values of a given function - * over ranges of integer values starting from `0`. - * - * @param n1 the number of elements in the 1st dimension - * @param n2 the number of elements in the 2nd dimension - * @param n3 the number of elements in the 3rd dimension - * @param n4 the number of elements in the 4th dimension - * @param f The function computing element values - */ - def tabulate[T: ClassTag](n1: Int, n2: Int, n3: Int, n4: Int)(f: (Int, Int, Int, Int) => T): Array[Array[Array[Array[T]]]] = - tabulate(n1)(i1 => tabulate(n2, n3, n4)(f(i1, _, _, _))) - - /** Returns a five-dimensional array containing values of a given function - * over ranges of integer values starting from `0`. - * - * @param n1 the number of elements in the 1st dimension - * @param n2 the number of elements in the 2nd dimension - * @param n3 the number of elements in the 3rd dimension - * @param n4 the number of elements in the 4th dimension - * @param n5 the number of elements in the 5th dimension - * @param f The function computing element values - */ - def tabulate[T: ClassTag](n1: Int, n2: Int, n3: Int, n4: Int, n5: Int)(f: (Int, Int, Int, Int, Int) => T): Array[Array[Array[Array[Array[T]]]]] = - tabulate(n1)(i1 => tabulate(n2, n3, n4, n5)(f(i1, _, _, _, _))) - - /** Returns an array containing a sequence of increasing integers in a range. - * - * @param start the start value of the array - * @param end the end value of the array, exclusive (in other words, this is the first value '''not''' returned) - * @return the array with values in range `start, start + 1, ..., end - 1` - * up to, but excluding, `end`. - */ - def range(start: Int, end: Int): Array[Int] = range(start, end, 1) - - /** Returns an array containing equally spaced values in some integer interval. - * - * @param start the start value of the array - * @param end the end value of the array, exclusive (in other words, this is the first value '''not''' returned) - * @param step the increment value of the array (may not be zero) - * @return the array with values in `start, start + step, ...` up to, but excluding `end` - */ - def range(start: Int, end: Int, step: Int): Array[Int] = { - if (step == 0) throw new IllegalArgumentException("zero step") - val b = newBuilder[Int] - b.sizeHint(immutable.Range.count(start, end, step, isInclusive = false)) - - var i = start - while (if (step < 0) end < i else i < end) { - b += i - i += step - } - b.result() - } - - /** Returns an array containing repeated applications of a function to a start value. - * - * @param start the start value of the array - * @param len the number of elements returned by the array - * @param f the function that is repeatedly applied - * @return the array returning `len` values in the sequence `start, f(start), f(f(start)), ...` - */ - def iterate[T: ClassTag](start: T, len: Int)(f: T => T): Array[T] = { - val b = newBuilder[T] - - if (len > 0) { - b.sizeHint(len) - var acc = start - var i = 1 - b += acc - - while (i < len) { - acc = f(acc) - i += 1 - b += acc - } - } - b.result() - } - - /** Called in a pattern match like `{ case Array(x,y,z) => println('3 elements')}`. - * - * @param x the selector value - * @return sequence wrapped in a [[scala.Some]], if `x` is a Seq, otherwise `None` - */ - def unapplySeq[T](x: Array[T]): Option[IndexedSeq[T]] = - if (x == null) None else Some(x.toIndexedSeq) - // !!! the null check should to be necessary, but without it 2241 fails. Seems to be a bug - // in pattern matcher. @PP: I noted in #4364 I think the behavior is correct. -} - -/** Arrays are mutable, indexed collections of values. `Array[T]` is Scala's representation - * for Java's `T[]`. - * - * {{{ - * val numbers = Array(1, 2, 3, 4) - * val first = numbers(0) // read the first element - * numbers(3) = 100 // replace the 4th array element with 100 - * val biggerNumbers = numbers.map(_ * 2) // multiply all numbers by two - * }}} - * - * Arrays make use of two common pieces of Scala syntactic sugar, shown on lines 2 and 3 of the above - * example code. - * Line 2 is translated into a call to `apply(Int)`, while line 3 is translated into a call to - * `update(Int, T)`. - * - * Two implicit conversions exist in [[scala.Predef]] that are frequently applied to arrays: a conversion - * to [[scala.collection.mutable.ArrayOps]] (shown on line 4 of the example above) and a conversion - * to [[scala.collection.mutable.WrappedArray]] (a subtype of [[scala.collection.Seq]]). - * Both types make available many of the standard operations found in the Scala collections API. - * The conversion to `ArrayOps` is temporary, as all operations defined on `ArrayOps` return an `Array`, - * while the conversion to `WrappedArray` is permanent as all operations return a `WrappedArray`. - * - * The conversion to `ArrayOps` takes priority over the conversion to `WrappedArray`. For instance, - * consider the following code: - * - * {{{ - * val arr = Array(1, 2, 3) - * val arrReversed = arr.reverse - * val seqReversed : Seq[Int] = arr.reverse - * }}} - * - * Value `arrReversed` will be of type `Array[Int]`, with an implicit conversion to `ArrayOps` occurring - * to perform the `reverse` operation. The value of `seqReversed`, on the other hand, will be computed - * by converting to `WrappedArray` first and invoking the variant of `reverse` that returns another - * `WrappedArray`. - * - * @author Martin Odersky - * @version 1.0 - * @see [[http://www.scala-lang.org/files/archive/spec/2.11/ Scala Language Specification]], for in-depth information on the transformations the Scala compiler makes on Arrays (Sections 6.6 and 6.15 respectively.) - * @see [[http://docs.scala-lang.org/sips/completed/scala-2-8-arrays.html "Scala 2.8 Arrays"]] the Scala Improvement Document detailing arrays since Scala 2.8. - * @see [[http://docs.scala-lang.org/overviews/collections/arrays.html "The Scala 2.8 Collections' API"]] section on `Array` by Martin Odersky for more information. - * @hideImplicitConversion scala.Predef.booleanArrayOps - * @hideImplicitConversion scala.Predef.byteArrayOps - * @hideImplicitConversion scala.Predef.charArrayOps - * @hideImplicitConversion scala.Predef.doubleArrayOps - * @hideImplicitConversion scala.Predef.floatArrayOps - * @hideImplicitConversion scala.Predef.intArrayOps - * @hideImplicitConversion scala.Predef.longArrayOps - * @hideImplicitConversion scala.Predef.refArrayOps - * @hideImplicitConversion scala.Predef.shortArrayOps - * @hideImplicitConversion scala.Predef.unitArrayOps - * @hideImplicitConversion scala.LowPriorityImplicits.wrapRefArray - * @hideImplicitConversion scala.LowPriorityImplicits.wrapIntArray - * @hideImplicitConversion scala.LowPriorityImplicits.wrapDoubleArray - * @hideImplicitConversion scala.LowPriorityImplicits.wrapLongArray - * @hideImplicitConversion scala.LowPriorityImplicits.wrapFloatArray - * @hideImplicitConversion scala.LowPriorityImplicits.wrapCharArray - * @hideImplicitConversion scala.LowPriorityImplicits.wrapByteArray - * @hideImplicitConversion scala.LowPriorityImplicits.wrapShortArray - * @hideImplicitConversion scala.LowPriorityImplicits.wrapBooleanArray - * @hideImplicitConversion scala.LowPriorityImplicits.wrapUnitArray - * @hideImplicitConversion scala.LowPriorityImplicits.genericWrapArray - * @define coll array - * @define Coll `Array` - * @define orderDependent - * @define orderDependentFold - * @define mayNotTerminateInf - * @define willNotTerminateInf - * @define collectExample - * @define undefinedorder - * @define thatinfo the class of the returned collection. In the standard library configuration, - * `That` is either `Array[B]` if an ClassTag is available for B or `ArraySeq[B]` otherwise. - * @define zipthatinfo $thatinfo - * @define bfinfo an implicit value of class `CanBuildFrom` which determines the result class `That` from the current - * representation type `Repr` and the new element type `B`. - */ -final class Array[T](_length: Int) extends java.io.Serializable with java.lang.Cloneable { - - /** The length of the array */ - def length: Int = throw new Error() - - /** The element at given index. - * - * Indices start at `0`; `xs.apply(0)` is the first element of array `xs`. - * Note the indexing syntax `xs(i)` is a shorthand for `xs.apply(i)`. - * - * @param i the index - * @return the element at the given index - * @throws ArrayIndexOutOfBoundsException if `i < 0` or `length <= i` - */ - def apply(i: Int): T = throw new Error() - - /** Update the element at given index. - * - * Indices start at `0`; `xs.update(i, x)` replaces the i^th^ element in the array. - * Note the syntax `xs(i) = x` is a shorthand for `xs.update(i, x)`. - * - * @param i the index - * @param x the value to be written at index `i` - * @throws ArrayIndexOutOfBoundsException if `i < 0` or `length <= i` - */ - def update(i: Int, x: T) { throw new Error() } - - /** Clone the Array. - * - * @return A clone of the Array. - */ - override def clone(): Array[T] = throw new Error() -} diff --git a/tests/scala2-library/src/library/scala/Boolean.scala b/tests/scala2-library/src/library/scala/Boolean.scala deleted file mode 100644 index 017f10a28334..000000000000 --- a/tests/scala2-library/src/library/scala/Boolean.scala +++ /dev/null @@ -1,136 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -// DO NOT EDIT, CHANGES WILL BE LOST -// This auto-generated code can be modified in "project/GenerateAnyVals.scala". -// Afterwards, running "sbt generateSources" regenerates this source file. - -package scala - -/** `Boolean` (equivalent to Java's `boolean` primitive type) is a - * subtype of [[scala.AnyVal]]. Instances of `Boolean` are not - * represented by an object in the underlying runtime system. - * - * There is an implicit conversion from [[scala.Boolean]] => [[scala.runtime.RichBoolean]] - * which provides useful non-primitive operations. - */ -final abstract class Boolean private extends AnyVal { - /** Negates a Boolean expression. - * - * - `!a` results in `false` if and only if `a` evaluates to `true` and - * - `!a` results in `true` if and only if `a` evaluates to `false`. - * - * @return the negated expression - */ - def unary_! : Boolean - - /** Compares two Boolean expressions and returns `true` if they evaluate to the same value. - * - * `a == b` returns `true` if and only if - * - `a` and `b` are `true` or - * - `a` and `b` are `false`. - */ - def ==(x: Boolean): Boolean - - /** - * Compares two Boolean expressions and returns `true` if they evaluate to a different value. - * - * `a != b` returns `true` if and only if - * - `a` is `true` and `b` is `false` or - * - `a` is `false` and `b` is `true`. - */ - def !=(x: Boolean): Boolean - - /** Compares two Boolean expressions and returns `true` if one or both of them evaluate to true. - * - * `a || b` returns `true` if and only if - * - `a` is `true` or - * - `b` is `true` or - * - `a` and `b` are `true`. - * - * @note This method uses 'short-circuit' evaluation and - * behaves as if it was declared as `def ||(x: => Boolean): Boolean`. - * If `a` evaluates to `true`, `true` is returned without evaluating `b`. - */ - def ||(x: Boolean): Boolean - - /** Compares two Boolean expressions and returns `true` if both of them evaluate to true. - * - * `a && b` returns `true` if and only if - * - `a` and `b` are `true`. - * - * @note This method uses 'short-circuit' evaluation and - * behaves as if it was declared as `def &&(x: => Boolean): Boolean`. - * If `a` evaluates to `false`, `false` is returned without evaluating `b`. - */ - def &&(x: Boolean): Boolean - - // Compiler won't build with these seemingly more accurate signatures - // def ||(x: => Boolean): Boolean - // def &&(x: => Boolean): Boolean - - /** Compares two Boolean expressions and returns `true` if one or both of them evaluate to true. - * - * `a | b` returns `true` if and only if - * - `a` is `true` or - * - `b` is `true` or - * - `a` and `b` are `true`. - * - * @note This method evaluates both `a` and `b`, even if the result is already determined after evaluating `a`. - */ - def |(x: Boolean): Boolean - - /** Compares two Boolean expressions and returns `true` if both of them evaluate to true. - * - * `a & b` returns `true` if and only if - * - `a` and `b` are `true`. - * - * @note This method evaluates both `a` and `b`, even if the result is already determined after evaluating `a`. - */ - def &(x: Boolean): Boolean - - /** Compares two Boolean expressions and returns `true` if they evaluate to a different value. - * - * `a ^ b` returns `true` if and only if - * - `a` is `true` and `b` is `false` or - * - `a` is `false` and `b` is `true`. - */ - def ^(x: Boolean): Boolean - - // Provide a more specific return type for Scaladoc - override def getClass(): Class[Boolean] = ??? -} - -object Boolean extends AnyValCompanion { - - /** Transform a value type into a boxed reference type. - * - * Runtime implementation determined by `scala.runtime.BoxesRunTime.boxToBoolean`. See [[https://github.com/scala/scala src/library/scala/runtime/BoxesRunTime.java]]. - * - * @param x the Boolean to be boxed - * @return a java.lang.Boolean offering `x` as its underlying value. - */ - def box(x: Boolean): java.lang.Boolean = ??? - - /** Transform a boxed type into a value type. Note that this - * method is not typesafe: it accepts any Object, but will throw - * an exception if the argument is not a java.lang.Boolean. - * - * Runtime implementation determined by `scala.runtime.BoxesRunTime.unboxToBoolean`. See [[https://github.com/scala/scala src/library/scala/runtime/BoxesRunTime.java]]. - * - * @param x the java.lang.Boolean to be unboxed. - * @throws ClassCastException if the argument is not a java.lang.Boolean - * @return the Boolean resulting from calling booleanValue() on `x` - */ - def unbox(x: java.lang.Object): Boolean = ??? - - /** The String representation of the scala.Boolean companion object. */ - override def toString = "object scala.Boolean" - -} - diff --git a/tests/scala2-library/src/library/scala/Byte.scala b/tests/scala2-library/src/library/scala/Byte.scala deleted file mode 100644 index 3709586f2ec2..000000000000 --- a/tests/scala2-library/src/library/scala/Byte.scala +++ /dev/null @@ -1,479 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -// DO NOT EDIT, CHANGES WILL BE LOST -// This auto-generated code can be modified in "project/GenerateAnyVals.scala". -// Afterwards, running "sbt generateSources" regenerates this source file. - -package scala - -/** `Byte`, a 8-bit signed integer (equivalent to Java's `byte` primitive type) is a - * subtype of [[scala.AnyVal]]. Instances of `Byte` are not - * represented by an object in the underlying runtime system. - * - * There is an implicit conversion from [[scala.Byte]] => [[scala.runtime.RichByte]] - * which provides useful non-primitive operations. - */ -final abstract class Byte private extends AnyVal { - def toByte: Byte - def toShort: Short - def toChar: Char - def toInt: Int - def toLong: Long - def toFloat: Float - def toDouble: Double - - /** - * Returns the bitwise negation of this value. - * @example {{{ - * ~5 == -6 - * // in binary: ~00000101 == - * // 11111010 - * }}} - */ - def unary_~ : Int - /** Returns this value, unmodified. */ - def unary_+ : Int - /** Returns the negation of this value. */ - def unary_- : Int - - def +(x: String): String - - /** - * Returns this value bit-shifted left by the specified number of bits, - * filling in the new right bits with zeroes. - * @example {{{ 6 << 3 == 48 // in binary: 0110 << 3 == 0110000 }}} - */ - def <<(x: Int): Int - /** - * Returns this value bit-shifted left by the specified number of bits, - * filling in the new right bits with zeroes. - * @example {{{ 6 << 3 == 48 // in binary: 0110 << 3 == 0110000 }}} - */ - def <<(x: Long): Int - /** - * Returns this value bit-shifted right by the specified number of bits, - * filling the new left bits with zeroes. - * @example {{{ 21 >>> 3 == 2 // in binary: 010101 >>> 3 == 010 }}} - * @example {{{ - * -21 >>> 3 == 536870909 - * // in binary: 11111111 11111111 11111111 11101011 >>> 3 == - * // 00011111 11111111 11111111 11111101 - * }}} - */ - def >>>(x: Int): Int - /** - * Returns this value bit-shifted right by the specified number of bits, - * filling the new left bits with zeroes. - * @example {{{ 21 >>> 3 == 2 // in binary: 010101 >>> 3 == 010 }}} - * @example {{{ - * -21 >>> 3 == 536870909 - * // in binary: 11111111 11111111 11111111 11101011 >>> 3 == - * // 00011111 11111111 11111111 11111101 - * }}} - */ - def >>>(x: Long): Int - /** - * Returns this value bit-shifted right by the specified number of bits, - * filling in the left bits with the same value as the left-most bit of this. - * The effect of this is to retain the sign of the value. - * @example {{{ - * -21 >> 3 == -3 - * // in binary: 11111111 11111111 11111111 11101011 >> 3 == - * // 11111111 11111111 11111111 11111101 - * }}} - */ - def >>(x: Int): Int - /** - * Returns this value bit-shifted right by the specified number of bits, - * filling in the left bits with the same value as the left-most bit of this. - * The effect of this is to retain the sign of the value. - * @example {{{ - * -21 >> 3 == -3 - * // in binary: 11111111 11111111 11111111 11101011 >> 3 == - * // 11111111 11111111 11111111 11111101 - * }}} - */ - def >>(x: Long): Int - - /** Returns `true` if this value is equal to x, `false` otherwise. */ - def ==(x: Byte): Boolean - /** Returns `true` if this value is equal to x, `false` otherwise. */ - def ==(x: Short): Boolean - /** Returns `true` if this value is equal to x, `false` otherwise. */ - def ==(x: Char): Boolean - /** Returns `true` if this value is equal to x, `false` otherwise. */ - def ==(x: Int): Boolean - /** Returns `true` if this value is equal to x, `false` otherwise. */ - def ==(x: Long): Boolean - /** Returns `true` if this value is equal to x, `false` otherwise. */ - def ==(x: Float): Boolean - /** Returns `true` if this value is equal to x, `false` otherwise. */ - def ==(x: Double): Boolean - - /** Returns `true` if this value is not equal to x, `false` otherwise. */ - def !=(x: Byte): Boolean - /** Returns `true` if this value is not equal to x, `false` otherwise. */ - def !=(x: Short): Boolean - /** Returns `true` if this value is not equal to x, `false` otherwise. */ - def !=(x: Char): Boolean - /** Returns `true` if this value is not equal to x, `false` otherwise. */ - def !=(x: Int): Boolean - /** Returns `true` if this value is not equal to x, `false` otherwise. */ - def !=(x: Long): Boolean - /** Returns `true` if this value is not equal to x, `false` otherwise. */ - def !=(x: Float): Boolean - /** Returns `true` if this value is not equal to x, `false` otherwise. */ - def !=(x: Double): Boolean - - /** Returns `true` if this value is less than x, `false` otherwise. */ - def <(x: Byte): Boolean - /** Returns `true` if this value is less than x, `false` otherwise. */ - def <(x: Short): Boolean - /** Returns `true` if this value is less than x, `false` otherwise. */ - def <(x: Char): Boolean - /** Returns `true` if this value is less than x, `false` otherwise. */ - def <(x: Int): Boolean - /** Returns `true` if this value is less than x, `false` otherwise. */ - def <(x: Long): Boolean - /** Returns `true` if this value is less than x, `false` otherwise. */ - def <(x: Float): Boolean - /** Returns `true` if this value is less than x, `false` otherwise. */ - def <(x: Double): Boolean - - /** Returns `true` if this value is less than or equal to x, `false` otherwise. */ - def <=(x: Byte): Boolean - /** Returns `true` if this value is less than or equal to x, `false` otherwise. */ - def <=(x: Short): Boolean - /** Returns `true` if this value is less than or equal to x, `false` otherwise. */ - def <=(x: Char): Boolean - /** Returns `true` if this value is less than or equal to x, `false` otherwise. */ - def <=(x: Int): Boolean - /** Returns `true` if this value is less than or equal to x, `false` otherwise. */ - def <=(x: Long): Boolean - /** Returns `true` if this value is less than or equal to x, `false` otherwise. */ - def <=(x: Float): Boolean - /** Returns `true` if this value is less than or equal to x, `false` otherwise. */ - def <=(x: Double): Boolean - - /** Returns `true` if this value is greater than x, `false` otherwise. */ - def >(x: Byte): Boolean - /** Returns `true` if this value is greater than x, `false` otherwise. */ - def >(x: Short): Boolean - /** Returns `true` if this value is greater than x, `false` otherwise. */ - def >(x: Char): Boolean - /** Returns `true` if this value is greater than x, `false` otherwise. */ - def >(x: Int): Boolean - /** Returns `true` if this value is greater than x, `false` otherwise. */ - def >(x: Long): Boolean - /** Returns `true` if this value is greater than x, `false` otherwise. */ - def >(x: Float): Boolean - /** Returns `true` if this value is greater than x, `false` otherwise. */ - def >(x: Double): Boolean - - /** Returns `true` if this value is greater than or equal to x, `false` otherwise. */ - def >=(x: Byte): Boolean - /** Returns `true` if this value is greater than or equal to x, `false` otherwise. */ - def >=(x: Short): Boolean - /** Returns `true` if this value is greater than or equal to x, `false` otherwise. */ - def >=(x: Char): Boolean - /** Returns `true` if this value is greater than or equal to x, `false` otherwise. */ - def >=(x: Int): Boolean - /** Returns `true` if this value is greater than or equal to x, `false` otherwise. */ - def >=(x: Long): Boolean - /** Returns `true` if this value is greater than or equal to x, `false` otherwise. */ - def >=(x: Float): Boolean - /** Returns `true` if this value is greater than or equal to x, `false` otherwise. */ - def >=(x: Double): Boolean - - /** - * Returns the bitwise OR of this value and `x`. - * @example {{{ - * (0xf0 | 0xaa) == 0xfa - * // in binary: 11110000 - * // | 10101010 - * // -------- - * // 11111010 - * }}} - */ - def |(x: Byte): Int - /** - * Returns the bitwise OR of this value and `x`. - * @example {{{ - * (0xf0 | 0xaa) == 0xfa - * // in binary: 11110000 - * // | 10101010 - * // -------- - * // 11111010 - * }}} - */ - def |(x: Short): Int - /** - * Returns the bitwise OR of this value and `x`. - * @example {{{ - * (0xf0 | 0xaa) == 0xfa - * // in binary: 11110000 - * // | 10101010 - * // -------- - * // 11111010 - * }}} - */ - def |(x: Char): Int - /** - * Returns the bitwise OR of this value and `x`. - * @example {{{ - * (0xf0 | 0xaa) == 0xfa - * // in binary: 11110000 - * // | 10101010 - * // -------- - * // 11111010 - * }}} - */ - def |(x: Int): Int - /** - * Returns the bitwise OR of this value and `x`. - * @example {{{ - * (0xf0 | 0xaa) == 0xfa - * // in binary: 11110000 - * // | 10101010 - * // -------- - * // 11111010 - * }}} - */ - def |(x: Long): Long - - /** - * Returns the bitwise AND of this value and `x`. - * @example {{{ - * (0xf0 & 0xaa) == 0xa0 - * // in binary: 11110000 - * // & 10101010 - * // -------- - * // 10100000 - * }}} - */ - def &(x: Byte): Int - /** - * Returns the bitwise AND of this value and `x`. - * @example {{{ - * (0xf0 & 0xaa) == 0xa0 - * // in binary: 11110000 - * // & 10101010 - * // -------- - * // 10100000 - * }}} - */ - def &(x: Short): Int - /** - * Returns the bitwise AND of this value and `x`. - * @example {{{ - * (0xf0 & 0xaa) == 0xa0 - * // in binary: 11110000 - * // & 10101010 - * // -------- - * // 10100000 - * }}} - */ - def &(x: Char): Int - /** - * Returns the bitwise AND of this value and `x`. - * @example {{{ - * (0xf0 & 0xaa) == 0xa0 - * // in binary: 11110000 - * // & 10101010 - * // -------- - * // 10100000 - * }}} - */ - def &(x: Int): Int - /** - * Returns the bitwise AND of this value and `x`. - * @example {{{ - * (0xf0 & 0xaa) == 0xa0 - * // in binary: 11110000 - * // & 10101010 - * // -------- - * // 10100000 - * }}} - */ - def &(x: Long): Long - - /** - * Returns the bitwise XOR of this value and `x`. - * @example {{{ - * (0xf0 ^ 0xaa) == 0x5a - * // in binary: 11110000 - * // ^ 10101010 - * // -------- - * // 01011010 - * }}} - */ - def ^(x: Byte): Int - /** - * Returns the bitwise XOR of this value and `x`. - * @example {{{ - * (0xf0 ^ 0xaa) == 0x5a - * // in binary: 11110000 - * // ^ 10101010 - * // -------- - * // 01011010 - * }}} - */ - def ^(x: Short): Int - /** - * Returns the bitwise XOR of this value and `x`. - * @example {{{ - * (0xf0 ^ 0xaa) == 0x5a - * // in binary: 11110000 - * // ^ 10101010 - * // -------- - * // 01011010 - * }}} - */ - def ^(x: Char): Int - /** - * Returns the bitwise XOR of this value and `x`. - * @example {{{ - * (0xf0 ^ 0xaa) == 0x5a - * // in binary: 11110000 - * // ^ 10101010 - * // -------- - * // 01011010 - * }}} - */ - def ^(x: Int): Int - /** - * Returns the bitwise XOR of this value and `x`. - * @example {{{ - * (0xf0 ^ 0xaa) == 0x5a - * // in binary: 11110000 - * // ^ 10101010 - * // -------- - * // 01011010 - * }}} - */ - def ^(x: Long): Long - - /** Returns the sum of this value and `x`. */ - def +(x: Byte): Int - /** Returns the sum of this value and `x`. */ - def +(x: Short): Int - /** Returns the sum of this value and `x`. */ - def +(x: Char): Int - /** Returns the sum of this value and `x`. */ - def +(x: Int): Int - /** Returns the sum of this value and `x`. */ - def +(x: Long): Long - /** Returns the sum of this value and `x`. */ - def +(x: Float): Float - /** Returns the sum of this value and `x`. */ - def +(x: Double): Double - - /** Returns the difference of this value and `x`. */ - def -(x: Byte): Int - /** Returns the difference of this value and `x`. */ - def -(x: Short): Int - /** Returns the difference of this value and `x`. */ - def -(x: Char): Int - /** Returns the difference of this value and `x`. */ - def -(x: Int): Int - /** Returns the difference of this value and `x`. */ - def -(x: Long): Long - /** Returns the difference of this value and `x`. */ - def -(x: Float): Float - /** Returns the difference of this value and `x`. */ - def -(x: Double): Double - - /** Returns the product of this value and `x`. */ - def *(x: Byte): Int - /** Returns the product of this value and `x`. */ - def *(x: Short): Int - /** Returns the product of this value and `x`. */ - def *(x: Char): Int - /** Returns the product of this value and `x`. */ - def *(x: Int): Int - /** Returns the product of this value and `x`. */ - def *(x: Long): Long - /** Returns the product of this value and `x`. */ - def *(x: Float): Float - /** Returns the product of this value and `x`. */ - def *(x: Double): Double - - /** Returns the quotient of this value and `x`. */ - def /(x: Byte): Int - /** Returns the quotient of this value and `x`. */ - def /(x: Short): Int - /** Returns the quotient of this value and `x`. */ - def /(x: Char): Int - /** Returns the quotient of this value and `x`. */ - def /(x: Int): Int - /** Returns the quotient of this value and `x`. */ - def /(x: Long): Long - /** Returns the quotient of this value and `x`. */ - def /(x: Float): Float - /** Returns the quotient of this value and `x`. */ - def /(x: Double): Double - - /** Returns the remainder of the division of this value by `x`. */ - def %(x: Byte): Int - /** Returns the remainder of the division of this value by `x`. */ - def %(x: Short): Int - /** Returns the remainder of the division of this value by `x`. */ - def %(x: Char): Int - /** Returns the remainder of the division of this value by `x`. */ - def %(x: Int): Int - /** Returns the remainder of the division of this value by `x`. */ - def %(x: Long): Long - /** Returns the remainder of the division of this value by `x`. */ - def %(x: Float): Float - /** Returns the remainder of the division of this value by `x`. */ - def %(x: Double): Double - - // Provide a more specific return type for Scaladoc - override def getClass(): Class[Byte] = ??? -} - -object Byte extends AnyValCompanion { - /** The smallest value representable as a Byte. */ - final val MinValue = java.lang.Byte.MIN_VALUE - - /** The largest value representable as a Byte. */ - final val MaxValue = java.lang.Byte.MAX_VALUE - - /** Transform a value type into a boxed reference type. - * - * Runtime implementation determined by `scala.runtime.BoxesRunTime.boxToByte`. See [[https://github.com/scala/scala src/library/scala/runtime/BoxesRunTime.java]]. - * - * @param x the Byte to be boxed - * @return a java.lang.Byte offering `x` as its underlying value. - */ - def box(x: Byte): java.lang.Byte = ??? - - /** Transform a boxed type into a value type. Note that this - * method is not typesafe: it accepts any Object, but will throw - * an exception if the argument is not a java.lang.Byte. - * - * Runtime implementation determined by `scala.runtime.BoxesRunTime.unboxToByte`. See [[https://github.com/scala/scala src/library/scala/runtime/BoxesRunTime.java]]. - * - * @param x the java.lang.Byte to be unboxed. - * @throws ClassCastException if the argument is not a java.lang.Byte - * @return the Byte resulting from calling byteValue() on `x` - */ - def unbox(x: java.lang.Object): Byte = ??? - - /** The String representation of the scala.Byte companion object. */ - override def toString = "object scala.Byte" - /** Language mandated coercions from Byte to "wider" types. */ - import scala.language.implicitConversions - implicit def byte2short(x: Byte): Short = x.toShort - implicit def byte2int(x: Byte): Int = x.toInt - implicit def byte2long(x: Byte): Long = x.toLong - implicit def byte2float(x: Byte): Float = x.toFloat - implicit def byte2double(x: Byte): Double = x.toDouble -} - diff --git a/tests/scala2-library/src/library/scala/Char.scala b/tests/scala2-library/src/library/scala/Char.scala deleted file mode 100644 index 7dbb0209c38d..000000000000 --- a/tests/scala2-library/src/library/scala/Char.scala +++ /dev/null @@ -1,478 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -// DO NOT EDIT, CHANGES WILL BE LOST -// This auto-generated code can be modified in "project/GenerateAnyVals.scala". -// Afterwards, running "sbt generateSources" regenerates this source file. - -package scala - -/** `Char`, a 16-bit unsigned integer (equivalent to Java's `char` primitive type) is a - * subtype of [[scala.AnyVal]]. Instances of `Char` are not - * represented by an object in the underlying runtime system. - * - * There is an implicit conversion from [[scala.Char]] => [[scala.runtime.RichChar]] - * which provides useful non-primitive operations. - */ -final abstract class Char private extends AnyVal { - def toByte: Byte - def toShort: Short - def toChar: Char - def toInt: Int - def toLong: Long - def toFloat: Float - def toDouble: Double - - /** - * Returns the bitwise negation of this value. - * @example {{{ - * ~5 == -6 - * // in binary: ~00000101 == - * // 11111010 - * }}} - */ - def unary_~ : Int - /** Returns this value, unmodified. */ - def unary_+ : Int - /** Returns the negation of this value. */ - def unary_- : Int - - def +(x: String): String - - /** - * Returns this value bit-shifted left by the specified number of bits, - * filling in the new right bits with zeroes. - * @example {{{ 6 << 3 == 48 // in binary: 0110 << 3 == 0110000 }}} - */ - def <<(x: Int): Int - /** - * Returns this value bit-shifted left by the specified number of bits, - * filling in the new right bits with zeroes. - * @example {{{ 6 << 3 == 48 // in binary: 0110 << 3 == 0110000 }}} - */ - def <<(x: Long): Int - /** - * Returns this value bit-shifted right by the specified number of bits, - * filling the new left bits with zeroes. - * @example {{{ 21 >>> 3 == 2 // in binary: 010101 >>> 3 == 010 }}} - * @example {{{ - * -21 >>> 3 == 536870909 - * // in binary: 11111111 11111111 11111111 11101011 >>> 3 == - * // 00011111 11111111 11111111 11111101 - * }}} - */ - def >>>(x: Int): Int - /** - * Returns this value bit-shifted right by the specified number of bits, - * filling the new left bits with zeroes. - * @example {{{ 21 >>> 3 == 2 // in binary: 010101 >>> 3 == 010 }}} - * @example {{{ - * -21 >>> 3 == 536870909 - * // in binary: 11111111 11111111 11111111 11101011 >>> 3 == - * // 00011111 11111111 11111111 11111101 - * }}} - */ - def >>>(x: Long): Int - /** - * Returns this value bit-shifted right by the specified number of bits, - * filling in the left bits with the same value as the left-most bit of this. - * The effect of this is to retain the sign of the value. - * @example {{{ - * -21 >> 3 == -3 - * // in binary: 11111111 11111111 11111111 11101011 >> 3 == - * // 11111111 11111111 11111111 11111101 - * }}} - */ - def >>(x: Int): Int - /** - * Returns this value bit-shifted right by the specified number of bits, - * filling in the left bits with the same value as the left-most bit of this. - * The effect of this is to retain the sign of the value. - * @example {{{ - * -21 >> 3 == -3 - * // in binary: 11111111 11111111 11111111 11101011 >> 3 == - * // 11111111 11111111 11111111 11111101 - * }}} - */ - def >>(x: Long): Int - - /** Returns `true` if this value is equal to x, `false` otherwise. */ - def ==(x: Byte): Boolean - /** Returns `true` if this value is equal to x, `false` otherwise. */ - def ==(x: Short): Boolean - /** Returns `true` if this value is equal to x, `false` otherwise. */ - def ==(x: Char): Boolean - /** Returns `true` if this value is equal to x, `false` otherwise. */ - def ==(x: Int): Boolean - /** Returns `true` if this value is equal to x, `false` otherwise. */ - def ==(x: Long): Boolean - /** Returns `true` if this value is equal to x, `false` otherwise. */ - def ==(x: Float): Boolean - /** Returns `true` if this value is equal to x, `false` otherwise. */ - def ==(x: Double): Boolean - - /** Returns `true` if this value is not equal to x, `false` otherwise. */ - def !=(x: Byte): Boolean - /** Returns `true` if this value is not equal to x, `false` otherwise. */ - def !=(x: Short): Boolean - /** Returns `true` if this value is not equal to x, `false` otherwise. */ - def !=(x: Char): Boolean - /** Returns `true` if this value is not equal to x, `false` otherwise. */ - def !=(x: Int): Boolean - /** Returns `true` if this value is not equal to x, `false` otherwise. */ - def !=(x: Long): Boolean - /** Returns `true` if this value is not equal to x, `false` otherwise. */ - def !=(x: Float): Boolean - /** Returns `true` if this value is not equal to x, `false` otherwise. */ - def !=(x: Double): Boolean - - /** Returns `true` if this value is less than x, `false` otherwise. */ - def <(x: Byte): Boolean - /** Returns `true` if this value is less than x, `false` otherwise. */ - def <(x: Short): Boolean - /** Returns `true` if this value is less than x, `false` otherwise. */ - def <(x: Char): Boolean - /** Returns `true` if this value is less than x, `false` otherwise. */ - def <(x: Int): Boolean - /** Returns `true` if this value is less than x, `false` otherwise. */ - def <(x: Long): Boolean - /** Returns `true` if this value is less than x, `false` otherwise. */ - def <(x: Float): Boolean - /** Returns `true` if this value is less than x, `false` otherwise. */ - def <(x: Double): Boolean - - /** Returns `true` if this value is less than or equal to x, `false` otherwise. */ - def <=(x: Byte): Boolean - /** Returns `true` if this value is less than or equal to x, `false` otherwise. */ - def <=(x: Short): Boolean - /** Returns `true` if this value is less than or equal to x, `false` otherwise. */ - def <=(x: Char): Boolean - /** Returns `true` if this value is less than or equal to x, `false` otherwise. */ - def <=(x: Int): Boolean - /** Returns `true` if this value is less than or equal to x, `false` otherwise. */ - def <=(x: Long): Boolean - /** Returns `true` if this value is less than or equal to x, `false` otherwise. */ - def <=(x: Float): Boolean - /** Returns `true` if this value is less than or equal to x, `false` otherwise. */ - def <=(x: Double): Boolean - - /** Returns `true` if this value is greater than x, `false` otherwise. */ - def >(x: Byte): Boolean - /** Returns `true` if this value is greater than x, `false` otherwise. */ - def >(x: Short): Boolean - /** Returns `true` if this value is greater than x, `false` otherwise. */ - def >(x: Char): Boolean - /** Returns `true` if this value is greater than x, `false` otherwise. */ - def >(x: Int): Boolean - /** Returns `true` if this value is greater than x, `false` otherwise. */ - def >(x: Long): Boolean - /** Returns `true` if this value is greater than x, `false` otherwise. */ - def >(x: Float): Boolean - /** Returns `true` if this value is greater than x, `false` otherwise. */ - def >(x: Double): Boolean - - /** Returns `true` if this value is greater than or equal to x, `false` otherwise. */ - def >=(x: Byte): Boolean - /** Returns `true` if this value is greater than or equal to x, `false` otherwise. */ - def >=(x: Short): Boolean - /** Returns `true` if this value is greater than or equal to x, `false` otherwise. */ - def >=(x: Char): Boolean - /** Returns `true` if this value is greater than or equal to x, `false` otherwise. */ - def >=(x: Int): Boolean - /** Returns `true` if this value is greater than or equal to x, `false` otherwise. */ - def >=(x: Long): Boolean - /** Returns `true` if this value is greater than or equal to x, `false` otherwise. */ - def >=(x: Float): Boolean - /** Returns `true` if this value is greater than or equal to x, `false` otherwise. */ - def >=(x: Double): Boolean - - /** - * Returns the bitwise OR of this value and `x`. - * @example {{{ - * (0xf0 | 0xaa) == 0xfa - * // in binary: 11110000 - * // | 10101010 - * // -------- - * // 11111010 - * }}} - */ - def |(x: Byte): Int - /** - * Returns the bitwise OR of this value and `x`. - * @example {{{ - * (0xf0 | 0xaa) == 0xfa - * // in binary: 11110000 - * // | 10101010 - * // -------- - * // 11111010 - * }}} - */ - def |(x: Short): Int - /** - * Returns the bitwise OR of this value and `x`. - * @example {{{ - * (0xf0 | 0xaa) == 0xfa - * // in binary: 11110000 - * // | 10101010 - * // -------- - * // 11111010 - * }}} - */ - def |(x: Char): Int - /** - * Returns the bitwise OR of this value and `x`. - * @example {{{ - * (0xf0 | 0xaa) == 0xfa - * // in binary: 11110000 - * // | 10101010 - * // -------- - * // 11111010 - * }}} - */ - def |(x: Int): Int - /** - * Returns the bitwise OR of this value and `x`. - * @example {{{ - * (0xf0 | 0xaa) == 0xfa - * // in binary: 11110000 - * // | 10101010 - * // -------- - * // 11111010 - * }}} - */ - def |(x: Long): Long - - /** - * Returns the bitwise AND of this value and `x`. - * @example {{{ - * (0xf0 & 0xaa) == 0xa0 - * // in binary: 11110000 - * // & 10101010 - * // -------- - * // 10100000 - * }}} - */ - def &(x: Byte): Int - /** - * Returns the bitwise AND of this value and `x`. - * @example {{{ - * (0xf0 & 0xaa) == 0xa0 - * // in binary: 11110000 - * // & 10101010 - * // -------- - * // 10100000 - * }}} - */ - def &(x: Short): Int - /** - * Returns the bitwise AND of this value and `x`. - * @example {{{ - * (0xf0 & 0xaa) == 0xa0 - * // in binary: 11110000 - * // & 10101010 - * // -------- - * // 10100000 - * }}} - */ - def &(x: Char): Int - /** - * Returns the bitwise AND of this value and `x`. - * @example {{{ - * (0xf0 & 0xaa) == 0xa0 - * // in binary: 11110000 - * // & 10101010 - * // -------- - * // 10100000 - * }}} - */ - def &(x: Int): Int - /** - * Returns the bitwise AND of this value and `x`. - * @example {{{ - * (0xf0 & 0xaa) == 0xa0 - * // in binary: 11110000 - * // & 10101010 - * // -------- - * // 10100000 - * }}} - */ - def &(x: Long): Long - - /** - * Returns the bitwise XOR of this value and `x`. - * @example {{{ - * (0xf0 ^ 0xaa) == 0x5a - * // in binary: 11110000 - * // ^ 10101010 - * // -------- - * // 01011010 - * }}} - */ - def ^(x: Byte): Int - /** - * Returns the bitwise XOR of this value and `x`. - * @example {{{ - * (0xf0 ^ 0xaa) == 0x5a - * // in binary: 11110000 - * // ^ 10101010 - * // -------- - * // 01011010 - * }}} - */ - def ^(x: Short): Int - /** - * Returns the bitwise XOR of this value and `x`. - * @example {{{ - * (0xf0 ^ 0xaa) == 0x5a - * // in binary: 11110000 - * // ^ 10101010 - * // -------- - * // 01011010 - * }}} - */ - def ^(x: Char): Int - /** - * Returns the bitwise XOR of this value and `x`. - * @example {{{ - * (0xf0 ^ 0xaa) == 0x5a - * // in binary: 11110000 - * // ^ 10101010 - * // -------- - * // 01011010 - * }}} - */ - def ^(x: Int): Int - /** - * Returns the bitwise XOR of this value and `x`. - * @example {{{ - * (0xf0 ^ 0xaa) == 0x5a - * // in binary: 11110000 - * // ^ 10101010 - * // -------- - * // 01011010 - * }}} - */ - def ^(x: Long): Long - - /** Returns the sum of this value and `x`. */ - def +(x: Byte): Int - /** Returns the sum of this value and `x`. */ - def +(x: Short): Int - /** Returns the sum of this value and `x`. */ - def +(x: Char): Int - /** Returns the sum of this value and `x`. */ - def +(x: Int): Int - /** Returns the sum of this value and `x`. */ - def +(x: Long): Long - /** Returns the sum of this value and `x`. */ - def +(x: Float): Float - /** Returns the sum of this value and `x`. */ - def +(x: Double): Double - - /** Returns the difference of this value and `x`. */ - def -(x: Byte): Int - /** Returns the difference of this value and `x`. */ - def -(x: Short): Int - /** Returns the difference of this value and `x`. */ - def -(x: Char): Int - /** Returns the difference of this value and `x`. */ - def -(x: Int): Int - /** Returns the difference of this value and `x`. */ - def -(x: Long): Long - /** Returns the difference of this value and `x`. */ - def -(x: Float): Float - /** Returns the difference of this value and `x`. */ - def -(x: Double): Double - - /** Returns the product of this value and `x`. */ - def *(x: Byte): Int - /** Returns the product of this value and `x`. */ - def *(x: Short): Int - /** Returns the product of this value and `x`. */ - def *(x: Char): Int - /** Returns the product of this value and `x`. */ - def *(x: Int): Int - /** Returns the product of this value and `x`. */ - def *(x: Long): Long - /** Returns the product of this value and `x`. */ - def *(x: Float): Float - /** Returns the product of this value and `x`. */ - def *(x: Double): Double - - /** Returns the quotient of this value and `x`. */ - def /(x: Byte): Int - /** Returns the quotient of this value and `x`. */ - def /(x: Short): Int - /** Returns the quotient of this value and `x`. */ - def /(x: Char): Int - /** Returns the quotient of this value and `x`. */ - def /(x: Int): Int - /** Returns the quotient of this value and `x`. */ - def /(x: Long): Long - /** Returns the quotient of this value and `x`. */ - def /(x: Float): Float - /** Returns the quotient of this value and `x`. */ - def /(x: Double): Double - - /** Returns the remainder of the division of this value by `x`. */ - def %(x: Byte): Int - /** Returns the remainder of the division of this value by `x`. */ - def %(x: Short): Int - /** Returns the remainder of the division of this value by `x`. */ - def %(x: Char): Int - /** Returns the remainder of the division of this value by `x`. */ - def %(x: Int): Int - /** Returns the remainder of the division of this value by `x`. */ - def %(x: Long): Long - /** Returns the remainder of the division of this value by `x`. */ - def %(x: Float): Float - /** Returns the remainder of the division of this value by `x`. */ - def %(x: Double): Double - - // Provide a more specific return type for Scaladoc - override def getClass(): Class[Char] = ??? -} - -object Char extends AnyValCompanion { - /** The smallest value representable as a Char. */ - final val MinValue = java.lang.Character.MIN_VALUE - - /** The largest value representable as a Char. */ - final val MaxValue = java.lang.Character.MAX_VALUE - - /** Transform a value type into a boxed reference type. - * - * Runtime implementation determined by `scala.runtime.BoxesRunTime.boxToCharacter`. See [[https://github.com/scala/scala src/library/scala/runtime/BoxesRunTime.java]]. - * - * @param x the Char to be boxed - * @return a java.lang.Character offering `x` as its underlying value. - */ - def box(x: Char): java.lang.Character = ??? - - /** Transform a boxed type into a value type. Note that this - * method is not typesafe: it accepts any Object, but will throw - * an exception if the argument is not a java.lang.Character. - * - * Runtime implementation determined by `scala.runtime.BoxesRunTime.unboxToChar`. See [[https://github.com/scala/scala src/library/scala/runtime/BoxesRunTime.java]]. - * - * @param x the java.lang.Character to be unboxed. - * @throws ClassCastException if the argument is not a java.lang.Character - * @return the Char resulting from calling charValue() on `x` - */ - def unbox(x: java.lang.Object): Char = ??? - - /** The String representation of the scala.Char companion object. */ - override def toString = "object scala.Char" - /** Language mandated coercions from Char to "wider" types. */ - import scala.language.implicitConversions - implicit def char2int(x: Char): Int = x.toInt - implicit def char2long(x: Char): Long = x.toLong - implicit def char2float(x: Char): Float = x.toFloat - implicit def char2double(x: Char): Double = x.toDouble -} - diff --git a/tests/scala2-library/src/library/scala/Cloneable.scala b/tests/scala2-library/src/library/scala/Cloneable.scala deleted file mode 100644 index 2810e3ca9613..000000000000 --- a/tests/scala2-library/src/library/scala/Cloneable.scala +++ /dev/null @@ -1,14 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala - -/** - * Classes extending this trait are cloneable across platforms (Java, .NET). - */ -trait Cloneable extends java.lang.Cloneable diff --git a/tests/scala2-library/src/library/scala/Console.scala b/tests/scala2-library/src/library/scala/Console.scala deleted file mode 100644 index bc702cfaad41..000000000000 --- a/tests/scala2-library/src/library/scala/Console.scala +++ /dev/null @@ -1,340 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2016, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala - -import java.io.{ BufferedReader, InputStream, InputStreamReader, OutputStream, PrintStream, Reader } -import scala.io.{ AnsiColor, StdIn } -import scala.util.DynamicVariable - -/** Implements functionality for printing Scala values on the terminal. For reading values - * use [[scala.io.StdIn$ StdIn]]. - * Also defines constants for marking up text on ANSI terminals. - * - * == Console Output == - * - * Use the print methods to output text. - * {{{ - * scala> Console.printf( - * "Today the outside temperature is a balmy %.1f°C. %<.1f°C beats the previous record of %.1f°C.\n", - * -137.0, - * -135.05) - * Today the outside temperature is a balmy -137.0°C. -137.0°C beats the previous record of -135.1°C. - * }}} - * - * == ANSI escape codes == - * Use the ANSI escape codes for colorizing console output either to STDOUT or STDERR. - * {{{ - * import Console.{GREEN, RED, RESET, YELLOW_B, UNDERLINED} - * - * object PrimeTest { - * - * def isPrime(): Unit = { - * - * val candidate = io.StdIn.readInt().ensuring(_ > 1) - * - * val prime = (2 to candidate - 1).forall(candidate % _ != 0) - * - * if (prime) - * Console.println(s"${RESET}${GREEN}yes${RESET}") - * else - * Console.err.println(s"${RESET}${YELLOW_B}${RED}${UNDERLINED}NO!${RESET}") - * } - * - * def main(args: Array[String]): Unit = isPrime() - * - * } - * }}} - * - * - * - * - * - * - * - * - *
$ scala PrimeTest
1234567891
yes
$ scala PrimeTest
56474
NO!
- * - * == IO redefinition == - * - * Use IO redefinition to temporarily swap in a different set of input and/or output streams. In this example the stream based - * method above is wrapped into a function. - * - * {{{ - * import java.io.{ByteArrayOutputStream, StringReader} - * - * object FunctionalPrimeTest { - * - * def isPrime(candidate: Int): Boolean = { - * - * val input = new StringReader(s"$candidate\n") - * val outCapture = new ByteArrayOutputStream - * val errCapture = new ByteArrayOutputStream - * - * Console.withIn(input) { - * Console.withOut(outCapture) { - * Console.withErr(errCapture) { - * PrimeTest.isPrime() - * } - * } - * } - * - * if (outCapture.toByteArray.nonEmpty) // "yes" - * true - * else if (errCapture.toByteArray.nonEmpty) // "NO!" - * false - * else throw new IllegalArgumentException(candidate.toString) - * } - * - * def main(args: Array[String]): Unit = { - * val primes = (2 to 50) filter (isPrime) - * println(s"First primes: $primes") - * } - * - * } - * }}} - * - * - * - * - * - *
$ scala FunctionalPrimeTest
First primes: Vector(2, 3, 5, 7, 11, 13, 17, 19, 23, 29, 31, 37, 41, 43, 47)
- * - * @author Matthias Zenger - * @version 1.0, 03/09/2003 - * - * @groupname console-output Console Output - * @groupprio console-output 30 - * @groupdesc console-output These methods provide output via the console. - * - * @groupname io-default IO Defaults - * @groupprio io-default 50 - * @groupdesc io-default These values provide direct access to the standard IO channels - * - * @groupname io-redefinition IO Redefinition - * @groupprio io-redefinition 60 - * @groupdesc io-redefinition These methods allow substituting alternative streams for the duration of - * a body of code. Threadsafe by virtue of [[scala.util.DynamicVariable]]. - * - */ -object Console extends DeprecatedConsole with AnsiColor { - private val outVar = new DynamicVariable[PrintStream](java.lang.System.out) - private val errVar = new DynamicVariable[PrintStream](java.lang.System.err) - private val inVar = new DynamicVariable[BufferedReader]( - new BufferedReader(new InputStreamReader(java.lang.System.in))) - - protected def setOutDirect(out: PrintStream): Unit = outVar.value = out - protected def setErrDirect(err: PrintStream): Unit = errVar.value = err - protected def setInDirect(in: BufferedReader): Unit = inVar.value = in - - /** The default output, can be overridden by `withOut` - * @group io-default - */ - def out = outVar.value - /** The default error, can be overridden by `withErr` - * @group io-default - */ - def err = errVar.value - /** The default input, can be overridden by `withIn` - * @group io-default - */ - def in = inVar.value - - /** Sets the default output stream for the duration - * of execution of one thunk. - * - * @example {{{ - * withOut(Console.err) { println("This goes to default _error_") } - * }}} - * - * @param out the new output stream. - * @param thunk the code to execute with - * the new output stream active - * @return the results of `thunk` - * @see `withOut[T](out:OutputStream)(thunk: => T)` - * @group io-redefinition - */ - def withOut[T](out: PrintStream)(thunk: =>T): T = - outVar.withValue(out)(thunk) - - /** Sets the default output stream for the duration - * of execution of one thunk. - * - * @param out the new output stream. - * @param thunk the code to execute with - * the new output stream active - * @return the results of `thunk` - * @see `withOut[T](out:PrintStream)(thunk: => T)` - * @group io-redefinition - */ - def withOut[T](out: OutputStream)(thunk: =>T): T = - withOut(new PrintStream(out))(thunk) - - /** Set the default error stream for the duration - * of execution of one thunk. - * @example {{{ - * withErr(Console.out) { err.println("This goes to default _out_") } - * }}} - * - * @param err the new error stream. - * @param thunk the code to execute with - * the new error stream active - * @return the results of `thunk` - * @see `withErr[T](err:OutputStream)(thunk: =>T)` - * @group io-redefinition - */ - def withErr[T](err: PrintStream)(thunk: =>T): T = - errVar.withValue(err)(thunk) - - /** Sets the default error stream for the duration - * of execution of one thunk. - * - * @param err the new error stream. - * @param thunk the code to execute with - * the new error stream active - * @return the results of `thunk` - * @see `withErr[T](err:PrintStream)(thunk: =>T)` - * @group io-redefinition - */ - def withErr[T](err: OutputStream)(thunk: =>T): T = - withErr(new PrintStream(err))(thunk) - - /** Sets the default input stream for the duration - * of execution of one thunk. - * - * @example {{{ - * val someFile:Reader = openFile("file.txt") - * withIn(someFile) { - * // Reads a line from file.txt instead of default input - * println(readLine) - * } - * }}} - * - * @param thunk the code to execute with - * the new input stream active - * - * @return the results of `thunk` - * @see `withIn[T](in:InputStream)(thunk: =>T)` - * @group io-redefinition - */ - def withIn[T](reader: Reader)(thunk: =>T): T = - inVar.withValue(new BufferedReader(reader))(thunk) - - /** Sets the default input stream for the duration - * of execution of one thunk. - * - * @param in the new input stream. - * @param thunk the code to execute with - * the new input stream active - * @return the results of `thunk` - * @see `withIn[T](reader:Reader)(thunk: =>T)` - * @group io-redefinition - */ - def withIn[T](in: InputStream)(thunk: =>T): T = - withIn(new InputStreamReader(in))(thunk) - - /** Prints an object to `out` using its `toString` method. - * - * @param obj the object to print; may be null. - * @group console-output - */ - def print(obj: Any) { - out.print(if (null == obj) "null" else obj.toString()) - } - - /** Flushes the output stream. This function is required when partial - * output (i.e. output not terminated by a newline character) has - * to be made visible on the terminal. - * @group console-output - */ - def flush() { out.flush() } - - /** Prints a newline character on the default output. - * @group console-output - */ - def println() { out.println() } - - /** Prints out an object to the default output, followed by a newline character. - * - * @param x the object to print. - * @group console-output - */ - def println(x: Any) { out.println(x) } - - /** Prints its arguments as a formatted string to the default output, - * based on a string pattern (in a fashion similar to printf in C). - * - * The interpretation of the formatting patterns is described in [[java.util.Formatter]]. - * - * @param text the pattern for formatting the arguments. - * @param args the arguments used to instantiating the pattern. - * @throws java.lang.IllegalArgumentException if there was a problem with the format string or arguments - * @group console-output - */ - def printf(text: String, args: Any*) { out.print(text format (args : _*)) } -} - -private[scala] abstract class DeprecatedConsole { - self: Console.type => - - /** Internal usage only. */ - protected def setOutDirect(out: PrintStream): Unit - protected def setErrDirect(err: PrintStream): Unit - protected def setInDirect(in: BufferedReader): Unit - - @deprecated("use the method in scala.io.StdIn", "2.11.0") def readBoolean(): Boolean = StdIn.readBoolean() - @deprecated("use the method in scala.io.StdIn", "2.11.0") def readByte(): Byte = StdIn.readByte() - @deprecated("use the method in scala.io.StdIn", "2.11.0") def readChar(): Char = StdIn.readChar() - @deprecated("use the method in scala.io.StdIn", "2.11.0") def readDouble(): Double = StdIn.readDouble() - @deprecated("use the method in scala.io.StdIn", "2.11.0") def readFloat(): Float = StdIn.readFloat() - @deprecated("use the method in scala.io.StdIn", "2.11.0") def readInt(): Int = StdIn.readInt() - @deprecated("use the method in scala.io.StdIn", "2.11.0") def readLine(): String = StdIn.readLine() - @deprecated("use the method in scala.io.StdIn", "2.11.0") def readLine(text: String, args: Any*): String = StdIn.readLine(text, args: _*) - @deprecated("use the method in scala.io.StdIn", "2.11.0") def readLong(): Long = StdIn.readLong() - @deprecated("use the method in scala.io.StdIn", "2.11.0") def readShort(): Short = StdIn.readShort() - @deprecated("use the method in scala.io.StdIn", "2.11.0") def readf(format: String): List[Any] = StdIn.readf(format) - @deprecated("use the method in scala.io.StdIn", "2.11.0") def readf1(format: String): Any = StdIn.readf1(format) - @deprecated("use the method in scala.io.StdIn", "2.11.0") def readf2(format: String): (Any, Any) = StdIn.readf2(format) - @deprecated("use the method in scala.io.StdIn", "2.11.0") def readf3(format: String): (Any, Any, Any) = StdIn.readf3(format) - - /** Sets the default output stream. - * - * @param out the new output stream. - */ - @deprecated("use withOut", "2.11.0") def setOut(out: PrintStream): Unit = setOutDirect(out) - - /** Sets the default output stream. - * - * @param out the new output stream. - */ - @deprecated("use withOut", "2.11.0") def setOut(out: OutputStream): Unit = setOutDirect(new PrintStream(out)) - - /** Sets the default error stream. - * - * @param err the new error stream. - */ - @deprecated("use withErr", "2.11.0") def setErr(err: PrintStream): Unit = setErrDirect(err) - - /** Sets the default error stream. - * - * @param err the new error stream. - */ - @deprecated("use withErr", "2.11.0") def setErr(err: OutputStream): Unit = setErrDirect(new PrintStream(err)) - - /** Sets the default input stream. - * - * @param reader specifies the new input stream. - */ - @deprecated("use withIn", "2.11.0") def setIn(reader: Reader): Unit = setInDirect(new BufferedReader(reader)) - - /** Sets the default input stream. - * - * @param in the new input stream. - */ - @deprecated("use withIn", "2.11.0") def setIn(in: InputStream): Unit = setInDirect(new BufferedReader(new InputStreamReader(in))) -} diff --git a/tests/scala2-library/src/library/scala/DelayedInit.scala b/tests/scala2-library/src/library/scala/DelayedInit.scala deleted file mode 100644 index 8dc841a7e38c..000000000000 --- a/tests/scala2-library/src/library/scala/DelayedInit.scala +++ /dev/null @@ -1,49 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2010-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala - -/** Classes and objects (but note, not traits) inheriting the `DelayedInit` - * marker trait will have their initialization code rewritten as follows: - * `code` becomes `delayedInit(code)`. - * - * Initialization code comprises all statements and all value definitions - * that are executed during initialization. - * - * Example: - * {{{ - * trait Helper extends DelayedInit { - * def delayedInit(body: => Unit) = { - * println("dummy text, printed before initialization of C") - * body // evaluates the initialization code of C - * } - * } - * - * class C extends Helper { - * println("this is the initialization code of C") - * } - * - * object Test extends App { - * val c = new C - * } - * }}} - * - * Should result in the following being printed: - * {{{ - * dummy text, printed before initialization of C - * this is the initialization code of C - * }}} - * - * @see "Delayed Initialization" subsection of the Scala Language Specification (section 5.1) - * - * @author Martin Odersky - */ -@deprecated("DelayedInit semantics can be surprising. Support for `App` will continue. See the release notes for more details: https://github.com/scala/scala/releases/tag/v2.11.0-RC1", "2.11.0") -trait DelayedInit { - def delayedInit(x: => Unit): Unit -} diff --git a/tests/scala2-library/src/library/scala/Double.scala b/tests/scala2-library/src/library/scala/Double.scala deleted file mode 100644 index 08bcb9fefcdd..000000000000 --- a/tests/scala2-library/src/library/scala/Double.scala +++ /dev/null @@ -1,250 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -// DO NOT EDIT, CHANGES WILL BE LOST -// This auto-generated code can be modified in "project/GenerateAnyVals.scala". -// Afterwards, running "sbt generateSources" regenerates this source file. - -package scala - -/** `Double`, a 64-bit IEEE-754 floating point number (equivalent to Java's `double` primitive type) is a - * subtype of [[scala.AnyVal]]. Instances of `Double` are not - * represented by an object in the underlying runtime system. - * - * There is an implicit conversion from [[scala.Double]] => [[scala.runtime.RichDouble]] - * which provides useful non-primitive operations. - */ -final abstract class Double private extends AnyVal { - def toByte: Byte - def toShort: Short - def toChar: Char - def toInt: Int - def toLong: Long - def toFloat: Float - def toDouble: Double - - /** Returns this value, unmodified. */ - def unary_+ : Double - /** Returns the negation of this value. */ - def unary_- : Double - - def +(x: String): String - - /** Returns `true` if this value is equal to x, `false` otherwise. */ - def ==(x: Byte): Boolean - /** Returns `true` if this value is equal to x, `false` otherwise. */ - def ==(x: Short): Boolean - /** Returns `true` if this value is equal to x, `false` otherwise. */ - def ==(x: Char): Boolean - /** Returns `true` if this value is equal to x, `false` otherwise. */ - def ==(x: Int): Boolean - /** Returns `true` if this value is equal to x, `false` otherwise. */ - def ==(x: Long): Boolean - /** Returns `true` if this value is equal to x, `false` otherwise. */ - def ==(x: Float): Boolean - /** Returns `true` if this value is equal to x, `false` otherwise. */ - def ==(x: Double): Boolean - - /** Returns `true` if this value is not equal to x, `false` otherwise. */ - def !=(x: Byte): Boolean - /** Returns `true` if this value is not equal to x, `false` otherwise. */ - def !=(x: Short): Boolean - /** Returns `true` if this value is not equal to x, `false` otherwise. */ - def !=(x: Char): Boolean - /** Returns `true` if this value is not equal to x, `false` otherwise. */ - def !=(x: Int): Boolean - /** Returns `true` if this value is not equal to x, `false` otherwise. */ - def !=(x: Long): Boolean - /** Returns `true` if this value is not equal to x, `false` otherwise. */ - def !=(x: Float): Boolean - /** Returns `true` if this value is not equal to x, `false` otherwise. */ - def !=(x: Double): Boolean - - /** Returns `true` if this value is less than x, `false` otherwise. */ - def <(x: Byte): Boolean - /** Returns `true` if this value is less than x, `false` otherwise. */ - def <(x: Short): Boolean - /** Returns `true` if this value is less than x, `false` otherwise. */ - def <(x: Char): Boolean - /** Returns `true` if this value is less than x, `false` otherwise. */ - def <(x: Int): Boolean - /** Returns `true` if this value is less than x, `false` otherwise. */ - def <(x: Long): Boolean - /** Returns `true` if this value is less than x, `false` otherwise. */ - def <(x: Float): Boolean - /** Returns `true` if this value is less than x, `false` otherwise. */ - def <(x: Double): Boolean - - /** Returns `true` if this value is less than or equal to x, `false` otherwise. */ - def <=(x: Byte): Boolean - /** Returns `true` if this value is less than or equal to x, `false` otherwise. */ - def <=(x: Short): Boolean - /** Returns `true` if this value is less than or equal to x, `false` otherwise. */ - def <=(x: Char): Boolean - /** Returns `true` if this value is less than or equal to x, `false` otherwise. */ - def <=(x: Int): Boolean - /** Returns `true` if this value is less than or equal to x, `false` otherwise. */ - def <=(x: Long): Boolean - /** Returns `true` if this value is less than or equal to x, `false` otherwise. */ - def <=(x: Float): Boolean - /** Returns `true` if this value is less than or equal to x, `false` otherwise. */ - def <=(x: Double): Boolean - - /** Returns `true` if this value is greater than x, `false` otherwise. */ - def >(x: Byte): Boolean - /** Returns `true` if this value is greater than x, `false` otherwise. */ - def >(x: Short): Boolean - /** Returns `true` if this value is greater than x, `false` otherwise. */ - def >(x: Char): Boolean - /** Returns `true` if this value is greater than x, `false` otherwise. */ - def >(x: Int): Boolean - /** Returns `true` if this value is greater than x, `false` otherwise. */ - def >(x: Long): Boolean - /** Returns `true` if this value is greater than x, `false` otherwise. */ - def >(x: Float): Boolean - /** Returns `true` if this value is greater than x, `false` otherwise. */ - def >(x: Double): Boolean - - /** Returns `true` if this value is greater than or equal to x, `false` otherwise. */ - def >=(x: Byte): Boolean - /** Returns `true` if this value is greater than or equal to x, `false` otherwise. */ - def >=(x: Short): Boolean - /** Returns `true` if this value is greater than or equal to x, `false` otherwise. */ - def >=(x: Char): Boolean - /** Returns `true` if this value is greater than or equal to x, `false` otherwise. */ - def >=(x: Int): Boolean - /** Returns `true` if this value is greater than or equal to x, `false` otherwise. */ - def >=(x: Long): Boolean - /** Returns `true` if this value is greater than or equal to x, `false` otherwise. */ - def >=(x: Float): Boolean - /** Returns `true` if this value is greater than or equal to x, `false` otherwise. */ - def >=(x: Double): Boolean - - /** Returns the sum of this value and `x`. */ - def +(x: Byte): Double - /** Returns the sum of this value and `x`. */ - def +(x: Short): Double - /** Returns the sum of this value and `x`. */ - def +(x: Char): Double - /** Returns the sum of this value and `x`. */ - def +(x: Int): Double - /** Returns the sum of this value and `x`. */ - def +(x: Long): Double - /** Returns the sum of this value and `x`. */ - def +(x: Float): Double - /** Returns the sum of this value and `x`. */ - def +(x: Double): Double - - /** Returns the difference of this value and `x`. */ - def -(x: Byte): Double - /** Returns the difference of this value and `x`. */ - def -(x: Short): Double - /** Returns the difference of this value and `x`. */ - def -(x: Char): Double - /** Returns the difference of this value and `x`. */ - def -(x: Int): Double - /** Returns the difference of this value and `x`. */ - def -(x: Long): Double - /** Returns the difference of this value and `x`. */ - def -(x: Float): Double - /** Returns the difference of this value and `x`. */ - def -(x: Double): Double - - /** Returns the product of this value and `x`. */ - def *(x: Byte): Double - /** Returns the product of this value and `x`. */ - def *(x: Short): Double - /** Returns the product of this value and `x`. */ - def *(x: Char): Double - /** Returns the product of this value and `x`. */ - def *(x: Int): Double - /** Returns the product of this value and `x`. */ - def *(x: Long): Double - /** Returns the product of this value and `x`. */ - def *(x: Float): Double - /** Returns the product of this value and `x`. */ - def *(x: Double): Double - - /** Returns the quotient of this value and `x`. */ - def /(x: Byte): Double - /** Returns the quotient of this value and `x`. */ - def /(x: Short): Double - /** Returns the quotient of this value and `x`. */ - def /(x: Char): Double - /** Returns the quotient of this value and `x`. */ - def /(x: Int): Double - /** Returns the quotient of this value and `x`. */ - def /(x: Long): Double - /** Returns the quotient of this value and `x`. */ - def /(x: Float): Double - /** Returns the quotient of this value and `x`. */ - def /(x: Double): Double - - /** Returns the remainder of the division of this value by `x`. */ - def %(x: Byte): Double - /** Returns the remainder of the division of this value by `x`. */ - def %(x: Short): Double - /** Returns the remainder of the division of this value by `x`. */ - def %(x: Char): Double - /** Returns the remainder of the division of this value by `x`. */ - def %(x: Int): Double - /** Returns the remainder of the division of this value by `x`. */ - def %(x: Long): Double - /** Returns the remainder of the division of this value by `x`. */ - def %(x: Float): Double - /** Returns the remainder of the division of this value by `x`. */ - def %(x: Double): Double - - // Provide a more specific return type for Scaladoc - override def getClass(): Class[Double] = ??? -} - -object Double extends AnyValCompanion { - /** The smallest positive value greater than 0.0d which is - * representable as a Double. - */ - final val MinPositiveValue = java.lang.Double.MIN_VALUE - final val NaN = java.lang.Double.NaN - final val PositiveInfinity = java.lang.Double.POSITIVE_INFINITY - final val NegativeInfinity = java.lang.Double.NEGATIVE_INFINITY - - /** The negative number with the greatest (finite) absolute value which is representable - * by a Double. Note that it differs from [[java.lang.Double.MIN_VALUE]], which - * is the smallest positive value representable by a Double. In Scala that number - * is called Double.MinPositiveValue. - */ - final val MinValue = -java.lang.Double.MAX_VALUE - - /** The largest finite positive number representable as a Double. */ - final val MaxValue = java.lang.Double.MAX_VALUE - - /** Transform a value type into a boxed reference type. - * - * Runtime implementation determined by `scala.runtime.BoxesRunTime.boxToDouble`. See [[https://github.com/scala/scala src/library/scala/runtime/BoxesRunTime.java]]. - * - * @param x the Double to be boxed - * @return a java.lang.Double offering `x` as its underlying value. - */ - def box(x: Double): java.lang.Double = ??? - - /** Transform a boxed type into a value type. Note that this - * method is not typesafe: it accepts any Object, but will throw - * an exception if the argument is not a java.lang.Double. - * - * Runtime implementation determined by `scala.runtime.BoxesRunTime.unboxToDouble`. See [[https://github.com/scala/scala src/library/scala/runtime/BoxesRunTime.java]]. - * - * @param x the java.lang.Double to be unboxed. - * @throws ClassCastException if the argument is not a java.lang.Double - * @return the Double resulting from calling doubleValue() on `x` - */ - def unbox(x: java.lang.Object): Double = ??? - - /** The String representation of the scala.Double companion object. */ - override def toString = "object scala.Double" -} - diff --git a/tests/scala2-library/src/library/scala/Dynamic.scala b/tests/scala2-library/src/library/scala/Dynamic.scala deleted file mode 100644 index 56eb4cfcf45b..000000000000 --- a/tests/scala2-library/src/library/scala/Dynamic.scala +++ /dev/null @@ -1,34 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2010-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala - -/** A marker trait that enables dynamic invocations. Instances `x` of this - * trait allow method invocations `x.meth(args)` for arbitrary method - * names `meth` and argument lists `args` as well as field accesses - * `x.field` for arbitrary field names `field`. - * - * If a call is not natively supported by `x` (i.e. if type checking - * fails), it is rewritten according to the following rules: - * - * {{{ - * foo.method("blah") ~~> foo.applyDynamic("method")("blah") - * foo.method(x = "blah") ~~> foo.applyDynamicNamed("method")(("x", "blah")) - * foo.method(x = 1, 2) ~~> foo.applyDynamicNamed("method")(("x", 1), ("", 2)) - * foo.field ~~> foo.selectDynamic("field") - * foo.varia = 10 ~~> foo.updateDynamic("varia")(10) - * foo.arr(10) = 13 ~~> foo.selectDynamic("arr").update(10, 13) - * foo.arr(10) ~~> foo.applyDynamic("arr")(10) - * }}} - * - * As of Scala 2.10, defining direct or indirect subclasses of this trait - * is only possible if the language feature `dynamics` is enabled. - */ -trait Dynamic extends Any - - diff --git a/tests/scala2-library/src/library/scala/Enumeration.scala b/tests/scala2-library/src/library/scala/Enumeration.scala deleted file mode 100644 index aad63d232fa3..000000000000 --- a/tests/scala2-library/src/library/scala/Enumeration.scala +++ /dev/null @@ -1,292 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala - -import scala.collection.{ mutable, immutable, generic, SortedSetLike, AbstractSet } -import java.lang.reflect.{ Method => JMethod, Field => JField } -import scala.reflect.NameTransformer._ -import scala.util.matching.Regex - -/** Defines a finite set of values specific to the enumeration. Typically - * these values enumerate all possible forms something can take and provide - * a lightweight alternative to case classes. - * - * Each call to a `Value` method adds a new unique value to the enumeration. - * To be accessible, these values are usually defined as `val` members of - * the evaluation. - * - * All values in an enumeration share a common, unique type defined as the - * `Value` type member of the enumeration (`Value` selected on the stable - * identifier path of the enumeration instance). - * - * @example {{{ - * object Main extends App { - * - * object WeekDay extends Enumeration { - * type WeekDay = Value - * val Mon, Tue, Wed, Thu, Fri, Sat, Sun = Value - * } - * import WeekDay._ - * - * def isWorkingDay(d: WeekDay) = ! (d == Sat || d == Sun) - * - * WeekDay.values filter isWorkingDay foreach println - * } - * // output: - * // Mon - * // Tue - * // Wed - * // Thu - * // Fri - * }}} - * - * @param initial The initial value from which to count the integers that - * identifies values at run-time. - * @author Matthias Zenger - */ -@SerialVersionUID(8476000850333817230L) -abstract class Enumeration (initial: Int) extends Serializable { - thisenum => - - def this() = this(0) - - /* Note that `readResolve` cannot be private, since otherwise - the JVM does not invoke it when deserializing subclasses. */ - protected def readResolve(): AnyRef = thisenum.getClass.getField(MODULE_INSTANCE_NAME).get(null) - - /** The name of this enumeration. - */ - override def toString = - ((getClass.getName stripSuffix MODULE_SUFFIX_STRING split '.').last split - Regex.quote(NAME_JOIN_STRING)).last - - /** The mapping from the integer used to identify values to the actual - * values. */ - private val vmap: mutable.Map[Int, Value] = new mutable.HashMap - - /** The cache listing all values of this enumeration. */ - @transient private var vset: ValueSet = null - @transient @volatile private var vsetDefined = false - - /** The mapping from the integer used to identify values to their - * names. */ - private val nmap: mutable.Map[Int, String] = new mutable.HashMap - - /** The values of this enumeration as a set. - */ - def values: ValueSet = { - if (!vsetDefined) { - vset = (ValueSet.newBuilder ++= vmap.values).result() - vsetDefined = true - } - vset - } - - /** The integer to use to identify the next created value. */ - protected var nextId: Int = initial - - /** The string to use to name the next created value. */ - protected var nextName: Iterator[String] = _ - - private def nextNameOrNull = - if (nextName != null && nextName.hasNext) nextName.next() else null - - /** The highest integer amongst those used to identify values in this - * enumeration. */ - private var topId = initial - - /** The lowest integer amongst those used to identify values in this - * enumeration, but no higher than 0. */ - private var bottomId = if(initial < 0) initial else 0 - - /** The one higher than the highest integer amongst those used to identify - * values in this enumeration. */ - final def maxId = topId - - /** The value of this enumeration with given id `x` - */ - final def apply(x: Int): Value = vmap(x) - - /** Return a `Value` from this `Enumeration` whose name matches - * the argument `s`. The names are determined automatically via reflection. - * - * @param s an `Enumeration` name - * @return the `Value` of this `Enumeration` if its name matches `s` - * @throws NoSuchElementException if no `Value` with a matching - * name is in this `Enumeration` - */ - final def withName(s: String): Value = values.find(_.toString == s).getOrElse( - throw new NoSuchElementException(s"No value found for '$s'")) - - /** Creates a fresh value, part of this enumeration. */ - protected final def Value: Value = Value(nextId) - - /** Creates a fresh value, part of this enumeration, identified by the - * integer `i`. - * - * @param i An integer that identifies this value at run-time. It must be - * unique amongst all values of the enumeration. - * @return Fresh value identified by `i`. - */ - protected final def Value(i: Int): Value = Value(i, nextNameOrNull) - - /** Creates a fresh value, part of this enumeration, called `name`. - * - * @param name A human-readable name for that value. - * @return Fresh value called `name`. - */ - protected final def Value(name: String): Value = Value(nextId, name) - - /** Creates a fresh value, part of this enumeration, called `name` - * and identified by the integer `i`. - * - * @param i An integer that identifies this value at run-time. It must be - * unique amongst all values of the enumeration. - * @param name A human-readable name for that value. - * @return Fresh value with the provided identifier `i` and name `name`. - */ - protected final def Value(i: Int, name: String): Value = new Val(i, name) - - private def populateNameMap() { - val fields: Array[JField] = getClass.getDeclaredFields - def isValDef(m: JMethod): Boolean = fields exists (fd => fd.getName == m.getName && fd.getType == m.getReturnType) - - // The list of possible Value methods: 0-args which return a conforming type - val methods: Array[JMethod] = getClass.getMethods filter (m => m.getParameterTypes.isEmpty && - classOf[Value].isAssignableFrom(m.getReturnType) && - m.getDeclaringClass != classOf[Enumeration] && - isValDef(m)) - methods foreach { m => - val name = m.getName - // invoke method to obtain actual `Value` instance - val value = m.invoke(this).asInstanceOf[Value] - // verify that outer points to the correct Enumeration: ticket #3616. - if (value.outerEnum eq thisenum) { - val id = Int.unbox(classOf[Val] getMethod "id" invoke value) - nmap += ((id, name)) - } - } - } - - /* Obtains the name for the value with id `i`. If no name is cached - * in `nmap`, it populates `nmap` using reflection. - */ - private def nameOf(i: Int): String = synchronized { nmap.getOrElse(i, { populateNameMap() ; nmap(i) }) } - - /** The type of the enumerated values. */ - @SerialVersionUID(7091335633555234129L) - abstract class Value extends Ordered[Value] with Serializable { - /** the id and bit location of this enumeration value */ - def id: Int - /** a marker so we can tell whose values belong to whom come reflective-naming time */ - private[Enumeration] val outerEnum = thisenum - - override def compare(that: Value): Int = - if (this.id < that.id) -1 - else if (this.id == that.id) 0 - else 1 - override def equals(other: Any) = other match { - case that: Enumeration#Value => (outerEnum eq that.outerEnum) && (id == that.id) - case _ => false - } - override def hashCode: Int = id.## - - /** Create a ValueSet which contains this value and another one */ - def + (v: Value) = ValueSet(this, v) - } - - /** A class implementing the [[scala.Enumeration.Value]] type. This class - * can be overridden to change the enumeration's naming and integer - * identification behaviour. - */ - @SerialVersionUID(0 - 3501153230598116017L) - protected class Val(i: Int, name: String) extends Value with Serializable { - def this(i: Int) = this(i, nextNameOrNull) - def this(name: String) = this(nextId, name) - def this() = this(nextId) - - assert(!vmap.isDefinedAt(i), "Duplicate id: " + i) - vmap(i) = this - vsetDefined = false - nextId = i + 1 - if (nextId > topId) topId = nextId - if (i < bottomId) bottomId = i - def id = i - override def toString() = - if (name != null) name - else try thisenum.nameOf(i) - catch { case _: NoSuchElementException => "" } - - protected def readResolve(): AnyRef = { - val enumeration = thisenum.readResolve().asInstanceOf[Enumeration] - if (enumeration.vmap == null) this - else enumeration.vmap(i) - } - } - - /** An ordering by id for values of this set */ - object ValueOrdering extends Ordering[Value] { - def compare(x: Value, y: Value): Int = x compare y - } - - /** A class for sets of values. - * Iterating through this set will yield values in increasing order of their ids. - * - * @param nnIds The set of ids of values (adjusted so that the lowest value does - * not fall below zero), organized as a `BitSet`. - * @define Coll `collection.immutable.SortedSet` - */ - class ValueSet private[ValueSet] (private[this] var nnIds: immutable.BitSet) - extends AbstractSet[Value] - with immutable.SortedSet[Value] - with SortedSetLike[Value, ValueSet] - with Serializable { - - implicit def ordering: Ordering[Value] = ValueOrdering - def rangeImpl(from: Option[Value], until: Option[Value]): ValueSet = - new ValueSet(nnIds.rangeImpl(from.map(_.id - bottomId), until.map(_.id - bottomId))) - - override def empty = ValueSet.empty - def contains(v: Value) = nnIds contains (v.id - bottomId) - def + (value: Value) = new ValueSet(nnIds + (value.id - bottomId)) - def - (value: Value) = new ValueSet(nnIds - (value.id - bottomId)) - def iterator = nnIds.iterator map (id => thisenum.apply(bottomId + id)) - override def keysIteratorFrom(start: Value) = nnIds keysIteratorFrom start.id map (id => thisenum.apply(bottomId + id)) - override def stringPrefix = thisenum + ".ValueSet" - /** Creates a bit mask for the zero-adjusted ids in this set as a - * new array of longs */ - def toBitMask: Array[Long] = nnIds.toBitMask - } - - /** A factory object for value sets */ - object ValueSet { - import generic.CanBuildFrom - - /** The empty value set */ - val empty = new ValueSet(immutable.BitSet.empty) - /** A value set consisting of given elements */ - def apply(elems: Value*): ValueSet = (newBuilder ++= elems).result() - /** A value set containing all the values for the zero-adjusted ids - * corresponding to the bits in an array */ - def fromBitMask(elems: Array[Long]): ValueSet = new ValueSet(immutable.BitSet.fromBitMask(elems)) - /** A builder object for value sets */ - def newBuilder: mutable.Builder[Value, ValueSet] = new mutable.Builder[Value, ValueSet] { - private[this] val b = new mutable.BitSet - def += (x: Value) = { b += (x.id - bottomId); this } - def clear() = b.clear() - def result() = new ValueSet(b.toImmutable) - } - /** The implicit builder for value sets */ - implicit def canBuildFrom: CanBuildFrom[ValueSet, Value, ValueSet] = - new CanBuildFrom[ValueSet, Value, ValueSet] { - def apply(from: ValueSet) = newBuilder - def apply() = newBuilder - } - } -} diff --git a/tests/scala2-library/src/library/scala/Equals.scala b/tests/scala2-library/src/library/scala/Equals.scala deleted file mode 100644 index e06557ccddb6..000000000000 --- a/tests/scala2-library/src/library/scala/Equals.scala +++ /dev/null @@ -1,28 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala - -/** An interface containing operations for equality. - * The only method not already present in class `AnyRef` is `canEqual`. - */ -trait Equals extends Any { - /** A method that should be called from every well-designed equals method - * that is open to be overridden in a subclass. See - * [[http://www.artima.com/pins1ed/object-equality.html Programming in Scala, - * Chapter 28]] for discussion and design. - * - * @param that the value being probed for possible equality - * @return true if this instance can possibly equal `that`, otherwise false - */ - def canEqual(that: Any): Boolean - - /** The universal equality method defined in `AnyRef`. - */ - def equals(that: Any): Boolean -} diff --git a/tests/scala2-library/src/library/scala/Float.scala b/tests/scala2-library/src/library/scala/Float.scala deleted file mode 100644 index 01fdbc00e48c..000000000000 --- a/tests/scala2-library/src/library/scala/Float.scala +++ /dev/null @@ -1,253 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -// DO NOT EDIT, CHANGES WILL BE LOST -// This auto-generated code can be modified in "project/GenerateAnyVals.scala". -// Afterwards, running "sbt generateSources" regenerates this source file. - -package scala - -/** `Float`, a 32-bit IEEE-754 floating point number (equivalent to Java's `float` primitive type) is a - * subtype of [[scala.AnyVal]]. Instances of `Float` are not - * represented by an object in the underlying runtime system. - * - * There is an implicit conversion from [[scala.Float]] => [[scala.runtime.RichFloat]] - * which provides useful non-primitive operations. - */ -final abstract class Float private extends AnyVal { - def toByte: Byte - def toShort: Short - def toChar: Char - def toInt: Int - def toLong: Long - def toFloat: Float - def toDouble: Double - - /** Returns this value, unmodified. */ - def unary_+ : Float - /** Returns the negation of this value. */ - def unary_- : Float - - def +(x: String): String - - /** Returns `true` if this value is equal to x, `false` otherwise. */ - def ==(x: Byte): Boolean - /** Returns `true` if this value is equal to x, `false` otherwise. */ - def ==(x: Short): Boolean - /** Returns `true` if this value is equal to x, `false` otherwise. */ - def ==(x: Char): Boolean - /** Returns `true` if this value is equal to x, `false` otherwise. */ - def ==(x: Int): Boolean - /** Returns `true` if this value is equal to x, `false` otherwise. */ - def ==(x: Long): Boolean - /** Returns `true` if this value is equal to x, `false` otherwise. */ - def ==(x: Float): Boolean - /** Returns `true` if this value is equal to x, `false` otherwise. */ - def ==(x: Double): Boolean - - /** Returns `true` if this value is not equal to x, `false` otherwise. */ - def !=(x: Byte): Boolean - /** Returns `true` if this value is not equal to x, `false` otherwise. */ - def !=(x: Short): Boolean - /** Returns `true` if this value is not equal to x, `false` otherwise. */ - def !=(x: Char): Boolean - /** Returns `true` if this value is not equal to x, `false` otherwise. */ - def !=(x: Int): Boolean - /** Returns `true` if this value is not equal to x, `false` otherwise. */ - def !=(x: Long): Boolean - /** Returns `true` if this value is not equal to x, `false` otherwise. */ - def !=(x: Float): Boolean - /** Returns `true` if this value is not equal to x, `false` otherwise. */ - def !=(x: Double): Boolean - - /** Returns `true` if this value is less than x, `false` otherwise. */ - def <(x: Byte): Boolean - /** Returns `true` if this value is less than x, `false` otherwise. */ - def <(x: Short): Boolean - /** Returns `true` if this value is less than x, `false` otherwise. */ - def <(x: Char): Boolean - /** Returns `true` if this value is less than x, `false` otherwise. */ - def <(x: Int): Boolean - /** Returns `true` if this value is less than x, `false` otherwise. */ - def <(x: Long): Boolean - /** Returns `true` if this value is less than x, `false` otherwise. */ - def <(x: Float): Boolean - /** Returns `true` if this value is less than x, `false` otherwise. */ - def <(x: Double): Boolean - - /** Returns `true` if this value is less than or equal to x, `false` otherwise. */ - def <=(x: Byte): Boolean - /** Returns `true` if this value is less than or equal to x, `false` otherwise. */ - def <=(x: Short): Boolean - /** Returns `true` if this value is less than or equal to x, `false` otherwise. */ - def <=(x: Char): Boolean - /** Returns `true` if this value is less than or equal to x, `false` otherwise. */ - def <=(x: Int): Boolean - /** Returns `true` if this value is less than or equal to x, `false` otherwise. */ - def <=(x: Long): Boolean - /** Returns `true` if this value is less than or equal to x, `false` otherwise. */ - def <=(x: Float): Boolean - /** Returns `true` if this value is less than or equal to x, `false` otherwise. */ - def <=(x: Double): Boolean - - /** Returns `true` if this value is greater than x, `false` otherwise. */ - def >(x: Byte): Boolean - /** Returns `true` if this value is greater than x, `false` otherwise. */ - def >(x: Short): Boolean - /** Returns `true` if this value is greater than x, `false` otherwise. */ - def >(x: Char): Boolean - /** Returns `true` if this value is greater than x, `false` otherwise. */ - def >(x: Int): Boolean - /** Returns `true` if this value is greater than x, `false` otherwise. */ - def >(x: Long): Boolean - /** Returns `true` if this value is greater than x, `false` otherwise. */ - def >(x: Float): Boolean - /** Returns `true` if this value is greater than x, `false` otherwise. */ - def >(x: Double): Boolean - - /** Returns `true` if this value is greater than or equal to x, `false` otherwise. */ - def >=(x: Byte): Boolean - /** Returns `true` if this value is greater than or equal to x, `false` otherwise. */ - def >=(x: Short): Boolean - /** Returns `true` if this value is greater than or equal to x, `false` otherwise. */ - def >=(x: Char): Boolean - /** Returns `true` if this value is greater than or equal to x, `false` otherwise. */ - def >=(x: Int): Boolean - /** Returns `true` if this value is greater than or equal to x, `false` otherwise. */ - def >=(x: Long): Boolean - /** Returns `true` if this value is greater than or equal to x, `false` otherwise. */ - def >=(x: Float): Boolean - /** Returns `true` if this value is greater than or equal to x, `false` otherwise. */ - def >=(x: Double): Boolean - - /** Returns the sum of this value and `x`. */ - def +(x: Byte): Float - /** Returns the sum of this value and `x`. */ - def +(x: Short): Float - /** Returns the sum of this value and `x`. */ - def +(x: Char): Float - /** Returns the sum of this value and `x`. */ - def +(x: Int): Float - /** Returns the sum of this value and `x`. */ - def +(x: Long): Float - /** Returns the sum of this value and `x`. */ - def +(x: Float): Float - /** Returns the sum of this value and `x`. */ - def +(x: Double): Double - - /** Returns the difference of this value and `x`. */ - def -(x: Byte): Float - /** Returns the difference of this value and `x`. */ - def -(x: Short): Float - /** Returns the difference of this value and `x`. */ - def -(x: Char): Float - /** Returns the difference of this value and `x`. */ - def -(x: Int): Float - /** Returns the difference of this value and `x`. */ - def -(x: Long): Float - /** Returns the difference of this value and `x`. */ - def -(x: Float): Float - /** Returns the difference of this value and `x`. */ - def -(x: Double): Double - - /** Returns the product of this value and `x`. */ - def *(x: Byte): Float - /** Returns the product of this value and `x`. */ - def *(x: Short): Float - /** Returns the product of this value and `x`. */ - def *(x: Char): Float - /** Returns the product of this value and `x`. */ - def *(x: Int): Float - /** Returns the product of this value and `x`. */ - def *(x: Long): Float - /** Returns the product of this value and `x`. */ - def *(x: Float): Float - /** Returns the product of this value and `x`. */ - def *(x: Double): Double - - /** Returns the quotient of this value and `x`. */ - def /(x: Byte): Float - /** Returns the quotient of this value and `x`. */ - def /(x: Short): Float - /** Returns the quotient of this value and `x`. */ - def /(x: Char): Float - /** Returns the quotient of this value and `x`. */ - def /(x: Int): Float - /** Returns the quotient of this value and `x`. */ - def /(x: Long): Float - /** Returns the quotient of this value and `x`. */ - def /(x: Float): Float - /** Returns the quotient of this value and `x`. */ - def /(x: Double): Double - - /** Returns the remainder of the division of this value by `x`. */ - def %(x: Byte): Float - /** Returns the remainder of the division of this value by `x`. */ - def %(x: Short): Float - /** Returns the remainder of the division of this value by `x`. */ - def %(x: Char): Float - /** Returns the remainder of the division of this value by `x`. */ - def %(x: Int): Float - /** Returns the remainder of the division of this value by `x`. */ - def %(x: Long): Float - /** Returns the remainder of the division of this value by `x`. */ - def %(x: Float): Float - /** Returns the remainder of the division of this value by `x`. */ - def %(x: Double): Double - - // Provide a more specific return type for Scaladoc - override def getClass(): Class[Float] = ??? -} - -object Float extends AnyValCompanion { - /** The smallest positive value greater than 0.0f which is - * representable as a Float. - */ - final val MinPositiveValue = java.lang.Float.MIN_VALUE - final val NaN = java.lang.Float.NaN - final val PositiveInfinity = java.lang.Float.POSITIVE_INFINITY - final val NegativeInfinity = java.lang.Float.NEGATIVE_INFINITY - - /** The negative number with the greatest (finite) absolute value which is representable - * by a Float. Note that it differs from [[java.lang.Float.MIN_VALUE]], which - * is the smallest positive value representable by a Float. In Scala that number - * is called Float.MinPositiveValue. - */ - final val MinValue = -java.lang.Float.MAX_VALUE - - /** The largest finite positive number representable as a Float. */ - final val MaxValue = java.lang.Float.MAX_VALUE - - /** Transform a value type into a boxed reference type. - * - * Runtime implementation determined by `scala.runtime.BoxesRunTime.boxToFloat`. See [[https://github.com/scala/scala src/library/scala/runtime/BoxesRunTime.java]]. - * - * @param x the Float to be boxed - * @return a java.lang.Float offering `x` as its underlying value. - */ - def box(x: Float): java.lang.Float = ??? - - /** Transform a boxed type into a value type. Note that this - * method is not typesafe: it accepts any Object, but will throw - * an exception if the argument is not a java.lang.Float. - * - * Runtime implementation determined by `scala.runtime.BoxesRunTime.unboxToFloat`. See [[https://github.com/scala/scala src/library/scala/runtime/BoxesRunTime.java]]. - * - * @param x the java.lang.Float to be unboxed. - * @throws ClassCastException if the argument is not a java.lang.Float - * @return the Float resulting from calling floatValue() on `x` - */ - def unbox(x: java.lang.Object): Float = ??? - - /** The String representation of the scala.Float companion object. */ - override def toString = "object scala.Float" - /** Language mandated coercions from Float to "wider" types. */ - import scala.language.implicitConversions - implicit def float2double(x: Float): Double = x.toDouble -} - diff --git a/tests/scala2-library/src/library/scala/Function.scala b/tests/scala2-library/src/library/scala/Function.scala deleted file mode 100644 index f28897c20bd3..000000000000 --- a/tests/scala2-library/src/library/scala/Function.scala +++ /dev/null @@ -1,131 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - - - -package scala - -/** A module defining utility methods for higher-order functional programming. - * - * @author Martin Odersky - * @version 1.0, 29/11/2006 - */ -object Function { - /** Given a sequence of functions `f,,1,,`, ..., `f,,n,,`, return the - * function `f,,1,, andThen ... andThen f,,n,,`. - * - * @param fs The given sequence of functions - */ - def chain[a](fs: Seq[a => a]): a => a = { x => (x /: fs) ((x, f) => f(x)) } - - /** The constant function */ - def const[T, U](x: T)(y: U): T = x - - /** Turns a function `A => Option[B]` into a `PartialFunction[A, B]`. - * - * '''Important note''': this transformation implies the original function - * may be called 2 or more times on each logical invocation, because the - * only way to supply an implementation of `isDefinedAt` is to call the - * function and examine the return value. - * See also [[scala.PartialFunction]], method `applyOrElse`. - * - * @param f a function `T => Option[R]` - * @return a partial function defined for those inputs where - * f returns `Some(_)` and undefined where `f` returns `None`. - * @see [[scala.PartialFunction]], method `lift`. - */ - def unlift[T, R](f: T => Option[R]): PartialFunction[T, R] = PartialFunction.unlifted(f) - - /** Uncurrying for functions of arity 2. This transforms a unary function - * returning another unary function into a function of arity 2. - */ - def uncurried[a1, a2, b](f: a1 => a2 => b): (a1, a2) => b = { - (x1, x2) => f(x1)(x2) - } - - /** Uncurrying for functions of arity 3. - */ - def uncurried[a1, a2, a3, b](f: a1 => a2 => a3 => b): (a1, a2, a3) => b = { - (x1, x2, x3) => f(x1)(x2)(x3) - } - - /** Uncurrying for functions of arity 4. - */ - def uncurried[a1, a2, a3, a4, b](f: a1 => a2 => a3 => a4 => b): (a1, a2, a3, a4) => b = { - (x1, x2, x3, x4) => f(x1)(x2)(x3)(x4) - } - - /** Uncurrying for functions of arity 5. - */ - def uncurried[a1, a2, a3, a4, a5, b](f: a1 => a2 => a3 => a4 => a5 => b): (a1, a2, a3, a4, a5) => b = { - (x1, x2, x3, x4, x5) => f(x1)(x2)(x3)(x4)(x5) - } - - /** Tupling for functions of arity 2. This transforms a function - * of arity 2 into a unary function that takes a pair of arguments. - * - * @note These functions are slotted for deprecation, but it is on - * hold pending superior type inference for tupling anonymous functions. - */ - // @deprecated("use `f.tupled` instead") - def tupled[a1, a2, b](f: (a1, a2) => b): Tuple2[a1, a2] => b = { - case Tuple2(x1, x2) => f(x1, x2) - } - - /** Tupling for functions of arity 3. This transforms a function - * of arity 3 into a unary function that takes a triple of arguments. - */ - // @deprecated("use `f.tupled` instead") - def tupled[a1, a2, a3, b](f: (a1, a2, a3) => b): Tuple3[a1, a2, a3] => b = { - case Tuple3(x1, x2, x3) => f(x1, x2, x3) - } - - /** Tupling for functions of arity 4. This transforms a function - * of arity 4 into a unary function that takes a 4-tuple of arguments. - */ - // @deprecated("use `f.tupled` instead") - def tupled[a1, a2, a3, a4, b](f: (a1, a2, a3, a4) => b): Tuple4[a1, a2, a3, a4] => b = { - case Tuple4(x1, x2, x3, x4) => f(x1, x2, x3, x4) - } - - /** Tupling for functions of arity 5. This transforms a function - * of arity 5 into a unary function that takes a 5-tuple of arguments. - */ - // @deprecated("use `f.tupled` instead") - def tupled[a1, a2, a3, a4, a5, b](f: (a1, a2, a3, a4, a5) => b): Tuple5[a1, a2, a3, a4, a5] => b = { - case Tuple5(x1, x2, x3, x4, x5) => f(x1, x2, x3, x4, x5) - } - - /** Un-tupling for functions of arity 2. This transforms a function taking - * a pair of arguments into a binary function which takes each argument separately. - */ - def untupled[a1, a2, b](f: Tuple2[a1, a2] => b): (a1, a2) => b = { - (x1, x2) => f(Tuple2(x1, x2)) - } - - /** Un-tupling for functions of arity 3. This transforms a function taking - * a triple of arguments into a ternary function which takes each argument separately. - */ - def untupled[a1, a2, a3, b](f: Tuple3[a1, a2, a3] => b): (a1, a2, a3) => b = { - (x1, x2, x3) => f(Tuple3(x1, x2, x3)) - } - - /** Un-tupling for functions of arity 4. This transforms a function taking - * a 4-tuple of arguments into a function of arity 4 which takes each argument separately. - */ - def untupled[a1, a2, a3, a4, b](f: Tuple4[a1, a2, a3, a4] => b): (a1, a2, a3, a4) => b = { - (x1, x2, x3, x4) => f(Tuple4(x1, x2, x3, x4)) - } - - /** Un-tupling for functions of arity 5. This transforms a function taking - * a 5-tuple of arguments into a function of arity 5 which takes each argument separately. - */ - def untupled[a1, a2, a3, a4, a5, b](f: Tuple5[a1, a2, a3, a4, a5] => b): (a1, a2, a3, a4, a5) => b = { - (x1, x2, x3, x4, x5) => f(Tuple5(x1, x2, x3, x4, x5)) - } -} diff --git a/tests/scala2-library/src/library/scala/Function0.scala b/tests/scala2-library/src/library/scala/Function0.scala deleted file mode 100644 index 15d0f1493875..000000000000 --- a/tests/scala2-library/src/library/scala/Function0.scala +++ /dev/null @@ -1,37 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ -// GENERATED CODE: DO NOT EDIT. -// genprod generated these sources at: Mon Jun 08 18:05:40 CEST 2015 - -package scala - - -/** A function of 0 parameters. - * - * In the following example, the definition of javaVersion is a - * shorthand for the anonymous class definition anonfun0: - * - * {{{ - * object Main extends App { - * val javaVersion = () => sys.props("java.version") - * - * val anonfun0 = new Function0[String] { - * def apply(): String = sys.props("java.version") - * } - * assert(javaVersion() == anonfun0()) - * } - * }}} - */ -trait Function0[@specialized(Specializable.Primitives) +R] extends AnyRef { self => - /** Apply the body of this function to the arguments. - * @return the result of function application. - */ - def apply(): R - - override def toString() = "" -} diff --git a/tests/scala2-library/src/library/scala/Function1.scala b/tests/scala2-library/src/library/scala/Function1.scala deleted file mode 100644 index 572901c6f357..000000000000 --- a/tests/scala2-library/src/library/scala/Function1.scala +++ /dev/null @@ -1,55 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ -// GENERATED CODE: DO NOT EDIT. See scala.Function0 for timestamp. - -package scala - - -/** A function of 1 parameter. - * - * In the following example, the definition of succ is a - * shorthand for the anonymous class definition anonfun1: - * - * {{{ - * object Main extends App { - * val succ = (x: Int) => x + 1 - * val anonfun1 = new Function1[Int, Int] { - * def apply(x: Int): Int = x + 1 - * } - * assert(succ(0) == anonfun1(0)) - * } - * }}} - * - * Note that the difference between `Function1` and [[scala.PartialFunction]] - * is that the latter can specify inputs which it will not handle. - */ -@annotation.implicitNotFound(msg = "No implicit view available from ${T1} => ${R}.") -trait Function1[@specialized(scala.Int, scala.Long, scala.Float, scala.Double) -T1, @specialized(scala.Unit, scala.Boolean, scala.Int, scala.Float, scala.Long, scala.Double) +R] extends AnyRef { self => - /** Apply the body of this function to the argument. - * @return the result of function application. - */ - def apply(v1: T1): R - - /** Composes two instances of Function1 in a new Function1, with this function applied last. - * - * @tparam A the type to which function `g` can be applied - * @param g a function A => T1 - * @return a new function `f` such that `f(x) == apply(g(x))` - */ - @annotation.unspecialized def compose[A](g: A => T1): A => R = { x => apply(g(x)) } - - /** Composes two instances of Function1 in a new Function1, with this function applied first. - * - * @tparam A the result type of function `g` - * @param g a function R => A - * @return a new function `f` such that `f(x) == g(apply(x))` - */ - @annotation.unspecialized def andThen[A](g: R => A): T1 => A = { x => g(apply(x)) } - - override def toString() = "" -} diff --git a/tests/scala2-library/src/library/scala/Function10.scala b/tests/scala2-library/src/library/scala/Function10.scala deleted file mode 100644 index 7789970a4418..000000000000 --- a/tests/scala2-library/src/library/scala/Function10.scala +++ /dev/null @@ -1,38 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ -// GENERATED CODE: DO NOT EDIT. See scala.Function0 for timestamp. - -package scala - - -/** A function of 10 parameters. - * - */ -trait Function10[-T1, -T2, -T3, -T4, -T5, -T6, -T7, -T8, -T9, -T10, +R] extends AnyRef { self => - /** Apply the body of this function to the arguments. - * @return the result of function application. - */ - def apply(v1: T1, v2: T2, v3: T3, v4: T4, v5: T5, v6: T6, v7: T7, v8: T8, v9: T9, v10: T10): R - /** Creates a curried version of this function. - * - * @return a function `f` such that `f(x1)(x2)(x3)(x4)(x5)(x6)(x7)(x8)(x9)(x10) == apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10)` - */ - @annotation.unspecialized def curried: T1 => T2 => T3 => T4 => T5 => T6 => T7 => T8 => T9 => T10 => R = { - (x1: T1) => ((x2: T2, x3: T3, x4: T4, x5: T5, x6: T6, x7: T7, x8: T8, x9: T9, x10: T10) => self.apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10)).curried - } - /** Creates a tupled version of this function: instead of 10 arguments, - * it accepts a single [[scala.Tuple10]] argument. - * - * @return a function `f` such that `f((x1, x2, x3, x4, x5, x6, x7, x8, x9, x10)) == f(Tuple10(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10)) == apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10)` - */ - - @annotation.unspecialized def tupled: Tuple10[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10] => R = { - case Tuple10(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10) => apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10) - } - override def toString() = "" -} diff --git a/tests/scala2-library/src/library/scala/Function11.scala b/tests/scala2-library/src/library/scala/Function11.scala deleted file mode 100644 index d4276f3fd109..000000000000 --- a/tests/scala2-library/src/library/scala/Function11.scala +++ /dev/null @@ -1,38 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ -// GENERATED CODE: DO NOT EDIT. See scala.Function0 for timestamp. - -package scala - - -/** A function of 11 parameters. - * - */ -trait Function11[-T1, -T2, -T3, -T4, -T5, -T6, -T7, -T8, -T9, -T10, -T11, +R] extends AnyRef { self => - /** Apply the body of this function to the arguments. - * @return the result of function application. - */ - def apply(v1: T1, v2: T2, v3: T3, v4: T4, v5: T5, v6: T6, v7: T7, v8: T8, v9: T9, v10: T10, v11: T11): R - /** Creates a curried version of this function. - * - * @return a function `f` such that `f(x1)(x2)(x3)(x4)(x5)(x6)(x7)(x8)(x9)(x10)(x11) == apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11)` - */ - @annotation.unspecialized def curried: T1 => T2 => T3 => T4 => T5 => T6 => T7 => T8 => T9 => T10 => T11 => R = { - (x1: T1) => ((x2: T2, x3: T3, x4: T4, x5: T5, x6: T6, x7: T7, x8: T8, x9: T9, x10: T10, x11: T11) => self.apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11)).curried - } - /** Creates a tupled version of this function: instead of 11 arguments, - * it accepts a single [[scala.Tuple11]] argument. - * - * @return a function `f` such that `f((x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11)) == f(Tuple11(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11)) == apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11)` - */ - - @annotation.unspecialized def tupled: Tuple11[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11] => R = { - case Tuple11(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11) => apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11) - } - override def toString() = "" -} diff --git a/tests/scala2-library/src/library/scala/Function12.scala b/tests/scala2-library/src/library/scala/Function12.scala deleted file mode 100644 index dfa8bcfce66b..000000000000 --- a/tests/scala2-library/src/library/scala/Function12.scala +++ /dev/null @@ -1,38 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ -// GENERATED CODE: DO NOT EDIT. See scala.Function0 for timestamp. - -package scala - - -/** A function of 12 parameters. - * - */ -trait Function12[-T1, -T2, -T3, -T4, -T5, -T6, -T7, -T8, -T9, -T10, -T11, -T12, +R] extends AnyRef { self => - /** Apply the body of this function to the arguments. - * @return the result of function application. - */ - def apply(v1: T1, v2: T2, v3: T3, v4: T4, v5: T5, v6: T6, v7: T7, v8: T8, v9: T9, v10: T10, v11: T11, v12: T12): R - /** Creates a curried version of this function. - * - * @return a function `f` such that `f(x1)(x2)(x3)(x4)(x5)(x6)(x7)(x8)(x9)(x10)(x11)(x12) == apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12)` - */ - @annotation.unspecialized def curried: T1 => T2 => T3 => T4 => T5 => T6 => T7 => T8 => T9 => T10 => T11 => T12 => R = { - (x1: T1) => ((x2: T2, x3: T3, x4: T4, x5: T5, x6: T6, x7: T7, x8: T8, x9: T9, x10: T10, x11: T11, x12: T12) => self.apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12)).curried - } - /** Creates a tupled version of this function: instead of 12 arguments, - * it accepts a single [[scala.Tuple12]] argument. - * - * @return a function `f` such that `f((x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12)) == f(Tuple12(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12)) == apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12)` - */ - - @annotation.unspecialized def tupled: Tuple12[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12] => R = { - case Tuple12(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12) => apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12) - } - override def toString() = "" -} diff --git a/tests/scala2-library/src/library/scala/Function13.scala b/tests/scala2-library/src/library/scala/Function13.scala deleted file mode 100644 index 5404c208bf31..000000000000 --- a/tests/scala2-library/src/library/scala/Function13.scala +++ /dev/null @@ -1,38 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ -// GENERATED CODE: DO NOT EDIT. See scala.Function0 for timestamp. - -package scala - - -/** A function of 13 parameters. - * - */ -trait Function13[-T1, -T2, -T3, -T4, -T5, -T6, -T7, -T8, -T9, -T10, -T11, -T12, -T13, +R] extends AnyRef { self => - /** Apply the body of this function to the arguments. - * @return the result of function application. - */ - def apply(v1: T1, v2: T2, v3: T3, v4: T4, v5: T5, v6: T6, v7: T7, v8: T8, v9: T9, v10: T10, v11: T11, v12: T12, v13: T13): R - /** Creates a curried version of this function. - * - * @return a function `f` such that `f(x1)(x2)(x3)(x4)(x5)(x6)(x7)(x8)(x9)(x10)(x11)(x12)(x13) == apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13)` - */ - @annotation.unspecialized def curried: T1 => T2 => T3 => T4 => T5 => T6 => T7 => T8 => T9 => T10 => T11 => T12 => T13 => R = { - (x1: T1) => ((x2: T2, x3: T3, x4: T4, x5: T5, x6: T6, x7: T7, x8: T8, x9: T9, x10: T10, x11: T11, x12: T12, x13: T13) => self.apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13)).curried - } - /** Creates a tupled version of this function: instead of 13 arguments, - * it accepts a single [[scala.Tuple13]] argument. - * - * @return a function `f` such that `f((x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13)) == f(Tuple13(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13)) == apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13)` - */ - - @annotation.unspecialized def tupled: Tuple13[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13] => R = { - case Tuple13(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13) => apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13) - } - override def toString() = "" -} diff --git a/tests/scala2-library/src/library/scala/Function14.scala b/tests/scala2-library/src/library/scala/Function14.scala deleted file mode 100644 index 3145290bcfa2..000000000000 --- a/tests/scala2-library/src/library/scala/Function14.scala +++ /dev/null @@ -1,38 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ -// GENERATED CODE: DO NOT EDIT. See scala.Function0 for timestamp. - -package scala - - -/** A function of 14 parameters. - * - */ -trait Function14[-T1, -T2, -T3, -T4, -T5, -T6, -T7, -T8, -T9, -T10, -T11, -T12, -T13, -T14, +R] extends AnyRef { self => - /** Apply the body of this function to the arguments. - * @return the result of function application. - */ - def apply(v1: T1, v2: T2, v3: T3, v4: T4, v5: T5, v6: T6, v7: T7, v8: T8, v9: T9, v10: T10, v11: T11, v12: T12, v13: T13, v14: T14): R - /** Creates a curried version of this function. - * - * @return a function `f` such that `f(x1)(x2)(x3)(x4)(x5)(x6)(x7)(x8)(x9)(x10)(x11)(x12)(x13)(x14) == apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14)` - */ - @annotation.unspecialized def curried: T1 => T2 => T3 => T4 => T5 => T6 => T7 => T8 => T9 => T10 => T11 => T12 => T13 => T14 => R = { - (x1: T1) => ((x2: T2, x3: T3, x4: T4, x5: T5, x6: T6, x7: T7, x8: T8, x9: T9, x10: T10, x11: T11, x12: T12, x13: T13, x14: T14) => self.apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14)).curried - } - /** Creates a tupled version of this function: instead of 14 arguments, - * it accepts a single [[scala.Tuple14]] argument. - * - * @return a function `f` such that `f((x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14)) == f(Tuple14(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14)) == apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14)` - */ - - @annotation.unspecialized def tupled: Tuple14[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14] => R = { - case Tuple14(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14) => apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14) - } - override def toString() = "" -} diff --git a/tests/scala2-library/src/library/scala/Function15.scala b/tests/scala2-library/src/library/scala/Function15.scala deleted file mode 100644 index 309ef53e71c3..000000000000 --- a/tests/scala2-library/src/library/scala/Function15.scala +++ /dev/null @@ -1,38 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ -// GENERATED CODE: DO NOT EDIT. See scala.Function0 for timestamp. - -package scala - - -/** A function of 15 parameters. - * - */ -trait Function15[-T1, -T2, -T3, -T4, -T5, -T6, -T7, -T8, -T9, -T10, -T11, -T12, -T13, -T14, -T15, +R] extends AnyRef { self => - /** Apply the body of this function to the arguments. - * @return the result of function application. - */ - def apply(v1: T1, v2: T2, v3: T3, v4: T4, v5: T5, v6: T6, v7: T7, v8: T8, v9: T9, v10: T10, v11: T11, v12: T12, v13: T13, v14: T14, v15: T15): R - /** Creates a curried version of this function. - * - * @return a function `f` such that `f(x1)(x2)(x3)(x4)(x5)(x6)(x7)(x8)(x9)(x10)(x11)(x12)(x13)(x14)(x15) == apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15)` - */ - @annotation.unspecialized def curried: T1 => T2 => T3 => T4 => T5 => T6 => T7 => T8 => T9 => T10 => T11 => T12 => T13 => T14 => T15 => R = { - (x1: T1) => ((x2: T2, x3: T3, x4: T4, x5: T5, x6: T6, x7: T7, x8: T8, x9: T9, x10: T10, x11: T11, x12: T12, x13: T13, x14: T14, x15: T15) => self.apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15)).curried - } - /** Creates a tupled version of this function: instead of 15 arguments, - * it accepts a single [[scala.Tuple15]] argument. - * - * @return a function `f` such that `f((x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15)) == f(Tuple15(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15)) == apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15)` - */ - - @annotation.unspecialized def tupled: Tuple15[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15] => R = { - case Tuple15(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15) => apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15) - } - override def toString() = "" -} diff --git a/tests/scala2-library/src/library/scala/Function16.scala b/tests/scala2-library/src/library/scala/Function16.scala deleted file mode 100644 index c4cb107e872e..000000000000 --- a/tests/scala2-library/src/library/scala/Function16.scala +++ /dev/null @@ -1,38 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ -// GENERATED CODE: DO NOT EDIT. See scala.Function0 for timestamp. - -package scala - - -/** A function of 16 parameters. - * - */ -trait Function16[-T1, -T2, -T3, -T4, -T5, -T6, -T7, -T8, -T9, -T10, -T11, -T12, -T13, -T14, -T15, -T16, +R] extends AnyRef { self => - /** Apply the body of this function to the arguments. - * @return the result of function application. - */ - def apply(v1: T1, v2: T2, v3: T3, v4: T4, v5: T5, v6: T6, v7: T7, v8: T8, v9: T9, v10: T10, v11: T11, v12: T12, v13: T13, v14: T14, v15: T15, v16: T16): R - /** Creates a curried version of this function. - * - * @return a function `f` such that `f(x1)(x2)(x3)(x4)(x5)(x6)(x7)(x8)(x9)(x10)(x11)(x12)(x13)(x14)(x15)(x16) == apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16)` - */ - @annotation.unspecialized def curried: T1 => T2 => T3 => T4 => T5 => T6 => T7 => T8 => T9 => T10 => T11 => T12 => T13 => T14 => T15 => T16 => R = { - (x1: T1) => ((x2: T2, x3: T3, x4: T4, x5: T5, x6: T6, x7: T7, x8: T8, x9: T9, x10: T10, x11: T11, x12: T12, x13: T13, x14: T14, x15: T15, x16: T16) => self.apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16)).curried - } - /** Creates a tupled version of this function: instead of 16 arguments, - * it accepts a single [[scala.Tuple16]] argument. - * - * @return a function `f` such that `f((x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16)) == f(Tuple16(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16)) == apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16)` - */ - - @annotation.unspecialized def tupled: Tuple16[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16] => R = { - case Tuple16(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16) => apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16) - } - override def toString() = "" -} diff --git a/tests/scala2-library/src/library/scala/Function17.scala b/tests/scala2-library/src/library/scala/Function17.scala deleted file mode 100644 index 005ae2ab79dc..000000000000 --- a/tests/scala2-library/src/library/scala/Function17.scala +++ /dev/null @@ -1,38 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ -// GENERATED CODE: DO NOT EDIT. See scala.Function0 for timestamp. - -package scala - - -/** A function of 17 parameters. - * - */ -trait Function17[-T1, -T2, -T3, -T4, -T5, -T6, -T7, -T8, -T9, -T10, -T11, -T12, -T13, -T14, -T15, -T16, -T17, +R] extends AnyRef { self => - /** Apply the body of this function to the arguments. - * @return the result of function application. - */ - def apply(v1: T1, v2: T2, v3: T3, v4: T4, v5: T5, v6: T6, v7: T7, v8: T8, v9: T9, v10: T10, v11: T11, v12: T12, v13: T13, v14: T14, v15: T15, v16: T16, v17: T17): R - /** Creates a curried version of this function. - * - * @return a function `f` such that `f(x1)(x2)(x3)(x4)(x5)(x6)(x7)(x8)(x9)(x10)(x11)(x12)(x13)(x14)(x15)(x16)(x17) == apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16, x17)` - */ - @annotation.unspecialized def curried: T1 => T2 => T3 => T4 => T5 => T6 => T7 => T8 => T9 => T10 => T11 => T12 => T13 => T14 => T15 => T16 => T17 => R = { - (x1: T1) => ((x2: T2, x3: T3, x4: T4, x5: T5, x6: T6, x7: T7, x8: T8, x9: T9, x10: T10, x11: T11, x12: T12, x13: T13, x14: T14, x15: T15, x16: T16, x17: T17) => self.apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16, x17)).curried - } - /** Creates a tupled version of this function: instead of 17 arguments, - * it accepts a single [[scala.Tuple17]] argument. - * - * @return a function `f` such that `f((x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16, x17)) == f(Tuple17(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16, x17)) == apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16, x17)` - */ - - @annotation.unspecialized def tupled: Tuple17[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17] => R = { - case Tuple17(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16, x17) => apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16, x17) - } - override def toString() = "" -} diff --git a/tests/scala2-library/src/library/scala/Function18.scala b/tests/scala2-library/src/library/scala/Function18.scala deleted file mode 100644 index 371630dae319..000000000000 --- a/tests/scala2-library/src/library/scala/Function18.scala +++ /dev/null @@ -1,38 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ -// GENERATED CODE: DO NOT EDIT. See scala.Function0 for timestamp. - -package scala - - -/** A function of 18 parameters. - * - */ -trait Function18[-T1, -T2, -T3, -T4, -T5, -T6, -T7, -T8, -T9, -T10, -T11, -T12, -T13, -T14, -T15, -T16, -T17, -T18, +R] extends AnyRef { self => - /** Apply the body of this function to the arguments. - * @return the result of function application. - */ - def apply(v1: T1, v2: T2, v3: T3, v4: T4, v5: T5, v6: T6, v7: T7, v8: T8, v9: T9, v10: T10, v11: T11, v12: T12, v13: T13, v14: T14, v15: T15, v16: T16, v17: T17, v18: T18): R - /** Creates a curried version of this function. - * - * @return a function `f` such that `f(x1)(x2)(x3)(x4)(x5)(x6)(x7)(x8)(x9)(x10)(x11)(x12)(x13)(x14)(x15)(x16)(x17)(x18) == apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16, x17, x18)` - */ - @annotation.unspecialized def curried: T1 => T2 => T3 => T4 => T5 => T6 => T7 => T8 => T9 => T10 => T11 => T12 => T13 => T14 => T15 => T16 => T17 => T18 => R = { - (x1: T1) => ((x2: T2, x3: T3, x4: T4, x5: T5, x6: T6, x7: T7, x8: T8, x9: T9, x10: T10, x11: T11, x12: T12, x13: T13, x14: T14, x15: T15, x16: T16, x17: T17, x18: T18) => self.apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16, x17, x18)).curried - } - /** Creates a tupled version of this function: instead of 18 arguments, - * it accepts a single [[scala.Tuple18]] argument. - * - * @return a function `f` such that `f((x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16, x17, x18)) == f(Tuple18(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16, x17, x18)) == apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16, x17, x18)` - */ - - @annotation.unspecialized def tupled: Tuple18[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18] => R = { - case Tuple18(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16, x17, x18) => apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16, x17, x18) - } - override def toString() = "" -} diff --git a/tests/scala2-library/src/library/scala/Function19.scala b/tests/scala2-library/src/library/scala/Function19.scala deleted file mode 100644 index 95c60a467e50..000000000000 --- a/tests/scala2-library/src/library/scala/Function19.scala +++ /dev/null @@ -1,38 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ -// GENERATED CODE: DO NOT EDIT. See scala.Function0 for timestamp. - -package scala - - -/** A function of 19 parameters. - * - */ -trait Function19[-T1, -T2, -T3, -T4, -T5, -T6, -T7, -T8, -T9, -T10, -T11, -T12, -T13, -T14, -T15, -T16, -T17, -T18, -T19, +R] extends AnyRef { self => - /** Apply the body of this function to the arguments. - * @return the result of function application. - */ - def apply(v1: T1, v2: T2, v3: T3, v4: T4, v5: T5, v6: T6, v7: T7, v8: T8, v9: T9, v10: T10, v11: T11, v12: T12, v13: T13, v14: T14, v15: T15, v16: T16, v17: T17, v18: T18, v19: T19): R - /** Creates a curried version of this function. - * - * @return a function `f` such that `f(x1)(x2)(x3)(x4)(x5)(x6)(x7)(x8)(x9)(x10)(x11)(x12)(x13)(x14)(x15)(x16)(x17)(x18)(x19) == apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16, x17, x18, x19)` - */ - @annotation.unspecialized def curried: T1 => T2 => T3 => T4 => T5 => T6 => T7 => T8 => T9 => T10 => T11 => T12 => T13 => T14 => T15 => T16 => T17 => T18 => T19 => R = { - (x1: T1) => ((x2: T2, x3: T3, x4: T4, x5: T5, x6: T6, x7: T7, x8: T8, x9: T9, x10: T10, x11: T11, x12: T12, x13: T13, x14: T14, x15: T15, x16: T16, x17: T17, x18: T18, x19: T19) => self.apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16, x17, x18, x19)).curried - } - /** Creates a tupled version of this function: instead of 19 arguments, - * it accepts a single [[scala.Tuple19]] argument. - * - * @return a function `f` such that `f((x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16, x17, x18, x19)) == f(Tuple19(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16, x17, x18, x19)) == apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16, x17, x18, x19)` - */ - - @annotation.unspecialized def tupled: Tuple19[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19] => R = { - case Tuple19(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16, x17, x18, x19) => apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16, x17, x18, x19) - } - override def toString() = "" -} diff --git a/tests/scala2-library/src/library/scala/Function2.scala b/tests/scala2-library/src/library/scala/Function2.scala deleted file mode 100644 index e2c094ea4022..000000000000 --- a/tests/scala2-library/src/library/scala/Function2.scala +++ /dev/null @@ -1,51 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ -// GENERATED CODE: DO NOT EDIT. See scala.Function0 for timestamp. - -package scala - - -/** A function of 2 parameters. - * - * In the following example, the definition of max is a - * shorthand for the anonymous class definition anonfun2: - * - * {{{ - * object Main extends App { - * val max = (x: Int, y: Int) => if (x < y) y else x - * - * val anonfun2 = new Function2[Int, Int, Int] { - * def apply(x: Int, y: Int): Int = if (x < y) y else x - * } - * assert(max(0, 1) == anonfun2(0, 1)) - * } - * }}} - */ -trait Function2[@specialized(scala.Int, scala.Long, scala.Double) -T1, @specialized(scala.Int, scala.Long, scala.Double) -T2, @specialized(scala.Unit, scala.Boolean, scala.Int, scala.Float, scala.Long, scala.Double) +R] extends AnyRef { self => - /** Apply the body of this function to the arguments. - * @return the result of function application. - */ - def apply(v1: T1, v2: T2): R - /** Creates a curried version of this function. - * - * @return a function `f` such that `f(x1)(x2) == apply(x1, x2)` - */ - @annotation.unspecialized def curried: T1 => T2 => R = { - (x1: T1) => (x2: T2) => apply(x1, x2) - } - /** Creates a tupled version of this function: instead of 2 arguments, - * it accepts a single [[scala.Tuple2]] argument. - * - * @return a function `f` such that `f((x1, x2)) == f(Tuple2(x1, x2)) == apply(x1, x2)` - */ - - @annotation.unspecialized def tupled: Tuple2[T1, T2] => R = { - case Tuple2(x1, x2) => apply(x1, x2) - } - override def toString() = "" -} diff --git a/tests/scala2-library/src/library/scala/Function20.scala b/tests/scala2-library/src/library/scala/Function20.scala deleted file mode 100644 index a93f999d44fb..000000000000 --- a/tests/scala2-library/src/library/scala/Function20.scala +++ /dev/null @@ -1,38 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ -// GENERATED CODE: DO NOT EDIT. See scala.Function0 for timestamp. - -package scala - - -/** A function of 20 parameters. - * - */ -trait Function20[-T1, -T2, -T3, -T4, -T5, -T6, -T7, -T8, -T9, -T10, -T11, -T12, -T13, -T14, -T15, -T16, -T17, -T18, -T19, -T20, +R] extends AnyRef { self => - /** Apply the body of this function to the arguments. - * @return the result of function application. - */ - def apply(v1: T1, v2: T2, v3: T3, v4: T4, v5: T5, v6: T6, v7: T7, v8: T8, v9: T9, v10: T10, v11: T11, v12: T12, v13: T13, v14: T14, v15: T15, v16: T16, v17: T17, v18: T18, v19: T19, v20: T20): R - /** Creates a curried version of this function. - * - * @return a function `f` such that `f(x1)(x2)(x3)(x4)(x5)(x6)(x7)(x8)(x9)(x10)(x11)(x12)(x13)(x14)(x15)(x16)(x17)(x18)(x19)(x20) == apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16, x17, x18, x19, x20)` - */ - @annotation.unspecialized def curried: T1 => T2 => T3 => T4 => T5 => T6 => T7 => T8 => T9 => T10 => T11 => T12 => T13 => T14 => T15 => T16 => T17 => T18 => T19 => T20 => R = { - (x1: T1) => ((x2: T2, x3: T3, x4: T4, x5: T5, x6: T6, x7: T7, x8: T8, x9: T9, x10: T10, x11: T11, x12: T12, x13: T13, x14: T14, x15: T15, x16: T16, x17: T17, x18: T18, x19: T19, x20: T20) => self.apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16, x17, x18, x19, x20)).curried - } - /** Creates a tupled version of this function: instead of 20 arguments, - * it accepts a single [[scala.Tuple20]] argument. - * - * @return a function `f` such that `f((x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16, x17, x18, x19, x20)) == f(Tuple20(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16, x17, x18, x19, x20)) == apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16, x17, x18, x19, x20)` - */ - - @annotation.unspecialized def tupled: Tuple20[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20] => R = { - case Tuple20(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16, x17, x18, x19, x20) => apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16, x17, x18, x19, x20) - } - override def toString() = "" -} diff --git a/tests/scala2-library/src/library/scala/Function21.scala b/tests/scala2-library/src/library/scala/Function21.scala deleted file mode 100644 index 7ebbb0679843..000000000000 --- a/tests/scala2-library/src/library/scala/Function21.scala +++ /dev/null @@ -1,38 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ -// GENERATED CODE: DO NOT EDIT. See scala.Function0 for timestamp. - -package scala - - -/** A function of 21 parameters. - * - */ -trait Function21[-T1, -T2, -T3, -T4, -T5, -T6, -T7, -T8, -T9, -T10, -T11, -T12, -T13, -T14, -T15, -T16, -T17, -T18, -T19, -T20, -T21, +R] extends AnyRef { self => - /** Apply the body of this function to the arguments. - * @return the result of function application. - */ - def apply(v1: T1, v2: T2, v3: T3, v4: T4, v5: T5, v6: T6, v7: T7, v8: T8, v9: T9, v10: T10, v11: T11, v12: T12, v13: T13, v14: T14, v15: T15, v16: T16, v17: T17, v18: T18, v19: T19, v20: T20, v21: T21): R - /** Creates a curried version of this function. - * - * @return a function `f` such that `f(x1)(x2)(x3)(x4)(x5)(x6)(x7)(x8)(x9)(x10)(x11)(x12)(x13)(x14)(x15)(x16)(x17)(x18)(x19)(x20)(x21) == apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16, x17, x18, x19, x20, x21)` - */ - @annotation.unspecialized def curried: T1 => T2 => T3 => T4 => T5 => T6 => T7 => T8 => T9 => T10 => T11 => T12 => T13 => T14 => T15 => T16 => T17 => T18 => T19 => T20 => T21 => R = { - (x1: T1) => ((x2: T2, x3: T3, x4: T4, x5: T5, x6: T6, x7: T7, x8: T8, x9: T9, x10: T10, x11: T11, x12: T12, x13: T13, x14: T14, x15: T15, x16: T16, x17: T17, x18: T18, x19: T19, x20: T20, x21: T21) => self.apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16, x17, x18, x19, x20, x21)).curried - } - /** Creates a tupled version of this function: instead of 21 arguments, - * it accepts a single [[scala.Tuple21]] argument. - * - * @return a function `f` such that `f((x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16, x17, x18, x19, x20, x21)) == f(Tuple21(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16, x17, x18, x19, x20, x21)) == apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16, x17, x18, x19, x20, x21)` - */ - - @annotation.unspecialized def tupled: Tuple21[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21] => R = { - case Tuple21(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16, x17, x18, x19, x20, x21) => apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16, x17, x18, x19, x20, x21) - } - override def toString() = "" -} diff --git a/tests/scala2-library/src/library/scala/Function22.scala b/tests/scala2-library/src/library/scala/Function22.scala deleted file mode 100644 index e5a3d83fb95d..000000000000 --- a/tests/scala2-library/src/library/scala/Function22.scala +++ /dev/null @@ -1,38 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ -// GENERATED CODE: DO NOT EDIT. See scala.Function0 for timestamp. - -package scala - - -/** A function of 22 parameters. - * - */ -trait Function22[-T1, -T2, -T3, -T4, -T5, -T6, -T7, -T8, -T9, -T10, -T11, -T12, -T13, -T14, -T15, -T16, -T17, -T18, -T19, -T20, -T21, -T22, +R] extends AnyRef { self => - /** Apply the body of this function to the arguments. - * @return the result of function application. - */ - def apply(v1: T1, v2: T2, v3: T3, v4: T4, v5: T5, v6: T6, v7: T7, v8: T8, v9: T9, v10: T10, v11: T11, v12: T12, v13: T13, v14: T14, v15: T15, v16: T16, v17: T17, v18: T18, v19: T19, v20: T20, v21: T21, v22: T22): R - /** Creates a curried version of this function. - * - * @return a function `f` such that `f(x1)(x2)(x3)(x4)(x5)(x6)(x7)(x8)(x9)(x10)(x11)(x12)(x13)(x14)(x15)(x16)(x17)(x18)(x19)(x20)(x21)(x22) == apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16, x17, x18, x19, x20, x21, x22)` - */ - @annotation.unspecialized def curried: T1 => T2 => T3 => T4 => T5 => T6 => T7 => T8 => T9 => T10 => T11 => T12 => T13 => T14 => T15 => T16 => T17 => T18 => T19 => T20 => T21 => T22 => R = { - (x1: T1) => ((x2: T2, x3: T3, x4: T4, x5: T5, x6: T6, x7: T7, x8: T8, x9: T9, x10: T10, x11: T11, x12: T12, x13: T13, x14: T14, x15: T15, x16: T16, x17: T17, x18: T18, x19: T19, x20: T20, x21: T21, x22: T22) => self.apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16, x17, x18, x19, x20, x21, x22)).curried - } - /** Creates a tupled version of this function: instead of 22 arguments, - * it accepts a single [[scala.Tuple22]] argument. - * - * @return a function `f` such that `f((x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16, x17, x18, x19, x20, x21, x22)) == f(Tuple22(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16, x17, x18, x19, x20, x21, x22)) == apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16, x17, x18, x19, x20, x21, x22)` - */ - - @annotation.unspecialized def tupled: Tuple22[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22] => R = { - case Tuple22(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16, x17, x18, x19, x20, x21, x22) => apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16, x17, x18, x19, x20, x21, x22) - } - override def toString() = "" -} diff --git a/tests/scala2-library/src/library/scala/Function3.scala b/tests/scala2-library/src/library/scala/Function3.scala deleted file mode 100644 index 850290d244bf..000000000000 --- a/tests/scala2-library/src/library/scala/Function3.scala +++ /dev/null @@ -1,38 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ -// GENERATED CODE: DO NOT EDIT. See scala.Function0 for timestamp. - -package scala - - -/** A function of 3 parameters. - * - */ -trait Function3[-T1, -T2, -T3, +R] extends AnyRef { self => - /** Apply the body of this function to the arguments. - * @return the result of function application. - */ - def apply(v1: T1, v2: T2, v3: T3): R - /** Creates a curried version of this function. - * - * @return a function `f` such that `f(x1)(x2)(x3) == apply(x1, x2, x3)` - */ - @annotation.unspecialized def curried: T1 => T2 => T3 => R = { - (x1: T1) => (x2: T2) => (x3: T3) => apply(x1, x2, x3) - } - /** Creates a tupled version of this function: instead of 3 arguments, - * it accepts a single [[scala.Tuple3]] argument. - * - * @return a function `f` such that `f((x1, x2, x3)) == f(Tuple3(x1, x2, x3)) == apply(x1, x2, x3)` - */ - - @annotation.unspecialized def tupled: Tuple3[T1, T2, T3] => R = { - case Tuple3(x1, x2, x3) => apply(x1, x2, x3) - } - override def toString() = "" -} diff --git a/tests/scala2-library/src/library/scala/Function4.scala b/tests/scala2-library/src/library/scala/Function4.scala deleted file mode 100644 index c9ac6df32ea0..000000000000 --- a/tests/scala2-library/src/library/scala/Function4.scala +++ /dev/null @@ -1,38 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ -// GENERATED CODE: DO NOT EDIT. See scala.Function0 for timestamp. - -package scala - - -/** A function of 4 parameters. - * - */ -trait Function4[-T1, -T2, -T3, -T4, +R] extends AnyRef { self => - /** Apply the body of this function to the arguments. - * @return the result of function application. - */ - def apply(v1: T1, v2: T2, v3: T3, v4: T4): R - /** Creates a curried version of this function. - * - * @return a function `f` such that `f(x1)(x2)(x3)(x4) == apply(x1, x2, x3, x4)` - */ - @annotation.unspecialized def curried: T1 => T2 => T3 => T4 => R = { - (x1: T1) => (x2: T2) => (x3: T3) => (x4: T4) => apply(x1, x2, x3, x4) - } - /** Creates a tupled version of this function: instead of 4 arguments, - * it accepts a single [[scala.Tuple4]] argument. - * - * @return a function `f` such that `f((x1, x2, x3, x4)) == f(Tuple4(x1, x2, x3, x4)) == apply(x1, x2, x3, x4)` - */ - - @annotation.unspecialized def tupled: Tuple4[T1, T2, T3, T4] => R = { - case Tuple4(x1, x2, x3, x4) => apply(x1, x2, x3, x4) - } - override def toString() = "" -} diff --git a/tests/scala2-library/src/library/scala/Function5.scala b/tests/scala2-library/src/library/scala/Function5.scala deleted file mode 100644 index 360a460306f2..000000000000 --- a/tests/scala2-library/src/library/scala/Function5.scala +++ /dev/null @@ -1,38 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ -// GENERATED CODE: DO NOT EDIT. See scala.Function0 for timestamp. - -package scala - - -/** A function of 5 parameters. - * - */ -trait Function5[-T1, -T2, -T3, -T4, -T5, +R] extends AnyRef { self => - /** Apply the body of this function to the arguments. - * @return the result of function application. - */ - def apply(v1: T1, v2: T2, v3: T3, v4: T4, v5: T5): R - /** Creates a curried version of this function. - * - * @return a function `f` such that `f(x1)(x2)(x3)(x4)(x5) == apply(x1, x2, x3, x4, x5)` - */ - @annotation.unspecialized def curried: T1 => T2 => T3 => T4 => T5 => R = { - (x1: T1) => ((x2: T2, x3: T3, x4: T4, x5: T5) => self.apply(x1, x2, x3, x4, x5)).curried - } - /** Creates a tupled version of this function: instead of 5 arguments, - * it accepts a single [[scala.Tuple5]] argument. - * - * @return a function `f` such that `f((x1, x2, x3, x4, x5)) == f(Tuple5(x1, x2, x3, x4, x5)) == apply(x1, x2, x3, x4, x5)` - */ - - @annotation.unspecialized def tupled: Tuple5[T1, T2, T3, T4, T5] => R = { - case Tuple5(x1, x2, x3, x4, x5) => apply(x1, x2, x3, x4, x5) - } - override def toString() = "" -} diff --git a/tests/scala2-library/src/library/scala/Function6.scala b/tests/scala2-library/src/library/scala/Function6.scala deleted file mode 100644 index d30877e7658b..000000000000 --- a/tests/scala2-library/src/library/scala/Function6.scala +++ /dev/null @@ -1,38 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ -// GENERATED CODE: DO NOT EDIT. See scala.Function0 for timestamp. - -package scala - - -/** A function of 6 parameters. - * - */ -trait Function6[-T1, -T2, -T3, -T4, -T5, -T6, +R] extends AnyRef { self => - /** Apply the body of this function to the arguments. - * @return the result of function application. - */ - def apply(v1: T1, v2: T2, v3: T3, v4: T4, v5: T5, v6: T6): R - /** Creates a curried version of this function. - * - * @return a function `f` such that `f(x1)(x2)(x3)(x4)(x5)(x6) == apply(x1, x2, x3, x4, x5, x6)` - */ - @annotation.unspecialized def curried: T1 => T2 => T3 => T4 => T5 => T6 => R = { - (x1: T1) => ((x2: T2, x3: T3, x4: T4, x5: T5, x6: T6) => self.apply(x1, x2, x3, x4, x5, x6)).curried - } - /** Creates a tupled version of this function: instead of 6 arguments, - * it accepts a single [[scala.Tuple6]] argument. - * - * @return a function `f` such that `f((x1, x2, x3, x4, x5, x6)) == f(Tuple6(x1, x2, x3, x4, x5, x6)) == apply(x1, x2, x3, x4, x5, x6)` - */ - - @annotation.unspecialized def tupled: Tuple6[T1, T2, T3, T4, T5, T6] => R = { - case Tuple6(x1, x2, x3, x4, x5, x6) => apply(x1, x2, x3, x4, x5, x6) - } - override def toString() = "" -} diff --git a/tests/scala2-library/src/library/scala/Function7.scala b/tests/scala2-library/src/library/scala/Function7.scala deleted file mode 100644 index b19caf2b5004..000000000000 --- a/tests/scala2-library/src/library/scala/Function7.scala +++ /dev/null @@ -1,38 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ -// GENERATED CODE: DO NOT EDIT. See scala.Function0 for timestamp. - -package scala - - -/** A function of 7 parameters. - * - */ -trait Function7[-T1, -T2, -T3, -T4, -T5, -T6, -T7, +R] extends AnyRef { self => - /** Apply the body of this function to the arguments. - * @return the result of function application. - */ - def apply(v1: T1, v2: T2, v3: T3, v4: T4, v5: T5, v6: T6, v7: T7): R - /** Creates a curried version of this function. - * - * @return a function `f` such that `f(x1)(x2)(x3)(x4)(x5)(x6)(x7) == apply(x1, x2, x3, x4, x5, x6, x7)` - */ - @annotation.unspecialized def curried: T1 => T2 => T3 => T4 => T5 => T6 => T7 => R = { - (x1: T1) => ((x2: T2, x3: T3, x4: T4, x5: T5, x6: T6, x7: T7) => self.apply(x1, x2, x3, x4, x5, x6, x7)).curried - } - /** Creates a tupled version of this function: instead of 7 arguments, - * it accepts a single [[scala.Tuple7]] argument. - * - * @return a function `f` such that `f((x1, x2, x3, x4, x5, x6, x7)) == f(Tuple7(x1, x2, x3, x4, x5, x6, x7)) == apply(x1, x2, x3, x4, x5, x6, x7)` - */ - - @annotation.unspecialized def tupled: Tuple7[T1, T2, T3, T4, T5, T6, T7] => R = { - case Tuple7(x1, x2, x3, x4, x5, x6, x7) => apply(x1, x2, x3, x4, x5, x6, x7) - } - override def toString() = "" -} diff --git a/tests/scala2-library/src/library/scala/Function8.scala b/tests/scala2-library/src/library/scala/Function8.scala deleted file mode 100644 index 3aff0b034cf6..000000000000 --- a/tests/scala2-library/src/library/scala/Function8.scala +++ /dev/null @@ -1,38 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ -// GENERATED CODE: DO NOT EDIT. See scala.Function0 for timestamp. - -package scala - - -/** A function of 8 parameters. - * - */ -trait Function8[-T1, -T2, -T3, -T4, -T5, -T6, -T7, -T8, +R] extends AnyRef { self => - /** Apply the body of this function to the arguments. - * @return the result of function application. - */ - def apply(v1: T1, v2: T2, v3: T3, v4: T4, v5: T5, v6: T6, v7: T7, v8: T8): R - /** Creates a curried version of this function. - * - * @return a function `f` such that `f(x1)(x2)(x3)(x4)(x5)(x6)(x7)(x8) == apply(x1, x2, x3, x4, x5, x6, x7, x8)` - */ - @annotation.unspecialized def curried: T1 => T2 => T3 => T4 => T5 => T6 => T7 => T8 => R = { - (x1: T1) => ((x2: T2, x3: T3, x4: T4, x5: T5, x6: T6, x7: T7, x8: T8) => self.apply(x1, x2, x3, x4, x5, x6, x7, x8)).curried - } - /** Creates a tupled version of this function: instead of 8 arguments, - * it accepts a single [[scala.Tuple8]] argument. - * - * @return a function `f` such that `f((x1, x2, x3, x4, x5, x6, x7, x8)) == f(Tuple8(x1, x2, x3, x4, x5, x6, x7, x8)) == apply(x1, x2, x3, x4, x5, x6, x7, x8)` - */ - - @annotation.unspecialized def tupled: Tuple8[T1, T2, T3, T4, T5, T6, T7, T8] => R = { - case Tuple8(x1, x2, x3, x4, x5, x6, x7, x8) => apply(x1, x2, x3, x4, x5, x6, x7, x8) - } - override def toString() = "" -} diff --git a/tests/scala2-library/src/library/scala/Function9.scala b/tests/scala2-library/src/library/scala/Function9.scala deleted file mode 100644 index f80ccf48f9b7..000000000000 --- a/tests/scala2-library/src/library/scala/Function9.scala +++ /dev/null @@ -1,38 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ -// GENERATED CODE: DO NOT EDIT. See scala.Function0 for timestamp. - -package scala - - -/** A function of 9 parameters. - * - */ -trait Function9[-T1, -T2, -T3, -T4, -T5, -T6, -T7, -T8, -T9, +R] extends AnyRef { self => - /** Apply the body of this function to the arguments. - * @return the result of function application. - */ - def apply(v1: T1, v2: T2, v3: T3, v4: T4, v5: T5, v6: T6, v7: T7, v8: T8, v9: T9): R - /** Creates a curried version of this function. - * - * @return a function `f` such that `f(x1)(x2)(x3)(x4)(x5)(x6)(x7)(x8)(x9) == apply(x1, x2, x3, x4, x5, x6, x7, x8, x9)` - */ - @annotation.unspecialized def curried: T1 => T2 => T3 => T4 => T5 => T6 => T7 => T8 => T9 => R = { - (x1: T1) => ((x2: T2, x3: T3, x4: T4, x5: T5, x6: T6, x7: T7, x8: T8, x9: T9) => self.apply(x1, x2, x3, x4, x5, x6, x7, x8, x9)).curried - } - /** Creates a tupled version of this function: instead of 9 arguments, - * it accepts a single [[scala.Tuple9]] argument. - * - * @return a function `f` such that `f((x1, x2, x3, x4, x5, x6, x7, x8, x9)) == f(Tuple9(x1, x2, x3, x4, x5, x6, x7, x8, x9)) == apply(x1, x2, x3, x4, x5, x6, x7, x8, x9)` - */ - - @annotation.unspecialized def tupled: Tuple9[T1, T2, T3, T4, T5, T6, T7, T8, T9] => R = { - case Tuple9(x1, x2, x3, x4, x5, x6, x7, x8, x9) => apply(x1, x2, x3, x4, x5, x6, x7, x8, x9) - } - override def toString() = "" -} diff --git a/tests/scala2-library/src/library/scala/Immutable.scala b/tests/scala2-library/src/library/scala/Immutable.scala deleted file mode 100644 index c7e96a46a010..000000000000 --- a/tests/scala2-library/src/library/scala/Immutable.scala +++ /dev/null @@ -1,18 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - - - -package scala - -/** A marker trait for all immutable data structures such as immutable - * collections. - * - * @since 2.8 - */ -trait Immutable diff --git a/tests/scala2-library/src/library/scala/Int.scala b/tests/scala2-library/src/library/scala/Int.scala deleted file mode 100644 index 491094cfde4a..000000000000 --- a/tests/scala2-library/src/library/scala/Int.scala +++ /dev/null @@ -1,477 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -// DO NOT EDIT, CHANGES WILL BE LOST -// This auto-generated code can be modified in "project/GenerateAnyVals.scala". -// Afterwards, running "sbt generateSources" regenerates this source file. - -package scala - -/** `Int`, a 32-bit signed integer (equivalent to Java's `int` primitive type) is a - * subtype of [[scala.AnyVal]]. Instances of `Int` are not - * represented by an object in the underlying runtime system. - * - * There is an implicit conversion from [[scala.Int]] => [[scala.runtime.RichInt]] - * which provides useful non-primitive operations. - */ -final abstract class Int private extends AnyVal { - def toByte: Byte - def toShort: Short - def toChar: Char - def toInt: Int - def toLong: Long - def toFloat: Float - def toDouble: Double - - /** - * Returns the bitwise negation of this value. - * @example {{{ - * ~5 == -6 - * // in binary: ~00000101 == - * // 11111010 - * }}} - */ - def unary_~ : Int - /** Returns this value, unmodified. */ - def unary_+ : Int - /** Returns the negation of this value. */ - def unary_- : Int - - def +(x: String): String - - /** - * Returns this value bit-shifted left by the specified number of bits, - * filling in the new right bits with zeroes. - * @example {{{ 6 << 3 == 48 // in binary: 0110 << 3 == 0110000 }}} - */ - def <<(x: Int): Int - /** - * Returns this value bit-shifted left by the specified number of bits, - * filling in the new right bits with zeroes. - * @example {{{ 6 << 3 == 48 // in binary: 0110 << 3 == 0110000 }}} - */ - def <<(x: Long): Int - /** - * Returns this value bit-shifted right by the specified number of bits, - * filling the new left bits with zeroes. - * @example {{{ 21 >>> 3 == 2 // in binary: 010101 >>> 3 == 010 }}} - * @example {{{ - * -21 >>> 3 == 536870909 - * // in binary: 11111111 11111111 11111111 11101011 >>> 3 == - * // 00011111 11111111 11111111 11111101 - * }}} - */ - def >>>(x: Int): Int - /** - * Returns this value bit-shifted right by the specified number of bits, - * filling the new left bits with zeroes. - * @example {{{ 21 >>> 3 == 2 // in binary: 010101 >>> 3 == 010 }}} - * @example {{{ - * -21 >>> 3 == 536870909 - * // in binary: 11111111 11111111 11111111 11101011 >>> 3 == - * // 00011111 11111111 11111111 11111101 - * }}} - */ - def >>>(x: Long): Int - /** - * Returns this value bit-shifted right by the specified number of bits, - * filling in the left bits with the same value as the left-most bit of this. - * The effect of this is to retain the sign of the value. - * @example {{{ - * -21 >> 3 == -3 - * // in binary: 11111111 11111111 11111111 11101011 >> 3 == - * // 11111111 11111111 11111111 11111101 - * }}} - */ - def >>(x: Int): Int - /** - * Returns this value bit-shifted right by the specified number of bits, - * filling in the left bits with the same value as the left-most bit of this. - * The effect of this is to retain the sign of the value. - * @example {{{ - * -21 >> 3 == -3 - * // in binary: 11111111 11111111 11111111 11101011 >> 3 == - * // 11111111 11111111 11111111 11111101 - * }}} - */ - def >>(x: Long): Int - - /** Returns `true` if this value is equal to x, `false` otherwise. */ - def ==(x: Byte): Boolean - /** Returns `true` if this value is equal to x, `false` otherwise. */ - def ==(x: Short): Boolean - /** Returns `true` if this value is equal to x, `false` otherwise. */ - def ==(x: Char): Boolean - /** Returns `true` if this value is equal to x, `false` otherwise. */ - def ==(x: Int): Boolean - /** Returns `true` if this value is equal to x, `false` otherwise. */ - def ==(x: Long): Boolean - /** Returns `true` if this value is equal to x, `false` otherwise. */ - def ==(x: Float): Boolean - /** Returns `true` if this value is equal to x, `false` otherwise. */ - def ==(x: Double): Boolean - - /** Returns `true` if this value is not equal to x, `false` otherwise. */ - def !=(x: Byte): Boolean - /** Returns `true` if this value is not equal to x, `false` otherwise. */ - def !=(x: Short): Boolean - /** Returns `true` if this value is not equal to x, `false` otherwise. */ - def !=(x: Char): Boolean - /** Returns `true` if this value is not equal to x, `false` otherwise. */ - def !=(x: Int): Boolean - /** Returns `true` if this value is not equal to x, `false` otherwise. */ - def !=(x: Long): Boolean - /** Returns `true` if this value is not equal to x, `false` otherwise. */ - def !=(x: Float): Boolean - /** Returns `true` if this value is not equal to x, `false` otherwise. */ - def !=(x: Double): Boolean - - /** Returns `true` if this value is less than x, `false` otherwise. */ - def <(x: Byte): Boolean - /** Returns `true` if this value is less than x, `false` otherwise. */ - def <(x: Short): Boolean - /** Returns `true` if this value is less than x, `false` otherwise. */ - def <(x: Char): Boolean - /** Returns `true` if this value is less than x, `false` otherwise. */ - def <(x: Int): Boolean - /** Returns `true` if this value is less than x, `false` otherwise. */ - def <(x: Long): Boolean - /** Returns `true` if this value is less than x, `false` otherwise. */ - def <(x: Float): Boolean - /** Returns `true` if this value is less than x, `false` otherwise. */ - def <(x: Double): Boolean - - /** Returns `true` if this value is less than or equal to x, `false` otherwise. */ - def <=(x: Byte): Boolean - /** Returns `true` if this value is less than or equal to x, `false` otherwise. */ - def <=(x: Short): Boolean - /** Returns `true` if this value is less than or equal to x, `false` otherwise. */ - def <=(x: Char): Boolean - /** Returns `true` if this value is less than or equal to x, `false` otherwise. */ - def <=(x: Int): Boolean - /** Returns `true` if this value is less than or equal to x, `false` otherwise. */ - def <=(x: Long): Boolean - /** Returns `true` if this value is less than or equal to x, `false` otherwise. */ - def <=(x: Float): Boolean - /** Returns `true` if this value is less than or equal to x, `false` otherwise. */ - def <=(x: Double): Boolean - - /** Returns `true` if this value is greater than x, `false` otherwise. */ - def >(x: Byte): Boolean - /** Returns `true` if this value is greater than x, `false` otherwise. */ - def >(x: Short): Boolean - /** Returns `true` if this value is greater than x, `false` otherwise. */ - def >(x: Char): Boolean - /** Returns `true` if this value is greater than x, `false` otherwise. */ - def >(x: Int): Boolean - /** Returns `true` if this value is greater than x, `false` otherwise. */ - def >(x: Long): Boolean - /** Returns `true` if this value is greater than x, `false` otherwise. */ - def >(x: Float): Boolean - /** Returns `true` if this value is greater than x, `false` otherwise. */ - def >(x: Double): Boolean - - /** Returns `true` if this value is greater than or equal to x, `false` otherwise. */ - def >=(x: Byte): Boolean - /** Returns `true` if this value is greater than or equal to x, `false` otherwise. */ - def >=(x: Short): Boolean - /** Returns `true` if this value is greater than or equal to x, `false` otherwise. */ - def >=(x: Char): Boolean - /** Returns `true` if this value is greater than or equal to x, `false` otherwise. */ - def >=(x: Int): Boolean - /** Returns `true` if this value is greater than or equal to x, `false` otherwise. */ - def >=(x: Long): Boolean - /** Returns `true` if this value is greater than or equal to x, `false` otherwise. */ - def >=(x: Float): Boolean - /** Returns `true` if this value is greater than or equal to x, `false` otherwise. */ - def >=(x: Double): Boolean - - /** - * Returns the bitwise OR of this value and `x`. - * @example {{{ - * (0xf0 | 0xaa) == 0xfa - * // in binary: 11110000 - * // | 10101010 - * // -------- - * // 11111010 - * }}} - */ - def |(x: Byte): Int - /** - * Returns the bitwise OR of this value and `x`. - * @example {{{ - * (0xf0 | 0xaa) == 0xfa - * // in binary: 11110000 - * // | 10101010 - * // -------- - * // 11111010 - * }}} - */ - def |(x: Short): Int - /** - * Returns the bitwise OR of this value and `x`. - * @example {{{ - * (0xf0 | 0xaa) == 0xfa - * // in binary: 11110000 - * // | 10101010 - * // -------- - * // 11111010 - * }}} - */ - def |(x: Char): Int - /** - * Returns the bitwise OR of this value and `x`. - * @example {{{ - * (0xf0 | 0xaa) == 0xfa - * // in binary: 11110000 - * // | 10101010 - * // -------- - * // 11111010 - * }}} - */ - def |(x: Int): Int - /** - * Returns the bitwise OR of this value and `x`. - * @example {{{ - * (0xf0 | 0xaa) == 0xfa - * // in binary: 11110000 - * // | 10101010 - * // -------- - * // 11111010 - * }}} - */ - def |(x: Long): Long - - /** - * Returns the bitwise AND of this value and `x`. - * @example {{{ - * (0xf0 & 0xaa) == 0xa0 - * // in binary: 11110000 - * // & 10101010 - * // -------- - * // 10100000 - * }}} - */ - def &(x: Byte): Int - /** - * Returns the bitwise AND of this value and `x`. - * @example {{{ - * (0xf0 & 0xaa) == 0xa0 - * // in binary: 11110000 - * // & 10101010 - * // -------- - * // 10100000 - * }}} - */ - def &(x: Short): Int - /** - * Returns the bitwise AND of this value and `x`. - * @example {{{ - * (0xf0 & 0xaa) == 0xa0 - * // in binary: 11110000 - * // & 10101010 - * // -------- - * // 10100000 - * }}} - */ - def &(x: Char): Int - /** - * Returns the bitwise AND of this value and `x`. - * @example {{{ - * (0xf0 & 0xaa) == 0xa0 - * // in binary: 11110000 - * // & 10101010 - * // -------- - * // 10100000 - * }}} - */ - def &(x: Int): Int - /** - * Returns the bitwise AND of this value and `x`. - * @example {{{ - * (0xf0 & 0xaa) == 0xa0 - * // in binary: 11110000 - * // & 10101010 - * // -------- - * // 10100000 - * }}} - */ - def &(x: Long): Long - - /** - * Returns the bitwise XOR of this value and `x`. - * @example {{{ - * (0xf0 ^ 0xaa) == 0x5a - * // in binary: 11110000 - * // ^ 10101010 - * // -------- - * // 01011010 - * }}} - */ - def ^(x: Byte): Int - /** - * Returns the bitwise XOR of this value and `x`. - * @example {{{ - * (0xf0 ^ 0xaa) == 0x5a - * // in binary: 11110000 - * // ^ 10101010 - * // -------- - * // 01011010 - * }}} - */ - def ^(x: Short): Int - /** - * Returns the bitwise XOR of this value and `x`. - * @example {{{ - * (0xf0 ^ 0xaa) == 0x5a - * // in binary: 11110000 - * // ^ 10101010 - * // -------- - * // 01011010 - * }}} - */ - def ^(x: Char): Int - /** - * Returns the bitwise XOR of this value and `x`. - * @example {{{ - * (0xf0 ^ 0xaa) == 0x5a - * // in binary: 11110000 - * // ^ 10101010 - * // -------- - * // 01011010 - * }}} - */ - def ^(x: Int): Int - /** - * Returns the bitwise XOR of this value and `x`. - * @example {{{ - * (0xf0 ^ 0xaa) == 0x5a - * // in binary: 11110000 - * // ^ 10101010 - * // -------- - * // 01011010 - * }}} - */ - def ^(x: Long): Long - - /** Returns the sum of this value and `x`. */ - def +(x: Byte): Int - /** Returns the sum of this value and `x`. */ - def +(x: Short): Int - /** Returns the sum of this value and `x`. */ - def +(x: Char): Int - /** Returns the sum of this value and `x`. */ - def +(x: Int): Int - /** Returns the sum of this value and `x`. */ - def +(x: Long): Long - /** Returns the sum of this value and `x`. */ - def +(x: Float): Float - /** Returns the sum of this value and `x`. */ - def +(x: Double): Double - - /** Returns the difference of this value and `x`. */ - def -(x: Byte): Int - /** Returns the difference of this value and `x`. */ - def -(x: Short): Int - /** Returns the difference of this value and `x`. */ - def -(x: Char): Int - /** Returns the difference of this value and `x`. */ - def -(x: Int): Int - /** Returns the difference of this value and `x`. */ - def -(x: Long): Long - /** Returns the difference of this value and `x`. */ - def -(x: Float): Float - /** Returns the difference of this value and `x`. */ - def -(x: Double): Double - - /** Returns the product of this value and `x`. */ - def *(x: Byte): Int - /** Returns the product of this value and `x`. */ - def *(x: Short): Int - /** Returns the product of this value and `x`. */ - def *(x: Char): Int - /** Returns the product of this value and `x`. */ - def *(x: Int): Int - /** Returns the product of this value and `x`. */ - def *(x: Long): Long - /** Returns the product of this value and `x`. */ - def *(x: Float): Float - /** Returns the product of this value and `x`. */ - def *(x: Double): Double - - /** Returns the quotient of this value and `x`. */ - def /(x: Byte): Int - /** Returns the quotient of this value and `x`. */ - def /(x: Short): Int - /** Returns the quotient of this value and `x`. */ - def /(x: Char): Int - /** Returns the quotient of this value and `x`. */ - def /(x: Int): Int - /** Returns the quotient of this value and `x`. */ - def /(x: Long): Long - /** Returns the quotient of this value and `x`. */ - def /(x: Float): Float - /** Returns the quotient of this value and `x`. */ - def /(x: Double): Double - - /** Returns the remainder of the division of this value by `x`. */ - def %(x: Byte): Int - /** Returns the remainder of the division of this value by `x`. */ - def %(x: Short): Int - /** Returns the remainder of the division of this value by `x`. */ - def %(x: Char): Int - /** Returns the remainder of the division of this value by `x`. */ - def %(x: Int): Int - /** Returns the remainder of the division of this value by `x`. */ - def %(x: Long): Long - /** Returns the remainder of the division of this value by `x`. */ - def %(x: Float): Float - /** Returns the remainder of the division of this value by `x`. */ - def %(x: Double): Double - - // Provide a more specific return type for Scaladoc - override def getClass(): Class[Int] = ??? -} - -object Int extends AnyValCompanion { - /** The smallest value representable as an Int. */ - final val MinValue = java.lang.Integer.MIN_VALUE - - /** The largest value representable as an Int. */ - final val MaxValue = java.lang.Integer.MAX_VALUE - - /** Transform a value type into a boxed reference type. - * - * Runtime implementation determined by `scala.runtime.BoxesRunTime.boxToInteger`. See [[https://github.com/scala/scala src/library/scala/runtime/BoxesRunTime.java]]. - * - * @param x the Int to be boxed - * @return a java.lang.Integer offering `x` as its underlying value. - */ - def box(x: Int): java.lang.Integer = ??? - - /** Transform a boxed type into a value type. Note that this - * method is not typesafe: it accepts any Object, but will throw - * an exception if the argument is not a java.lang.Integer. - * - * Runtime implementation determined by `scala.runtime.BoxesRunTime.unboxToInt`. See [[https://github.com/scala/scala src/library/scala/runtime/BoxesRunTime.java]]. - * - * @param x the java.lang.Integer to be unboxed. - * @throws ClassCastException if the argument is not a java.lang.Integer - * @return the Int resulting from calling intValue() on `x` - */ - def unbox(x: java.lang.Object): Int = ??? - - /** The String representation of the scala.Int companion object. */ - override def toString = "object scala.Int" - /** Language mandated coercions from Int to "wider" types. */ - import scala.language.implicitConversions - implicit def int2long(x: Int): Long = x.toLong - implicit def int2float(x: Int): Float = x.toFloat - implicit def int2double(x: Int): Double = x.toDouble -} - diff --git a/tests/scala2-library/src/library/scala/Long.scala b/tests/scala2-library/src/library/scala/Long.scala deleted file mode 100644 index 84e6f09da354..000000000000 --- a/tests/scala2-library/src/library/scala/Long.scala +++ /dev/null @@ -1,476 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -// DO NOT EDIT, CHANGES WILL BE LOST -// This auto-generated code can be modified in "project/GenerateAnyVals.scala". -// Afterwards, running "sbt generateSources" regenerates this source file. - -package scala - -/** `Long`, a 64-bit signed integer (equivalent to Java's `long` primitive type) is a - * subtype of [[scala.AnyVal]]. Instances of `Long` are not - * represented by an object in the underlying runtime system. - * - * There is an implicit conversion from [[scala.Long]] => [[scala.runtime.RichLong]] - * which provides useful non-primitive operations. - */ -final abstract class Long private extends AnyVal { - def toByte: Byte - def toShort: Short - def toChar: Char - def toInt: Int - def toLong: Long - def toFloat: Float - def toDouble: Double - - /** - * Returns the bitwise negation of this value. - * @example {{{ - * ~5 == -6 - * // in binary: ~00000101 == - * // 11111010 - * }}} - */ - def unary_~ : Long - /** Returns this value, unmodified. */ - def unary_+ : Long - /** Returns the negation of this value. */ - def unary_- : Long - - def +(x: String): String - - /** - * Returns this value bit-shifted left by the specified number of bits, - * filling in the new right bits with zeroes. - * @example {{{ 6 << 3 == 48 // in binary: 0110 << 3 == 0110000 }}} - */ - def <<(x: Int): Long - /** - * Returns this value bit-shifted left by the specified number of bits, - * filling in the new right bits with zeroes. - * @example {{{ 6 << 3 == 48 // in binary: 0110 << 3 == 0110000 }}} - */ - def <<(x: Long): Long - /** - * Returns this value bit-shifted right by the specified number of bits, - * filling the new left bits with zeroes. - * @example {{{ 21 >>> 3 == 2 // in binary: 010101 >>> 3 == 010 }}} - * @example {{{ - * -21 >>> 3 == 536870909 - * // in binary: 11111111 11111111 11111111 11101011 >>> 3 == - * // 00011111 11111111 11111111 11111101 - * }}} - */ - def >>>(x: Int): Long - /** - * Returns this value bit-shifted right by the specified number of bits, - * filling the new left bits with zeroes. - * @example {{{ 21 >>> 3 == 2 // in binary: 010101 >>> 3 == 010 }}} - * @example {{{ - * -21 >>> 3 == 536870909 - * // in binary: 11111111 11111111 11111111 11101011 >>> 3 == - * // 00011111 11111111 11111111 11111101 - * }}} - */ - def >>>(x: Long): Long - /** - * Returns this value bit-shifted right by the specified number of bits, - * filling in the left bits with the same value as the left-most bit of this. - * The effect of this is to retain the sign of the value. - * @example {{{ - * -21 >> 3 == -3 - * // in binary: 11111111 11111111 11111111 11101011 >> 3 == - * // 11111111 11111111 11111111 11111101 - * }}} - */ - def >>(x: Int): Long - /** - * Returns this value bit-shifted right by the specified number of bits, - * filling in the left bits with the same value as the left-most bit of this. - * The effect of this is to retain the sign of the value. - * @example {{{ - * -21 >> 3 == -3 - * // in binary: 11111111 11111111 11111111 11101011 >> 3 == - * // 11111111 11111111 11111111 11111101 - * }}} - */ - def >>(x: Long): Long - - /** Returns `true` if this value is equal to x, `false` otherwise. */ - def ==(x: Byte): Boolean - /** Returns `true` if this value is equal to x, `false` otherwise. */ - def ==(x: Short): Boolean - /** Returns `true` if this value is equal to x, `false` otherwise. */ - def ==(x: Char): Boolean - /** Returns `true` if this value is equal to x, `false` otherwise. */ - def ==(x: Int): Boolean - /** Returns `true` if this value is equal to x, `false` otherwise. */ - def ==(x: Long): Boolean - /** Returns `true` if this value is equal to x, `false` otherwise. */ - def ==(x: Float): Boolean - /** Returns `true` if this value is equal to x, `false` otherwise. */ - def ==(x: Double): Boolean - - /** Returns `true` if this value is not equal to x, `false` otherwise. */ - def !=(x: Byte): Boolean - /** Returns `true` if this value is not equal to x, `false` otherwise. */ - def !=(x: Short): Boolean - /** Returns `true` if this value is not equal to x, `false` otherwise. */ - def !=(x: Char): Boolean - /** Returns `true` if this value is not equal to x, `false` otherwise. */ - def !=(x: Int): Boolean - /** Returns `true` if this value is not equal to x, `false` otherwise. */ - def !=(x: Long): Boolean - /** Returns `true` if this value is not equal to x, `false` otherwise. */ - def !=(x: Float): Boolean - /** Returns `true` if this value is not equal to x, `false` otherwise. */ - def !=(x: Double): Boolean - - /** Returns `true` if this value is less than x, `false` otherwise. */ - def <(x: Byte): Boolean - /** Returns `true` if this value is less than x, `false` otherwise. */ - def <(x: Short): Boolean - /** Returns `true` if this value is less than x, `false` otherwise. */ - def <(x: Char): Boolean - /** Returns `true` if this value is less than x, `false` otherwise. */ - def <(x: Int): Boolean - /** Returns `true` if this value is less than x, `false` otherwise. */ - def <(x: Long): Boolean - /** Returns `true` if this value is less than x, `false` otherwise. */ - def <(x: Float): Boolean - /** Returns `true` if this value is less than x, `false` otherwise. */ - def <(x: Double): Boolean - - /** Returns `true` if this value is less than or equal to x, `false` otherwise. */ - def <=(x: Byte): Boolean - /** Returns `true` if this value is less than or equal to x, `false` otherwise. */ - def <=(x: Short): Boolean - /** Returns `true` if this value is less than or equal to x, `false` otherwise. */ - def <=(x: Char): Boolean - /** Returns `true` if this value is less than or equal to x, `false` otherwise. */ - def <=(x: Int): Boolean - /** Returns `true` if this value is less than or equal to x, `false` otherwise. */ - def <=(x: Long): Boolean - /** Returns `true` if this value is less than or equal to x, `false` otherwise. */ - def <=(x: Float): Boolean - /** Returns `true` if this value is less than or equal to x, `false` otherwise. */ - def <=(x: Double): Boolean - - /** Returns `true` if this value is greater than x, `false` otherwise. */ - def >(x: Byte): Boolean - /** Returns `true` if this value is greater than x, `false` otherwise. */ - def >(x: Short): Boolean - /** Returns `true` if this value is greater than x, `false` otherwise. */ - def >(x: Char): Boolean - /** Returns `true` if this value is greater than x, `false` otherwise. */ - def >(x: Int): Boolean - /** Returns `true` if this value is greater than x, `false` otherwise. */ - def >(x: Long): Boolean - /** Returns `true` if this value is greater than x, `false` otherwise. */ - def >(x: Float): Boolean - /** Returns `true` if this value is greater than x, `false` otherwise. */ - def >(x: Double): Boolean - - /** Returns `true` if this value is greater than or equal to x, `false` otherwise. */ - def >=(x: Byte): Boolean - /** Returns `true` if this value is greater than or equal to x, `false` otherwise. */ - def >=(x: Short): Boolean - /** Returns `true` if this value is greater than or equal to x, `false` otherwise. */ - def >=(x: Char): Boolean - /** Returns `true` if this value is greater than or equal to x, `false` otherwise. */ - def >=(x: Int): Boolean - /** Returns `true` if this value is greater than or equal to x, `false` otherwise. */ - def >=(x: Long): Boolean - /** Returns `true` if this value is greater than or equal to x, `false` otherwise. */ - def >=(x: Float): Boolean - /** Returns `true` if this value is greater than or equal to x, `false` otherwise. */ - def >=(x: Double): Boolean - - /** - * Returns the bitwise OR of this value and `x`. - * @example {{{ - * (0xf0 | 0xaa) == 0xfa - * // in binary: 11110000 - * // | 10101010 - * // -------- - * // 11111010 - * }}} - */ - def |(x: Byte): Long - /** - * Returns the bitwise OR of this value and `x`. - * @example {{{ - * (0xf0 | 0xaa) == 0xfa - * // in binary: 11110000 - * // | 10101010 - * // -------- - * // 11111010 - * }}} - */ - def |(x: Short): Long - /** - * Returns the bitwise OR of this value and `x`. - * @example {{{ - * (0xf0 | 0xaa) == 0xfa - * // in binary: 11110000 - * // | 10101010 - * // -------- - * // 11111010 - * }}} - */ - def |(x: Char): Long - /** - * Returns the bitwise OR of this value and `x`. - * @example {{{ - * (0xf0 | 0xaa) == 0xfa - * // in binary: 11110000 - * // | 10101010 - * // -------- - * // 11111010 - * }}} - */ - def |(x: Int): Long - /** - * Returns the bitwise OR of this value and `x`. - * @example {{{ - * (0xf0 | 0xaa) == 0xfa - * // in binary: 11110000 - * // | 10101010 - * // -------- - * // 11111010 - * }}} - */ - def |(x: Long): Long - - /** - * Returns the bitwise AND of this value and `x`. - * @example {{{ - * (0xf0 & 0xaa) == 0xa0 - * // in binary: 11110000 - * // & 10101010 - * // -------- - * // 10100000 - * }}} - */ - def &(x: Byte): Long - /** - * Returns the bitwise AND of this value and `x`. - * @example {{{ - * (0xf0 & 0xaa) == 0xa0 - * // in binary: 11110000 - * // & 10101010 - * // -------- - * // 10100000 - * }}} - */ - def &(x: Short): Long - /** - * Returns the bitwise AND of this value and `x`. - * @example {{{ - * (0xf0 & 0xaa) == 0xa0 - * // in binary: 11110000 - * // & 10101010 - * // -------- - * // 10100000 - * }}} - */ - def &(x: Char): Long - /** - * Returns the bitwise AND of this value and `x`. - * @example {{{ - * (0xf0 & 0xaa) == 0xa0 - * // in binary: 11110000 - * // & 10101010 - * // -------- - * // 10100000 - * }}} - */ - def &(x: Int): Long - /** - * Returns the bitwise AND of this value and `x`. - * @example {{{ - * (0xf0 & 0xaa) == 0xa0 - * // in binary: 11110000 - * // & 10101010 - * // -------- - * // 10100000 - * }}} - */ - def &(x: Long): Long - - /** - * Returns the bitwise XOR of this value and `x`. - * @example {{{ - * (0xf0 ^ 0xaa) == 0x5a - * // in binary: 11110000 - * // ^ 10101010 - * // -------- - * // 01011010 - * }}} - */ - def ^(x: Byte): Long - /** - * Returns the bitwise XOR of this value and `x`. - * @example {{{ - * (0xf0 ^ 0xaa) == 0x5a - * // in binary: 11110000 - * // ^ 10101010 - * // -------- - * // 01011010 - * }}} - */ - def ^(x: Short): Long - /** - * Returns the bitwise XOR of this value and `x`. - * @example {{{ - * (0xf0 ^ 0xaa) == 0x5a - * // in binary: 11110000 - * // ^ 10101010 - * // -------- - * // 01011010 - * }}} - */ - def ^(x: Char): Long - /** - * Returns the bitwise XOR of this value and `x`. - * @example {{{ - * (0xf0 ^ 0xaa) == 0x5a - * // in binary: 11110000 - * // ^ 10101010 - * // -------- - * // 01011010 - * }}} - */ - def ^(x: Int): Long - /** - * Returns the bitwise XOR of this value and `x`. - * @example {{{ - * (0xf0 ^ 0xaa) == 0x5a - * // in binary: 11110000 - * // ^ 10101010 - * // -------- - * // 01011010 - * }}} - */ - def ^(x: Long): Long - - /** Returns the sum of this value and `x`. */ - def +(x: Byte): Long - /** Returns the sum of this value and `x`. */ - def +(x: Short): Long - /** Returns the sum of this value and `x`. */ - def +(x: Char): Long - /** Returns the sum of this value and `x`. */ - def +(x: Int): Long - /** Returns the sum of this value and `x`. */ - def +(x: Long): Long - /** Returns the sum of this value and `x`. */ - def +(x: Float): Float - /** Returns the sum of this value and `x`. */ - def +(x: Double): Double - - /** Returns the difference of this value and `x`. */ - def -(x: Byte): Long - /** Returns the difference of this value and `x`. */ - def -(x: Short): Long - /** Returns the difference of this value and `x`. */ - def -(x: Char): Long - /** Returns the difference of this value and `x`. */ - def -(x: Int): Long - /** Returns the difference of this value and `x`. */ - def -(x: Long): Long - /** Returns the difference of this value and `x`. */ - def -(x: Float): Float - /** Returns the difference of this value and `x`. */ - def -(x: Double): Double - - /** Returns the product of this value and `x`. */ - def *(x: Byte): Long - /** Returns the product of this value and `x`. */ - def *(x: Short): Long - /** Returns the product of this value and `x`. */ - def *(x: Char): Long - /** Returns the product of this value and `x`. */ - def *(x: Int): Long - /** Returns the product of this value and `x`. */ - def *(x: Long): Long - /** Returns the product of this value and `x`. */ - def *(x: Float): Float - /** Returns the product of this value and `x`. */ - def *(x: Double): Double - - /** Returns the quotient of this value and `x`. */ - def /(x: Byte): Long - /** Returns the quotient of this value and `x`. */ - def /(x: Short): Long - /** Returns the quotient of this value and `x`. */ - def /(x: Char): Long - /** Returns the quotient of this value and `x`. */ - def /(x: Int): Long - /** Returns the quotient of this value and `x`. */ - def /(x: Long): Long - /** Returns the quotient of this value and `x`. */ - def /(x: Float): Float - /** Returns the quotient of this value and `x`. */ - def /(x: Double): Double - - /** Returns the remainder of the division of this value by `x`. */ - def %(x: Byte): Long - /** Returns the remainder of the division of this value by `x`. */ - def %(x: Short): Long - /** Returns the remainder of the division of this value by `x`. */ - def %(x: Char): Long - /** Returns the remainder of the division of this value by `x`. */ - def %(x: Int): Long - /** Returns the remainder of the division of this value by `x`. */ - def %(x: Long): Long - /** Returns the remainder of the division of this value by `x`. */ - def %(x: Float): Float - /** Returns the remainder of the division of this value by `x`. */ - def %(x: Double): Double - - // Provide a more specific return type for Scaladoc - override def getClass(): Class[Long] = ??? -} - -object Long extends AnyValCompanion { - /** The smallest value representable as a Long. */ - final val MinValue = java.lang.Long.MIN_VALUE - - /** The largest value representable as a Long. */ - final val MaxValue = java.lang.Long.MAX_VALUE - - /** Transform a value type into a boxed reference type. - * - * Runtime implementation determined by `scala.runtime.BoxesRunTime.boxToLong`. See [[https://github.com/scala/scala src/library/scala/runtime/BoxesRunTime.java]]. - * - * @param x the Long to be boxed - * @return a java.lang.Long offering `x` as its underlying value. - */ - def box(x: Long): java.lang.Long = ??? - - /** Transform a boxed type into a value type. Note that this - * method is not typesafe: it accepts any Object, but will throw - * an exception if the argument is not a java.lang.Long. - * - * Runtime implementation determined by `scala.runtime.BoxesRunTime.unboxToLong`. See [[https://github.com/scala/scala src/library/scala/runtime/BoxesRunTime.java]]. - * - * @param x the java.lang.Long to be unboxed. - * @throws ClassCastException if the argument is not a java.lang.Long - * @return the Long resulting from calling longValue() on `x` - */ - def unbox(x: java.lang.Object): Long = ??? - - /** The String representation of the scala.Long companion object. */ - override def toString = "object scala.Long" - /** Language mandated coercions from Long to "wider" types. */ - import scala.language.implicitConversions - implicit def long2float(x: Long): Float = x.toFloat - implicit def long2double(x: Long): Double = x.toDouble -} - diff --git a/tests/scala2-library/src/library/scala/MatchError.scala b/tests/scala2-library/src/library/scala/MatchError.scala deleted file mode 100644 index 0ab7f13c7e4b..000000000000 --- a/tests/scala2-library/src/library/scala/MatchError.scala +++ /dev/null @@ -1,43 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - - - -package scala - -/** This class implements errors which are thrown whenever an - * object doesn't match any pattern of a pattern matching - * expression. - * - * @author Matthias Zenger - * @author Martin Odersky - * @version 1.1, 05/03/2004 - * @since 2.0 - */ -final class MatchError(@transient obj: Any) extends RuntimeException { - /** There's no reason we need to call toString eagerly, - * so defer it until getMessage is called or object is serialized - */ - private lazy val objString = { - def ofClass = "of class " + obj.getClass.getName - if (obj == null) "null" - else try { - obj.toString() + " (" + ofClass + ")" - } catch { - case _: Throwable => "an instance " + ofClass - } - } - - @throws[java.io.ObjectStreamException] - private def writeReplace(): Object = { - objString - this - } - - override def getMessage() = objString -} diff --git a/tests/scala2-library/src/library/scala/Mutable.scala b/tests/scala2-library/src/library/scala/Mutable.scala deleted file mode 100644 index 43f98ee4df02..000000000000 --- a/tests/scala2-library/src/library/scala/Mutable.scala +++ /dev/null @@ -1,18 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - - - -package scala - -/** - * A marker trait for mutable data structures such as mutable collections - * - * @since 2.8 - */ -trait Mutable diff --git a/tests/scala2-library/src/library/scala/NotImplementedError.scala b/tests/scala2-library/src/library/scala/NotImplementedError.scala deleted file mode 100644 index 464a9a656d48..000000000000 --- a/tests/scala2-library/src/library/scala/NotImplementedError.scala +++ /dev/null @@ -1,19 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - - - -package scala - -/** Throwing this exception can be a temporary replacement for a method - * body that remains to be implemented. For instance, the exception is thrown by - * `Predef.???`. - */ -final class NotImplementedError(msg: String) extends Error(msg) { - def this() = this("an implementation is missing") -} diff --git a/tests/scala2-library/src/library/scala/NotNull.scala b/tests/scala2-library/src/library/scala/NotNull.scala deleted file mode 100644 index 6a9be79281ad..000000000000 --- a/tests/scala2-library/src/library/scala/NotNull.scala +++ /dev/null @@ -1,17 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala - -/** - * A marker trait for things that are not allowed to be null - * @since 2.5 - */ - -@deprecated("this trait will be removed", "2.11.0") -trait NotNull extends Any {} diff --git a/tests/scala2-library/src/library/scala/Option.scala b/tests/scala2-library/src/library/scala/Option.scala deleted file mode 100644 index 30c9e685652c..000000000000 --- a/tests/scala2-library/src/library/scala/Option.scala +++ /dev/null @@ -1,350 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala - -object Option { - - import scala.language.implicitConversions - - /** An implicit conversion that converts an option to an iterable value - */ - implicit def option2Iterable[A](xo: Option[A]): Iterable[A] = xo.toList - - /** An Option factory which creates Some(x) if the argument is not null, - * and None if it is null. - * - * @param x the value - * @return Some(value) if value != null, None if value == null - */ - def apply[A](x: A): Option[A] = if (x == null) None else Some(x) - - /** An Option factory which returns `None` in a manner consistent with - * the collections hierarchy. - */ - def empty[A] : Option[A] = None -} - -/** Represents optional values. Instances of `Option` - * are either an instance of $some or the object $none. - * - * The most idiomatic way to use an $option instance is to treat it - * as a collection or monad and use `map`,`flatMap`, `filter`, or - * `foreach`: - * - * {{{ - * val name: Option[String] = request getParameter "name" - * val upper = name map { _.trim } filter { _.length != 0 } map { _.toUpperCase } - * println(upper getOrElse "") - * }}} - * - * Note that this is equivalent to {{{ - * val upper = for { - * name <- request getParameter "name" - * trimmed <- Some(name.trim) - * upper <- Some(trimmed.toUpperCase) if trimmed.length != 0 - * } yield upper - * println(upper getOrElse "") - * }}} - * - * Because of how for comprehension works, if $none is returned - * from `request.getParameter`, the entire expression results in - * $none - * - * This allows for sophisticated chaining of $option values without - * having to check for the existence of a value. - * - * A less-idiomatic way to use $option values is via pattern matching: {{{ - * val nameMaybe = request getParameter "name" - * nameMaybe match { - * case Some(name) => - * println(name.trim.toUppercase) - * case None => - * println("No name value") - * } - * }}} - * - * @note Many of the methods in here are duplicative with those - * in the Traversable hierarchy, but they are duplicated for a reason: - * the implicit conversion tends to leave one with an Iterable in - * situations where one could have retained an Option. - * - * @author Martin Odersky - * @author Matthias Zenger - * @version 1.1, 16/01/2007 - * @define none `None` - * @define some [[scala.Some]] - * @define option [[scala.Option]] - * @define p `p` - * @define f `f` - * @define coll option - * @define Coll `Option` - * @define orderDependent - * @define orderDependentFold - * @define mayNotTerminateInf - * @define willNotTerminateInf - * @define collectExample - * @define undefinedorder - * @define thatinfo the class of the returned collection. In the standard library configuration, `That` is `Iterable[B]` - * @define bfinfo an implicit value of class `CanBuildFrom` which determines the result class `That` from the current - * representation type `Repr` and the new element type `B`. - */ -@SerialVersionUID(-114498752079829388L) // value computed by serialver for 2.11.2, annotation added in 2.11.4 -sealed abstract class Option[+A] extends Product with Serializable { - self => - - /** Returns true if the option is $none, false otherwise. - */ - def isEmpty: Boolean - - /** Returns true if the option is an instance of $some, false otherwise. - */ - def isDefined: Boolean = !isEmpty - - /** Returns the option's value. - * @note The option must be nonempty. - * @throws java.util.NoSuchElementException if the option is empty. - */ - def get: A - - /** Returns the option's value if the option is nonempty, otherwise - * return the result of evaluating `default`. - * - * @param default the default expression. - */ - @inline final def getOrElse[B >: A](default: => B): B = - if (isEmpty) default else this.get - - /** Returns the option's value if it is nonempty, - * or `null` if it is empty. - * Although the use of null is discouraged, code written to use - * $option must often interface with code that expects and returns nulls. - * @example {{{ - * val initialText: Option[String] = getInitialText - * val textField = new JComponent(initialText.orNull,20) - * }}} - */ - @inline final def orNull[A1 >: A](implicit ev: Null <:< A1): A1 = this getOrElse ev(null) - - /** Returns a $some containing the result of applying $f to this $option's - * value if this $option is nonempty. - * Otherwise return $none. - * - * @note This is similar to `flatMap` except here, - * $f does not need to wrap its result in an $option. - * - * @param f the function to apply - * @see flatMap - * @see foreach - */ - @inline final def map[B](f: A => B): Option[B] = - if (isEmpty) None else Some(f(this.get)) - - /** Returns the result of applying $f to this $option's - * value if the $option is nonempty. Otherwise, evaluates - * expression `ifEmpty`. - * - * @note This is equivalent to `$option map f getOrElse ifEmpty`. - * - * @param ifEmpty the expression to evaluate if empty. - * @param f the function to apply if nonempty. - */ - @inline final def fold[B](ifEmpty: => B)(f: A => B): B = - if (isEmpty) ifEmpty else f(this.get) - - /** Returns the result of applying $f to this $option's value if - * this $option is nonempty. - * Returns $none if this $option is empty. - * Slightly different from `map` in that $f is expected to - * return an $option (which could be $none). - * - * @param f the function to apply - * @see map - * @see foreach - */ - @inline final def flatMap[B](f: A => Option[B]): Option[B] = - if (isEmpty) None else f(this.get) - - def flatten[B](implicit ev: A <:< Option[B]): Option[B] = - if (isEmpty) None else ev(this.get) - - /** Returns this $option if it is nonempty '''and''' applying the predicate $p to - * this $option's value returns true. Otherwise, return $none. - * - * @param p the predicate used for testing. - */ - @inline final def filter(p: A => Boolean): Option[A] = - if (isEmpty || p(this.get)) this else None - - /** Returns this $option if it is nonempty '''and''' applying the predicate $p to - * this $option's value returns false. Otherwise, return $none. - * - * @param p the predicate used for testing. - */ - @inline final def filterNot(p: A => Boolean): Option[A] = - if (isEmpty || !p(this.get)) this else None - - /** Returns false if the option is $none, true otherwise. - * @note Implemented here to avoid the implicit conversion to Iterable. - */ - final def nonEmpty = isDefined - - /** Necessary to keep $option from being implicitly converted to - * [[scala.collection.Iterable]] in `for` comprehensions. - */ - @inline final def withFilter(p: A => Boolean): WithFilter = new WithFilter(p) - - /** We need a whole WithFilter class to honor the "doesn't create a new - * collection" contract even though it seems unlikely to matter much in a - * collection with max size 1. - */ - class WithFilter(p: A => Boolean) { - def map[B](f: A => B): Option[B] = self filter p map f - def flatMap[B](f: A => Option[B]): Option[B] = self filter p flatMap f - def foreach[U](f: A => U): Unit = self filter p foreach f - def withFilter(q: A => Boolean): WithFilter = new WithFilter(x => p(x) && q(x)) - } - - /** Tests whether the option contains a given value as an element. - * - * @example {{{ - * // Returns true because Some instance contains string "something" which equals "something". - * Some("something") contains "something" - * - * // Returns false because "something" != "anything". - * Some("something") contains "anything" - * - * // Returns false when method called on None. - * None contains "anything" - * }}} - * - * @param elem the element to test. - * @return `true` if the option has an element that is equal (as - * determined by `==`) to `elem`, `false` otherwise. - */ - final def contains[A1 >: A](elem: A1): Boolean = - !isEmpty && this.get == elem - - /** Returns true if this option is nonempty '''and''' the predicate - * $p returns true when applied to this $option's value. - * Otherwise, returns false. - * - * @param p the predicate to test - */ - @inline final def exists(p: A => Boolean): Boolean = - !isEmpty && p(this.get) - - /** Returns true if this option is empty '''or''' the predicate - * $p returns true when applied to this $option's value. - * - * @param p the predicate to test - */ - @inline final def forall(p: A => Boolean): Boolean = isEmpty || p(this.get) - - /** Apply the given procedure $f to the option's value, - * if it is nonempty. Otherwise, do nothing. - * - * @param f the procedure to apply. - * @see map - * @see flatMap - */ - @inline final def foreach[U](f: A => U) { - if (!isEmpty) f(this.get) - } - - /** Returns a $some containing the result of - * applying `pf` to this $option's contained - * value, '''if''' this option is - * nonempty '''and''' `pf` is defined for that value. - * Returns $none otherwise. - * - * @example {{{ - * // Returns Some(HTTP) because the partial function covers the case. - * Some("http") collect {case "http" => "HTTP"} - * - * // Returns None because the partial function doesn't cover the case. - * Some("ftp") collect {case "http" => "HTTP"} - * - * // Returns None because the option is empty. There is no value to pass to the partial function. - * None collect {case value => value} - * }}} - * - * @param pf the partial function. - * @return the result of applying `pf` to this $option's - * value (if possible), or $none. - */ - @inline final def collect[B](pf: PartialFunction[A, B]): Option[B] = - if (!isEmpty) pf.lift(this.get) else None - - /** Returns this $option if it is nonempty, - * otherwise return the result of evaluating `alternative`. - * @param alternative the alternative expression. - */ - @inline final def orElse[B >: A](alternative: => Option[B]): Option[B] = - if (isEmpty) alternative else this - - /** Returns a singleton iterator returning the $option's value - * if it is nonempty, or an empty iterator if the option is empty. - */ - def iterator: Iterator[A] = - if (isEmpty) collection.Iterator.empty else collection.Iterator.single(this.get) - - /** Returns a singleton list containing the $option's value - * if it is nonempty, or the empty list if the $option is empty. - */ - def toList: List[A] = - if (isEmpty) List() else new ::(this.get, Nil) - - /** Returns a [[scala.util.Left]] containing the given - * argument `left` if this $option is empty, or - * a [[scala.util.Right]] containing this $option's value if - * this is nonempty. - * - * @param left the expression to evaluate and return if this is empty - * @see toLeft - */ - @inline final def toRight[X](left: => X): Either[X, A] = - if (isEmpty) Left(left) else Right(this.get) - - /** Returns a [[scala.util.Right]] containing the given - * argument `right` if this is empty, or - * a [[scala.util.Left]] containing this $option's value - * if this $option is nonempty. - * - * @param right the expression to evaluate and return if this is empty - * @see toRight - */ - @inline final def toLeft[X](right: => X): Either[A, X] = - if (isEmpty) Right(right) else Left(this.get) -} - -/** Class `Some[A]` represents existing values of type - * `A`. - * - * @author Martin Odersky - * @version 1.0, 16/07/2003 - */ -@SerialVersionUID(1234815782226070388L) // value computed by serialver for 2.11.2, annotation added in 2.11.4 -final case class Some[+A](@deprecatedName('x, "2.12.0") value: A) extends Option[A] { - def isEmpty = false - def get = value - - @deprecated("Use .value instead.", "2.12.0") def x: A = value -} - - -/** This case object represents non-existent values. - * - * @author Martin Odersky - * @version 1.0, 16/07/2003 - */ -@SerialVersionUID(5066590221178148012L) // value computed by serialver for 2.11.2, annotation added in 2.11.4 -case object None extends Option[Nothing] { - def isEmpty = true - def get = throw new NoSuchElementException("None.get") -} diff --git a/tests/scala2-library/src/library/scala/PartialFunction.scala b/tests/scala2-library/src/library/scala/PartialFunction.scala deleted file mode 100644 index c1a413d516fa..000000000000 --- a/tests/scala2-library/src/library/scala/PartialFunction.scala +++ /dev/null @@ -1,288 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala - - -/** A partial function of type `PartialFunction[A, B]` is a unary function - * where the domain does not necessarily include all values of type `A`. - * The function `isDefinedAt` allows to test dynamically if a value is in - * the domain of the function. - * - * Even if `isDefinedAt` returns true for an `a: A`, calling `apply(a)` may - * still throw an exception, so the following code is legal: - * - * {{{ - * val f: PartialFunction[Int, Any] = { case _ => 1/0 } - * }}} - * - * It is the responsibility of the caller to call `isDefinedAt` before - * calling `apply`, because if `isDefinedAt` is false, it is not guaranteed - * `apply` will throw an exception to indicate an error condition. If an - * exception is not thrown, evaluation may result in an arbitrary value. - * - * The main distinction between `PartialFunction` and [[scala.Function1]] is - * that the user of a `PartialFunction` may choose to do something different - * with input that is declared to be outside its domain. For example: - * - * {{{ - * val sample = 1 to 10 - * val isEven: PartialFunction[Int, String] = { - * case x if x % 2 == 0 => x+" is even" - * } - * - * // the method collect can use isDefinedAt to select which members to collect - * val evenNumbers = sample collect isEven - * - * val isOdd: PartialFunction[Int, String] = { - * case x if x % 2 == 1 => x+" is odd" - * } - * - * // the method orElse allows chaining another partial function to handle - * // input outside the declared domain - * val numbers = sample map (isEven orElse isOdd) - * }}} - * - * - * @author Martin Odersky, Pavel Pavlov, Adriaan Moors - * @version 1.0, 16/07/2003 - */ -trait PartialFunction[-A, +B] extends (A => B) { self => - import PartialFunction._ - - /** Checks if a value is contained in the function's domain. - * - * @param x the value to test - * @return `'''true'''`, iff `x` is in the domain of this function, `'''false'''` otherwise. - */ - def isDefinedAt(x: A): Boolean - - /** Composes this partial function with a fallback partial function which - * gets applied where this partial function is not defined. - * - * @param that the fallback function - * @tparam A1 the argument type of the fallback function - * @tparam B1 the result type of the fallback function - * @return a partial function which has as domain the union of the domains - * of this partial function and `that`. The resulting partial function - * takes `x` to `this(x)` where `this` is defined, and to `that(x)` where it is not. - */ - def orElse[A1 <: A, B1 >: B](that: PartialFunction[A1, B1]): PartialFunction[A1, B1] = - new OrElse[A1, B1] (this, that) - //TODO: why not overload it with orElse(that: F1): F1? - - /** Composes this partial function with a transformation function that - * gets applied to results of this partial function. - * @param k the transformation function - * @tparam C the result type of the transformation function. - * @return a partial function with the same domain as this partial function, which maps - * arguments `x` to `k(this(x))`. - */ - override def andThen[C](k: B => C): PartialFunction[A, C] = - new AndThen[A, B, C] (this, k) - - /** Turns this partial function into a plain function returning an `Option` result. - * @see Function.unlift - * @return a function that takes an argument `x` to `Some(this(x))` if `this` - * is defined for `x`, and to `None` otherwise. - */ - def lift: A => Option[B] = new Lifted(this) - - /** Applies this partial function to the given argument when it is contained in the function domain. - * Applies fallback function where this partial function is not defined. - * - * Note that expression `pf.applyOrElse(x, default)` is equivalent to - * {{{ if(pf isDefinedAt x) pf(x) else default(x) }}} - * except that `applyOrElse` method can be implemented more efficiently. - * For all partial function literals the compiler generates an `applyOrElse` implementation which - * avoids double evaluation of pattern matchers and guards. - * This makes `applyOrElse` the basis for the efficient implementation for many operations and scenarios, such as: - * - * - combining partial functions into `orElse`/`andThen` chains does not lead to - * excessive `apply`/`isDefinedAt` evaluation - * - `lift` and `unlift` do not evaluate source functions twice on each invocation - * - `runWith` allows efficient imperative-style combining of partial functions - * with conditionally applied actions - * - * For non-literal partial function classes with nontrivial `isDefinedAt` method - * it is recommended to override `applyOrElse` with custom implementation that avoids - * double `isDefinedAt` evaluation. This may result in better performance - * and more predictable behavior w.r.t. side effects. - * - * @param x the function argument - * @param default the fallback function - * @return the result of this function or fallback function application. - * @since 2.10 - */ - def applyOrElse[A1 <: A, B1 >: B](x: A1, default: A1 => B1): B1 = - if (isDefinedAt(x)) apply(x) else default(x) - - /** Composes this partial function with an action function which - * gets applied to results of this partial function. - * The action function is invoked only for its side effects; its result is ignored. - * - * Note that expression `pf.runWith(action)(x)` is equivalent to - * {{{ if(pf isDefinedAt x) { action(pf(x)); true } else false }}} - * except that `runWith` is implemented via `applyOrElse` and thus potentially more efficient. - * Using `runWith` avoids double evaluation of pattern matchers and guards for partial function literals. - * @see `applyOrElse`. - * - * @param action the action function - * @return a function which maps arguments `x` to `isDefinedAt(x)`. The resulting function - * runs `action(this(x))` where `this` is defined. - * @since 2.10 - */ - def runWith[U](action: B => U): A => Boolean = { x => - val z = applyOrElse(x, checkFallback[B]) - if (!fallbackOccurred(z)) { action(z); true } else false - } -} - -/** A few handy operations which leverage the extra bit of information - * available in partial functions. Examples: - * {{{ - * import PartialFunction._ - * - * def strangeConditional(other: Any): Boolean = cond(other) { - * case x: String if x == "abc" || x == "def" => true - * case x: Int => true - * } - * def onlyInt(v: Any): Option[Int] = condOpt(v) { case x: Int => x } - * }}} - * - * @author Paul Phillips - * @since 2.8 - */ -object PartialFunction { - /** Composite function produced by `PartialFunction#orElse` method - */ - private class OrElse[-A, +B] (f1: PartialFunction[A, B], f2: PartialFunction[A, B]) - extends scala.runtime.AbstractPartialFunction[A, B] with Serializable { - def isDefinedAt(x: A) = f1.isDefinedAt(x) || f2.isDefinedAt(x) - - override def apply(x: A): B = f1.applyOrElse(x, f2) - - override def applyOrElse[A1 <: A, B1 >: B](x: A1, default: A1 => B1): B1 = { - val z = f1.applyOrElse(x, checkFallback[B]) - if (!fallbackOccurred(z)) z else f2.applyOrElse(x, default) - } - - override def orElse[A1 <: A, B1 >: B](that: PartialFunction[A1, B1]) = - new OrElse[A1, B1] (f1, f2 orElse that) - - override def andThen[C](k: B => C) = - new OrElse[A, C] (f1 andThen k, f2 andThen k) - } - - /** Composite function produced by `PartialFunction#andThen` method - */ - private class AndThen[-A, B, +C] (pf: PartialFunction[A, B], k: B => C) extends PartialFunction[A, C] with Serializable { - def isDefinedAt(x: A) = pf.isDefinedAt(x) - - def apply(x: A): C = k(pf(x)) - - override def applyOrElse[A1 <: A, C1 >: C](x: A1, default: A1 => C1): C1 = { - val z = pf.applyOrElse(x, checkFallback[B]) - if (!fallbackOccurred(z)) k(z) else default(x) - } - } - - /** To implement patterns like {{{ if(pf isDefinedAt x) f1(pf(x)) else f2(x) }}} efficiently - * the following trick is used: - * - * To avoid double evaluation of pattern matchers & guards `applyOrElse` method is used here - * instead of `isDefinedAt`/`apply` pair. - * - * After call to `applyOrElse` we need both the function result it returned and - * the fact if the function's argument was contained in its domain. The only degree of freedom we have here - * to achieve this goal is tweaking with the continuation argument (`default`) of `applyOrElse` method. - * The obvious way is to throw an exception from `default` function and to catch it after - * calling `applyOrElse` but I consider this somewhat inefficient. - * - * I know only one way how you can do this task efficiently: `default` function should return unique marker object - * which never may be returned by any other (regular/partial) function. This way after calling `applyOrElse` you need - * just one reference comparison to distinguish if `pf isDefined x` or not. - * - * This correctly interacts with specialization as return type of `applyOrElse` - * (which is parameterized upper bound) can never be specialized. - * - * Here `fallback_pf` is used as both unique marker object and special fallback function that returns it. - */ - private[this] val fallback_pf: PartialFunction[Any, Any] = { case _ => fallback_pf } - private def checkFallback[B] = fallback_pf.asInstanceOf[PartialFunction[Any, B]] - private def fallbackOccurred[B](x: B) = (fallback_pf eq x.asInstanceOf[AnyRef]) - - private class Lifted[-A, +B] (val pf: PartialFunction[A, B]) - extends scala.runtime.AbstractFunction1[A, Option[B]] with Serializable { - - def apply(x: A): Option[B] = { - val z = pf.applyOrElse(x, checkFallback[B]) - if (!fallbackOccurred(z)) Some(z) else None - } - } - - private class Unlifted[A, B] (f: A => Option[B]) extends scala.runtime.AbstractPartialFunction[A, B] with Serializable { - def isDefinedAt(x: A): Boolean = f(x).isDefined - - override def applyOrElse[A1 <: A, B1 >: B](x: A1, default: A1 => B1): B1 = { - val z = f(x) - if (!z.isEmpty) z.get else default(x) - } - - override def lift = f - } - - private[scala] def unlifted[A, B](f: A => Option[B]): PartialFunction[A, B] = f match { - case lf: Lifted[A, B] => lf.pf - case ff => new Unlifted(ff) - } - - /** Converts ordinary function to partial one - * @since 2.10 - */ - def apply[A, B](f: A => B): PartialFunction[A, B] = { case x => f(x) } - - private[this] val constFalse: Any => Boolean = { _ => false} - - private[this] val empty_pf: PartialFunction[Any, Nothing] = new PartialFunction[Any, Nothing] with Serializable { - def isDefinedAt(x: Any) = false - def apply(x: Any) = throw new MatchError(x) - override def orElse[A1, B1](that: PartialFunction[A1, B1]) = that - override def andThen[C](k: Nothing => C) = this - override val lift = (x: Any) => None - override def runWith[U](action: Nothing => U) = constFalse - } - - /** The partial function with empty domain. - * Any attempt to invoke empty partial function leads to throwing [[scala.MatchError]] exception. - * @since 2.10 - */ - def empty[A, B] : PartialFunction[A, B] = empty_pf - - /** Creates a Boolean test based on a value and a partial function. - * It behaves like a 'match' statement with an implied 'case _ => false' - * following the supplied cases. - * - * @param x the value to test - * @param pf the partial function - * @return true, iff `x` is in the domain of `pf` and `pf(x) == true`. - */ - def cond[T](x: T)(pf: PartialFunction[T, Boolean]): Boolean = pf.applyOrElse(x, constFalse) - - /** Transforms a PartialFunction[T, U] `pf` into Function1[T, Option[U]] `f` - * whose result is `Some(x)` if the argument is in `pf`'s domain and `None` - * otherwise, and applies it to the value `x`. In effect, it is a - * `'''match'''` statement which wraps all case results in `Some(_)` and - * adds `'''case''' _ => None` to the end. - * - * @param x the value to test - * @param pf the PartialFunction[T, U] - * @return `Some(pf(x))` if `pf isDefinedAt x`, `None` otherwise. - */ - def condOpt[T,U](x: T)(pf: PartialFunction[T, U]): Option[U] = pf.lift(x) -} diff --git a/tests/scala2-library/src/library/scala/Predef.scala b/tests/scala2-library/src/library/scala/Predef.scala deleted file mode 100644 index a0f22ed3767a..000000000000 --- a/tests/scala2-library/src/library/scala/Predef.scala +++ /dev/null @@ -1,642 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala - -import scala.language.implicitConversions - -import scala.collection.{ mutable, immutable, generic } -import immutable.StringOps -import mutable.ArrayOps -import generic.CanBuildFrom -import scala.annotation.{ elidable, implicitNotFound } -import scala.annotation.elidable.ASSERTION -import scala.io.StdIn - -/** The `Predef` object provides definitions that are accessible in all Scala - * compilation units without explicit qualification. - * - * === Commonly Used Types === - * Predef provides type aliases for types which are commonly used, such as - * the immutable collection types [[scala.collection.immutable.Map]], - * [[scala.collection.immutable.Set]], and the [[scala.collection.immutable.List]] - * constructors ([[scala.collection.immutable.::]] and - * [[scala.collection.immutable.Nil]]). - * - * === Console Output === - * For basic console output, `Predef` provides convenience methods [[print(x:Any* print]] and [[println(x:Any* println]], - * which are aliases of the methods in the object [[scala.Console]]. - * - * === Assertions === - * A set of `assert` functions are provided for use as a way to document - * and dynamically check invariants in code. Invocations of `assert` can be elided - * at compile time by providing the command line option `-Xdisable-assertions`, - * which raises `-Xelide-below` above `elidable.ASSERTION`, to the `scalac` command. - * - * Variants of `assert` intended for use with static analysis tools are also - * provided: `assume`, `require` and `ensuring`. `require` and `ensuring` are - * intended for use as a means of design-by-contract style specification - * of pre- and post-conditions on functions, with the intention that these - * specifications could be consumed by a static analysis tool. For instance, - * - * {{{ - * def addNaturals(nats: List[Int]): Int = { - * require(nats forall (_ >= 0), "List contains negative numbers") - * nats.foldLeft(0)(_ + _) - * } ensuring(_ >= 0) - * }}} - * - * The declaration of `addNaturals` states that the list of integers passed should - * only contain natural numbers (i.e. non-negative), and that the result returned - * will also be natural. `require` is distinct from `assert` in that if the - * condition fails, then the caller of the function is to blame rather than a - * logical error having been made within `addNaturals` itself. `ensuring` is a - * form of `assert` that declares the guarantee the function is providing with - * regards to its return value. - * - * === Implicit Conversions === - * A number of commonly applied implicit conversions are also defined here, and - * in the parent type [[scala.LowPriorityImplicits]]. Implicit conversions - * are provided for the "widening" of numeric values, for instance, converting a - * Short value to a Long value as required, and to add additional higher-order - * functions to Array values. These are described in more detail in the documentation of [[scala.Array]]. - * - * @groupname utilities Utility Methods - * @groupprio utilities 10 - * - * @groupname assertions Assertions - * @groupprio assertions 20 - * @groupdesc assertions These methods support program verification and runtime correctness. - * - * @groupname console-output Console Output - * @groupprio console-output 30 - * @groupdesc console-output These methods provide output via the console. - * - * @groupname type-constraints Type Constraints - * @groupprio type-constraints 40 - * @groupdesc type-constraints These entities allows constraints between types to be stipulated. - * - * @groupname aliases Aliases - * @groupprio aliases 50 - * @groupdesc aliases These aliases bring selected immutable types into scope without any imports. - * - * @groupname conversions-string String Conversions - * @groupprio conversions-string 60 - * @groupdesc conversions-string Conversions to and from String and StringOps. - * - * @groupname implicit-classes-any Implicit Classes - * @groupprio implicit-classes-any 70 - * @groupdesc implicit-classes-any These implicit classes add useful extension methods to every type. - * - * @groupname implicit-classes-char CharSequence Conversions - * @groupprio implicit-classes-char 80 - * @groupdesc implicit-classes-char These implicit classes add CharSequence methods to Array[Char] and IndexedSeq[Char] instances. - * - * @groupname conversions-java-to-anyval Java to Scala - * @groupprio conversions-java-to-anyval 90 - * @groupdesc conversions-java-to-anyval Implicit conversion from Java primitive wrapper types to Scala equivalents. - * - * @groupname conversions-anyval-to-java Scala to Java - * @groupprio conversions-anyval-to-java 100 - * @groupdesc conversions-anyval-to-java Implicit conversion from Scala AnyVals to Java primitive wrapper types equivalents. - * - * @groupname conversions-array-to-wrapped-array Array to WrappedArray - * @groupprio conversions-array-to-wrapped-array 110 - * @groupdesc conversions-array-to-wrapped-array Conversions from Arrays to WrappedArrays. - */ -object Predef extends LowPriorityImplicits with DeprecatedPredef { - /** - * Retrieve the runtime representation of a class type. `classOf[T]` is equivalent to - * the class literal `T.class` in Java. - * - * @example {{{ - * val listClass = classOf[List[_]] - * // listClass is java.lang.Class[List[_]] = class scala.collection.immutable.List - * - * val mapIntString = classOf[Map[Int,String]] - * // mapIntString is java.lang.Class[Map[Int,String]] = interface scala.collection.immutable.Map - * }}} - * @group utilities - */ - def classOf[T]: Class[T] = null // This is a stub method. The actual implementation is filled in by the compiler. - - /** The `String` type in Scala has methods that come either from the underlying - * Java String (see the documentation corresponding to your Java version, for - * example [[http://docs.oracle.com/javase/8/docs/api/java/lang/String.html]]) or - * are added implicitly through [[scala.collection.immutable.StringOps]]. - * @group aliases - */ - type String = java.lang.String - /** @group aliases */ - type Class[T] = java.lang.Class[T] - - // miscellaneous ----------------------------------------------------- - scala.`package` // to force scala package object to be seen. - scala.collection.immutable.List // to force Nil, :: to be seen. - - /** @group aliases */ - type Function[-A, +B] = Function1[A, B] - - /** @group aliases */ - type Map[A, +B] = immutable.Map[A, B] - /** @group aliases */ - type Set[A] = immutable.Set[A] - /** @group aliases */ - val Map = immutable.Map - /** @group aliases */ - val Set = immutable.Set - - // Manifest types, companions, and incantations for summoning - @annotation.implicitNotFound(msg = "No ClassManifest available for ${T}.") - @deprecated("use `scala.reflect.ClassTag` instead", "2.10.0") - type ClassManifest[T] = scala.reflect.ClassManifest[T] - // TODO undeprecated until Scala reflection becomes non-experimental - // @deprecated("this notion doesn't have a corresponding concept in 2.10, because scala.reflect.runtime.universe.TypeTag can capture arbitrary types. Use type tags instead of manifests, and there will be no need in opt manifests.", "2.10.0") - type OptManifest[T] = scala.reflect.OptManifest[T] - @annotation.implicitNotFound(msg = "No Manifest available for ${T}.") - // TODO undeprecated until Scala reflection becomes non-experimental - // @deprecated("use `scala.reflect.ClassTag` (to capture erasures) or scala.reflect.runtime.universe.TypeTag (to capture types) or both instead", "2.10.0") - type Manifest[T] = scala.reflect.Manifest[T] - @deprecated("use `scala.reflect.ClassTag` instead", "2.10.0") - val ClassManifest = scala.reflect.ClassManifest - // TODO undeprecated until Scala reflection becomes non-experimental - // @deprecated("use `scala.reflect.ClassTag` (to capture erasures) or scala.reflect.runtime.universe.TypeTag (to capture types) or both instead", "2.10.0") - // val Manifest = scala.reflect.Manifest - // TODO undeprecated until Scala reflection becomes non-experimental - // @deprecated("this notion doesn't have a corresponding concept in 2.10, because scala.reflect.runtime.universe.TypeTag can capture arbitrary types. Use type tags instead of manifests, and there will be no need in opt manifests.", "2.10.0") - val NoManifest = scala.reflect.NoManifest - - // TODO undeprecated until Scala reflection becomes non-experimental - // @deprecated("use scala.reflect.classTag[T] and scala.reflect.runtime.universe.typeTag[T] instead", "2.10.0") - def manifest[T](implicit m: Manifest[T]) = m - @deprecated("use scala.reflect.classTag[T] instead", "2.10.0") - def classManifest[T](implicit m: ClassManifest[T]) = m - // TODO undeprecated until Scala reflection becomes non-experimental - // @deprecated("this notion doesn't have a corresponding concept in 2.10, because scala.reflect.runtime.universe.TypeTag can capture arbitrary types. Use type tags instead of manifests, and there will be no need in opt manifests.", "2.10.0") - def optManifest[T](implicit m: OptManifest[T]) = m - - // Minor variations on identity functions - /** @group utilities */ - @inline def identity[A](x: A): A = x // @see `conforms` for the implicit version - /** @group utilities */ - @inline def implicitly[T](implicit e: T) = e // for summoning implicit values from the nether world -- TODO: when dependent method types are on by default, give this result type `e.type`, so that inliner has better chance of knowing which method to inline in calls like `implicitly[MatchingStrategy[Option]].zero` - /** @group utilities */ - @inline def locally[T](x: T): T = x // to communicate intent and avoid unmoored statements - - // assertions --------------------------------------------------------- - - /** Tests an expression, throwing an `AssertionError` if false. - * Calls to this method will not be generated if `-Xelide-below` - * is greater than `ASSERTION`. - * - * @see [[scala.annotation.elidable elidable]] - * @param assertion the expression to test - * @group assertions - */ - @elidable(ASSERTION) - def assert(assertion: Boolean) { - if (!assertion) - throw new java.lang.AssertionError("assertion failed") - } - - /** Tests an expression, throwing an `AssertionError` if false. - * Calls to this method will not be generated if `-Xelide-below` - * is greater than `ASSERTION`. - * - * @see [[scala.annotation.elidable elidable]] - * @param assertion the expression to test - * @param message a String to include in the failure message - * @group assertions - */ - @elidable(ASSERTION) @inline - final def assert(assertion: Boolean, message: => Any) { - if (!assertion) - throw new java.lang.AssertionError("assertion failed: "+ message) - } - - /** Tests an expression, throwing an `AssertionError` if false. - * This method differs from assert only in the intent expressed: - * assert contains a predicate which needs to be proven, while - * assume contains an axiom for a static checker. Calls to this method - * will not be generated if `-Xelide-below` is greater than `ASSERTION`. - * - * @see [[scala.annotation.elidable elidable]] - * @param assumption the expression to test - * @group assertions - */ - @elidable(ASSERTION) - def assume(assumption: Boolean) { - if (!assumption) - throw new java.lang.AssertionError("assumption failed") - } - - /** Tests an expression, throwing an `AssertionError` if false. - * This method differs from assert only in the intent expressed: - * assert contains a predicate which needs to be proven, while - * assume contains an axiom for a static checker. Calls to this method - * will not be generated if `-Xelide-below` is greater than `ASSERTION`. - * - * @see [[scala.annotation.elidable elidable]] - * @param assumption the expression to test - * @param message a String to include in the failure message - * @group assertions - */ - @elidable(ASSERTION) @inline - final def assume(assumption: Boolean, message: => Any) { - if (!assumption) - throw new java.lang.AssertionError("assumption failed: "+ message) - } - - /** Tests an expression, throwing an `IllegalArgumentException` if false. - * This method is similar to `assert`, but blames the caller of the method - * for violating the condition. - * - * @param requirement the expression to test - * @group assertions - */ - def require(requirement: Boolean) { - if (!requirement) - throw new IllegalArgumentException("requirement failed") - } - - /** Tests an expression, throwing an `IllegalArgumentException` if false. - * This method is similar to `assert`, but blames the caller of the method - * for violating the condition. - * - * @param requirement the expression to test - * @param message a String to include in the failure message - * @group assertions - */ - @inline final def require(requirement: Boolean, message: => Any) { - if (!requirement) - throw new IllegalArgumentException("requirement failed: "+ message) - } - - /** `???` can be used for marking methods that remain to be implemented. - * @throws NotImplementedError - * @group utilities - */ - def ??? : Nothing = throw new NotImplementedError - - // tupling ------------------------------------------------------------ - - @deprecated("use built-in tuple syntax or Tuple2 instead", "2.11.0") - type Pair[+A, +B] = Tuple2[A, B] - @deprecated("use built-in tuple syntax or Tuple2 instead", "2.11.0") - object Pair { - def apply[A, B](x: A, y: B) = Tuple2(x, y) - def unapply[A, B](x: Tuple2[A, B]): Option[Tuple2[A, B]] = Some(x) - } - - @deprecated("use built-in tuple syntax or Tuple3 instead", "2.11.0") - type Triple[+A, +B, +C] = Tuple3[A, B, C] - @deprecated("use built-in tuple syntax or Tuple3 instead", "2.11.0") - object Triple { - def apply[A, B, C](x: A, y: B, z: C) = Tuple3(x, y, z) - def unapply[A, B, C](x: Tuple3[A, B, C]): Option[Tuple3[A, B, C]] = Some(x) - } - - // implicit classes ----------------------------------------------------- - - /** @group implicit-classes-any */ - implicit final class ArrowAssoc[A](private val self: A) extends AnyVal { - @inline def -> [B](y: B): Tuple2[A, B] = Tuple2(self, y) - def →[B](y: B): Tuple2[A, B] = ->(y) - } - - /** @group implicit-classes-any */ - implicit final class Ensuring[A](private val self: A) extends AnyVal { - def ensuring(cond: Boolean): A = { assert(cond); self } - def ensuring(cond: Boolean, msg: => Any): A = { assert(cond, msg); self } - def ensuring(cond: A => Boolean): A = { assert(cond(self)); self } - def ensuring(cond: A => Boolean, msg: => Any): A = { assert(cond(self), msg); self } - } - - /** @group implicit-classes-any */ - implicit final class StringFormat[A](private val self: A) extends AnyVal { - /** Returns string formatted according to given `format` string. - * Format strings are as for `String.format` - * (@see java.lang.String.format). - */ - @inline def formatted(fmtstr: String): String = fmtstr format self - } - - // scala/bug#8229 retaining the pre 2.11 name for source compatibility in shadowing this implicit - /** @group implicit-classes-any */ - implicit final class any2stringadd[A](private val self: A) extends AnyVal { - def +(other: String): String = String.valueOf(self) + other - } - - implicit final class RichException(private val self: Throwable) extends AnyVal { - import scala.compat.Platform.EOL - @deprecated("use Throwable#getStackTrace", "2.11.0") def getStackTraceString = self.getStackTrace().mkString("", EOL, EOL) - } - - // Sadly we have to do `@deprecatedName(null, "2.12.0")` because - // `@deprecatedName(since="2.12.0")` incurs a warning about - // Usage of named or default arguments transformed this annotation constructor call into a block. - // The corresponding AnnotationInfo will contain references to local values and default getters - // instead of the actual argument trees - // and `@deprecatedName(Symbol(""), "2.12.0")` crashes scalac with - // scala.reflect.internal.Symbols$CyclicReference: illegal cyclic reference involving object Symbol - // in run/repl-no-imports-no-predef-power.scala. - /** @group implicit-classes-char */ - implicit final class SeqCharSequence(@deprecated("will be made private", "2.12.0") @deprecatedName(null, "2.12.0") val __sequenceOfChars: scala.collection.IndexedSeq[Char]) extends CharSequence { - def length: Int = __sequenceOfChars.length - def charAt(index: Int): Char = __sequenceOfChars(index) - def subSequence(start: Int, end: Int): CharSequence = new SeqCharSequence(__sequenceOfChars.slice(start, end)) - override def toString = __sequenceOfChars mkString "" - } - - /** @group implicit-classes-char */ - implicit final class ArrayCharSequence(@deprecated("will be made private", "2.12.0") @deprecatedName(null, "2.12.0") val __arrayOfChars: Array[Char]) extends CharSequence { - def length: Int = __arrayOfChars.length - def charAt(index: Int): Char = __arrayOfChars(index) - def subSequence(start: Int, end: Int): CharSequence = new runtime.ArrayCharSequence(__arrayOfChars, start, end) - override def toString = __arrayOfChars mkString "" - } - - implicit val StringCanBuildFrom: CanBuildFrom[String, Char, String] = new CanBuildFrom[String, Char, String] { - def apply(from: String) = apply() - def apply() = mutable.StringBuilder.newBuilder - } - - /** @group conversions-string */ - @inline implicit def augmentString(x: String): StringOps = new StringOps(x) - /** @group conversions-string */ - @inline implicit def unaugmentString(x: StringOps): String = x.repr - - // printing ----------------------------------------------------------- - - /** Prints an object to `out` using its `toString` method. - * - * @param x the object to print; may be null. - * @group console-output - */ - def print(x: Any) = Console.print(x) - - /** Prints a newline character on the default output. - * @group console-output - */ - def println() = Console.println() - - /** Prints out an object to the default output, followed by a newline character. - * - * @param x the object to print. - * @group console-output - */ - def println(x: Any) = Console.println(x) - - /** Prints its arguments as a formatted string to the default output, - * based on a string pattern (in a fashion similar to printf in C). - * - * The interpretation of the formatting patterns is described in - * [[java.util.Formatter]]. - * - * Consider using the [[scala.StringContext.f f interpolator]] as more type safe and idiomatic. - * - * @param text the pattern for formatting the arguments. - * @param args the arguments used to instantiating the pattern. - * @throws java.lang.IllegalArgumentException if there was a problem with the format string or arguments - * - * @see [[scala.StringContext.f StringContext.f]] - * @group console-output - */ - def printf(text: String, xs: Any*) = Console.print(text.format(xs: _*)) - - // views -------------------------------------------------------------- - - implicit def tuple2ToZippedOps[T1, T2](x: (T1, T2)): runtime.Tuple2Zipped.Ops[T1, T2] = new runtime.Tuple2Zipped.Ops(x) - implicit def tuple3ToZippedOps[T1, T2, T3](x: (T1, T2, T3)): runtime.Tuple3Zipped.Ops[T1, T2, T3] = new runtime.Tuple3Zipped.Ops(x) - - implicit def genericArrayOps[T](xs: Array[T]): ArrayOps[T] = (xs match { - case x: Array[AnyRef] => refArrayOps[AnyRef](x) - case x: Array[Boolean] => booleanArrayOps(x) - case x: Array[Byte] => byteArrayOps(x) - case x: Array[Char] => charArrayOps(x) - case x: Array[Double] => doubleArrayOps(x) - case x: Array[Float] => floatArrayOps(x) - case x: Array[Int] => intArrayOps(x) - case x: Array[Long] => longArrayOps(x) - case x: Array[Short] => shortArrayOps(x) - case x: Array[Unit] => unitArrayOps(x) - case null => null - }).asInstanceOf[ArrayOps[T]] - - implicit def booleanArrayOps(xs: Array[Boolean]): ArrayOps.ofBoolean = new ArrayOps.ofBoolean(xs) - implicit def byteArrayOps(xs: Array[Byte]): ArrayOps.ofByte = new ArrayOps.ofByte(xs) - implicit def charArrayOps(xs: Array[Char]): ArrayOps.ofChar = new ArrayOps.ofChar(xs) - implicit def doubleArrayOps(xs: Array[Double]): ArrayOps.ofDouble = new ArrayOps.ofDouble(xs) - implicit def floatArrayOps(xs: Array[Float]): ArrayOps.ofFloat = new ArrayOps.ofFloat(xs) - implicit def intArrayOps(xs: Array[Int]): ArrayOps.ofInt = new ArrayOps.ofInt(xs) - implicit def longArrayOps(xs: Array[Long]): ArrayOps.ofLong = new ArrayOps.ofLong(xs) - implicit def refArrayOps[T <: AnyRef](xs: Array[T]): ArrayOps.ofRef[T] = new ArrayOps.ofRef[T](xs) - implicit def shortArrayOps(xs: Array[Short]): ArrayOps.ofShort = new ArrayOps.ofShort(xs) - implicit def unitArrayOps(xs: Array[Unit]): ArrayOps.ofUnit = new ArrayOps.ofUnit(xs) - - // "Autoboxing" and "Autounboxing" --------------------------------------------------- - - /** @group conversions-anyval-to-java */ - implicit def byte2Byte(x: Byte): java.lang.Byte = x.asInstanceOf[java.lang.Byte] - /** @group conversions-anyval-to-java */ - implicit def short2Short(x: Short): java.lang.Short = x.asInstanceOf[java.lang.Short] - /** @group conversions-anyval-to-java */ - implicit def char2Character(x: Char): java.lang.Character = x.asInstanceOf[java.lang.Character] - /** @group conversions-anyval-to-java */ - implicit def int2Integer(x: Int): java.lang.Integer = x.asInstanceOf[java.lang.Integer] - /** @group conversions-anyval-to-java */ - implicit def long2Long(x: Long): java.lang.Long = x.asInstanceOf[java.lang.Long] - /** @group conversions-anyval-to-java */ - implicit def float2Float(x: Float): java.lang.Float = x.asInstanceOf[java.lang.Float] - /** @group conversions-anyval-to-java */ - implicit def double2Double(x: Double): java.lang.Double = x.asInstanceOf[java.lang.Double] - /** @group conversions-anyval-to-java */ - implicit def boolean2Boolean(x: Boolean): java.lang.Boolean = x.asInstanceOf[java.lang.Boolean] - - /** @group conversions-java-to-anyval */ - implicit def Byte2byte(x: java.lang.Byte): Byte = x.asInstanceOf[Byte] - /** @group conversions-java-to-anyval */ - implicit def Short2short(x: java.lang.Short): Short = x.asInstanceOf[Short] - /** @group conversions-java-to-anyval */ - implicit def Character2char(x: java.lang.Character): Char = x.asInstanceOf[Char] - /** @group conversions-java-to-anyval */ - implicit def Integer2int(x: java.lang.Integer): Int = x.asInstanceOf[Int] - /** @group conversions-java-to-anyval */ - implicit def Long2long(x: java.lang.Long): Long = x.asInstanceOf[Long] - /** @group conversions-java-to-anyval */ - implicit def Float2float(x: java.lang.Float): Float = x.asInstanceOf[Float] - /** @group conversions-java-to-anyval */ - implicit def Double2double(x: java.lang.Double): Double = x.asInstanceOf[Double] - /** @group conversions-java-to-anyval */ - implicit def Boolean2boolean(x: java.lang.Boolean): Boolean = x.asInstanceOf[Boolean] - - // Type Constraints -------------------------------------------------------------- - - /** - * An instance of `A <:< B` witnesses that `A` is a subtype of `B`. - * Requiring an implicit argument of the type `A <:< B` encodes - * the generalized constraint `A <: B`. - * - * @note we need a new type constructor `<:<` and evidence `conforms`, - * as reusing `Function1` and `identity` leads to ambiguities in - * case of type errors (`any2stringadd` is inferred) - * - * To constrain any abstract type T that's in scope in a method's - * argument list (not just the method's own type parameters) simply - * add an implicit argument of type `T <:< U`, where `U` is the required - * upper bound; or for lower-bounds, use: `L <:< T`, where `L` is the - * required lower bound. - * - * In part contributed by Jason Zaugg. - * @group type-constraints - */ - @implicitNotFound(msg = "Cannot prove that ${From} <:< ${To}.") - sealed abstract class <:<[-From, +To] extends (From => To) with Serializable - private[this] final val singleton_<:< = new <:<[Any,Any] { def apply(x: Any): Any = x } - // The dollar prefix is to dodge accidental shadowing of this method - // by a user-defined method of the same name (scala/bug#7788). - // The collections rely on this method. - /** @group type-constraints */ - implicit def $conforms[A]: A <:< A = singleton_<:<.asInstanceOf[A <:< A] - - @deprecated("use `implicitly[T <:< U]` or `identity` instead.", "2.11.0") - def conforms[A]: A <:< A = $conforms[A] - - /** An instance of `A =:= B` witnesses that the types `A` and `B` are equal. - * - * @see `<:<` for expressing subtyping constraints - * @group type-constraints - */ - @implicitNotFound(msg = "Cannot prove that ${From} =:= ${To}.") - sealed abstract class =:=[From, To] extends (From => To) with Serializable - private[this] final val singleton_=:= = new =:=[Any,Any] { def apply(x: Any): Any = x } - /** @group type-constraints */ - object =:= { - implicit def tpEquals[A]: A =:= A = singleton_=:=.asInstanceOf[A =:= A] - } - - /** A type for which there is always an implicit value. - * @see [[scala.Array$]], method `fallbackCanBuildFrom` - */ - class DummyImplicit - - object DummyImplicit { - - /** An implicit value yielding a `DummyImplicit`. - * @see [[scala.Array$]], method `fallbackCanBuildFrom` - */ - implicit def dummyImplicit: DummyImplicit = new DummyImplicit - } -} - -private[scala] trait DeprecatedPredef { - self: Predef.type => - - // Deprecated stubs for any who may have been calling these methods directly. - @deprecated("use `ArrowAssoc`", "2.11.0") def any2ArrowAssoc[A](x: A): ArrowAssoc[A] = new ArrowAssoc(x) - @deprecated("use `Ensuring`", "2.11.0") def any2Ensuring[A](x: A): Ensuring[A] = new Ensuring(x) - @deprecated("use `StringFormat`", "2.11.0") def any2stringfmt(x: Any): StringFormat[Any] = new StringFormat(x) - @deprecated("use `Throwable` directly", "2.11.0") def exceptionWrapper(exc: Throwable) = new RichException(exc) - @deprecated("use `SeqCharSequence`", "2.11.0") def seqToCharSequence(xs: scala.collection.IndexedSeq[Char]): CharSequence = new SeqCharSequence(xs) - @deprecated("use `ArrayCharSequence`", "2.11.0") def arrayToCharSequence(xs: Array[Char]): CharSequence = new ArrayCharSequence(xs) - - @deprecated("use the method in `scala.io.StdIn`", "2.11.0") def readLine(): String = StdIn.readLine() - @deprecated("use the method in `scala.io.StdIn`", "2.11.0") def readLine(text: String, args: Any*) = StdIn.readLine(text, args: _*) - @deprecated("use the method in `scala.io.StdIn`", "2.11.0") def readBoolean() = StdIn.readBoolean() - @deprecated("use the method in `scala.io.StdIn`", "2.11.0") def readByte() = StdIn.readByte() - @deprecated("use the method in `scala.io.StdIn`", "2.11.0") def readShort() = StdIn.readShort() - @deprecated("use the method in `scala.io.StdIn`", "2.11.0") def readChar() = StdIn.readChar() - @deprecated("use the method in `scala.io.StdIn`", "2.11.0") def readInt() = StdIn.readInt() - @deprecated("use the method in `scala.io.StdIn`", "2.11.0") def readLong() = StdIn.readLong() - @deprecated("use the method in `scala.io.StdIn`", "2.11.0") def readFloat() = StdIn.readFloat() - @deprecated("use the method in `scala.io.StdIn`", "2.11.0") def readDouble() = StdIn.readDouble() - @deprecated("use the method in `scala.io.StdIn`", "2.11.0") def readf(format: String) = StdIn.readf(format) - @deprecated("use the method in `scala.io.StdIn`", "2.11.0") def readf1(format: String) = StdIn.readf1(format) - @deprecated("use the method in `scala.io.StdIn`", "2.11.0") def readf2(format: String) = StdIn.readf2(format) - @deprecated("use the method in `scala.io.StdIn`", "2.11.0") def readf3(format: String) = StdIn.readf3(format) -} - -/** The `LowPriorityImplicits` class provides implicit values that -* are valid in all Scala compilation units without explicit qualification, -* but that are partially overridden by higher-priority conversions in object -* `Predef`. -* -* @author Martin Odersky -* @since 2.8 -*/ -// scala/bug#7335 Parents of Predef are defined in the same compilation unit to avoid -// cyclic reference errors compiling the standard library *without* a previously -// compiled copy on the classpath. -private[scala] abstract class LowPriorityImplicits { - import mutable.WrappedArray - import immutable.WrappedString - - /** We prefer the java.lang.* boxed types to these wrappers in - * any potential conflicts. Conflicts do exist because the wrappers - * need to implement ScalaNumber in order to have a symmetric equals - * method, but that implies implementing java.lang.Number as well. - * - * Note - these are inlined because they are value classes, but - * the call to xxxWrapper is not eliminated even though it does nothing. - * Even inlined, every call site does a no-op retrieval of Predef's MODULE$ - * because maybe loading Predef has side effects! - */ - @inline implicit def byteWrapper(x: Byte): runtime.RichByte = new runtime.RichByte(x) - @inline implicit def shortWrapper(x: Short): runtime.RichShort = new runtime.RichShort(x) - @inline implicit def intWrapper(x: Int): runtime.RichInt = new runtime.RichInt(x) - @inline implicit def charWrapper(c: Char): runtime.RichChar = new runtime.RichChar(c) - @inline implicit def longWrapper(x: Long): runtime.RichLong = new runtime.RichLong(x) - @inline implicit def floatWrapper(x: Float): runtime.RichFloat = new runtime.RichFloat(x) - @inline implicit def doubleWrapper(x: Double): runtime.RichDouble = new runtime.RichDouble(x) - @inline implicit def booleanWrapper(x: Boolean): runtime.RichBoolean = new runtime.RichBoolean(x) - - /** @group conversions-array-to-wrapped-array */ - implicit def genericWrapArray[T](xs: Array[T]): WrappedArray[T] = - if (xs eq null) null - else WrappedArray.make(xs) - - // Since the JVM thinks arrays are covariant, one 0-length Array[AnyRef] - // is as good as another for all T <: AnyRef. Instead of creating 100,000,000 - // unique ones by way of this implicit, let's share one. - /** @group conversions-array-to-wrapped-array */ - implicit def wrapRefArray[T <: AnyRef](xs: Array[T]): WrappedArray[T] = { - if (xs eq null) null - else if (xs.length == 0) WrappedArray.empty[T] - else new WrappedArray.ofRef[T](xs) - } - - /** @group conversions-array-to-wrapped-array */ - implicit def wrapIntArray(xs: Array[Int]): WrappedArray[Int] = if (xs ne null) new WrappedArray.ofInt(xs) else null - /** @group conversions-array-to-wrapped-array */ - implicit def wrapDoubleArray(xs: Array[Double]): WrappedArray[Double] = if (xs ne null) new WrappedArray.ofDouble(xs) else null - /** @group conversions-array-to-wrapped-array */ - implicit def wrapLongArray(xs: Array[Long]): WrappedArray[Long] = if (xs ne null) new WrappedArray.ofLong(xs) else null - /** @group conversions-array-to-wrapped-array */ - implicit def wrapFloatArray(xs: Array[Float]): WrappedArray[Float] = if (xs ne null) new WrappedArray.ofFloat(xs) else null - /** @group conversions-array-to-wrapped-array */ - implicit def wrapCharArray(xs: Array[Char]): WrappedArray[Char] = if (xs ne null) new WrappedArray.ofChar(xs) else null - /** @group conversions-array-to-wrapped-array */ - implicit def wrapByteArray(xs: Array[Byte]): WrappedArray[Byte] = if (xs ne null) new WrappedArray.ofByte(xs) else null - /** @group conversions-array-to-wrapped-array */ - implicit def wrapShortArray(xs: Array[Short]): WrappedArray[Short] = if (xs ne null) new WrappedArray.ofShort(xs) else null - /** @group conversions-array-to-wrapped-array */ - implicit def wrapBooleanArray(xs: Array[Boolean]): WrappedArray[Boolean] = if (xs ne null) new WrappedArray.ofBoolean(xs) else null - /** @group conversions-array-to-wrapped-array */ - implicit def wrapUnitArray(xs: Array[Unit]): WrappedArray[Unit] = if (xs ne null) new WrappedArray.ofUnit(xs) else null - - /** @group conversions-string */ - implicit def wrapString(s: String): WrappedString = if (s ne null) new WrappedString(s) else null - /** @group conversions-string */ - implicit def unwrapString(ws: WrappedString): String = if (ws ne null) ws.self else null - - implicit def fallbackStringCanBuildFrom[T]: CanBuildFrom[String, T, immutable.IndexedSeq[T]] = - new CanBuildFrom[String, T, immutable.IndexedSeq[T]] { - def apply(from: String) = immutable.IndexedSeq.newBuilder[T] - def apply() = immutable.IndexedSeq.newBuilder[T] - } -} diff --git a/tests/scala2-library/src/library/scala/Product.scala b/tests/scala2-library/src/library/scala/Product.scala deleted file mode 100644 index f3a96fb333b2..000000000000 --- a/tests/scala2-library/src/library/scala/Product.scala +++ /dev/null @@ -1,52 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala - -/** Base trait for all products, which in the standard library include at - * least [[scala.Product1]] through [[scala.Product22]] and therefore also - * their subclasses [[scala.Tuple1]] through [[scala.Tuple22]]. In addition, - * all case classes implement `Product` with synthetically generated methods. - * - * @author Burak Emir - * @version 1.0 - * @since 2.3 - */ -trait Product extends Any with Equals { - /** The n^th^ element of this product, 0-based. In other words, for a - * product `A(x,,1,,, ..., x,,k,,)`, returns `x,,(n+1),,` where `0 <= n < k`. - * - * @param n the index of the element to return - * @throws IndexOutOfBoundsException - * @return the element `n` elements after the first element - */ - def productElement(n: Int): Any - - /** The size of this product. - * @return for a product `A(x,,1,,, ..., x,,k,,)`, returns `k` - */ - def productArity: Int - - /** An iterator over all the elements of this product. - * @return in the default implementation, an `Iterator[Any]` - */ - def productIterator: Iterator[Any] = new scala.collection.AbstractIterator[Any] { - private var c: Int = 0 - private val cmax = productArity - def hasNext = c < cmax - def next() = { val result = productElement(c); c += 1; result } - } - - /** A string used in the `toString` methods of derived classes. - * Implementations may override this method to prepend a string prefix - * to the result of `toString` methods. - * - * @return in the default implementation, the empty string - */ - def productPrefix = "" -} diff --git a/tests/scala2-library/src/library/scala/Product1.scala b/tests/scala2-library/src/library/scala/Product1.scala deleted file mode 100644 index 3b0194e41f18..000000000000 --- a/tests/scala2-library/src/library/scala/Product1.scala +++ /dev/null @@ -1,47 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ -// GENERATED CODE: DO NOT EDIT. See scala.Function0 for timestamp. - -package scala - -object Product1 { - def unapply[T1](x: Product1[T1]): Option[Product1[T1]] = - Some(x) -} - -/** Product1 is a Cartesian product of 1 component. - * @since 2.3 - */ -trait Product1[@specialized(Int, Long, Double) +T1] extends Any with Product { - /** The arity of this product. - * @return 1 - */ - override def productArity = 1 - - - /** Returns the n-th projection of this product if 0 <= n < productArity, - * otherwise throws an `IndexOutOfBoundsException`. - * - * @param n number of the projection to be returned - * @return same as `._(n+1)`, for example `productElement(0)` is the same as `._1`. - * @throws IndexOutOfBoundsException - */ - - @throws(classOf[IndexOutOfBoundsException]) - override def productElement(n: Int) = n match { - case 0 => _1 - case _ => throw new IndexOutOfBoundsException(n.toString()) - } - - /** A projection of element 1 of this Product. - * @return A projection of element 1. - */ - def _1: T1 - - -} diff --git a/tests/scala2-library/src/library/scala/Product10.scala b/tests/scala2-library/src/library/scala/Product10.scala deleted file mode 100644 index 8826d95007e5..000000000000 --- a/tests/scala2-library/src/library/scala/Product10.scala +++ /dev/null @@ -1,92 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ -// GENERATED CODE: DO NOT EDIT. See scala.Function0 for timestamp. - -package scala - -object Product10 { - def unapply[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10](x: Product10[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10]): Option[Product10[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10]] = - Some(x) -} - -/** Product10 is a Cartesian product of 10 components. - * @since 2.3 - */ -trait Product10[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10] extends Any with Product { - /** The arity of this product. - * @return 10 - */ - override def productArity = 10 - - - /** Returns the n-th projection of this product if 0 <= n < productArity, - * otherwise throws an `IndexOutOfBoundsException`. - * - * @param n number of the projection to be returned - * @return same as `._(n+1)`, for example `productElement(0)` is the same as `._1`. - * @throws IndexOutOfBoundsException - */ - - @throws(classOf[IndexOutOfBoundsException]) - override def productElement(n: Int) = n match { - case 0 => _1 - case 1 => _2 - case 2 => _3 - case 3 => _4 - case 4 => _5 - case 5 => _6 - case 6 => _7 - case 7 => _8 - case 8 => _9 - case 9 => _10 - case _ => throw new IndexOutOfBoundsException(n.toString()) - } - - /** A projection of element 1 of this Product. - * @return A projection of element 1. - */ - def _1: T1 - /** A projection of element 2 of this Product. - * @return A projection of element 2. - */ - def _2: T2 - /** A projection of element 3 of this Product. - * @return A projection of element 3. - */ - def _3: T3 - /** A projection of element 4 of this Product. - * @return A projection of element 4. - */ - def _4: T4 - /** A projection of element 5 of this Product. - * @return A projection of element 5. - */ - def _5: T5 - /** A projection of element 6 of this Product. - * @return A projection of element 6. - */ - def _6: T6 - /** A projection of element 7 of this Product. - * @return A projection of element 7. - */ - def _7: T7 - /** A projection of element 8 of this Product. - * @return A projection of element 8. - */ - def _8: T8 - /** A projection of element 9 of this Product. - * @return A projection of element 9. - */ - def _9: T9 - /** A projection of element 10 of this Product. - * @return A projection of element 10. - */ - def _10: T10 - - -} diff --git a/tests/scala2-library/src/library/scala/Product11.scala b/tests/scala2-library/src/library/scala/Product11.scala deleted file mode 100644 index 2a846fff4e22..000000000000 --- a/tests/scala2-library/src/library/scala/Product11.scala +++ /dev/null @@ -1,97 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ -// GENERATED CODE: DO NOT EDIT. See scala.Function0 for timestamp. - -package scala - -object Product11 { - def unapply[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11](x: Product11[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11]): Option[Product11[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11]] = - Some(x) -} - -/** Product11 is a Cartesian product of 11 components. - * @since 2.3 - */ -trait Product11[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11] extends Any with Product { - /** The arity of this product. - * @return 11 - */ - override def productArity = 11 - - - /** Returns the n-th projection of this product if 0 <= n < productArity, - * otherwise throws an `IndexOutOfBoundsException`. - * - * @param n number of the projection to be returned - * @return same as `._(n+1)`, for example `productElement(0)` is the same as `._1`. - * @throws IndexOutOfBoundsException - */ - - @throws(classOf[IndexOutOfBoundsException]) - override def productElement(n: Int) = n match { - case 0 => _1 - case 1 => _2 - case 2 => _3 - case 3 => _4 - case 4 => _5 - case 5 => _6 - case 6 => _7 - case 7 => _8 - case 8 => _9 - case 9 => _10 - case 10 => _11 - case _ => throw new IndexOutOfBoundsException(n.toString()) - } - - /** A projection of element 1 of this Product. - * @return A projection of element 1. - */ - def _1: T1 - /** A projection of element 2 of this Product. - * @return A projection of element 2. - */ - def _2: T2 - /** A projection of element 3 of this Product. - * @return A projection of element 3. - */ - def _3: T3 - /** A projection of element 4 of this Product. - * @return A projection of element 4. - */ - def _4: T4 - /** A projection of element 5 of this Product. - * @return A projection of element 5. - */ - def _5: T5 - /** A projection of element 6 of this Product. - * @return A projection of element 6. - */ - def _6: T6 - /** A projection of element 7 of this Product. - * @return A projection of element 7. - */ - def _7: T7 - /** A projection of element 8 of this Product. - * @return A projection of element 8. - */ - def _8: T8 - /** A projection of element 9 of this Product. - * @return A projection of element 9. - */ - def _9: T9 - /** A projection of element 10 of this Product. - * @return A projection of element 10. - */ - def _10: T10 - /** A projection of element 11 of this Product. - * @return A projection of element 11. - */ - def _11: T11 - - -} diff --git a/tests/scala2-library/src/library/scala/Product12.scala b/tests/scala2-library/src/library/scala/Product12.scala deleted file mode 100644 index 87419048d626..000000000000 --- a/tests/scala2-library/src/library/scala/Product12.scala +++ /dev/null @@ -1,102 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ -// GENERATED CODE: DO NOT EDIT. See scala.Function0 for timestamp. - -package scala - -object Product12 { - def unapply[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12](x: Product12[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12]): Option[Product12[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12]] = - Some(x) -} - -/** Product12 is a Cartesian product of 12 components. - * @since 2.3 - */ -trait Product12[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12] extends Any with Product { - /** The arity of this product. - * @return 12 - */ - override def productArity = 12 - - - /** Returns the n-th projection of this product if 0 <= n < productArity, - * otherwise throws an `IndexOutOfBoundsException`. - * - * @param n number of the projection to be returned - * @return same as `._(n+1)`, for example `productElement(0)` is the same as `._1`. - * @throws IndexOutOfBoundsException - */ - - @throws(classOf[IndexOutOfBoundsException]) - override def productElement(n: Int) = n match { - case 0 => _1 - case 1 => _2 - case 2 => _3 - case 3 => _4 - case 4 => _5 - case 5 => _6 - case 6 => _7 - case 7 => _8 - case 8 => _9 - case 9 => _10 - case 10 => _11 - case 11 => _12 - case _ => throw new IndexOutOfBoundsException(n.toString()) - } - - /** A projection of element 1 of this Product. - * @return A projection of element 1. - */ - def _1: T1 - /** A projection of element 2 of this Product. - * @return A projection of element 2. - */ - def _2: T2 - /** A projection of element 3 of this Product. - * @return A projection of element 3. - */ - def _3: T3 - /** A projection of element 4 of this Product. - * @return A projection of element 4. - */ - def _4: T4 - /** A projection of element 5 of this Product. - * @return A projection of element 5. - */ - def _5: T5 - /** A projection of element 6 of this Product. - * @return A projection of element 6. - */ - def _6: T6 - /** A projection of element 7 of this Product. - * @return A projection of element 7. - */ - def _7: T7 - /** A projection of element 8 of this Product. - * @return A projection of element 8. - */ - def _8: T8 - /** A projection of element 9 of this Product. - * @return A projection of element 9. - */ - def _9: T9 - /** A projection of element 10 of this Product. - * @return A projection of element 10. - */ - def _10: T10 - /** A projection of element 11 of this Product. - * @return A projection of element 11. - */ - def _11: T11 - /** A projection of element 12 of this Product. - * @return A projection of element 12. - */ - def _12: T12 - - -} diff --git a/tests/scala2-library/src/library/scala/Product13.scala b/tests/scala2-library/src/library/scala/Product13.scala deleted file mode 100644 index a944279a2eff..000000000000 --- a/tests/scala2-library/src/library/scala/Product13.scala +++ /dev/null @@ -1,107 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ -// GENERATED CODE: DO NOT EDIT. See scala.Function0 for timestamp. - -package scala - -object Product13 { - def unapply[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13](x: Product13[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13]): Option[Product13[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13]] = - Some(x) -} - -/** Product13 is a Cartesian product of 13 components. - * @since 2.3 - */ -trait Product13[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12, +T13] extends Any with Product { - /** The arity of this product. - * @return 13 - */ - override def productArity = 13 - - - /** Returns the n-th projection of this product if 0 <= n < productArity, - * otherwise throws an `IndexOutOfBoundsException`. - * - * @param n number of the projection to be returned - * @return same as `._(n+1)`, for example `productElement(0)` is the same as `._1`. - * @throws IndexOutOfBoundsException - */ - - @throws(classOf[IndexOutOfBoundsException]) - override def productElement(n: Int) = n match { - case 0 => _1 - case 1 => _2 - case 2 => _3 - case 3 => _4 - case 4 => _5 - case 5 => _6 - case 6 => _7 - case 7 => _8 - case 8 => _9 - case 9 => _10 - case 10 => _11 - case 11 => _12 - case 12 => _13 - case _ => throw new IndexOutOfBoundsException(n.toString()) - } - - /** A projection of element 1 of this Product. - * @return A projection of element 1. - */ - def _1: T1 - /** A projection of element 2 of this Product. - * @return A projection of element 2. - */ - def _2: T2 - /** A projection of element 3 of this Product. - * @return A projection of element 3. - */ - def _3: T3 - /** A projection of element 4 of this Product. - * @return A projection of element 4. - */ - def _4: T4 - /** A projection of element 5 of this Product. - * @return A projection of element 5. - */ - def _5: T5 - /** A projection of element 6 of this Product. - * @return A projection of element 6. - */ - def _6: T6 - /** A projection of element 7 of this Product. - * @return A projection of element 7. - */ - def _7: T7 - /** A projection of element 8 of this Product. - * @return A projection of element 8. - */ - def _8: T8 - /** A projection of element 9 of this Product. - * @return A projection of element 9. - */ - def _9: T9 - /** A projection of element 10 of this Product. - * @return A projection of element 10. - */ - def _10: T10 - /** A projection of element 11 of this Product. - * @return A projection of element 11. - */ - def _11: T11 - /** A projection of element 12 of this Product. - * @return A projection of element 12. - */ - def _12: T12 - /** A projection of element 13 of this Product. - * @return A projection of element 13. - */ - def _13: T13 - - -} diff --git a/tests/scala2-library/src/library/scala/Product14.scala b/tests/scala2-library/src/library/scala/Product14.scala deleted file mode 100644 index 098721f21637..000000000000 --- a/tests/scala2-library/src/library/scala/Product14.scala +++ /dev/null @@ -1,112 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ -// GENERATED CODE: DO NOT EDIT. See scala.Function0 for timestamp. - -package scala - -object Product14 { - def unapply[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14](x: Product14[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14]): Option[Product14[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14]] = - Some(x) -} - -/** Product14 is a Cartesian product of 14 components. - * @since 2.3 - */ -trait Product14[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12, +T13, +T14] extends Any with Product { - /** The arity of this product. - * @return 14 - */ - override def productArity = 14 - - - /** Returns the n-th projection of this product if 0 <= n < productArity, - * otherwise throws an `IndexOutOfBoundsException`. - * - * @param n number of the projection to be returned - * @return same as `._(n+1)`, for example `productElement(0)` is the same as `._1`. - * @throws IndexOutOfBoundsException - */ - - @throws(classOf[IndexOutOfBoundsException]) - override def productElement(n: Int) = n match { - case 0 => _1 - case 1 => _2 - case 2 => _3 - case 3 => _4 - case 4 => _5 - case 5 => _6 - case 6 => _7 - case 7 => _8 - case 8 => _9 - case 9 => _10 - case 10 => _11 - case 11 => _12 - case 12 => _13 - case 13 => _14 - case _ => throw new IndexOutOfBoundsException(n.toString()) - } - - /** A projection of element 1 of this Product. - * @return A projection of element 1. - */ - def _1: T1 - /** A projection of element 2 of this Product. - * @return A projection of element 2. - */ - def _2: T2 - /** A projection of element 3 of this Product. - * @return A projection of element 3. - */ - def _3: T3 - /** A projection of element 4 of this Product. - * @return A projection of element 4. - */ - def _4: T4 - /** A projection of element 5 of this Product. - * @return A projection of element 5. - */ - def _5: T5 - /** A projection of element 6 of this Product. - * @return A projection of element 6. - */ - def _6: T6 - /** A projection of element 7 of this Product. - * @return A projection of element 7. - */ - def _7: T7 - /** A projection of element 8 of this Product. - * @return A projection of element 8. - */ - def _8: T8 - /** A projection of element 9 of this Product. - * @return A projection of element 9. - */ - def _9: T9 - /** A projection of element 10 of this Product. - * @return A projection of element 10. - */ - def _10: T10 - /** A projection of element 11 of this Product. - * @return A projection of element 11. - */ - def _11: T11 - /** A projection of element 12 of this Product. - * @return A projection of element 12. - */ - def _12: T12 - /** A projection of element 13 of this Product. - * @return A projection of element 13. - */ - def _13: T13 - /** A projection of element 14 of this Product. - * @return A projection of element 14. - */ - def _14: T14 - - -} diff --git a/tests/scala2-library/src/library/scala/Product15.scala b/tests/scala2-library/src/library/scala/Product15.scala deleted file mode 100644 index ef550c80d2a8..000000000000 --- a/tests/scala2-library/src/library/scala/Product15.scala +++ /dev/null @@ -1,117 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ -// GENERATED CODE: DO NOT EDIT. See scala.Function0 for timestamp. - -package scala - -object Product15 { - def unapply[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15](x: Product15[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15]): Option[Product15[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15]] = - Some(x) -} - -/** Product15 is a Cartesian product of 15 components. - * @since 2.3 - */ -trait Product15[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12, +T13, +T14, +T15] extends Any with Product { - /** The arity of this product. - * @return 15 - */ - override def productArity = 15 - - - /** Returns the n-th projection of this product if 0 <= n < productArity, - * otherwise throws an `IndexOutOfBoundsException`. - * - * @param n number of the projection to be returned - * @return same as `._(n+1)`, for example `productElement(0)` is the same as `._1`. - * @throws IndexOutOfBoundsException - */ - - @throws(classOf[IndexOutOfBoundsException]) - override def productElement(n: Int) = n match { - case 0 => _1 - case 1 => _2 - case 2 => _3 - case 3 => _4 - case 4 => _5 - case 5 => _6 - case 6 => _7 - case 7 => _8 - case 8 => _9 - case 9 => _10 - case 10 => _11 - case 11 => _12 - case 12 => _13 - case 13 => _14 - case 14 => _15 - case _ => throw new IndexOutOfBoundsException(n.toString()) - } - - /** A projection of element 1 of this Product. - * @return A projection of element 1. - */ - def _1: T1 - /** A projection of element 2 of this Product. - * @return A projection of element 2. - */ - def _2: T2 - /** A projection of element 3 of this Product. - * @return A projection of element 3. - */ - def _3: T3 - /** A projection of element 4 of this Product. - * @return A projection of element 4. - */ - def _4: T4 - /** A projection of element 5 of this Product. - * @return A projection of element 5. - */ - def _5: T5 - /** A projection of element 6 of this Product. - * @return A projection of element 6. - */ - def _6: T6 - /** A projection of element 7 of this Product. - * @return A projection of element 7. - */ - def _7: T7 - /** A projection of element 8 of this Product. - * @return A projection of element 8. - */ - def _8: T8 - /** A projection of element 9 of this Product. - * @return A projection of element 9. - */ - def _9: T9 - /** A projection of element 10 of this Product. - * @return A projection of element 10. - */ - def _10: T10 - /** A projection of element 11 of this Product. - * @return A projection of element 11. - */ - def _11: T11 - /** A projection of element 12 of this Product. - * @return A projection of element 12. - */ - def _12: T12 - /** A projection of element 13 of this Product. - * @return A projection of element 13. - */ - def _13: T13 - /** A projection of element 14 of this Product. - * @return A projection of element 14. - */ - def _14: T14 - /** A projection of element 15 of this Product. - * @return A projection of element 15. - */ - def _15: T15 - - -} diff --git a/tests/scala2-library/src/library/scala/Product16.scala b/tests/scala2-library/src/library/scala/Product16.scala deleted file mode 100644 index dd32e2f63741..000000000000 --- a/tests/scala2-library/src/library/scala/Product16.scala +++ /dev/null @@ -1,122 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ -// GENERATED CODE: DO NOT EDIT. See scala.Function0 for timestamp. - -package scala - -object Product16 { - def unapply[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16](x: Product16[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16]): Option[Product16[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16]] = - Some(x) -} - -/** Product16 is a Cartesian product of 16 components. - * @since 2.3 - */ -trait Product16[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12, +T13, +T14, +T15, +T16] extends Any with Product { - /** The arity of this product. - * @return 16 - */ - override def productArity = 16 - - - /** Returns the n-th projection of this product if 0 <= n < productArity, - * otherwise throws an `IndexOutOfBoundsException`. - * - * @param n number of the projection to be returned - * @return same as `._(n+1)`, for example `productElement(0)` is the same as `._1`. - * @throws IndexOutOfBoundsException - */ - - @throws(classOf[IndexOutOfBoundsException]) - override def productElement(n: Int) = n match { - case 0 => _1 - case 1 => _2 - case 2 => _3 - case 3 => _4 - case 4 => _5 - case 5 => _6 - case 6 => _7 - case 7 => _8 - case 8 => _9 - case 9 => _10 - case 10 => _11 - case 11 => _12 - case 12 => _13 - case 13 => _14 - case 14 => _15 - case 15 => _16 - case _ => throw new IndexOutOfBoundsException(n.toString()) - } - - /** A projection of element 1 of this Product. - * @return A projection of element 1. - */ - def _1: T1 - /** A projection of element 2 of this Product. - * @return A projection of element 2. - */ - def _2: T2 - /** A projection of element 3 of this Product. - * @return A projection of element 3. - */ - def _3: T3 - /** A projection of element 4 of this Product. - * @return A projection of element 4. - */ - def _4: T4 - /** A projection of element 5 of this Product. - * @return A projection of element 5. - */ - def _5: T5 - /** A projection of element 6 of this Product. - * @return A projection of element 6. - */ - def _6: T6 - /** A projection of element 7 of this Product. - * @return A projection of element 7. - */ - def _7: T7 - /** A projection of element 8 of this Product. - * @return A projection of element 8. - */ - def _8: T8 - /** A projection of element 9 of this Product. - * @return A projection of element 9. - */ - def _9: T9 - /** A projection of element 10 of this Product. - * @return A projection of element 10. - */ - def _10: T10 - /** A projection of element 11 of this Product. - * @return A projection of element 11. - */ - def _11: T11 - /** A projection of element 12 of this Product. - * @return A projection of element 12. - */ - def _12: T12 - /** A projection of element 13 of this Product. - * @return A projection of element 13. - */ - def _13: T13 - /** A projection of element 14 of this Product. - * @return A projection of element 14. - */ - def _14: T14 - /** A projection of element 15 of this Product. - * @return A projection of element 15. - */ - def _15: T15 - /** A projection of element 16 of this Product. - * @return A projection of element 16. - */ - def _16: T16 - - -} diff --git a/tests/scala2-library/src/library/scala/Product17.scala b/tests/scala2-library/src/library/scala/Product17.scala deleted file mode 100644 index e97cc5189ef9..000000000000 --- a/tests/scala2-library/src/library/scala/Product17.scala +++ /dev/null @@ -1,127 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ -// GENERATED CODE: DO NOT EDIT. See scala.Function0 for timestamp. - -package scala - -object Product17 { - def unapply[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17](x: Product17[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17]): Option[Product17[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17]] = - Some(x) -} - -/** Product17 is a Cartesian product of 17 components. - * @since 2.3 - */ -trait Product17[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12, +T13, +T14, +T15, +T16, +T17] extends Any with Product { - /** The arity of this product. - * @return 17 - */ - override def productArity = 17 - - - /** Returns the n-th projection of this product if 0 <= n < productArity, - * otherwise throws an `IndexOutOfBoundsException`. - * - * @param n number of the projection to be returned - * @return same as `._(n+1)`, for example `productElement(0)` is the same as `._1`. - * @throws IndexOutOfBoundsException - */ - - @throws(classOf[IndexOutOfBoundsException]) - override def productElement(n: Int) = n match { - case 0 => _1 - case 1 => _2 - case 2 => _3 - case 3 => _4 - case 4 => _5 - case 5 => _6 - case 6 => _7 - case 7 => _8 - case 8 => _9 - case 9 => _10 - case 10 => _11 - case 11 => _12 - case 12 => _13 - case 13 => _14 - case 14 => _15 - case 15 => _16 - case 16 => _17 - case _ => throw new IndexOutOfBoundsException(n.toString()) - } - - /** A projection of element 1 of this Product. - * @return A projection of element 1. - */ - def _1: T1 - /** A projection of element 2 of this Product. - * @return A projection of element 2. - */ - def _2: T2 - /** A projection of element 3 of this Product. - * @return A projection of element 3. - */ - def _3: T3 - /** A projection of element 4 of this Product. - * @return A projection of element 4. - */ - def _4: T4 - /** A projection of element 5 of this Product. - * @return A projection of element 5. - */ - def _5: T5 - /** A projection of element 6 of this Product. - * @return A projection of element 6. - */ - def _6: T6 - /** A projection of element 7 of this Product. - * @return A projection of element 7. - */ - def _7: T7 - /** A projection of element 8 of this Product. - * @return A projection of element 8. - */ - def _8: T8 - /** A projection of element 9 of this Product. - * @return A projection of element 9. - */ - def _9: T9 - /** A projection of element 10 of this Product. - * @return A projection of element 10. - */ - def _10: T10 - /** A projection of element 11 of this Product. - * @return A projection of element 11. - */ - def _11: T11 - /** A projection of element 12 of this Product. - * @return A projection of element 12. - */ - def _12: T12 - /** A projection of element 13 of this Product. - * @return A projection of element 13. - */ - def _13: T13 - /** A projection of element 14 of this Product. - * @return A projection of element 14. - */ - def _14: T14 - /** A projection of element 15 of this Product. - * @return A projection of element 15. - */ - def _15: T15 - /** A projection of element 16 of this Product. - * @return A projection of element 16. - */ - def _16: T16 - /** A projection of element 17 of this Product. - * @return A projection of element 17. - */ - def _17: T17 - - -} diff --git a/tests/scala2-library/src/library/scala/Product18.scala b/tests/scala2-library/src/library/scala/Product18.scala deleted file mode 100644 index 1266b77a9f52..000000000000 --- a/tests/scala2-library/src/library/scala/Product18.scala +++ /dev/null @@ -1,132 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ -// GENERATED CODE: DO NOT EDIT. See scala.Function0 for timestamp. - -package scala - -object Product18 { - def unapply[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18](x: Product18[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18]): Option[Product18[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18]] = - Some(x) -} - -/** Product18 is a Cartesian product of 18 components. - * @since 2.3 - */ -trait Product18[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12, +T13, +T14, +T15, +T16, +T17, +T18] extends Any with Product { - /** The arity of this product. - * @return 18 - */ - override def productArity = 18 - - - /** Returns the n-th projection of this product if 0 <= n < productArity, - * otherwise throws an `IndexOutOfBoundsException`. - * - * @param n number of the projection to be returned - * @return same as `._(n+1)`, for example `productElement(0)` is the same as `._1`. - * @throws IndexOutOfBoundsException - */ - - @throws(classOf[IndexOutOfBoundsException]) - override def productElement(n: Int) = n match { - case 0 => _1 - case 1 => _2 - case 2 => _3 - case 3 => _4 - case 4 => _5 - case 5 => _6 - case 6 => _7 - case 7 => _8 - case 8 => _9 - case 9 => _10 - case 10 => _11 - case 11 => _12 - case 12 => _13 - case 13 => _14 - case 14 => _15 - case 15 => _16 - case 16 => _17 - case 17 => _18 - case _ => throw new IndexOutOfBoundsException(n.toString()) - } - - /** A projection of element 1 of this Product. - * @return A projection of element 1. - */ - def _1: T1 - /** A projection of element 2 of this Product. - * @return A projection of element 2. - */ - def _2: T2 - /** A projection of element 3 of this Product. - * @return A projection of element 3. - */ - def _3: T3 - /** A projection of element 4 of this Product. - * @return A projection of element 4. - */ - def _4: T4 - /** A projection of element 5 of this Product. - * @return A projection of element 5. - */ - def _5: T5 - /** A projection of element 6 of this Product. - * @return A projection of element 6. - */ - def _6: T6 - /** A projection of element 7 of this Product. - * @return A projection of element 7. - */ - def _7: T7 - /** A projection of element 8 of this Product. - * @return A projection of element 8. - */ - def _8: T8 - /** A projection of element 9 of this Product. - * @return A projection of element 9. - */ - def _9: T9 - /** A projection of element 10 of this Product. - * @return A projection of element 10. - */ - def _10: T10 - /** A projection of element 11 of this Product. - * @return A projection of element 11. - */ - def _11: T11 - /** A projection of element 12 of this Product. - * @return A projection of element 12. - */ - def _12: T12 - /** A projection of element 13 of this Product. - * @return A projection of element 13. - */ - def _13: T13 - /** A projection of element 14 of this Product. - * @return A projection of element 14. - */ - def _14: T14 - /** A projection of element 15 of this Product. - * @return A projection of element 15. - */ - def _15: T15 - /** A projection of element 16 of this Product. - * @return A projection of element 16. - */ - def _16: T16 - /** A projection of element 17 of this Product. - * @return A projection of element 17. - */ - def _17: T17 - /** A projection of element 18 of this Product. - * @return A projection of element 18. - */ - def _18: T18 - - -} diff --git a/tests/scala2-library/src/library/scala/Product19.scala b/tests/scala2-library/src/library/scala/Product19.scala deleted file mode 100644 index 4bf5dcf23ebd..000000000000 --- a/tests/scala2-library/src/library/scala/Product19.scala +++ /dev/null @@ -1,137 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ -// GENERATED CODE: DO NOT EDIT. See scala.Function0 for timestamp. - -package scala - -object Product19 { - def unapply[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19](x: Product19[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19]): Option[Product19[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19]] = - Some(x) -} - -/** Product19 is a Cartesian product of 19 components. - * @since 2.3 - */ -trait Product19[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12, +T13, +T14, +T15, +T16, +T17, +T18, +T19] extends Any with Product { - /** The arity of this product. - * @return 19 - */ - override def productArity = 19 - - - /** Returns the n-th projection of this product if 0 <= n < productArity, - * otherwise throws an `IndexOutOfBoundsException`. - * - * @param n number of the projection to be returned - * @return same as `._(n+1)`, for example `productElement(0)` is the same as `._1`. - * @throws IndexOutOfBoundsException - */ - - @throws(classOf[IndexOutOfBoundsException]) - override def productElement(n: Int) = n match { - case 0 => _1 - case 1 => _2 - case 2 => _3 - case 3 => _4 - case 4 => _5 - case 5 => _6 - case 6 => _7 - case 7 => _8 - case 8 => _9 - case 9 => _10 - case 10 => _11 - case 11 => _12 - case 12 => _13 - case 13 => _14 - case 14 => _15 - case 15 => _16 - case 16 => _17 - case 17 => _18 - case 18 => _19 - case _ => throw new IndexOutOfBoundsException(n.toString()) - } - - /** A projection of element 1 of this Product. - * @return A projection of element 1. - */ - def _1: T1 - /** A projection of element 2 of this Product. - * @return A projection of element 2. - */ - def _2: T2 - /** A projection of element 3 of this Product. - * @return A projection of element 3. - */ - def _3: T3 - /** A projection of element 4 of this Product. - * @return A projection of element 4. - */ - def _4: T4 - /** A projection of element 5 of this Product. - * @return A projection of element 5. - */ - def _5: T5 - /** A projection of element 6 of this Product. - * @return A projection of element 6. - */ - def _6: T6 - /** A projection of element 7 of this Product. - * @return A projection of element 7. - */ - def _7: T7 - /** A projection of element 8 of this Product. - * @return A projection of element 8. - */ - def _8: T8 - /** A projection of element 9 of this Product. - * @return A projection of element 9. - */ - def _9: T9 - /** A projection of element 10 of this Product. - * @return A projection of element 10. - */ - def _10: T10 - /** A projection of element 11 of this Product. - * @return A projection of element 11. - */ - def _11: T11 - /** A projection of element 12 of this Product. - * @return A projection of element 12. - */ - def _12: T12 - /** A projection of element 13 of this Product. - * @return A projection of element 13. - */ - def _13: T13 - /** A projection of element 14 of this Product. - * @return A projection of element 14. - */ - def _14: T14 - /** A projection of element 15 of this Product. - * @return A projection of element 15. - */ - def _15: T15 - /** A projection of element 16 of this Product. - * @return A projection of element 16. - */ - def _16: T16 - /** A projection of element 17 of this Product. - * @return A projection of element 17. - */ - def _17: T17 - /** A projection of element 18 of this Product. - * @return A projection of element 18. - */ - def _18: T18 - /** A projection of element 19 of this Product. - * @return A projection of element 19. - */ - def _19: T19 - - -} diff --git a/tests/scala2-library/src/library/scala/Product2.scala b/tests/scala2-library/src/library/scala/Product2.scala deleted file mode 100644 index 93144abeb3c3..000000000000 --- a/tests/scala2-library/src/library/scala/Product2.scala +++ /dev/null @@ -1,52 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ -// GENERATED CODE: DO NOT EDIT. See scala.Function0 for timestamp. - -package scala - -object Product2 { - def unapply[T1, T2](x: Product2[T1, T2]): Option[Product2[T1, T2]] = - Some(x) -} - -/** Product2 is a Cartesian product of 2 components. - * @since 2.3 - */ -trait Product2[@specialized(Int, Long, Double) +T1, @specialized(Int, Long, Double) +T2] extends Any with Product { - /** The arity of this product. - * @return 2 - */ - override def productArity = 2 - - - /** Returns the n-th projection of this product if 0 <= n < productArity, - * otherwise throws an `IndexOutOfBoundsException`. - * - * @param n number of the projection to be returned - * @return same as `._(n+1)`, for example `productElement(0)` is the same as `._1`. - * @throws IndexOutOfBoundsException - */ - - @throws(classOf[IndexOutOfBoundsException]) - override def productElement(n: Int) = n match { - case 0 => _1 - case 1 => _2 - case _ => throw new IndexOutOfBoundsException(n.toString()) - } - - /** A projection of element 1 of this Product. - * @return A projection of element 1. - */ - def _1: T1 - /** A projection of element 2 of this Product. - * @return A projection of element 2. - */ - def _2: T2 - - -} diff --git a/tests/scala2-library/src/library/scala/Product20.scala b/tests/scala2-library/src/library/scala/Product20.scala deleted file mode 100644 index a1dfd469add8..000000000000 --- a/tests/scala2-library/src/library/scala/Product20.scala +++ /dev/null @@ -1,142 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ -// GENERATED CODE: DO NOT EDIT. See scala.Function0 for timestamp. - -package scala - -object Product20 { - def unapply[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20](x: Product20[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20]): Option[Product20[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20]] = - Some(x) -} - -/** Product20 is a Cartesian product of 20 components. - * @since 2.3 - */ -trait Product20[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12, +T13, +T14, +T15, +T16, +T17, +T18, +T19, +T20] extends Any with Product { - /** The arity of this product. - * @return 20 - */ - override def productArity = 20 - - - /** Returns the n-th projection of this product if 0 <= n < productArity, - * otherwise throws an `IndexOutOfBoundsException`. - * - * @param n number of the projection to be returned - * @return same as `._(n+1)`, for example `productElement(0)` is the same as `._1`. - * @throws IndexOutOfBoundsException - */ - - @throws(classOf[IndexOutOfBoundsException]) - override def productElement(n: Int) = n match { - case 0 => _1 - case 1 => _2 - case 2 => _3 - case 3 => _4 - case 4 => _5 - case 5 => _6 - case 6 => _7 - case 7 => _8 - case 8 => _9 - case 9 => _10 - case 10 => _11 - case 11 => _12 - case 12 => _13 - case 13 => _14 - case 14 => _15 - case 15 => _16 - case 16 => _17 - case 17 => _18 - case 18 => _19 - case 19 => _20 - case _ => throw new IndexOutOfBoundsException(n.toString()) - } - - /** A projection of element 1 of this Product. - * @return A projection of element 1. - */ - def _1: T1 - /** A projection of element 2 of this Product. - * @return A projection of element 2. - */ - def _2: T2 - /** A projection of element 3 of this Product. - * @return A projection of element 3. - */ - def _3: T3 - /** A projection of element 4 of this Product. - * @return A projection of element 4. - */ - def _4: T4 - /** A projection of element 5 of this Product. - * @return A projection of element 5. - */ - def _5: T5 - /** A projection of element 6 of this Product. - * @return A projection of element 6. - */ - def _6: T6 - /** A projection of element 7 of this Product. - * @return A projection of element 7. - */ - def _7: T7 - /** A projection of element 8 of this Product. - * @return A projection of element 8. - */ - def _8: T8 - /** A projection of element 9 of this Product. - * @return A projection of element 9. - */ - def _9: T9 - /** A projection of element 10 of this Product. - * @return A projection of element 10. - */ - def _10: T10 - /** A projection of element 11 of this Product. - * @return A projection of element 11. - */ - def _11: T11 - /** A projection of element 12 of this Product. - * @return A projection of element 12. - */ - def _12: T12 - /** A projection of element 13 of this Product. - * @return A projection of element 13. - */ - def _13: T13 - /** A projection of element 14 of this Product. - * @return A projection of element 14. - */ - def _14: T14 - /** A projection of element 15 of this Product. - * @return A projection of element 15. - */ - def _15: T15 - /** A projection of element 16 of this Product. - * @return A projection of element 16. - */ - def _16: T16 - /** A projection of element 17 of this Product. - * @return A projection of element 17. - */ - def _17: T17 - /** A projection of element 18 of this Product. - * @return A projection of element 18. - */ - def _18: T18 - /** A projection of element 19 of this Product. - * @return A projection of element 19. - */ - def _19: T19 - /** A projection of element 20 of this Product. - * @return A projection of element 20. - */ - def _20: T20 - - -} diff --git a/tests/scala2-library/src/library/scala/Product21.scala b/tests/scala2-library/src/library/scala/Product21.scala deleted file mode 100644 index 4f01277ad3df..000000000000 --- a/tests/scala2-library/src/library/scala/Product21.scala +++ /dev/null @@ -1,147 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ -// GENERATED CODE: DO NOT EDIT. See scala.Function0 for timestamp. - -package scala - -object Product21 { - def unapply[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21](x: Product21[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21]): Option[Product21[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21]] = - Some(x) -} - -/** Product21 is a Cartesian product of 21 components. - * @since 2.3 - */ -trait Product21[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12, +T13, +T14, +T15, +T16, +T17, +T18, +T19, +T20, +T21] extends Any with Product { - /** The arity of this product. - * @return 21 - */ - override def productArity = 21 - - - /** Returns the n-th projection of this product if 0 <= n < productArity, - * otherwise throws an `IndexOutOfBoundsException`. - * - * @param n number of the projection to be returned - * @return same as `._(n+1)`, for example `productElement(0)` is the same as `._1`. - * @throws IndexOutOfBoundsException - */ - - @throws(classOf[IndexOutOfBoundsException]) - override def productElement(n: Int) = n match { - case 0 => _1 - case 1 => _2 - case 2 => _3 - case 3 => _4 - case 4 => _5 - case 5 => _6 - case 6 => _7 - case 7 => _8 - case 8 => _9 - case 9 => _10 - case 10 => _11 - case 11 => _12 - case 12 => _13 - case 13 => _14 - case 14 => _15 - case 15 => _16 - case 16 => _17 - case 17 => _18 - case 18 => _19 - case 19 => _20 - case 20 => _21 - case _ => throw new IndexOutOfBoundsException(n.toString()) - } - - /** A projection of element 1 of this Product. - * @return A projection of element 1. - */ - def _1: T1 - /** A projection of element 2 of this Product. - * @return A projection of element 2. - */ - def _2: T2 - /** A projection of element 3 of this Product. - * @return A projection of element 3. - */ - def _3: T3 - /** A projection of element 4 of this Product. - * @return A projection of element 4. - */ - def _4: T4 - /** A projection of element 5 of this Product. - * @return A projection of element 5. - */ - def _5: T5 - /** A projection of element 6 of this Product. - * @return A projection of element 6. - */ - def _6: T6 - /** A projection of element 7 of this Product. - * @return A projection of element 7. - */ - def _7: T7 - /** A projection of element 8 of this Product. - * @return A projection of element 8. - */ - def _8: T8 - /** A projection of element 9 of this Product. - * @return A projection of element 9. - */ - def _9: T9 - /** A projection of element 10 of this Product. - * @return A projection of element 10. - */ - def _10: T10 - /** A projection of element 11 of this Product. - * @return A projection of element 11. - */ - def _11: T11 - /** A projection of element 12 of this Product. - * @return A projection of element 12. - */ - def _12: T12 - /** A projection of element 13 of this Product. - * @return A projection of element 13. - */ - def _13: T13 - /** A projection of element 14 of this Product. - * @return A projection of element 14. - */ - def _14: T14 - /** A projection of element 15 of this Product. - * @return A projection of element 15. - */ - def _15: T15 - /** A projection of element 16 of this Product. - * @return A projection of element 16. - */ - def _16: T16 - /** A projection of element 17 of this Product. - * @return A projection of element 17. - */ - def _17: T17 - /** A projection of element 18 of this Product. - * @return A projection of element 18. - */ - def _18: T18 - /** A projection of element 19 of this Product. - * @return A projection of element 19. - */ - def _19: T19 - /** A projection of element 20 of this Product. - * @return A projection of element 20. - */ - def _20: T20 - /** A projection of element 21 of this Product. - * @return A projection of element 21. - */ - def _21: T21 - - -} diff --git a/tests/scala2-library/src/library/scala/Product22.scala b/tests/scala2-library/src/library/scala/Product22.scala deleted file mode 100644 index cef8d3040282..000000000000 --- a/tests/scala2-library/src/library/scala/Product22.scala +++ /dev/null @@ -1,152 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ -// GENERATED CODE: DO NOT EDIT. See scala.Function0 for timestamp. - -package scala - -object Product22 { - def unapply[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22](x: Product22[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22]): Option[Product22[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22]] = - Some(x) -} - -/** Product22 is a Cartesian product of 22 components. - * @since 2.3 - */ -trait Product22[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12, +T13, +T14, +T15, +T16, +T17, +T18, +T19, +T20, +T21, +T22] extends Any with Product { - /** The arity of this product. - * @return 22 - */ - override def productArity = 22 - - - /** Returns the n-th projection of this product if 0 <= n < productArity, - * otherwise throws an `IndexOutOfBoundsException`. - * - * @param n number of the projection to be returned - * @return same as `._(n+1)`, for example `productElement(0)` is the same as `._1`. - * @throws IndexOutOfBoundsException - */ - - @throws(classOf[IndexOutOfBoundsException]) - override def productElement(n: Int) = n match { - case 0 => _1 - case 1 => _2 - case 2 => _3 - case 3 => _4 - case 4 => _5 - case 5 => _6 - case 6 => _7 - case 7 => _8 - case 8 => _9 - case 9 => _10 - case 10 => _11 - case 11 => _12 - case 12 => _13 - case 13 => _14 - case 14 => _15 - case 15 => _16 - case 16 => _17 - case 17 => _18 - case 18 => _19 - case 19 => _20 - case 20 => _21 - case 21 => _22 - case _ => throw new IndexOutOfBoundsException(n.toString()) - } - - /** A projection of element 1 of this Product. - * @return A projection of element 1. - */ - def _1: T1 - /** A projection of element 2 of this Product. - * @return A projection of element 2. - */ - def _2: T2 - /** A projection of element 3 of this Product. - * @return A projection of element 3. - */ - def _3: T3 - /** A projection of element 4 of this Product. - * @return A projection of element 4. - */ - def _4: T4 - /** A projection of element 5 of this Product. - * @return A projection of element 5. - */ - def _5: T5 - /** A projection of element 6 of this Product. - * @return A projection of element 6. - */ - def _6: T6 - /** A projection of element 7 of this Product. - * @return A projection of element 7. - */ - def _7: T7 - /** A projection of element 8 of this Product. - * @return A projection of element 8. - */ - def _8: T8 - /** A projection of element 9 of this Product. - * @return A projection of element 9. - */ - def _9: T9 - /** A projection of element 10 of this Product. - * @return A projection of element 10. - */ - def _10: T10 - /** A projection of element 11 of this Product. - * @return A projection of element 11. - */ - def _11: T11 - /** A projection of element 12 of this Product. - * @return A projection of element 12. - */ - def _12: T12 - /** A projection of element 13 of this Product. - * @return A projection of element 13. - */ - def _13: T13 - /** A projection of element 14 of this Product. - * @return A projection of element 14. - */ - def _14: T14 - /** A projection of element 15 of this Product. - * @return A projection of element 15. - */ - def _15: T15 - /** A projection of element 16 of this Product. - * @return A projection of element 16. - */ - def _16: T16 - /** A projection of element 17 of this Product. - * @return A projection of element 17. - */ - def _17: T17 - /** A projection of element 18 of this Product. - * @return A projection of element 18. - */ - def _18: T18 - /** A projection of element 19 of this Product. - * @return A projection of element 19. - */ - def _19: T19 - /** A projection of element 20 of this Product. - * @return A projection of element 20. - */ - def _20: T20 - /** A projection of element 21 of this Product. - * @return A projection of element 21. - */ - def _21: T21 - /** A projection of element 22 of this Product. - * @return A projection of element 22. - */ - def _22: T22 - - -} diff --git a/tests/scala2-library/src/library/scala/Product3.scala b/tests/scala2-library/src/library/scala/Product3.scala deleted file mode 100644 index 7da324106d36..000000000000 --- a/tests/scala2-library/src/library/scala/Product3.scala +++ /dev/null @@ -1,57 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ -// GENERATED CODE: DO NOT EDIT. See scala.Function0 for timestamp. - -package scala - -object Product3 { - def unapply[T1, T2, T3](x: Product3[T1, T2, T3]): Option[Product3[T1, T2, T3]] = - Some(x) -} - -/** Product3 is a Cartesian product of 3 components. - * @since 2.3 - */ -trait Product3[+T1, +T2, +T3] extends Any with Product { - /** The arity of this product. - * @return 3 - */ - override def productArity = 3 - - - /** Returns the n-th projection of this product if 0 <= n < productArity, - * otherwise throws an `IndexOutOfBoundsException`. - * - * @param n number of the projection to be returned - * @return same as `._(n+1)`, for example `productElement(0)` is the same as `._1`. - * @throws IndexOutOfBoundsException - */ - - @throws(classOf[IndexOutOfBoundsException]) - override def productElement(n: Int) = n match { - case 0 => _1 - case 1 => _2 - case 2 => _3 - case _ => throw new IndexOutOfBoundsException(n.toString()) - } - - /** A projection of element 1 of this Product. - * @return A projection of element 1. - */ - def _1: T1 - /** A projection of element 2 of this Product. - * @return A projection of element 2. - */ - def _2: T2 - /** A projection of element 3 of this Product. - * @return A projection of element 3. - */ - def _3: T3 - - -} diff --git a/tests/scala2-library/src/library/scala/Product4.scala b/tests/scala2-library/src/library/scala/Product4.scala deleted file mode 100644 index 88e5dea9d3b2..000000000000 --- a/tests/scala2-library/src/library/scala/Product4.scala +++ /dev/null @@ -1,62 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ -// GENERATED CODE: DO NOT EDIT. See scala.Function0 for timestamp. - -package scala - -object Product4 { - def unapply[T1, T2, T3, T4](x: Product4[T1, T2, T3, T4]): Option[Product4[T1, T2, T3, T4]] = - Some(x) -} - -/** Product4 is a Cartesian product of 4 components. - * @since 2.3 - */ -trait Product4[+T1, +T2, +T3, +T4] extends Any with Product { - /** The arity of this product. - * @return 4 - */ - override def productArity = 4 - - - /** Returns the n-th projection of this product if 0 <= n < productArity, - * otherwise throws an `IndexOutOfBoundsException`. - * - * @param n number of the projection to be returned - * @return same as `._(n+1)`, for example `productElement(0)` is the same as `._1`. - * @throws IndexOutOfBoundsException - */ - - @throws(classOf[IndexOutOfBoundsException]) - override def productElement(n: Int) = n match { - case 0 => _1 - case 1 => _2 - case 2 => _3 - case 3 => _4 - case _ => throw new IndexOutOfBoundsException(n.toString()) - } - - /** A projection of element 1 of this Product. - * @return A projection of element 1. - */ - def _1: T1 - /** A projection of element 2 of this Product. - * @return A projection of element 2. - */ - def _2: T2 - /** A projection of element 3 of this Product. - * @return A projection of element 3. - */ - def _3: T3 - /** A projection of element 4 of this Product. - * @return A projection of element 4. - */ - def _4: T4 - - -} diff --git a/tests/scala2-library/src/library/scala/Product5.scala b/tests/scala2-library/src/library/scala/Product5.scala deleted file mode 100644 index d8c3ffc190aa..000000000000 --- a/tests/scala2-library/src/library/scala/Product5.scala +++ /dev/null @@ -1,67 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ -// GENERATED CODE: DO NOT EDIT. See scala.Function0 for timestamp. - -package scala - -object Product5 { - def unapply[T1, T2, T3, T4, T5](x: Product5[T1, T2, T3, T4, T5]): Option[Product5[T1, T2, T3, T4, T5]] = - Some(x) -} - -/** Product5 is a Cartesian product of 5 components. - * @since 2.3 - */ -trait Product5[+T1, +T2, +T3, +T4, +T5] extends Any with Product { - /** The arity of this product. - * @return 5 - */ - override def productArity = 5 - - - /** Returns the n-th projection of this product if 0 <= n < productArity, - * otherwise throws an `IndexOutOfBoundsException`. - * - * @param n number of the projection to be returned - * @return same as `._(n+1)`, for example `productElement(0)` is the same as `._1`. - * @throws IndexOutOfBoundsException - */ - - @throws(classOf[IndexOutOfBoundsException]) - override def productElement(n: Int) = n match { - case 0 => _1 - case 1 => _2 - case 2 => _3 - case 3 => _4 - case 4 => _5 - case _ => throw new IndexOutOfBoundsException(n.toString()) - } - - /** A projection of element 1 of this Product. - * @return A projection of element 1. - */ - def _1: T1 - /** A projection of element 2 of this Product. - * @return A projection of element 2. - */ - def _2: T2 - /** A projection of element 3 of this Product. - * @return A projection of element 3. - */ - def _3: T3 - /** A projection of element 4 of this Product. - * @return A projection of element 4. - */ - def _4: T4 - /** A projection of element 5 of this Product. - * @return A projection of element 5. - */ - def _5: T5 - - -} diff --git a/tests/scala2-library/src/library/scala/Product6.scala b/tests/scala2-library/src/library/scala/Product6.scala deleted file mode 100644 index ab50d678fc56..000000000000 --- a/tests/scala2-library/src/library/scala/Product6.scala +++ /dev/null @@ -1,72 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ -// GENERATED CODE: DO NOT EDIT. See scala.Function0 for timestamp. - -package scala - -object Product6 { - def unapply[T1, T2, T3, T4, T5, T6](x: Product6[T1, T2, T3, T4, T5, T6]): Option[Product6[T1, T2, T3, T4, T5, T6]] = - Some(x) -} - -/** Product6 is a Cartesian product of 6 components. - * @since 2.3 - */ -trait Product6[+T1, +T2, +T3, +T4, +T5, +T6] extends Any with Product { - /** The arity of this product. - * @return 6 - */ - override def productArity = 6 - - - /** Returns the n-th projection of this product if 0 <= n < productArity, - * otherwise throws an `IndexOutOfBoundsException`. - * - * @param n number of the projection to be returned - * @return same as `._(n+1)`, for example `productElement(0)` is the same as `._1`. - * @throws IndexOutOfBoundsException - */ - - @throws(classOf[IndexOutOfBoundsException]) - override def productElement(n: Int) = n match { - case 0 => _1 - case 1 => _2 - case 2 => _3 - case 3 => _4 - case 4 => _5 - case 5 => _6 - case _ => throw new IndexOutOfBoundsException(n.toString()) - } - - /** A projection of element 1 of this Product. - * @return A projection of element 1. - */ - def _1: T1 - /** A projection of element 2 of this Product. - * @return A projection of element 2. - */ - def _2: T2 - /** A projection of element 3 of this Product. - * @return A projection of element 3. - */ - def _3: T3 - /** A projection of element 4 of this Product. - * @return A projection of element 4. - */ - def _4: T4 - /** A projection of element 5 of this Product. - * @return A projection of element 5. - */ - def _5: T5 - /** A projection of element 6 of this Product. - * @return A projection of element 6. - */ - def _6: T6 - - -} diff --git a/tests/scala2-library/src/library/scala/Product7.scala b/tests/scala2-library/src/library/scala/Product7.scala deleted file mode 100644 index efdeb142d18b..000000000000 --- a/tests/scala2-library/src/library/scala/Product7.scala +++ /dev/null @@ -1,77 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ -// GENERATED CODE: DO NOT EDIT. See scala.Function0 for timestamp. - -package scala - -object Product7 { - def unapply[T1, T2, T3, T4, T5, T6, T7](x: Product7[T1, T2, T3, T4, T5, T6, T7]): Option[Product7[T1, T2, T3, T4, T5, T6, T7]] = - Some(x) -} - -/** Product7 is a Cartesian product of 7 components. - * @since 2.3 - */ -trait Product7[+T1, +T2, +T3, +T4, +T5, +T6, +T7] extends Any with Product { - /** The arity of this product. - * @return 7 - */ - override def productArity = 7 - - - /** Returns the n-th projection of this product if 0 <= n < productArity, - * otherwise throws an `IndexOutOfBoundsException`. - * - * @param n number of the projection to be returned - * @return same as `._(n+1)`, for example `productElement(0)` is the same as `._1`. - * @throws IndexOutOfBoundsException - */ - - @throws(classOf[IndexOutOfBoundsException]) - override def productElement(n: Int) = n match { - case 0 => _1 - case 1 => _2 - case 2 => _3 - case 3 => _4 - case 4 => _5 - case 5 => _6 - case 6 => _7 - case _ => throw new IndexOutOfBoundsException(n.toString()) - } - - /** A projection of element 1 of this Product. - * @return A projection of element 1. - */ - def _1: T1 - /** A projection of element 2 of this Product. - * @return A projection of element 2. - */ - def _2: T2 - /** A projection of element 3 of this Product. - * @return A projection of element 3. - */ - def _3: T3 - /** A projection of element 4 of this Product. - * @return A projection of element 4. - */ - def _4: T4 - /** A projection of element 5 of this Product. - * @return A projection of element 5. - */ - def _5: T5 - /** A projection of element 6 of this Product. - * @return A projection of element 6. - */ - def _6: T6 - /** A projection of element 7 of this Product. - * @return A projection of element 7. - */ - def _7: T7 - - -} diff --git a/tests/scala2-library/src/library/scala/Product8.scala b/tests/scala2-library/src/library/scala/Product8.scala deleted file mode 100644 index 743c0ac4858d..000000000000 --- a/tests/scala2-library/src/library/scala/Product8.scala +++ /dev/null @@ -1,82 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ -// GENERATED CODE: DO NOT EDIT. See scala.Function0 for timestamp. - -package scala - -object Product8 { - def unapply[T1, T2, T3, T4, T5, T6, T7, T8](x: Product8[T1, T2, T3, T4, T5, T6, T7, T8]): Option[Product8[T1, T2, T3, T4, T5, T6, T7, T8]] = - Some(x) -} - -/** Product8 is a Cartesian product of 8 components. - * @since 2.3 - */ -trait Product8[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8] extends Any with Product { - /** The arity of this product. - * @return 8 - */ - override def productArity = 8 - - - /** Returns the n-th projection of this product if 0 <= n < productArity, - * otherwise throws an `IndexOutOfBoundsException`. - * - * @param n number of the projection to be returned - * @return same as `._(n+1)`, for example `productElement(0)` is the same as `._1`. - * @throws IndexOutOfBoundsException - */ - - @throws(classOf[IndexOutOfBoundsException]) - override def productElement(n: Int) = n match { - case 0 => _1 - case 1 => _2 - case 2 => _3 - case 3 => _4 - case 4 => _5 - case 5 => _6 - case 6 => _7 - case 7 => _8 - case _ => throw new IndexOutOfBoundsException(n.toString()) - } - - /** A projection of element 1 of this Product. - * @return A projection of element 1. - */ - def _1: T1 - /** A projection of element 2 of this Product. - * @return A projection of element 2. - */ - def _2: T2 - /** A projection of element 3 of this Product. - * @return A projection of element 3. - */ - def _3: T3 - /** A projection of element 4 of this Product. - * @return A projection of element 4. - */ - def _4: T4 - /** A projection of element 5 of this Product. - * @return A projection of element 5. - */ - def _5: T5 - /** A projection of element 6 of this Product. - * @return A projection of element 6. - */ - def _6: T6 - /** A projection of element 7 of this Product. - * @return A projection of element 7. - */ - def _7: T7 - /** A projection of element 8 of this Product. - * @return A projection of element 8. - */ - def _8: T8 - - -} diff --git a/tests/scala2-library/src/library/scala/Product9.scala b/tests/scala2-library/src/library/scala/Product9.scala deleted file mode 100644 index 8d04213cd93f..000000000000 --- a/tests/scala2-library/src/library/scala/Product9.scala +++ /dev/null @@ -1,87 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ -// GENERATED CODE: DO NOT EDIT. See scala.Function0 for timestamp. - -package scala - -object Product9 { - def unapply[T1, T2, T3, T4, T5, T6, T7, T8, T9](x: Product9[T1, T2, T3, T4, T5, T6, T7, T8, T9]): Option[Product9[T1, T2, T3, T4, T5, T6, T7, T8, T9]] = - Some(x) -} - -/** Product9 is a Cartesian product of 9 components. - * @since 2.3 - */ -trait Product9[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9] extends Any with Product { - /** The arity of this product. - * @return 9 - */ - override def productArity = 9 - - - /** Returns the n-th projection of this product if 0 <= n < productArity, - * otherwise throws an `IndexOutOfBoundsException`. - * - * @param n number of the projection to be returned - * @return same as `._(n+1)`, for example `productElement(0)` is the same as `._1`. - * @throws IndexOutOfBoundsException - */ - - @throws(classOf[IndexOutOfBoundsException]) - override def productElement(n: Int) = n match { - case 0 => _1 - case 1 => _2 - case 2 => _3 - case 3 => _4 - case 4 => _5 - case 5 => _6 - case 6 => _7 - case 7 => _8 - case 8 => _9 - case _ => throw new IndexOutOfBoundsException(n.toString()) - } - - /** A projection of element 1 of this Product. - * @return A projection of element 1. - */ - def _1: T1 - /** A projection of element 2 of this Product. - * @return A projection of element 2. - */ - def _2: T2 - /** A projection of element 3 of this Product. - * @return A projection of element 3. - */ - def _3: T3 - /** A projection of element 4 of this Product. - * @return A projection of element 4. - */ - def _4: T4 - /** A projection of element 5 of this Product. - * @return A projection of element 5. - */ - def _5: T5 - /** A projection of element 6 of this Product. - * @return A projection of element 6. - */ - def _6: T6 - /** A projection of element 7 of this Product. - * @return A projection of element 7. - */ - def _7: T7 - /** A projection of element 8 of this Product. - * @return A projection of element 8. - */ - def _8: T8 - /** A projection of element 9 of this Product. - * @return A projection of element 9. - */ - def _9: T9 - - -} diff --git a/tests/scala2-library/src/library/scala/Proxy.scala b/tests/scala2-library/src/library/scala/Proxy.scala deleted file mode 100644 index 7c28e6ea2859..000000000000 --- a/tests/scala2-library/src/library/scala/Proxy.scala +++ /dev/null @@ -1,44 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://www.scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala - -/** This class implements a simple proxy that forwards all calls to - * the public, non-final methods defined in class `Any` to another - * object self. Those methods are: - * {{{ - * def hashCode(): Int - * def equals(other: Any): Boolean - * def toString(): String - * }}} - * '''Note:''' forwarding methods in this way will most likely create - * an asymmetric equals method, which is not generally recommended. - * - * @author Matthias Zenger - * @version 1.0, 26/04/2004 - */ -trait Proxy extends Any { - def self: Any - - override def hashCode: Int = self.hashCode - override def equals(that: Any): Boolean = that match { - case null => false - case _ => - val x = that.asInstanceOf[AnyRef] - (x eq this.asInstanceOf[AnyRef]) || (x eq self.asInstanceOf[AnyRef]) || (x equals self) - } - override def toString = "" + self -} - -object Proxy { - /** A proxy which exposes the type it is proxying for via a type parameter. - */ - trait Typed[T] extends Any with Proxy { - def self: T - } -} diff --git a/tests/scala2-library/src/library/scala/Responder.scala b/tests/scala2-library/src/library/scala/Responder.scala deleted file mode 100644 index eb8260dc9ab8..000000000000 --- a/tests/scala2-library/src/library/scala/Responder.scala +++ /dev/null @@ -1,88 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2005-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - - -package scala - -/** This object contains utility methods to build responders. - * - * @author Martin Odersky - * @author Burak Emir - * @version 1.0 - * - * @see class Responder - * @since 2.1 - */ -@deprecated("this object will be removed", "2.11.0") -object Responder { - - /** Creates a responder that answer continuations with the constant `a`. - */ - def constant[A](x: A) = new Responder[A] { - def respond(k: A => Unit) = k(x) - } - - /** Executes `x` and returns `'''true'''`, useful as syntactic - * convenience in for comprehensions. - */ - def exec[A](x: => Unit): Boolean = { x; true } - - /** Runs a responder, returning an optional result. - */ - def run[A](r: Responder[A]): Option[A] = { - var result: Option[A] = None - r.foreach(x => result = Some(x)) - result - } - - def loop[A](r: Responder[Unit]): Responder[Nothing] = - for (_ <- r; y <- loop(r)) yield y - - def loopWhile[A](cond: => Boolean)(r: Responder[Unit]): Responder[Unit] = - if (cond) for (_ <- r; y <- loopWhile(cond)(r)) yield y - else constant(()) -} - -/** Instances of responder are the building blocks of small programs - * written in continuation passing style. By using responder classes - * in for comprehensions, one can embed domain-specific languages in - * Scala while giving the impression that programs in these DSLs are - * written in direct style. - * - * @author Martin Odersky - * @author Burak Emir - * @version 1.0 - * @since 2.1 - */ -@deprecated("this class will be removed", "2.11.0") -abstract class Responder[+A] extends Serializable { - - def respond(k: A => Unit): Unit - - def foreach(k: A => Unit) { respond(k) } - - def map[B](f: A => B) = new Responder[B] { - def respond(k: B => Unit) { - Responder.this.respond(x => k(f(x))) - } - } - - def flatMap[B](f: A => Responder[B]) = new Responder[B] { - def respond(k: B => Unit) { - Responder.this.respond(x => f(x).respond(k)) - } - } - - def filter(p: A => Boolean) = new Responder[A] { - def respond(k: A => Unit) { - Responder.this.respond(x => if (p(x)) k(x) else ()) - } - } - - override def toString = "Responder" -} diff --git a/tests/scala2-library/src/library/scala/SerialVersionUID.scala b/tests/scala2-library/src/library/scala/SerialVersionUID.scala deleted file mode 100644 index 77094f0bbff7..000000000000 --- a/tests/scala2-library/src/library/scala/SerialVersionUID.scala +++ /dev/null @@ -1,15 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -*/ - -package scala - -/** - * Annotation for specifying the `static SerialVersionUID` field - * of a serializable class. - */ -class SerialVersionUID(value: Long) extends scala.annotation.ClassfileAnnotation diff --git a/tests/scala2-library/src/library/scala/Serializable.scala b/tests/scala2-library/src/library/scala/Serializable.scala deleted file mode 100644 index 596ee984aaee..000000000000 --- a/tests/scala2-library/src/library/scala/Serializable.scala +++ /dev/null @@ -1,14 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala - -/** - * Classes extending this trait are serializable across platforms (Java, .NET). - */ -trait Serializable extends Any with java.io.Serializable diff --git a/tests/scala2-library/src/library/scala/Short.scala b/tests/scala2-library/src/library/scala/Short.scala deleted file mode 100644 index 136d745f167d..000000000000 --- a/tests/scala2-library/src/library/scala/Short.scala +++ /dev/null @@ -1,478 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -// DO NOT EDIT, CHANGES WILL BE LOST -// This auto-generated code can be modified in "project/GenerateAnyVals.scala". -// Afterwards, running "sbt generateSources" regenerates this source file. - -package scala - -/** `Short`, a 16-bit signed integer (equivalent to Java's `short` primitive type) is a - * subtype of [[scala.AnyVal]]. Instances of `Short` are not - * represented by an object in the underlying runtime system. - * - * There is an implicit conversion from [[scala.Short]] => [[scala.runtime.RichShort]] - * which provides useful non-primitive operations. - */ -final abstract class Short private extends AnyVal { - def toByte: Byte - def toShort: Short - def toChar: Char - def toInt: Int - def toLong: Long - def toFloat: Float - def toDouble: Double - - /** - * Returns the bitwise negation of this value. - * @example {{{ - * ~5 == -6 - * // in binary: ~00000101 == - * // 11111010 - * }}} - */ - def unary_~ : Int - /** Returns this value, unmodified. */ - def unary_+ : Int - /** Returns the negation of this value. */ - def unary_- : Int - - def +(x: String): String - - /** - * Returns this value bit-shifted left by the specified number of bits, - * filling in the new right bits with zeroes. - * @example {{{ 6 << 3 == 48 // in binary: 0110 << 3 == 0110000 }}} - */ - def <<(x: Int): Int - /** - * Returns this value bit-shifted left by the specified number of bits, - * filling in the new right bits with zeroes. - * @example {{{ 6 << 3 == 48 // in binary: 0110 << 3 == 0110000 }}} - */ - def <<(x: Long): Int - /** - * Returns this value bit-shifted right by the specified number of bits, - * filling the new left bits with zeroes. - * @example {{{ 21 >>> 3 == 2 // in binary: 010101 >>> 3 == 010 }}} - * @example {{{ - * -21 >>> 3 == 536870909 - * // in binary: 11111111 11111111 11111111 11101011 >>> 3 == - * // 00011111 11111111 11111111 11111101 - * }}} - */ - def >>>(x: Int): Int - /** - * Returns this value bit-shifted right by the specified number of bits, - * filling the new left bits with zeroes. - * @example {{{ 21 >>> 3 == 2 // in binary: 010101 >>> 3 == 010 }}} - * @example {{{ - * -21 >>> 3 == 536870909 - * // in binary: 11111111 11111111 11111111 11101011 >>> 3 == - * // 00011111 11111111 11111111 11111101 - * }}} - */ - def >>>(x: Long): Int - /** - * Returns this value bit-shifted right by the specified number of bits, - * filling in the left bits with the same value as the left-most bit of this. - * The effect of this is to retain the sign of the value. - * @example {{{ - * -21 >> 3 == -3 - * // in binary: 11111111 11111111 11111111 11101011 >> 3 == - * // 11111111 11111111 11111111 11111101 - * }}} - */ - def >>(x: Int): Int - /** - * Returns this value bit-shifted right by the specified number of bits, - * filling in the left bits with the same value as the left-most bit of this. - * The effect of this is to retain the sign of the value. - * @example {{{ - * -21 >> 3 == -3 - * // in binary: 11111111 11111111 11111111 11101011 >> 3 == - * // 11111111 11111111 11111111 11111101 - * }}} - */ - def >>(x: Long): Int - - /** Returns `true` if this value is equal to x, `false` otherwise. */ - def ==(x: Byte): Boolean - /** Returns `true` if this value is equal to x, `false` otherwise. */ - def ==(x: Short): Boolean - /** Returns `true` if this value is equal to x, `false` otherwise. */ - def ==(x: Char): Boolean - /** Returns `true` if this value is equal to x, `false` otherwise. */ - def ==(x: Int): Boolean - /** Returns `true` if this value is equal to x, `false` otherwise. */ - def ==(x: Long): Boolean - /** Returns `true` if this value is equal to x, `false` otherwise. */ - def ==(x: Float): Boolean - /** Returns `true` if this value is equal to x, `false` otherwise. */ - def ==(x: Double): Boolean - - /** Returns `true` if this value is not equal to x, `false` otherwise. */ - def !=(x: Byte): Boolean - /** Returns `true` if this value is not equal to x, `false` otherwise. */ - def !=(x: Short): Boolean - /** Returns `true` if this value is not equal to x, `false` otherwise. */ - def !=(x: Char): Boolean - /** Returns `true` if this value is not equal to x, `false` otherwise. */ - def !=(x: Int): Boolean - /** Returns `true` if this value is not equal to x, `false` otherwise. */ - def !=(x: Long): Boolean - /** Returns `true` if this value is not equal to x, `false` otherwise. */ - def !=(x: Float): Boolean - /** Returns `true` if this value is not equal to x, `false` otherwise. */ - def !=(x: Double): Boolean - - /** Returns `true` if this value is less than x, `false` otherwise. */ - def <(x: Byte): Boolean - /** Returns `true` if this value is less than x, `false` otherwise. */ - def <(x: Short): Boolean - /** Returns `true` if this value is less than x, `false` otherwise. */ - def <(x: Char): Boolean - /** Returns `true` if this value is less than x, `false` otherwise. */ - def <(x: Int): Boolean - /** Returns `true` if this value is less than x, `false` otherwise. */ - def <(x: Long): Boolean - /** Returns `true` if this value is less than x, `false` otherwise. */ - def <(x: Float): Boolean - /** Returns `true` if this value is less than x, `false` otherwise. */ - def <(x: Double): Boolean - - /** Returns `true` if this value is less than or equal to x, `false` otherwise. */ - def <=(x: Byte): Boolean - /** Returns `true` if this value is less than or equal to x, `false` otherwise. */ - def <=(x: Short): Boolean - /** Returns `true` if this value is less than or equal to x, `false` otherwise. */ - def <=(x: Char): Boolean - /** Returns `true` if this value is less than or equal to x, `false` otherwise. */ - def <=(x: Int): Boolean - /** Returns `true` if this value is less than or equal to x, `false` otherwise. */ - def <=(x: Long): Boolean - /** Returns `true` if this value is less than or equal to x, `false` otherwise. */ - def <=(x: Float): Boolean - /** Returns `true` if this value is less than or equal to x, `false` otherwise. */ - def <=(x: Double): Boolean - - /** Returns `true` if this value is greater than x, `false` otherwise. */ - def >(x: Byte): Boolean - /** Returns `true` if this value is greater than x, `false` otherwise. */ - def >(x: Short): Boolean - /** Returns `true` if this value is greater than x, `false` otherwise. */ - def >(x: Char): Boolean - /** Returns `true` if this value is greater than x, `false` otherwise. */ - def >(x: Int): Boolean - /** Returns `true` if this value is greater than x, `false` otherwise. */ - def >(x: Long): Boolean - /** Returns `true` if this value is greater than x, `false` otherwise. */ - def >(x: Float): Boolean - /** Returns `true` if this value is greater than x, `false` otherwise. */ - def >(x: Double): Boolean - - /** Returns `true` if this value is greater than or equal to x, `false` otherwise. */ - def >=(x: Byte): Boolean - /** Returns `true` if this value is greater than or equal to x, `false` otherwise. */ - def >=(x: Short): Boolean - /** Returns `true` if this value is greater than or equal to x, `false` otherwise. */ - def >=(x: Char): Boolean - /** Returns `true` if this value is greater than or equal to x, `false` otherwise. */ - def >=(x: Int): Boolean - /** Returns `true` if this value is greater than or equal to x, `false` otherwise. */ - def >=(x: Long): Boolean - /** Returns `true` if this value is greater than or equal to x, `false` otherwise. */ - def >=(x: Float): Boolean - /** Returns `true` if this value is greater than or equal to x, `false` otherwise. */ - def >=(x: Double): Boolean - - /** - * Returns the bitwise OR of this value and `x`. - * @example {{{ - * (0xf0 | 0xaa) == 0xfa - * // in binary: 11110000 - * // | 10101010 - * // -------- - * // 11111010 - * }}} - */ - def |(x: Byte): Int - /** - * Returns the bitwise OR of this value and `x`. - * @example {{{ - * (0xf0 | 0xaa) == 0xfa - * // in binary: 11110000 - * // | 10101010 - * // -------- - * // 11111010 - * }}} - */ - def |(x: Short): Int - /** - * Returns the bitwise OR of this value and `x`. - * @example {{{ - * (0xf0 | 0xaa) == 0xfa - * // in binary: 11110000 - * // | 10101010 - * // -------- - * // 11111010 - * }}} - */ - def |(x: Char): Int - /** - * Returns the bitwise OR of this value and `x`. - * @example {{{ - * (0xf0 | 0xaa) == 0xfa - * // in binary: 11110000 - * // | 10101010 - * // -------- - * // 11111010 - * }}} - */ - def |(x: Int): Int - /** - * Returns the bitwise OR of this value and `x`. - * @example {{{ - * (0xf0 | 0xaa) == 0xfa - * // in binary: 11110000 - * // | 10101010 - * // -------- - * // 11111010 - * }}} - */ - def |(x: Long): Long - - /** - * Returns the bitwise AND of this value and `x`. - * @example {{{ - * (0xf0 & 0xaa) == 0xa0 - * // in binary: 11110000 - * // & 10101010 - * // -------- - * // 10100000 - * }}} - */ - def &(x: Byte): Int - /** - * Returns the bitwise AND of this value and `x`. - * @example {{{ - * (0xf0 & 0xaa) == 0xa0 - * // in binary: 11110000 - * // & 10101010 - * // -------- - * // 10100000 - * }}} - */ - def &(x: Short): Int - /** - * Returns the bitwise AND of this value and `x`. - * @example {{{ - * (0xf0 & 0xaa) == 0xa0 - * // in binary: 11110000 - * // & 10101010 - * // -------- - * // 10100000 - * }}} - */ - def &(x: Char): Int - /** - * Returns the bitwise AND of this value and `x`. - * @example {{{ - * (0xf0 & 0xaa) == 0xa0 - * // in binary: 11110000 - * // & 10101010 - * // -------- - * // 10100000 - * }}} - */ - def &(x: Int): Int - /** - * Returns the bitwise AND of this value and `x`. - * @example {{{ - * (0xf0 & 0xaa) == 0xa0 - * // in binary: 11110000 - * // & 10101010 - * // -------- - * // 10100000 - * }}} - */ - def &(x: Long): Long - - /** - * Returns the bitwise XOR of this value and `x`. - * @example {{{ - * (0xf0 ^ 0xaa) == 0x5a - * // in binary: 11110000 - * // ^ 10101010 - * // -------- - * // 01011010 - * }}} - */ - def ^(x: Byte): Int - /** - * Returns the bitwise XOR of this value and `x`. - * @example {{{ - * (0xf0 ^ 0xaa) == 0x5a - * // in binary: 11110000 - * // ^ 10101010 - * // -------- - * // 01011010 - * }}} - */ - def ^(x: Short): Int - /** - * Returns the bitwise XOR of this value and `x`. - * @example {{{ - * (0xf0 ^ 0xaa) == 0x5a - * // in binary: 11110000 - * // ^ 10101010 - * // -------- - * // 01011010 - * }}} - */ - def ^(x: Char): Int - /** - * Returns the bitwise XOR of this value and `x`. - * @example {{{ - * (0xf0 ^ 0xaa) == 0x5a - * // in binary: 11110000 - * // ^ 10101010 - * // -------- - * // 01011010 - * }}} - */ - def ^(x: Int): Int - /** - * Returns the bitwise XOR of this value and `x`. - * @example {{{ - * (0xf0 ^ 0xaa) == 0x5a - * // in binary: 11110000 - * // ^ 10101010 - * // -------- - * // 01011010 - * }}} - */ - def ^(x: Long): Long - - /** Returns the sum of this value and `x`. */ - def +(x: Byte): Int - /** Returns the sum of this value and `x`. */ - def +(x: Short): Int - /** Returns the sum of this value and `x`. */ - def +(x: Char): Int - /** Returns the sum of this value and `x`. */ - def +(x: Int): Int - /** Returns the sum of this value and `x`. */ - def +(x: Long): Long - /** Returns the sum of this value and `x`. */ - def +(x: Float): Float - /** Returns the sum of this value and `x`. */ - def +(x: Double): Double - - /** Returns the difference of this value and `x`. */ - def -(x: Byte): Int - /** Returns the difference of this value and `x`. */ - def -(x: Short): Int - /** Returns the difference of this value and `x`. */ - def -(x: Char): Int - /** Returns the difference of this value and `x`. */ - def -(x: Int): Int - /** Returns the difference of this value and `x`. */ - def -(x: Long): Long - /** Returns the difference of this value and `x`. */ - def -(x: Float): Float - /** Returns the difference of this value and `x`. */ - def -(x: Double): Double - - /** Returns the product of this value and `x`. */ - def *(x: Byte): Int - /** Returns the product of this value and `x`. */ - def *(x: Short): Int - /** Returns the product of this value and `x`. */ - def *(x: Char): Int - /** Returns the product of this value and `x`. */ - def *(x: Int): Int - /** Returns the product of this value and `x`. */ - def *(x: Long): Long - /** Returns the product of this value and `x`. */ - def *(x: Float): Float - /** Returns the product of this value and `x`. */ - def *(x: Double): Double - - /** Returns the quotient of this value and `x`. */ - def /(x: Byte): Int - /** Returns the quotient of this value and `x`. */ - def /(x: Short): Int - /** Returns the quotient of this value and `x`. */ - def /(x: Char): Int - /** Returns the quotient of this value and `x`. */ - def /(x: Int): Int - /** Returns the quotient of this value and `x`. */ - def /(x: Long): Long - /** Returns the quotient of this value and `x`. */ - def /(x: Float): Float - /** Returns the quotient of this value and `x`. */ - def /(x: Double): Double - - /** Returns the remainder of the division of this value by `x`. */ - def %(x: Byte): Int - /** Returns the remainder of the division of this value by `x`. */ - def %(x: Short): Int - /** Returns the remainder of the division of this value by `x`. */ - def %(x: Char): Int - /** Returns the remainder of the division of this value by `x`. */ - def %(x: Int): Int - /** Returns the remainder of the division of this value by `x`. */ - def %(x: Long): Long - /** Returns the remainder of the division of this value by `x`. */ - def %(x: Float): Float - /** Returns the remainder of the division of this value by `x`. */ - def %(x: Double): Double - - // Provide a more specific return type for Scaladoc - override def getClass(): Class[Short] = ??? -} - -object Short extends AnyValCompanion { - /** The smallest value representable as a Short. */ - final val MinValue = java.lang.Short.MIN_VALUE - - /** The largest value representable as a Short. */ - final val MaxValue = java.lang.Short.MAX_VALUE - - /** Transform a value type into a boxed reference type. - * - * Runtime implementation determined by `scala.runtime.BoxesRunTime.boxToShort`. See [[https://github.com/scala/scala src/library/scala/runtime/BoxesRunTime.java]]. - * - * @param x the Short to be boxed - * @return a java.lang.Short offering `x` as its underlying value. - */ - def box(x: Short): java.lang.Short = ??? - - /** Transform a boxed type into a value type. Note that this - * method is not typesafe: it accepts any Object, but will throw - * an exception if the argument is not a java.lang.Short. - * - * Runtime implementation determined by `scala.runtime.BoxesRunTime.unboxToShort`. See [[https://github.com/scala/scala src/library/scala/runtime/BoxesRunTime.java]]. - * - * @param x the java.lang.Short to be unboxed. - * @throws ClassCastException if the argument is not a java.lang.Short - * @return the Short resulting from calling shortValue() on `x` - */ - def unbox(x: java.lang.Object): Short = ??? - - /** The String representation of the scala.Short companion object. */ - override def toString = "object scala.Short" - /** Language mandated coercions from Short to "wider" types. */ - import scala.language.implicitConversions - implicit def short2int(x: Short): Int = x.toInt - implicit def short2long(x: Short): Long = x.toLong - implicit def short2float(x: Short): Float = x.toFloat - implicit def short2double(x: Short): Double = x.toDouble -} - diff --git a/tests/scala2-library/src/library/scala/Specializable.scala b/tests/scala2-library/src/library/scala/Specializable.scala deleted file mode 100644 index 137598c28ddd..000000000000 --- a/tests/scala2-library/src/library/scala/Specializable.scala +++ /dev/null @@ -1,29 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala - -/** A common supertype for companions of specializable types. - * Should not be extended in user code. - */ -trait Specializable - -object Specializable { - // No type parameter in @specialized annotation. - trait SpecializedGroup { } - - // Smuggle a list of types by way of a tuple upon which Group is parameterized. - class Group[T >: Null](value: T) extends SpecializedGroup { } - - final val Primitives = new Group((Byte, Short, Int, Long, Char, Float, Double, Boolean, Unit)) - final val Everything = new Group((Byte, Short, Int, Long, Char, Float, Double, Boolean, Unit, AnyRef)) - final val Bits32AndUp = new Group((Int, Long, Float, Double)) - final val Integral = new Group((Byte, Short, Int, Long, Char)) - final val AllNumeric = new Group((Byte, Short, Int, Long, Char, Float, Double)) - final val BestOfBreed = new Group((Int, Double, Boolean, Unit, AnyRef)) -} diff --git a/tests/scala2-library/src/library/scala/StringContext.scala b/tests/scala2-library/src/library/scala/StringContext.scala deleted file mode 100644 index 24fc5e70f281..000000000000 --- a/tests/scala2-library/src/library/scala/StringContext.scala +++ /dev/null @@ -1,253 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala - -import java.lang.{ StringBuilder => JLSBuilder } -import scala.annotation.tailrec - -/** This class provides the basic mechanism to do String Interpolation. - * String Interpolation allows users - * to embed variable references directly in *processed* string literals. - * Here's an example: - * {{{ - * val name = "James" - * println(s"Hello, $name") // Hello, James - * }}} - * - * Any processed string literal is rewritten as an instantiation and - * method call against this class. For example: - * {{{ - * s"Hello, $name" - * }}} - * - * is rewritten to be: - * - * {{{ - * StringContext("Hello, ", "").s(name) - * }}} - * - * By default, this class provides the `raw`, `s` and `f` methods as - * available interpolators. - * - * To provide your own string interpolator, create an implicit class - * which adds a method to `StringContext`. Here's an example: - * {{{ - * implicit class JsonHelper(private val sc: StringContext) extends AnyVal { - * def json(args: Any*): JSONObject = ... - * } - * val x: JSONObject = json"{ a: $a }" - * }}} - * - * Here the `JsonHelper` extension class implicitly adds the `json` method to - * `StringContext` which can be used for `json` string literals. - * - * @since 2.10.0 - * @param parts The parts that make up the interpolated string, - * without the expressions that get inserted by interpolation. - */ -case class StringContext(parts: String*) { - - import StringContext._ - - /** Checks that the length of the given argument `args` is one less than the number - * of `parts` supplied to the enclosing `StringContext`. - * @param `args` The arguments to be checked. - * @throws IllegalArgumentException if this is not the case. - */ - def checkLengths(args: Seq[Any]): Unit = - if (parts.length != args.length + 1) - throw new IllegalArgumentException("wrong number of arguments ("+ args.length - +") for interpolated string with "+ parts.length +" parts") - - - /** The simple string interpolator. - * - * It inserts its arguments between corresponding parts of the string context. - * It also treats standard escape sequences as defined in the Scala specification. - * Here's an example of usage: - * {{{ - * val name = "James" - * println(s"Hello, $name") // Hello, James - * }}} - * In this example, the expression $name is replaced with the `toString` of the - * variable `name`. - * The `s` interpolator can take the `toString` of any arbitrary expression within - * a `${}` block, for example: - * {{{ - * println(s"1 + 1 = ${1 + 1}") - * }}} - * will print the string `1 + 1 = 2`. - * - * @param `args` The arguments to be inserted into the resulting string. - * @throws IllegalArgumentException - * if the number of `parts` in the enclosing `StringContext` does not exceed - * the number of arguments `arg` by exactly 1. - * @throws StringContext.InvalidEscapeException - * if a `parts` string contains a backslash (`\`) character - * that does not start a valid escape sequence. - */ - def s(args: Any*): String = standardInterpolator(treatEscapes, args) - - /** The raw string interpolator. - * - * It inserts its arguments between corresponding parts of the string context. - * As opposed to the simple string interpolator `s`, this one does not treat - * standard escape sequences as defined in the Scala specification. - * - * For example, the raw processed string `raw"a\nb"` is equal to the scala string `"a\\nb"`. - * - * ''Note:'' Even when using the raw interpolator, Scala will preprocess unicode escapes. - * For example: - * {{{ - * scala> raw"\u005cu0023" - * res0: String = # - * }}} - * - * @param `args` The arguments to be inserted into the resulting string. - * @throws IllegalArgumentException - * if the number of `parts` in the enclosing `StringContext` does not exceed - * the number of arguments `arg` by exactly 1. - */ - def raw(args: Any*): String = standardInterpolator(identity, args) - - def standardInterpolator(process: String => String, args: Seq[Any]): String = { - checkLengths(args) - val pi = parts.iterator - val ai = args.iterator - val bldr = new JLSBuilder(process(pi.next())) - while (ai.hasNext) { - bldr append ai.next - bldr append process(pi.next()) - } - bldr.toString - } - - /** The formatted string interpolator. - * - * It inserts its arguments between corresponding parts of the string context. - * It also treats standard escape sequences as defined in the Scala specification. - * Finally, if an interpolated expression is followed by a `parts` string - * that starts with a formatting specifier, the expression is formatted according to that - * specifier. All specifiers allowed in Java format strings are handled, and in the same - * way they are treated in Java. - * - * For example: - * {{{ - * val height = 1.9d - * val name = "James" - * println(f"$name%s is $height%2.2f meters tall") // James is 1.90 meters tall - * }}} - * - * @param `args` The arguments to be inserted into the resulting string. - * @throws IllegalArgumentException - * if the number of `parts` in the enclosing `StringContext` does not exceed - * the number of arguments `arg` by exactly 1. - * @throws StringContext.InvalidEscapeException - * if a `parts` string contains a backslash (`\`) character - * that does not start a valid escape sequence. - * - * Note: The `f` method works by assembling a format string from all the `parts` strings and using - * `java.lang.String.format` to format all arguments with that format string. The format string is - * obtained by concatenating all `parts` strings, and performing two transformations: - * - * 1. Let a _formatting position_ be a start of any `parts` string except the first one. - * If a formatting position does not refer to a `%` character (which is assumed to - * start a format specifier), then the string format specifier `%s` is inserted. - * - * 2. Any `%` characters not in formatting positions must begin one of the conversions - * `%%` (the literal percent) or `%n` (the platform-specific line separator). - */ - // The implementation is hardwired to `scala.tools.reflect.MacroImplementations.macro_StringInterpolation_f` - // Using the mechanism implemented in `scala.tools.reflect.FastTrack` - def f[A >: Any](args: A*): String = /*macro*/ ??? -} - -object StringContext { - - /** An exception that is thrown if a string contains a backslash (`\`) character - * that does not start a valid escape sequence. - * @param str The offending string - * @param index The index of the offending backslash character in `str`. - */ - class InvalidEscapeException(str: String, @deprecatedName('idx) val index: Int) extends IllegalArgumentException( - s"""invalid escape ${ - require(index >= 0 && index < str.length) - val ok = """[\b, \t, \n, \f, \r, \\, \", \']""" - if (index == str.length - 1) "at terminal" else s"'\\${str(index + 1)}' not one of $ok at" - } index $index in "$str". Use \\\\ for literal \\.""" - ) - - /** Expands standard Scala escape sequences in a string. - * Escape sequences are: - * control: `\b`, `\t`, `\n`, `\f`, `\r` - * escape: `\\`, `\"`, `\'` - * octal: `\d` `\dd` `\ddd` where `d` is an octal digit between `0` and `7`. - * - * @param str A string that may contain escape sequences - * @return The string with all escape sequences expanded. - */ - def treatEscapes(str: String): String = treatEscapes0(str, strict = false) - - /** Treats escapes, but disallows octal escape sequences. */ - def processEscapes(str: String): String = treatEscapes0(str, strict = true) - - private def treatEscapes0(str: String, strict: Boolean): String = { - val len = str.length - // replace escapes with given first escape - def replace(first: Int): String = { - val b = new JLSBuilder - // append replacement starting at index `i`, with `next` backslash - @tailrec def loop(i: Int, next: Int): String = { - if (next >= 0) { - //require(str(next) == '\\') - if (next > i) b.append(str, i, next) - var idx = next + 1 - if (idx >= len) throw new InvalidEscapeException(str, next) - val c = str(idx) match { - case 'b' => '\b' - case 't' => '\t' - case 'n' => '\n' - case 'f' => '\f' - case 'r' => '\r' - case '"' => '"' - case '\'' => '\'' - case '\\' => '\\' - case o if '0' <= o && o <= '7' => - if (strict) throw new InvalidEscapeException(str, next) - val leadch = str(idx) - var oct = leadch - '0' - idx += 1 - if (idx < len && '0' <= str(idx) && str(idx) <= '7') { - oct = oct * 8 + str(idx) - '0' - idx += 1 - if (idx < len && leadch <= '3' && '0' <= str(idx) && str(idx) <= '7') { - oct = oct * 8 + str(idx) - '0' - idx += 1 - } - } - idx -= 1 // retreat - oct.toChar - case _ => throw new InvalidEscapeException(str, next) - } - idx += 1 // advance - b append c - loop(idx, str.indexOf('\\', idx)) - } else { - if (i < len) b.append(str, i, len) - b.toString - } - } - loop(0, first) - } - str indexOf '\\' match { - case -1 => str - case i => replace(i) - } - } -} diff --git a/tests/scala2-library/src/library/scala/Symbol.scala b/tests/scala2-library/src/library/scala/Symbol.scala deleted file mode 100644 index e1efe20c8b3b..000000000000 --- a/tests/scala2-library/src/library/scala/Symbol.scala +++ /dev/null @@ -1,90 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala - -/** This class provides a simple way to get unique objects for equal strings. - * Since symbols are interned, they can be compared using reference equality. - * Instances of `Symbol` can be created easily with Scala's built-in quote - * mechanism. - * - * For instance, the [[http://scala-lang.org/#_top Scala]] term `'mysym` will - * invoke the constructor of the `Symbol` class in the following way: - * `Symbol("mysym")`. - * - * @author Martin Odersky, Iulian Dragos - * @version 1.8 - */ -final class Symbol private (val name: String) extends Serializable { - /** Converts this symbol to a string. - */ - override def toString(): String = "'" + name - - @throws(classOf[java.io.ObjectStreamException]) - private def readResolve(): Any = Symbol.apply(name) - override def hashCode = name.hashCode() - override def equals(other: Any) = this eq other.asInstanceOf[AnyRef] -} - -object Symbol extends UniquenessCache[String, Symbol] { - override def apply(name: String): Symbol = super.apply(name) - protected def valueFromKey(name: String): Symbol = new Symbol(name) - protected def keyFromValue(sym: Symbol): Option[String] = Some(sym.name) -} - -/** This is private so it won't appear in the library API, but - * abstracted to offer some hope of reusability. */ -private[scala] abstract class UniquenessCache[K, V >: Null] -{ - import java.lang.ref.WeakReference - import java.util.WeakHashMap - import java.util.concurrent.locks.ReentrantReadWriteLock - - private val rwl = new ReentrantReadWriteLock() - private val rlock = rwl.readLock - private val wlock = rwl.writeLock - private val map = new WeakHashMap[K, WeakReference[V]] - - protected def valueFromKey(k: K): V - protected def keyFromValue(v: V): Option[K] - - def apply(name: K): V = { - def cached(): V = { - rlock.lock - try { - val reference = map get name - if (reference == null) null - else reference.get // will be null if we were gc-ed - } - finally rlock.unlock - } - def updateCache(): V = { - wlock.lock - try { - val res = cached() - if (res != null) res - else { - // If we don't remove the old String key from the map, we can - // wind up with one String as the key and a different String as - // the name field in the Symbol, which can lead to surprising GC - // behavior and duplicate Symbols. See scala/bug#6706. - map remove name - val sym = valueFromKey(name) - map.put(name, new WeakReference(sym)) - sym - } - } - finally wlock.unlock - } - - val res = cached() - if (res == null) updateCache() - else res - } - def unapply(other: V): Option[K] = keyFromValue(other) -} diff --git a/tests/scala2-library/src/library/scala/Tuple1.scala b/tests/scala2-library/src/library/scala/Tuple1.scala deleted file mode 100644 index 5544a5f63d9c..000000000000 --- a/tests/scala2-library/src/library/scala/Tuple1.scala +++ /dev/null @@ -1,23 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ -// GENERATED CODE: DO NOT EDIT. See scala.Function0 for timestamp. - -package scala - - -/** A tuple of 1 elements; the canonical representation of a [[scala.Product1]]. - * - * @constructor Create a new tuple with 1 elements. - * @param _1 Element 1 of this Tuple1 - */ -final case class Tuple1[@specialized(Int, Long, Double) +T1](_1: T1) - extends Product1[T1] -{ - override def toString() = "(" + _1 + ")" - -} diff --git a/tests/scala2-library/src/library/scala/Tuple10.scala b/tests/scala2-library/src/library/scala/Tuple10.scala deleted file mode 100644 index c57acb7c6e7f..000000000000 --- a/tests/scala2-library/src/library/scala/Tuple10.scala +++ /dev/null @@ -1,32 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ -// GENERATED CODE: DO NOT EDIT. See scala.Function0 for timestamp. - -package scala - - -/** A tuple of 10 elements; the canonical representation of a [[scala.Product10]]. - * - * @constructor Create a new tuple with 10 elements. Note that it is more idiomatic to create a Tuple10 via `(t1, t2, t3, t4, t5, t6, t7, t8, t9, t10)` - * @param _1 Element 1 of this Tuple10 - * @param _2 Element 2 of this Tuple10 - * @param _3 Element 3 of this Tuple10 - * @param _4 Element 4 of this Tuple10 - * @param _5 Element 5 of this Tuple10 - * @param _6 Element 6 of this Tuple10 - * @param _7 Element 7 of this Tuple10 - * @param _8 Element 8 of this Tuple10 - * @param _9 Element 9 of this Tuple10 - * @param _10 Element 10 of this Tuple10 - */ -final case class Tuple10[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10](_1: T1, _2: T2, _3: T3, _4: T4, _5: T5, _6: T6, _7: T7, _8: T8, _9: T9, _10: T10) - extends Product10[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10] -{ - override def toString() = "(" + _1 + "," + _2 + "," + _3 + "," + _4 + "," + _5 + "," + _6 + "," + _7 + "," + _8 + "," + _9 + "," + _10 + ")" - -} diff --git a/tests/scala2-library/src/library/scala/Tuple11.scala b/tests/scala2-library/src/library/scala/Tuple11.scala deleted file mode 100644 index 06360e6679ba..000000000000 --- a/tests/scala2-library/src/library/scala/Tuple11.scala +++ /dev/null @@ -1,33 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ -// GENERATED CODE: DO NOT EDIT. See scala.Function0 for timestamp. - -package scala - - -/** A tuple of 11 elements; the canonical representation of a [[scala.Product11]]. - * - * @constructor Create a new tuple with 11 elements. Note that it is more idiomatic to create a Tuple11 via `(t1, t2, t3, t4, t5, t6, t7, t8, t9, t10, t11)` - * @param _1 Element 1 of this Tuple11 - * @param _2 Element 2 of this Tuple11 - * @param _3 Element 3 of this Tuple11 - * @param _4 Element 4 of this Tuple11 - * @param _5 Element 5 of this Tuple11 - * @param _6 Element 6 of this Tuple11 - * @param _7 Element 7 of this Tuple11 - * @param _8 Element 8 of this Tuple11 - * @param _9 Element 9 of this Tuple11 - * @param _10 Element 10 of this Tuple11 - * @param _11 Element 11 of this Tuple11 - */ -final case class Tuple11[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11](_1: T1, _2: T2, _3: T3, _4: T4, _5: T5, _6: T6, _7: T7, _8: T8, _9: T9, _10: T10, _11: T11) - extends Product11[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11] -{ - override def toString() = "(" + _1 + "," + _2 + "," + _3 + "," + _4 + "," + _5 + "," + _6 + "," + _7 + "," + _8 + "," + _9 + "," + _10 + "," + _11 + ")" - -} diff --git a/tests/scala2-library/src/library/scala/Tuple12.scala b/tests/scala2-library/src/library/scala/Tuple12.scala deleted file mode 100644 index e223de104d95..000000000000 --- a/tests/scala2-library/src/library/scala/Tuple12.scala +++ /dev/null @@ -1,35 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ -// GENERATED CODE: DO NOT EDIT. See scala.Function0 for timestamp. - -package scala - - -/** A tuple of 12 elements; the canonical representation of a [[scala.Product12]]. - * - * @constructor Create a new tuple with 12 elements. Note that it is more idiomatic to create a Tuple12 via `(t1, t2, t3, t4, t5, t6, t7, t8, t9, t10, t11, t12)` - * @param _1 Element 1 of this Tuple12 - * @param _2 Element 2 of this Tuple12 - * @param _3 Element 3 of this Tuple12 - * @param _4 Element 4 of this Tuple12 - * @param _5 Element 5 of this Tuple12 - * @param _6 Element 6 of this Tuple12 - * @param _7 Element 7 of this Tuple12 - * @param _8 Element 8 of this Tuple12 - * @param _9 Element 9 of this Tuple12 - * @param _10 Element 10 of this Tuple12 - * @param _11 Element 11 of this Tuple12 - * @param _12 Element 12 of this Tuple12 - */ -final case class Tuple12[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12](_1: T1, _2: T2, _3: T3, _4: T4, _5: T5, _6: T6, _7: T7, _8: T8, _9: T9, _10: T10, _11: T11, _12: T12) - extends Product12[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12] -{ - override def toString() = "(" + _1 + "," + _2 + "," + _3 + "," + _4 + "," + _5 + "," + _6 + - "," + _7 + "," + _8 + "," + _9 + "," + _10 + "," + _11 + "," + _12 + ")" - -} diff --git a/tests/scala2-library/src/library/scala/Tuple13.scala b/tests/scala2-library/src/library/scala/Tuple13.scala deleted file mode 100644 index 56e12b96b676..000000000000 --- a/tests/scala2-library/src/library/scala/Tuple13.scala +++ /dev/null @@ -1,36 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ -// GENERATED CODE: DO NOT EDIT. See scala.Function0 for timestamp. - -package scala - - -/** A tuple of 13 elements; the canonical representation of a [[scala.Product13]]. - * - * @constructor Create a new tuple with 13 elements. Note that it is more idiomatic to create a Tuple13 via `(t1, t2, t3, t4, t5, t6, t7, t8, t9, t10, t11, t12, t13)` - * @param _1 Element 1 of this Tuple13 - * @param _2 Element 2 of this Tuple13 - * @param _3 Element 3 of this Tuple13 - * @param _4 Element 4 of this Tuple13 - * @param _5 Element 5 of this Tuple13 - * @param _6 Element 6 of this Tuple13 - * @param _7 Element 7 of this Tuple13 - * @param _8 Element 8 of this Tuple13 - * @param _9 Element 9 of this Tuple13 - * @param _10 Element 10 of this Tuple13 - * @param _11 Element 11 of this Tuple13 - * @param _12 Element 12 of this Tuple13 - * @param _13 Element 13 of this Tuple13 - */ -final case class Tuple13[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12, +T13](_1: T1, _2: T2, _3: T3, _4: T4, _5: T5, _6: T6, _7: T7, _8: T8, _9: T9, _10: T10, _11: T11, _12: T12, _13: T13) - extends Product13[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13] -{ - override def toString() = "(" + _1 + "," + _2 + "," + _3 + "," + _4 + "," + _5 + "," + _6 + - "," + _7 + "," + _8 + "," + _9 + "," + _10 + "," + _11 + "," + _12 + "," + _13 + ")" - -} diff --git a/tests/scala2-library/src/library/scala/Tuple14.scala b/tests/scala2-library/src/library/scala/Tuple14.scala deleted file mode 100644 index 53dd4d79bbf3..000000000000 --- a/tests/scala2-library/src/library/scala/Tuple14.scala +++ /dev/null @@ -1,37 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ -// GENERATED CODE: DO NOT EDIT. See scala.Function0 for timestamp. - -package scala - - -/** A tuple of 14 elements; the canonical representation of a [[scala.Product14]]. - * - * @constructor Create a new tuple with 14 elements. Note that it is more idiomatic to create a Tuple14 via `(t1, t2, t3, t4, t5, t6, t7, t8, t9, t10, t11, t12, t13, t14)` - * @param _1 Element 1 of this Tuple14 - * @param _2 Element 2 of this Tuple14 - * @param _3 Element 3 of this Tuple14 - * @param _4 Element 4 of this Tuple14 - * @param _5 Element 5 of this Tuple14 - * @param _6 Element 6 of this Tuple14 - * @param _7 Element 7 of this Tuple14 - * @param _8 Element 8 of this Tuple14 - * @param _9 Element 9 of this Tuple14 - * @param _10 Element 10 of this Tuple14 - * @param _11 Element 11 of this Tuple14 - * @param _12 Element 12 of this Tuple14 - * @param _13 Element 13 of this Tuple14 - * @param _14 Element 14 of this Tuple14 - */ -final case class Tuple14[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12, +T13, +T14](_1: T1, _2: T2, _3: T3, _4: T4, _5: T5, _6: T6, _7: T7, _8: T8, _9: T9, _10: T10, _11: T11, _12: T12, _13: T13, _14: T14) - extends Product14[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14] -{ - override def toString() = "(" + _1 + "," + _2 + "," + _3 + "," + _4 + "," + _5 + "," + _6 + "," + _7 + - "," + _8 + "," + _9 + "," + _10 + "," + _11 + "," + _12 + "," + _13 + "," + _14 + ")" - -} diff --git a/tests/scala2-library/src/library/scala/Tuple15.scala b/tests/scala2-library/src/library/scala/Tuple15.scala deleted file mode 100644 index 0aca96d00da0..000000000000 --- a/tests/scala2-library/src/library/scala/Tuple15.scala +++ /dev/null @@ -1,38 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ -// GENERATED CODE: DO NOT EDIT. See scala.Function0 for timestamp. - -package scala - - -/** A tuple of 15 elements; the canonical representation of a [[scala.Product15]]. - * - * @constructor Create a new tuple with 15 elements. Note that it is more idiomatic to create a Tuple15 via `(t1, t2, t3, t4, t5, t6, t7, t8, t9, t10, t11, t12, t13, t14, t15)` - * @param _1 Element 1 of this Tuple15 - * @param _2 Element 2 of this Tuple15 - * @param _3 Element 3 of this Tuple15 - * @param _4 Element 4 of this Tuple15 - * @param _5 Element 5 of this Tuple15 - * @param _6 Element 6 of this Tuple15 - * @param _7 Element 7 of this Tuple15 - * @param _8 Element 8 of this Tuple15 - * @param _9 Element 9 of this Tuple15 - * @param _10 Element 10 of this Tuple15 - * @param _11 Element 11 of this Tuple15 - * @param _12 Element 12 of this Tuple15 - * @param _13 Element 13 of this Tuple15 - * @param _14 Element 14 of this Tuple15 - * @param _15 Element 15 of this Tuple15 - */ -final case class Tuple15[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12, +T13, +T14, +T15](_1: T1, _2: T2, _3: T3, _4: T4, _5: T5, _6: T6, _7: T7, _8: T8, _9: T9, _10: T10, _11: T11, _12: T12, _13: T13, _14: T14, _15: T15) - extends Product15[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15] -{ - override def toString() = "(" + _1 + "," + _2 + "," + _3 + "," + _4 + "," + _5 + "," + _6 + "," + _7 + - "," + _8 + "," + _9 + "," + _10 + "," + _11 + "," + _12 + "," + _13 + "," + _14 + "," + _15 + ")" - -} diff --git a/tests/scala2-library/src/library/scala/Tuple16.scala b/tests/scala2-library/src/library/scala/Tuple16.scala deleted file mode 100644 index d4c0c318070f..000000000000 --- a/tests/scala2-library/src/library/scala/Tuple16.scala +++ /dev/null @@ -1,39 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ -// GENERATED CODE: DO NOT EDIT. See scala.Function0 for timestamp. - -package scala - - -/** A tuple of 16 elements; the canonical representation of a [[scala.Product16]]. - * - * @constructor Create a new tuple with 16 elements. Note that it is more idiomatic to create a Tuple16 via `(t1, t2, t3, t4, t5, t6, t7, t8, t9, t10, t11, t12, t13, t14, t15, t16)` - * @param _1 Element 1 of this Tuple16 - * @param _2 Element 2 of this Tuple16 - * @param _3 Element 3 of this Tuple16 - * @param _4 Element 4 of this Tuple16 - * @param _5 Element 5 of this Tuple16 - * @param _6 Element 6 of this Tuple16 - * @param _7 Element 7 of this Tuple16 - * @param _8 Element 8 of this Tuple16 - * @param _9 Element 9 of this Tuple16 - * @param _10 Element 10 of this Tuple16 - * @param _11 Element 11 of this Tuple16 - * @param _12 Element 12 of this Tuple16 - * @param _13 Element 13 of this Tuple16 - * @param _14 Element 14 of this Tuple16 - * @param _15 Element 15 of this Tuple16 - * @param _16 Element 16 of this Tuple16 - */ -final case class Tuple16[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12, +T13, +T14, +T15, +T16](_1: T1, _2: T2, _3: T3, _4: T4, _5: T5, _6: T6, _7: T7, _8: T8, _9: T9, _10: T10, _11: T11, _12: T12, _13: T13, _14: T14, _15: T15, _16: T16) - extends Product16[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16] -{ - override def toString() = "(" + _1 + "," + _2 + "," + _3 + "," + _4 + "," + _5 + "," + _6 + "," + _7 + "," + _8 + - "," + _9 + "," + _10 + "," + _11 + "," + _12 + "," + _13 + "," + _14 + "," + _15 + "," + _16 + ")" - -} diff --git a/tests/scala2-library/src/library/scala/Tuple17.scala b/tests/scala2-library/src/library/scala/Tuple17.scala deleted file mode 100644 index 47df6cb59f07..000000000000 --- a/tests/scala2-library/src/library/scala/Tuple17.scala +++ /dev/null @@ -1,40 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ -// GENERATED CODE: DO NOT EDIT. See scala.Function0 for timestamp. - -package scala - - -/** A tuple of 17 elements; the canonical representation of a [[scala.Product17]]. - * - * @constructor Create a new tuple with 17 elements. Note that it is more idiomatic to create a Tuple17 via `(t1, t2, t3, t4, t5, t6, t7, t8, t9, t10, t11, t12, t13, t14, t15, t16, t17)` - * @param _1 Element 1 of this Tuple17 - * @param _2 Element 2 of this Tuple17 - * @param _3 Element 3 of this Tuple17 - * @param _4 Element 4 of this Tuple17 - * @param _5 Element 5 of this Tuple17 - * @param _6 Element 6 of this Tuple17 - * @param _7 Element 7 of this Tuple17 - * @param _8 Element 8 of this Tuple17 - * @param _9 Element 9 of this Tuple17 - * @param _10 Element 10 of this Tuple17 - * @param _11 Element 11 of this Tuple17 - * @param _12 Element 12 of this Tuple17 - * @param _13 Element 13 of this Tuple17 - * @param _14 Element 14 of this Tuple17 - * @param _15 Element 15 of this Tuple17 - * @param _16 Element 16 of this Tuple17 - * @param _17 Element 17 of this Tuple17 - */ -final case class Tuple17[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12, +T13, +T14, +T15, +T16, +T17](_1: T1, _2: T2, _3: T3, _4: T4, _5: T5, _6: T6, _7: T7, _8: T8, _9: T9, _10: T10, _11: T11, _12: T12, _13: T13, _14: T14, _15: T15, _16: T16, _17: T17) - extends Product17[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17] -{ - override def toString() = "(" + _1 + "," + _2 + "," + _3 + "," + _4 + "," + _5 + "," + _6 + "," + _7 + "," + _8 + - "," + _9 + "," + _10 + "," + _11 + "," + _12 + "," + _13 + "," + _14 + "," + _15 + "," + _16 + "," + _17 + ")" - -} diff --git a/tests/scala2-library/src/library/scala/Tuple18.scala b/tests/scala2-library/src/library/scala/Tuple18.scala deleted file mode 100644 index 464b08fb2840..000000000000 --- a/tests/scala2-library/src/library/scala/Tuple18.scala +++ /dev/null @@ -1,41 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ -// GENERATED CODE: DO NOT EDIT. See scala.Function0 for timestamp. - -package scala - - -/** A tuple of 18 elements; the canonical representation of a [[scala.Product18]]. - * - * @constructor Create a new tuple with 18 elements. Note that it is more idiomatic to create a Tuple18 via `(t1, t2, t3, t4, t5, t6, t7, t8, t9, t10, t11, t12, t13, t14, t15, t16, t17, t18)` - * @param _1 Element 1 of this Tuple18 - * @param _2 Element 2 of this Tuple18 - * @param _3 Element 3 of this Tuple18 - * @param _4 Element 4 of this Tuple18 - * @param _5 Element 5 of this Tuple18 - * @param _6 Element 6 of this Tuple18 - * @param _7 Element 7 of this Tuple18 - * @param _8 Element 8 of this Tuple18 - * @param _9 Element 9 of this Tuple18 - * @param _10 Element 10 of this Tuple18 - * @param _11 Element 11 of this Tuple18 - * @param _12 Element 12 of this Tuple18 - * @param _13 Element 13 of this Tuple18 - * @param _14 Element 14 of this Tuple18 - * @param _15 Element 15 of this Tuple18 - * @param _16 Element 16 of this Tuple18 - * @param _17 Element 17 of this Tuple18 - * @param _18 Element 18 of this Tuple18 - */ -final case class Tuple18[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12, +T13, +T14, +T15, +T16, +T17, +T18](_1: T1, _2: T2, _3: T3, _4: T4, _5: T5, _6: T6, _7: T7, _8: T8, _9: T9, _10: T10, _11: T11, _12: T12, _13: T13, _14: T14, _15: T15, _16: T16, _17: T17, _18: T18) - extends Product18[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18] -{ - override def toString() = "(" + _1 + "," + _2 + "," + _3 + "," + _4 + "," + _5 + "," + _6 + "," + _7 + "," + _8 + "," + _9 + - "," + _10 + "," + _11 + "," + _12 + "," + _13 + "," + _14 + "," + _15 + "," + _16 + "," + _17 + "," + _18 + ")" - -} diff --git a/tests/scala2-library/src/library/scala/Tuple19.scala b/tests/scala2-library/src/library/scala/Tuple19.scala deleted file mode 100644 index d64b3920b441..000000000000 --- a/tests/scala2-library/src/library/scala/Tuple19.scala +++ /dev/null @@ -1,42 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ -// GENERATED CODE: DO NOT EDIT. See scala.Function0 for timestamp. - -package scala - - -/** A tuple of 19 elements; the canonical representation of a [[scala.Product19]]. - * - * @constructor Create a new tuple with 19 elements. Note that it is more idiomatic to create a Tuple19 via `(t1, t2, t3, t4, t5, t6, t7, t8, t9, t10, t11, t12, t13, t14, t15, t16, t17, t18, t19)` - * @param _1 Element 1 of this Tuple19 - * @param _2 Element 2 of this Tuple19 - * @param _3 Element 3 of this Tuple19 - * @param _4 Element 4 of this Tuple19 - * @param _5 Element 5 of this Tuple19 - * @param _6 Element 6 of this Tuple19 - * @param _7 Element 7 of this Tuple19 - * @param _8 Element 8 of this Tuple19 - * @param _9 Element 9 of this Tuple19 - * @param _10 Element 10 of this Tuple19 - * @param _11 Element 11 of this Tuple19 - * @param _12 Element 12 of this Tuple19 - * @param _13 Element 13 of this Tuple19 - * @param _14 Element 14 of this Tuple19 - * @param _15 Element 15 of this Tuple19 - * @param _16 Element 16 of this Tuple19 - * @param _17 Element 17 of this Tuple19 - * @param _18 Element 18 of this Tuple19 - * @param _19 Element 19 of this Tuple19 - */ -final case class Tuple19[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12, +T13, +T14, +T15, +T16, +T17, +T18, +T19](_1: T1, _2: T2, _3: T3, _4: T4, _5: T5, _6: T6, _7: T7, _8: T8, _9: T9, _10: T10, _11: T11, _12: T12, _13: T13, _14: T14, _15: T15, _16: T16, _17: T17, _18: T18, _19: T19) - extends Product19[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19] -{ - override def toString() = "(" + _1 + "," + _2 + "," + _3 + "," + _4 + "," + _5 + "," + _6 + "," + _7 + "," + _8 + "," + _9 + - "," + _10 + "," + _11 + "," + _12 + "," + _13 + "," + _14 + "," + _15 + "," + _16 + "," + _17 + "," + _18 + "," + _19 + ")" - -} diff --git a/tests/scala2-library/src/library/scala/Tuple2.scala b/tests/scala2-library/src/library/scala/Tuple2.scala deleted file mode 100644 index 5faa4e982150..000000000000 --- a/tests/scala2-library/src/library/scala/Tuple2.scala +++ /dev/null @@ -1,30 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ -// GENERATED CODE: DO NOT EDIT. See scala.Function0 for timestamp. - -package scala - - -/** A tuple of 2 elements; the canonical representation of a [[scala.Product2]]. - * - * @constructor Create a new tuple with 2 elements. Note that it is more idiomatic to create a Tuple2 via `(t1, t2)` - * @param _1 Element 1 of this Tuple2 - * @param _2 Element 2 of this Tuple2 - */ -final case class Tuple2[@specialized(Int, Long, Double, Char, Boolean/*, AnyRef*/) +T1, @specialized(Int, Long, Double, Char, Boolean/*, AnyRef*/) +T2](_1: T1, _2: T2) - extends Product2[T1, T2] -{ - override def toString() = "(" + _1 + "," + _2 + ")" - - /** Swaps the elements of this `Tuple`. - * @return a new Tuple where the first element is the second element of this Tuple and the - * second element is the first element of this Tuple. - */ - def swap: Tuple2[T2,T1] = Tuple2(_2, _1) - -} diff --git a/tests/scala2-library/src/library/scala/Tuple20.scala b/tests/scala2-library/src/library/scala/Tuple20.scala deleted file mode 100644 index a96c41baa551..000000000000 --- a/tests/scala2-library/src/library/scala/Tuple20.scala +++ /dev/null @@ -1,43 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ -// GENERATED CODE: DO NOT EDIT. See scala.Function0 for timestamp. - -package scala - - -/** A tuple of 20 elements; the canonical representation of a [[scala.Product20]]. - * - * @constructor Create a new tuple with 20 elements. Note that it is more idiomatic to create a Tuple20 via `(t1, t2, t3, t4, t5, t6, t7, t8, t9, t10, t11, t12, t13, t14, t15, t16, t17, t18, t19, t20)` - * @param _1 Element 1 of this Tuple20 - * @param _2 Element 2 of this Tuple20 - * @param _3 Element 3 of this Tuple20 - * @param _4 Element 4 of this Tuple20 - * @param _5 Element 5 of this Tuple20 - * @param _6 Element 6 of this Tuple20 - * @param _7 Element 7 of this Tuple20 - * @param _8 Element 8 of this Tuple20 - * @param _9 Element 9 of this Tuple20 - * @param _10 Element 10 of this Tuple20 - * @param _11 Element 11 of this Tuple20 - * @param _12 Element 12 of this Tuple20 - * @param _13 Element 13 of this Tuple20 - * @param _14 Element 14 of this Tuple20 - * @param _15 Element 15 of this Tuple20 - * @param _16 Element 16 of this Tuple20 - * @param _17 Element 17 of this Tuple20 - * @param _18 Element 18 of this Tuple20 - * @param _19 Element 19 of this Tuple20 - * @param _20 Element 20 of this Tuple20 - */ -final case class Tuple20[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12, +T13, +T14, +T15, +T16, +T17, +T18, +T19, +T20](_1: T1, _2: T2, _3: T3, _4: T4, _5: T5, _6: T6, _7: T7, _8: T8, _9: T9, _10: T10, _11: T11, _12: T12, _13: T13, _14: T14, _15: T15, _16: T16, _17: T17, _18: T18, _19: T19, _20: T20) - extends Product20[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20] -{ - override def toString() = "(" + _1 + "," + _2 + "," + _3 + "," + _4 + "," + _5 + "," + _6 + "," + _7 + "," + _8 + "," + _9 + "," + _10 + - "," + _11 + "," + _12 + "," + _13 + "," + _14 + "," + _15 + "," + _16 + "," + _17 + "," + _18 + "," + _19 + "," + _20 + ")" - -} diff --git a/tests/scala2-library/src/library/scala/Tuple21.scala b/tests/scala2-library/src/library/scala/Tuple21.scala deleted file mode 100644 index 6f240d1fba7f..000000000000 --- a/tests/scala2-library/src/library/scala/Tuple21.scala +++ /dev/null @@ -1,44 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ -// GENERATED CODE: DO NOT EDIT. See scala.Function0 for timestamp. - -package scala - - -/** A tuple of 21 elements; the canonical representation of a [[scala.Product21]]. - * - * @constructor Create a new tuple with 21 elements. Note that it is more idiomatic to create a Tuple21 via `(t1, t2, t3, t4, t5, t6, t7, t8, t9, t10, t11, t12, t13, t14, t15, t16, t17, t18, t19, t20, t21)` - * @param _1 Element 1 of this Tuple21 - * @param _2 Element 2 of this Tuple21 - * @param _3 Element 3 of this Tuple21 - * @param _4 Element 4 of this Tuple21 - * @param _5 Element 5 of this Tuple21 - * @param _6 Element 6 of this Tuple21 - * @param _7 Element 7 of this Tuple21 - * @param _8 Element 8 of this Tuple21 - * @param _9 Element 9 of this Tuple21 - * @param _10 Element 10 of this Tuple21 - * @param _11 Element 11 of this Tuple21 - * @param _12 Element 12 of this Tuple21 - * @param _13 Element 13 of this Tuple21 - * @param _14 Element 14 of this Tuple21 - * @param _15 Element 15 of this Tuple21 - * @param _16 Element 16 of this Tuple21 - * @param _17 Element 17 of this Tuple21 - * @param _18 Element 18 of this Tuple21 - * @param _19 Element 19 of this Tuple21 - * @param _20 Element 20 of this Tuple21 - * @param _21 Element 21 of this Tuple21 - */ -final case class Tuple21[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12, +T13, +T14, +T15, +T16, +T17, +T18, +T19, +T20, +T21](_1: T1, _2: T2, _3: T3, _4: T4, _5: T5, _6: T6, _7: T7, _8: T8, _9: T9, _10: T10, _11: T11, _12: T12, _13: T13, _14: T14, _15: T15, _16: T16, _17: T17, _18: T18, _19: T19, _20: T20, _21: T21) - extends Product21[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21] -{ - override def toString() = "(" + _1 + "," + _2 + "," + _3 + "," + _4 + "," + _5 + "," + _6 + "," + _7 + "," + _8 + "," + _9 + "," + _10 + - "," + _11 + "," + _12 + "," + _13 + "," + _14 + "," + _15 + "," + _16 + "," + _17 + "," + _18 + "," + _19 + "," + _20 + "," + _21 + ")" - -} diff --git a/tests/scala2-library/src/library/scala/Tuple22.scala b/tests/scala2-library/src/library/scala/Tuple22.scala deleted file mode 100644 index 681b709f6578..000000000000 --- a/tests/scala2-library/src/library/scala/Tuple22.scala +++ /dev/null @@ -1,45 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ -// GENERATED CODE: DO NOT EDIT. See scala.Function0 for timestamp. - -package scala - - -/** A tuple of 22 elements; the canonical representation of a [[scala.Product22]]. - * - * @constructor Create a new tuple with 22 elements. Note that it is more idiomatic to create a Tuple22 via `(t1, t2, t3, t4, t5, t6, t7, t8, t9, t10, t11, t12, t13, t14, t15, t16, t17, t18, t19, t20, t21, t22)` - * @param _1 Element 1 of this Tuple22 - * @param _2 Element 2 of this Tuple22 - * @param _3 Element 3 of this Tuple22 - * @param _4 Element 4 of this Tuple22 - * @param _5 Element 5 of this Tuple22 - * @param _6 Element 6 of this Tuple22 - * @param _7 Element 7 of this Tuple22 - * @param _8 Element 8 of this Tuple22 - * @param _9 Element 9 of this Tuple22 - * @param _10 Element 10 of this Tuple22 - * @param _11 Element 11 of this Tuple22 - * @param _12 Element 12 of this Tuple22 - * @param _13 Element 13 of this Tuple22 - * @param _14 Element 14 of this Tuple22 - * @param _15 Element 15 of this Tuple22 - * @param _16 Element 16 of this Tuple22 - * @param _17 Element 17 of this Tuple22 - * @param _18 Element 18 of this Tuple22 - * @param _19 Element 19 of this Tuple22 - * @param _20 Element 20 of this Tuple22 - * @param _21 Element 21 of this Tuple22 - * @param _22 Element 22 of this Tuple22 - */ -final case class Tuple22[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12, +T13, +T14, +T15, +T16, +T17, +T18, +T19, +T20, +T21, +T22](_1: T1, _2: T2, _3: T3, _4: T4, _5: T5, _6: T6, _7: T7, _8: T8, _9: T9, _10: T10, _11: T11, _12: T12, _13: T13, _14: T14, _15: T15, _16: T16, _17: T17, _18: T18, _19: T19, _20: T20, _21: T21, _22: T22) - extends Product22[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22] -{ - override def toString() = "(" + _1 + "," + _2 + "," + _3 + "," + _4 + "," + _5 + "," + _6 + "," + _7 + "," + _8 + "," + _9 + "," + _10 + "," + _11 + - "," + _12 + "," + _13 + "," + _14 + "," + _15 + "," + _16 + "," + _17 + "," + _18 + "," + _19 + "," + _20 + "," + _21 + "," + _22 + ")" - -} diff --git a/tests/scala2-library/src/library/scala/Tuple3.scala b/tests/scala2-library/src/library/scala/Tuple3.scala deleted file mode 100644 index 86f8f7e1a4b3..000000000000 --- a/tests/scala2-library/src/library/scala/Tuple3.scala +++ /dev/null @@ -1,25 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ -// GENERATED CODE: DO NOT EDIT. See scala.Function0 for timestamp. - -package scala - - -/** A tuple of 3 elements; the canonical representation of a [[scala.Product3]]. - * - * @constructor Create a new tuple with 3 elements. Note that it is more idiomatic to create a Tuple3 via `(t1, t2, t3)` - * @param _1 Element 1 of this Tuple3 - * @param _2 Element 2 of this Tuple3 - * @param _3 Element 3 of this Tuple3 - */ -final case class Tuple3[+T1, +T2, +T3](_1: T1, _2: T2, _3: T3) - extends Product3[T1, T2, T3] -{ - override def toString() = "(" + _1 + "," + _2 + "," + _3 + ")" - -} diff --git a/tests/scala2-library/src/library/scala/Tuple4.scala b/tests/scala2-library/src/library/scala/Tuple4.scala deleted file mode 100644 index f3266c262c41..000000000000 --- a/tests/scala2-library/src/library/scala/Tuple4.scala +++ /dev/null @@ -1,26 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ -// GENERATED CODE: DO NOT EDIT. See scala.Function0 for timestamp. - -package scala - - -/** A tuple of 4 elements; the canonical representation of a [[scala.Product4]]. - * - * @constructor Create a new tuple with 4 elements. Note that it is more idiomatic to create a Tuple4 via `(t1, t2, t3, t4)` - * @param _1 Element 1 of this Tuple4 - * @param _2 Element 2 of this Tuple4 - * @param _3 Element 3 of this Tuple4 - * @param _4 Element 4 of this Tuple4 - */ -final case class Tuple4[+T1, +T2, +T3, +T4](_1: T1, _2: T2, _3: T3, _4: T4) - extends Product4[T1, T2, T3, T4] -{ - override def toString() = "(" + _1 + "," + _2 + "," + _3 + "," + _4 + ")" - -} diff --git a/tests/scala2-library/src/library/scala/Tuple5.scala b/tests/scala2-library/src/library/scala/Tuple5.scala deleted file mode 100644 index e6ac0a62452c..000000000000 --- a/tests/scala2-library/src/library/scala/Tuple5.scala +++ /dev/null @@ -1,27 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ -// GENERATED CODE: DO NOT EDIT. See scala.Function0 for timestamp. - -package scala - - -/** A tuple of 5 elements; the canonical representation of a [[scala.Product5]]. - * - * @constructor Create a new tuple with 5 elements. Note that it is more idiomatic to create a Tuple5 via `(t1, t2, t3, t4, t5)` - * @param _1 Element 1 of this Tuple5 - * @param _2 Element 2 of this Tuple5 - * @param _3 Element 3 of this Tuple5 - * @param _4 Element 4 of this Tuple5 - * @param _5 Element 5 of this Tuple5 - */ -final case class Tuple5[+T1, +T2, +T3, +T4, +T5](_1: T1, _2: T2, _3: T3, _4: T4, _5: T5) - extends Product5[T1, T2, T3, T4, T5] -{ - override def toString() = "(" + _1 + "," + _2 + "," + _3 + "," + _4 + "," + _5 + ")" - -} diff --git a/tests/scala2-library/src/library/scala/Tuple6.scala b/tests/scala2-library/src/library/scala/Tuple6.scala deleted file mode 100644 index cf69b9c10a6a..000000000000 --- a/tests/scala2-library/src/library/scala/Tuple6.scala +++ /dev/null @@ -1,28 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ -// GENERATED CODE: DO NOT EDIT. See scala.Function0 for timestamp. - -package scala - - -/** A tuple of 6 elements; the canonical representation of a [[scala.Product6]]. - * - * @constructor Create a new tuple with 6 elements. Note that it is more idiomatic to create a Tuple6 via `(t1, t2, t3, t4, t5, t6)` - * @param _1 Element 1 of this Tuple6 - * @param _2 Element 2 of this Tuple6 - * @param _3 Element 3 of this Tuple6 - * @param _4 Element 4 of this Tuple6 - * @param _5 Element 5 of this Tuple6 - * @param _6 Element 6 of this Tuple6 - */ -final case class Tuple6[+T1, +T2, +T3, +T4, +T5, +T6](_1: T1, _2: T2, _3: T3, _4: T4, _5: T5, _6: T6) - extends Product6[T1, T2, T3, T4, T5, T6] -{ - override def toString() = "(" + _1 + "," + _2 + "," + _3 + "," + _4 + "," + _5 + "," + _6 + ")" - -} diff --git a/tests/scala2-library/src/library/scala/Tuple7.scala b/tests/scala2-library/src/library/scala/Tuple7.scala deleted file mode 100644 index 4f0200fe238f..000000000000 --- a/tests/scala2-library/src/library/scala/Tuple7.scala +++ /dev/null @@ -1,29 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ -// GENERATED CODE: DO NOT EDIT. See scala.Function0 for timestamp. - -package scala - - -/** A tuple of 7 elements; the canonical representation of a [[scala.Product7]]. - * - * @constructor Create a new tuple with 7 elements. Note that it is more idiomatic to create a Tuple7 via `(t1, t2, t3, t4, t5, t6, t7)` - * @param _1 Element 1 of this Tuple7 - * @param _2 Element 2 of this Tuple7 - * @param _3 Element 3 of this Tuple7 - * @param _4 Element 4 of this Tuple7 - * @param _5 Element 5 of this Tuple7 - * @param _6 Element 6 of this Tuple7 - * @param _7 Element 7 of this Tuple7 - */ -final case class Tuple7[+T1, +T2, +T3, +T4, +T5, +T6, +T7](_1: T1, _2: T2, _3: T3, _4: T4, _5: T5, _6: T6, _7: T7) - extends Product7[T1, T2, T3, T4, T5, T6, T7] -{ - override def toString() = "(" + _1 + "," + _2 + "," + _3 + "," + _4 + "," + _5 + "," + _6 + "," + _7 + ")" - -} diff --git a/tests/scala2-library/src/library/scala/Tuple8.scala b/tests/scala2-library/src/library/scala/Tuple8.scala deleted file mode 100644 index ebd9f7025264..000000000000 --- a/tests/scala2-library/src/library/scala/Tuple8.scala +++ /dev/null @@ -1,30 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ -// GENERATED CODE: DO NOT EDIT. See scala.Function0 for timestamp. - -package scala - - -/** A tuple of 8 elements; the canonical representation of a [[scala.Product8]]. - * - * @constructor Create a new tuple with 8 elements. Note that it is more idiomatic to create a Tuple8 via `(t1, t2, t3, t4, t5, t6, t7, t8)` - * @param _1 Element 1 of this Tuple8 - * @param _2 Element 2 of this Tuple8 - * @param _3 Element 3 of this Tuple8 - * @param _4 Element 4 of this Tuple8 - * @param _5 Element 5 of this Tuple8 - * @param _6 Element 6 of this Tuple8 - * @param _7 Element 7 of this Tuple8 - * @param _8 Element 8 of this Tuple8 - */ -final case class Tuple8[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8](_1: T1, _2: T2, _3: T3, _4: T4, _5: T5, _6: T6, _7: T7, _8: T8) - extends Product8[T1, T2, T3, T4, T5, T6, T7, T8] -{ - override def toString() = "(" + _1 + "," + _2 + "," + _3 + "," + _4 + "," + _5 + "," + _6 + "," + _7 + "," + _8 + ")" - -} diff --git a/tests/scala2-library/src/library/scala/Tuple9.scala b/tests/scala2-library/src/library/scala/Tuple9.scala deleted file mode 100644 index 854fe97b4401..000000000000 --- a/tests/scala2-library/src/library/scala/Tuple9.scala +++ /dev/null @@ -1,31 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ -// GENERATED CODE: DO NOT EDIT. See scala.Function0 for timestamp. - -package scala - - -/** A tuple of 9 elements; the canonical representation of a [[scala.Product9]]. - * - * @constructor Create a new tuple with 9 elements. Note that it is more idiomatic to create a Tuple9 via `(t1, t2, t3, t4, t5, t6, t7, t8, t9)` - * @param _1 Element 1 of this Tuple9 - * @param _2 Element 2 of this Tuple9 - * @param _3 Element 3 of this Tuple9 - * @param _4 Element 4 of this Tuple9 - * @param _5 Element 5 of this Tuple9 - * @param _6 Element 6 of this Tuple9 - * @param _7 Element 7 of this Tuple9 - * @param _8 Element 8 of this Tuple9 - * @param _9 Element 9 of this Tuple9 - */ -final case class Tuple9[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9](_1: T1, _2: T2, _3: T3, _4: T4, _5: T5, _6: T6, _7: T7, _8: T8, _9: T9) - extends Product9[T1, T2, T3, T4, T5, T6, T7, T8, T9] -{ - override def toString() = "(" + _1 + "," + _2 + "," + _3 + "," + _4 + "," + _5 + "," + _6 + "," + _7 + "," + _8 + "," + _9 + ")" - -} diff --git a/tests/scala2-library/src/library/scala/UninitializedError.scala b/tests/scala2-library/src/library/scala/UninitializedError.scala deleted file mode 100644 index 0641a6638880..000000000000 --- a/tests/scala2-library/src/library/scala/UninitializedError.scala +++ /dev/null @@ -1,18 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - - - -package scala - -/** This class represents uninitialized variable/value errors. - * - * @author Martin Odersky - * @since 2.5 - */ -final class UninitializedError extends RuntimeException("uninitialized value") diff --git a/tests/scala2-library/src/library/scala/UninitializedFieldError.scala b/tests/scala2-library/src/library/scala/UninitializedFieldError.scala deleted file mode 100644 index 0dfba2a187a5..000000000000 --- a/tests/scala2-library/src/library/scala/UninitializedFieldError.scala +++ /dev/null @@ -1,23 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - - - -package scala - -/** This class implements errors which are thrown whenever a - * field is used before it has been initialized. - * - * Such runtime checks are not emitted by default. - * They can be enabled by the `-Xcheckinit` compiler option. - * - * @since 2.7 - */ -final case class UninitializedFieldError(msg: String) extends RuntimeException(msg) { - def this(obj: Any) = this("" + obj) -} diff --git a/tests/scala2-library/src/library/scala/Unit.scala b/tests/scala2-library/src/library/scala/Unit.scala deleted file mode 100644 index eb6d1d0ddffa..000000000000 --- a/tests/scala2-library/src/library/scala/Unit.scala +++ /dev/null @@ -1,48 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -// DO NOT EDIT, CHANGES WILL BE LOST -// This auto-generated code can be modified in "project/GenerateAnyVals.scala". -// Afterwards, running "sbt generateSources" regenerates this source file. - -package scala - - -/** `Unit` is a subtype of [[scala.AnyVal]]. There is only one value of type - * `Unit`, `()`, and it is not represented by any object in the underlying - * runtime system. A method with return type `Unit` is analogous to a Java - * method which is declared `void`. - */ -final abstract class Unit private extends AnyVal { - // Provide a more specific return type for Scaladoc - override def getClass(): Class[Unit] = ??? -} - -object Unit extends AnyValCompanion { - - /** Transform a value type into a boxed reference type. - * - * @param x the Unit to be boxed - * @return a scala.runtime.BoxedUnit offering `x` as its underlying value. - */ - def box(x: Unit): scala.runtime.BoxedUnit = scala.runtime.BoxedUnit.UNIT - - /** Transform a boxed type into a value type. Note that this - * method is not typesafe: it accepts any Object, but will throw - * an exception if the argument is not a scala.runtime.BoxedUnit. - * - * @param x the scala.runtime.BoxedUnit to be unboxed. - * @throws ClassCastException if the argument is not a scala.runtime.BoxedUnit - * @return the Unit value () - */ - def unbox(x: java.lang.Object): Unit = x.asInstanceOf[scala.runtime.BoxedUnit] - - /** The String representation of the scala.Unit companion object. */ - override def toString = "object scala.Unit" -} - diff --git a/tests/scala2-library/src/library/scala/annotation/Annotation.scala b/tests/scala2-library/src/library/scala/annotation/Annotation.scala deleted file mode 100644 index c821344cfa92..000000000000 --- a/tests/scala2-library/src/library/scala/annotation/Annotation.scala +++ /dev/null @@ -1,21 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2006-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala.annotation - -/** A base class for annotations. Annotations extending this class directly - * are not preserved for the Scala type checker and are also not stored as - * Java annotations in classfiles. To enable either or both of these, one - * needs to inherit from [[scala.annotation.StaticAnnotation]] or/and - * [[scala.annotation.ClassfileAnnotation]]. - * - * @author Martin Odersky - * @version 1.1, 2/02/2007 - * @since 2.4 - */ -abstract class Annotation {} diff --git a/tests/scala2-library/src/library/scala/annotation/ClassfileAnnotation.scala b/tests/scala2-library/src/library/scala/annotation/ClassfileAnnotation.scala deleted file mode 100644 index e32b93a5df97..000000000000 --- a/tests/scala2-library/src/library/scala/annotation/ClassfileAnnotation.scala +++ /dev/null @@ -1,19 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2006-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala.annotation - -/** A base class for classfile annotations. These are stored as - * [[http://docs.oracle.com/javase/7/docs/technotes/guides/language/annotations.html#_top Java annotations]]] - * in classfiles. - * - * @author Martin Odersky - * @version 1.1, 2/02/2007 - * @since 2.4 - */ -trait ClassfileAnnotation extends StaticAnnotation diff --git a/tests/scala2-library/src/library/scala/annotation/StaticAnnotation.scala b/tests/scala2-library/src/library/scala/annotation/StaticAnnotation.scala deleted file mode 100644 index 3e7e7f26af62..000000000000 --- a/tests/scala2-library/src/library/scala/annotation/StaticAnnotation.scala +++ /dev/null @@ -1,18 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2006-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala.annotation - -/** A base class for static annotations. These are available - * to the Scala type checker, even across different compilation units. - * - * @author Martin Odersky - * @version 1.1, 2/02/2007 - * @since 2.4 - */ -trait StaticAnnotation extends Annotation diff --git a/tests/scala2-library/src/library/scala/annotation/TypeConstraint.scala b/tests/scala2-library/src/library/scala/annotation/TypeConstraint.scala deleted file mode 100644 index d80569b84580..000000000000 --- a/tests/scala2-library/src/library/scala/annotation/TypeConstraint.scala +++ /dev/null @@ -1,26 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2006-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala.annotation - -/** A marker for annotations that, when applied to a type, should be treated - * as a constraint on the annotated type. - * - * A proper constraint should restrict the type based only on information - * mentioned within the type. A Scala compiler can use this assumption to - * rewrite the contents of the constraint as necessary. To contrast, a type - * annotation whose meaning depends on the context where it is written - * down is not a proper constrained type, and this marker should not be - * applied. A Scala compiler will drop such annotations in cases where it - * would rewrite a type constraint. - * - * @author Lex Spoon - * @version 1.1, 2007-11-5 - * @since 2.6 - */ -trait TypeConstraint extends Annotation diff --git a/tests/scala2-library/src/library/scala/annotation/bridge.scala b/tests/scala2-library/src/library/scala/annotation/bridge.scala deleted file mode 100644 index c0c6dba42439..000000000000 --- a/tests/scala2-library/src/library/scala/annotation/bridge.scala +++ /dev/null @@ -1,14 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala.annotation - -/** If this annotation is present on a method, it will be treated as a bridge method. - */ -@deprecated("reconsider whether using this annotation will accomplish anything", "2.10.0") -private[scala] class bridge extends scala.annotation.StaticAnnotation diff --git a/tests/scala2-library/src/library/scala/annotation/compileTimeOnly.scala b/tests/scala2-library/src/library/scala/annotation/compileTimeOnly.scala deleted file mode 100644 index 942e9cad8c7d..000000000000 --- a/tests/scala2-library/src/library/scala/annotation/compileTimeOnly.scala +++ /dev/null @@ -1,22 +0,0 @@ -package scala.annotation - -import scala.annotation.meta._ - -/** - * An annotation that designates that an annottee should not be referred to after - * type checking (which includes macro expansion). - * - * Examples of potential use: - * 1) The annottee can only appear in the arguments of some other macro - * that will eliminate it from the AST during expansion. - * 2) The annottee is a macro and should have been expanded away, - * so if hasn't, something wrong has happened. - * (Comes in handy to provide better support for new macro flavors, - * e.g. macro annotations, that can't be expanded by the vanilla compiler). - * - * @param message the error message to print during compilation if a reference remains - * after type checking - * @since 2.11.0 - */ -@getter @setter @beanGetter @beanSetter @companionClass @companionMethod -final class compileTimeOnly(message: String) extends scala.annotation.StaticAnnotation diff --git a/tests/scala2-library/src/library/scala/annotation/elidable.scala b/tests/scala2-library/src/library/scala/annotation/elidable.scala deleted file mode 100644 index dd0d9b511cb0..000000000000 --- a/tests/scala2-library/src/library/scala/annotation/elidable.scala +++ /dev/null @@ -1,119 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala.annotation - -/** An annotation for methods whose bodies may be excluded - * from compiler-generated bytecode. - * - * Behavior is influenced by passing `-Xelide-below ` to `scalac`. - * Calls to methods marked elidable (as well as the method body) will - * be omitted from generated code if the priority given the annotation - * is lower than that given on the command line. - * - * {{{ - * @elidable(123) // annotation priority - * scalac -Xelide-below 456 // command line priority - * }}} - * - * The method call will be replaced with an expression which depends on - * the type of the elided expression. In decreasing order of precedence: - * - * {{{ - * Unit () - * Boolean false - * T <: AnyVal 0 - * T >: Null null - * T >: Nothing Predef.??? - * }}} - * - * Complete example: - {{{ - import scala.annotation._, elidable._ - object Test extends App { - def expensiveComputation(): Int = { Thread.sleep(1000) ; 172 } - - @elidable(WARNING) def warning(msg: String) = println(msg) - @elidable(FINE) def debug(msg: String) = println(msg) - @elidable(FINE) def computedValue = expensiveComputation() - - warning("Warning! Danger! Warning!") - debug("Debug! Danger! Debug!") - println("I computed a value: " + computedValue) - } - % scalac example.scala && scala Test - Warning! Danger! Warning! - Debug! Danger! Debug! - I computed a value: 172 - - // INFO lies between WARNING and FINE - % scalac -Xelide-below INFO example.scala && scala Test - Warning! Danger! Warning! - I computed a value: 0 - }}} - * - * @author Paul Phillips - * @since 2.8 - */ -final class elidable(final val level: Int) extends scala.annotation.StaticAnnotation - -/** This useless appearing code was necessary to allow people to use - * named constants for the elidable annotation. This is what it takes - * to convince the compiler to fold the constants: otherwise when it's - * time to check an elision level it's staring at a tree like - * {{{ - * (Select(Level, Select(FINEST, Apply(intValue, Nil)))) - * }}} - * instead of the number `300`. - * - * @since 2.8 - */ -object elidable { - /** The levels `ALL` and `OFF` are confusing in this context because - * the sentiment being expressed when using the annotation is at cross - * purposes with the one being expressed via `-Xelide-below`. This - * confusion reaches its zenith at level `OFF`, where the annotation means - * ''never elide this method'' but `-Xelide-below OFF` is how you would - * say ''elide everything possible''. - * - * With no simple remedy at hand, the issue is now at least documented, - * and aliases `MAXIMUM` and `MINIMUM` are offered. - */ - final val ALL = Int.MinValue // Level.ALL.intValue() - final val FINEST = 300 // Level.FINEST.intValue() - final val FINER = 400 // Level.FINER.intValue() - final val FINE = 500 // Level.FINE.intValue() - final val CONFIG = 700 // Level.CONFIG.intValue() - final val INFO = 800 // Level.INFO.intValue() - final val WARNING = 900 // Level.WARNING.intValue() - final val SEVERE = 1000 // Level.SEVERE.intValue() - final val OFF = Int.MaxValue // Level.OFF.intValue() - - // a couple aliases for the confusing ALL and OFF - final val MAXIMUM = OFF - final val MINIMUM = ALL - - // and we can add a few of our own - final val ASSERTION = 2000 // we should make this more granular - - // for command line parsing so we can use names or ints - val byName: Map[String, Int] = Map( - "FINEST" -> FINEST, - "FINER" -> FINER, - "FINE" -> FINE, - "CONFIG" -> CONFIG, - "INFO" -> INFO, - "WARNING" -> WARNING, - "SEVERE" -> SEVERE, - "ASSERTION" -> ASSERTION, - "ALL" -> ALL, - "OFF" -> OFF, - "MAXIMUM" -> MAXIMUM, - "MINIMUM" -> MINIMUM - ) -} diff --git a/tests/scala2-library/src/library/scala/annotation/implicitAmbiguous.scala b/tests/scala2-library/src/library/scala/annotation/implicitAmbiguous.scala deleted file mode 100644 index 44e8d2308591..000000000000 --- a/tests/scala2-library/src/library/scala/annotation/implicitAmbiguous.scala +++ /dev/null @@ -1,32 +0,0 @@ -package scala.annotation - -/** - * To customize the error message that's emitted when an implicit search finds - * multiple ambiguous values, annotate at least one of the implicit values - * `@implicitAmbiguous`. Assuming the implicit value is a method with type - * parameters `X1,..., XN`, the error message will be the result of replacing - * all occurrences of `${Xi}` in the string `msg` with the string representation - * of the corresponding type argument `Ti`. - * - * If more than one `@implicitAmbiguous` annotation is collected, the compiler is - * free to pick any of them to display. - * - * Nice errors can direct users to fix imports or even tell them why code - * intentionally doesn't compile. - * - * {{{ - * trait =!=[C, D] - * - * implicit def neq[E, F] : E =!= F = null - * - * @annotation.implicitAmbiguous("Could not prove ${J} =!= ${J}") - * implicit def neqAmbig1[G, H, J] : J =!= J = null - * implicit def neqAmbig2[I] : I =!= I = null - * - * implicitly[Int =!= Int] - * }}} - * - * @author Brian McKenna - * @since 2.12.0 - */ -final class implicitAmbiguous(msg: String) extends scala.annotation.StaticAnnotation diff --git a/tests/scala2-library/src/library/scala/annotation/implicitNotFound.scala b/tests/scala2-library/src/library/scala/annotation/implicitNotFound.scala deleted file mode 100644 index eeedcb014e43..000000000000 --- a/tests/scala2-library/src/library/scala/annotation/implicitNotFound.scala +++ /dev/null @@ -1,21 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala.annotation - -/** - * To customize the error message that's emitted when an implicit of type - * C[T1,..., TN] cannot be found, annotate the class C with @implicitNotFound. - * Assuming C has type parameters X1,..., XN, the error message will be the - * result of replacing all occurrences of ${Xi} in the string msg with the - * string representation of the corresponding type argument Ti. * - * - * @author Adriaan Moors - * @since 2.8.1 - */ -final class implicitNotFound(msg: String) extends scala.annotation.StaticAnnotation {} diff --git a/tests/scala2-library/src/library/scala/annotation/meta/beanGetter.scala b/tests/scala2-library/src/library/scala/annotation/meta/beanGetter.scala deleted file mode 100644 index ce4207e1352c..000000000000 --- a/tests/scala2-library/src/library/scala/annotation/meta/beanGetter.scala +++ /dev/null @@ -1,13 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ -package scala.annotation.meta - -/** - * Consult the documentation in package [[scala.annotation.meta]]. - */ -final class beanGetter extends scala.annotation.StaticAnnotation diff --git a/tests/scala2-library/src/library/scala/annotation/meta/beanSetter.scala b/tests/scala2-library/src/library/scala/annotation/meta/beanSetter.scala deleted file mode 100644 index ad3093240017..000000000000 --- a/tests/scala2-library/src/library/scala/annotation/meta/beanSetter.scala +++ /dev/null @@ -1,13 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ -package scala.annotation.meta - -/** - * Consult the documentation in package [[scala.annotation.meta]]. - */ -final class beanSetter extends scala.annotation.StaticAnnotation diff --git a/tests/scala2-library/src/library/scala/annotation/meta/companionClass.scala b/tests/scala2-library/src/library/scala/annotation/meta/companionClass.scala deleted file mode 100644 index a0be63ed99d1..000000000000 --- a/tests/scala2-library/src/library/scala/annotation/meta/companionClass.scala +++ /dev/null @@ -1,17 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ -package scala.annotation.meta - -/** - * When defining an implicit class, the Scala compiler creates an implicit - * conversion method for it. Annotations `@companionClass` and `@companionMethod` - * control where an annotation on the implicit class will go. By default, annotations - * on an implicit class end up only on the class. - * - */ -final class companionClass extends scala.annotation.StaticAnnotation diff --git a/tests/scala2-library/src/library/scala/annotation/meta/companionMethod.scala b/tests/scala2-library/src/library/scala/annotation/meta/companionMethod.scala deleted file mode 100644 index 74d624002c37..000000000000 --- a/tests/scala2-library/src/library/scala/annotation/meta/companionMethod.scala +++ /dev/null @@ -1,17 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ -package scala.annotation.meta - -/** - * When defining an implicit class, the Scala compiler creates an implicit - * conversion method for it. Annotations `@companionClass` and `@companionMethod` - * control where an annotation on the implicit class will go. By default, annotations - * on an implicit class end up only on the class. - * - */ -final class companionMethod extends scala.annotation.StaticAnnotation diff --git a/tests/scala2-library/src/library/scala/annotation/meta/companionObject.scala b/tests/scala2-library/src/library/scala/annotation/meta/companionObject.scala deleted file mode 100644 index 882299371c41..000000000000 --- a/tests/scala2-library/src/library/scala/annotation/meta/companionObject.scala +++ /dev/null @@ -1,14 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ -package scala.annotation.meta - -/** - * Currently unused; intended as an annotation target for classes such as case classes - * that automatically generate a companion object - */ -final class companionObject extends scala.annotation.StaticAnnotation diff --git a/tests/scala2-library/src/library/scala/annotation/meta/field.scala b/tests/scala2-library/src/library/scala/annotation/meta/field.scala deleted file mode 100644 index 84e7fc89f6fd..000000000000 --- a/tests/scala2-library/src/library/scala/annotation/meta/field.scala +++ /dev/null @@ -1,13 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ -package scala.annotation.meta - -/** - * Consult the documentation in package [[scala.annotation.meta]]. - */ -final class field extends scala.annotation.StaticAnnotation diff --git a/tests/scala2-library/src/library/scala/annotation/meta/getter.scala b/tests/scala2-library/src/library/scala/annotation/meta/getter.scala deleted file mode 100644 index 3190aef16384..000000000000 --- a/tests/scala2-library/src/library/scala/annotation/meta/getter.scala +++ /dev/null @@ -1,13 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ -package scala.annotation.meta - -/** - * Consult the documentation in package [[scala.annotation.meta]]. - */ -final class getter extends scala.annotation.StaticAnnotation diff --git a/tests/scala2-library/src/library/scala/annotation/meta/languageFeature.scala b/tests/scala2-library/src/library/scala/annotation/meta/languageFeature.scala deleted file mode 100644 index 5b407121851d..000000000000 --- a/tests/scala2-library/src/library/scala/annotation/meta/languageFeature.scala +++ /dev/null @@ -1,13 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ -package scala.annotation.meta - -/** - * An annotation giving particulars for a language feature in object `scala.language`. - */ -final class languageFeature(feature: String, enableRequired: Boolean) extends scala.annotation.StaticAnnotation diff --git a/tests/scala2-library/src/library/scala/annotation/meta/package.scala b/tests/scala2-library/src/library/scala/annotation/meta/package.scala deleted file mode 100644 index 2d18ae5dd714..000000000000 --- a/tests/scala2-library/src/library/scala/annotation/meta/package.scala +++ /dev/null @@ -1,68 +0,0 @@ -package scala.annotation - -/** - * When defining a field, the Scala compiler creates up to four accessors - * for it: a getter, a setter, and if the field is annotated with - * `@BeanProperty`, a bean getter and a bean setter. - * - * For instance in the following class definition - * - * {{{ - * class C(@myAnnot @BeanProperty var c: Int) - * }}} - * - * there are six entities which can carry the annotation `@myAnnot`: the - * constructor parameter, the generated field and the four accessors. - * - * By default, annotations on (`val`-, `var`- or plain) constructor parameters - * end up on the parameter, not on any other entity. Annotations on fields - * by default only end up on the field. - * - * The meta-annotations in package `scala.annotation.meta` are used - * to control where annotations on fields and class parameters are copied. - * This is done by annotating either the annotation type or the annotation - * class with one or several of the meta-annotations in this package. - * - * ==Annotating the annotation type== - * - * The target meta-annotations can be put on the annotation type when - * instantiating the annotation. In the following example, the annotation - * `@Id` will be added only to the bean getter `getX`. - * - * {{{ - * import javax.persistence.Id - * class A { - * @(Id @beanGetter) @BeanProperty val x = 0 - * } - * }}} - * - * In order to annotate the field as well, the meta-annotation `@field` - * would need to be added. - * - * The syntax can be improved using a type alias: - * - * {{{ - * object ScalaJPA { - * type Id = javax.persistence.Id @beanGetter - * } - * import ScalaJPA.Id - * class A { - * @Id @BeanProperty val x = 0 - * } - * }}} - * - * ==Annotating the annotation class== - * - * For annotations defined in Scala, a default target can be specified - * in the annotation class itself, for example - * - * {{{ - * @getter - * class myAnnotation extends Annotation - * }}} - * - * This only changes the default target for the annotation `myAnnotation`. - * When instantiating the annotation, the target can still be specified - * as described in the last section. - */ -package object meta diff --git a/tests/scala2-library/src/library/scala/annotation/meta/param.scala b/tests/scala2-library/src/library/scala/annotation/meta/param.scala deleted file mode 100644 index 1b28e8d27f52..000000000000 --- a/tests/scala2-library/src/library/scala/annotation/meta/param.scala +++ /dev/null @@ -1,13 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ -package scala.annotation.meta - -/** - * Consult the documentation in package [[scala.annotation.meta]]. - */ -final class param extends scala.annotation.StaticAnnotation diff --git a/tests/scala2-library/src/library/scala/annotation/meta/setter.scala b/tests/scala2-library/src/library/scala/annotation/meta/setter.scala deleted file mode 100644 index 33be4f0ab8c4..000000000000 --- a/tests/scala2-library/src/library/scala/annotation/meta/setter.scala +++ /dev/null @@ -1,13 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ -package scala.annotation.meta - -/** - * Consult the documentation in package [[scala.annotation.meta]]. - */ -final class setter extends scala.annotation.StaticAnnotation diff --git a/tests/scala2-library/src/library/scala/annotation/migration.scala b/tests/scala2-library/src/library/scala/annotation/migration.scala deleted file mode 100644 index e71be00f32fd..000000000000 --- a/tests/scala2-library/src/library/scala/annotation/migration.scala +++ /dev/null @@ -1,28 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala.annotation - -/** - * An annotation that marks a member as having changed semantics - * between versions. This is intended for methods which for one - * reason or another retain the same name and type signature, - * but some aspect of their behavior is different. An illustrative - * examples is Stack.iterator, which reversed from LIFO to FIFO - * order between Scala 2.7 and 2.8. - * - * @param message A message describing the change, which is emitted - * by the compiler if the flag `-Xmigration` indicates a version - * prior to the changedIn version. - * - * @param changedIn The version, in which the behaviour change was - * introduced. - * - * @since 2.8 - */ - private[scala] final class migration(message: String, changedIn: String) extends scala.annotation.StaticAnnotation diff --git a/tests/scala2-library/src/library/scala/annotation/showAsInfix.scala b/tests/scala2-library/src/library/scala/annotation/showAsInfix.scala deleted file mode 100644 index 6c25e08efa5d..000000000000 --- a/tests/scala2-library/src/library/scala/annotation/showAsInfix.scala +++ /dev/null @@ -1,27 +0,0 @@ -package scala.annotation - -/** - * This annotation configures how Scala prints two-parameter generic types. - * - * By default, types with symbolic names are printed infix; while types without - * them are printed using the regular generic type syntax. - * - * Example of usage: - {{{ - scala> class Map[T, U] - defined class Map - - scala> def foo: Int Map Int = ??? - foo: Map[Int,Int] - - scala> @showAsInfix class Map[T, U] - defined class Map - - scala> def foo: Int Map Int = ??? - foo: Int Map Int - }}} - * - * @param enabled whether to show this type as an infix type operator. - * @since 2.12.2 - */ -class showAsInfix(enabled: Boolean = true) extends annotation.StaticAnnotation \ No newline at end of file diff --git a/tests/scala2-library/src/library/scala/annotation/strictfp.scala b/tests/scala2-library/src/library/scala/annotation/strictfp.scala deleted file mode 100644 index dd8659aa06e9..000000000000 --- a/tests/scala2-library/src/library/scala/annotation/strictfp.scala +++ /dev/null @@ -1,18 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala.annotation - -/** If this annotation is present on a method or its enclosing class, - * the strictfp flag will be emitted. - * - * @author Paul Phillips - * @version 2.9 - * @since 2.9 - */ -class strictfp extends scala.annotation.StaticAnnotation diff --git a/tests/scala2-library/src/library/scala/annotation/switch.scala b/tests/scala2-library/src/library/scala/annotation/switch.scala deleted file mode 100644 index 00124cf88baf..000000000000 --- a/tests/scala2-library/src/library/scala/annotation/switch.scala +++ /dev/null @@ -1,31 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ -package scala.annotation - -/** An annotation to be applied to a match expression. If present, - * the compiler will verify that the match has been compiled to a - * [[http://docs.oracle.com/javase/specs/jvms/se7/html/jvms-3.html#jvms-3.10 tableswitch or lookupswitch]] - * and issue an error if it instead compiles into a series of conditional expressions. - * Example usage: -{{{ - val Constant = 'Q' - def tokenMe(ch: Char) = (ch: @switch) match { - case ' ' | '\t' | '\n' => 1 - case 'A' | 'Z' | '$' => 2 - case '5' | Constant => 3 // a non-literal may prevent switch generation: this would not compile - case _ => 4 - } -}}} - * - * Note: for pattern matches with one or two cases, the compiler generates jump instructions. - * Annotating such a match with `@switch` does not issue any warning. - * - * @author Paul Phillips - * @since 2.8 - */ -final class switch extends scala.annotation.StaticAnnotation diff --git a/tests/scala2-library/src/library/scala/annotation/tailrec.scala b/tests/scala2-library/src/library/scala/annotation/tailrec.scala deleted file mode 100644 index 03c2b6a166aa..000000000000 --- a/tests/scala2-library/src/library/scala/annotation/tailrec.scala +++ /dev/null @@ -1,19 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala.annotation - -/** A method annotation which verifies that the method will be compiled - * with tail call optimization. - * - * If it is present, the compiler will issue an error if the method cannot - * be optimized into a loop. - * - * @since 2.8 - */ -final class tailrec extends scala.annotation.StaticAnnotation diff --git a/tests/scala2-library/src/library/scala/annotation/unchecked/uncheckedStable.scala b/tests/scala2-library/src/library/scala/annotation/unchecked/uncheckedStable.scala deleted file mode 100644 index d1414df06a88..000000000000 --- a/tests/scala2-library/src/library/scala/annotation/unchecked/uncheckedStable.scala +++ /dev/null @@ -1,15 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ -package scala.annotation.unchecked - -/** An annotation for values that are assumed to be stable even though their - * types are volatile. - * - * @since 2.7 - */ -final class uncheckedStable extends scala.annotation.StaticAnnotation {} diff --git a/tests/scala2-library/src/library/scala/annotation/unchecked/uncheckedVariance.scala b/tests/scala2-library/src/library/scala/annotation/unchecked/uncheckedVariance.scala deleted file mode 100644 index 0cd6aac40fa7..000000000000 --- a/tests/scala2-library/src/library/scala/annotation/unchecked/uncheckedVariance.scala +++ /dev/null @@ -1,15 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ -package scala.annotation.unchecked - -/** An annotation for type arguments for which one wants to suppress variance checking - * types are volatile. - * - * @since 2.7 - */ -final class uncheckedVariance extends scala.annotation.StaticAnnotation {} diff --git a/tests/scala2-library/src/library/scala/annotation/unspecialized.scala b/tests/scala2-library/src/library/scala/annotation/unspecialized.scala deleted file mode 100644 index 6e77e3a57ec9..000000000000 --- a/tests/scala2-library/src/library/scala/annotation/unspecialized.scala +++ /dev/null @@ -1,17 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala.annotation - -/** A method annotation which suppresses the creation of - * additional specialized forms based on enclosing specialized - * type parameters. - * - * @since 2.10 - */ -class unspecialized extends scala.annotation.StaticAnnotation diff --git a/tests/scala2-library/src/library/scala/annotation/varargs.scala b/tests/scala2-library/src/library/scala/annotation/varargs.scala deleted file mode 100644 index 46fc790226a1..000000000000 --- a/tests/scala2-library/src/library/scala/annotation/varargs.scala +++ /dev/null @@ -1,17 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala.annotation - -/** A method annotation which instructs the compiler to generate a - * Java varargs-style forwarder method for interop. This annotation can - * only be applied to methods with repeated parameters. - * - * @since 2.9 - */ -final class varargs extends scala.annotation.StaticAnnotation diff --git a/tests/scala2-library/src/library/scala/beans/BeanDescription.scala b/tests/scala2-library/src/library/scala/beans/BeanDescription.scala deleted file mode 100644 index a9c748dfe75b..000000000000 --- a/tests/scala2-library/src/library/scala/beans/BeanDescription.scala +++ /dev/null @@ -1,19 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - - -package scala.beans - -/** Provides a short description that will be included when generating - * bean information. This annotation can be attached to the bean itself, - * or to any member. - * - * @author Ross Judson (rjudson@managedobjects.com) - */ -class BeanDescription(val description: String) extends scala.annotation.Annotation - diff --git a/tests/scala2-library/src/library/scala/beans/BeanDisplayName.scala b/tests/scala2-library/src/library/scala/beans/BeanDisplayName.scala deleted file mode 100644 index 5937c6517b8c..000000000000 --- a/tests/scala2-library/src/library/scala/beans/BeanDisplayName.scala +++ /dev/null @@ -1,18 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - - -package scala.beans - -/** Provides a display name when generating bean information. This - * annotation can be attached to the bean itself, or to any member. - * - * @author Ross Judson (rjudson@managedobjects.com) - */ -class BeanDisplayName(val name: String) extends scala.annotation.Annotation - diff --git a/tests/scala2-library/src/library/scala/beans/BeanInfo.scala b/tests/scala2-library/src/library/scala/beans/BeanInfo.scala deleted file mode 100644 index d7f0a1618be7..000000000000 --- a/tests/scala2-library/src/library/scala/beans/BeanInfo.scala +++ /dev/null @@ -1,21 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala.beans - -/** This annotation indicates that a JavaBean-compliant `BeanInfo` class - * should be generated for this annotated Scala class. - * - * - A `'''val'''` becomes a read-only property. - * - A `'''var'''` becomes a read-write property. - * - A `'''def'''` becomes a method. - * - * @author Ross Judson (rjudson@managedobjects.com) - */ -@deprecated(message = "the generation of BeanInfo classes is no longer supported", since = "2.12.0") -class BeanInfo extends scala.annotation.Annotation diff --git a/tests/scala2-library/src/library/scala/beans/BeanInfoSkip.scala b/tests/scala2-library/src/library/scala/beans/BeanInfoSkip.scala deleted file mode 100644 index ccbb19385413..000000000000 --- a/tests/scala2-library/src/library/scala/beans/BeanInfoSkip.scala +++ /dev/null @@ -1,18 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - - -package scala.beans - -/** This annotation indicates that bean information should - * not be generated for the val, var, or def that it is - * attached to. - * - * @author Ross Judson (rjudson@managedobjects.com) - */ -class BeanInfoSkip extends scala.annotation.Annotation diff --git a/tests/scala2-library/src/library/scala/beans/BeanProperty.scala b/tests/scala2-library/src/library/scala/beans/BeanProperty.scala deleted file mode 100644 index fec469dc7034..000000000000 --- a/tests/scala2-library/src/library/scala/beans/BeanProperty.scala +++ /dev/null @@ -1,26 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala.beans - -/** When attached to a field, this annotation adds a setter and a getter - * method following the Java Bean convention. For example: - * {{{ - * @BeanProperty - * var status = "" - * }}} - * adds the following methods to the class: - * {{{ - * def setStatus(s: String) { this.status = s } - * def getStatus: String = this.status - * }}} - * For fields of type `Boolean`, if you need a getter named `isStatus`, - * use the `scala.beans.BooleanBeanProperty` annotation instead. - */ -@scala.annotation.meta.field -class BeanProperty extends scala.annotation.StaticAnnotation diff --git a/tests/scala2-library/src/library/scala/beans/BooleanBeanProperty.scala b/tests/scala2-library/src/library/scala/beans/BooleanBeanProperty.scala deleted file mode 100644 index 775e1ac362aa..000000000000 --- a/tests/scala2-library/src/library/scala/beans/BooleanBeanProperty.scala +++ /dev/null @@ -1,16 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala.beans - -/** This annotation has the same functionality as - * `scala.beans.BeanProperty`, but the generated Bean getter will be - * named `isFieldName` instead of `getFieldName`. - */ -@scala.annotation.meta.field -class BooleanBeanProperty extends scala.annotation.StaticAnnotation diff --git a/tests/scala2-library/src/library/scala/beans/ScalaBeanInfo.scala b/tests/scala2-library/src/library/scala/beans/ScalaBeanInfo.scala deleted file mode 100644 index ac8fa263d7f5..000000000000 --- a/tests/scala2-library/src/library/scala/beans/ScalaBeanInfo.scala +++ /dev/null @@ -1,46 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - - -package scala.beans - -/** Provides some simple runtime processing necessary to create - * JavaBean descriptors for Scala entities. The compiler creates - * subclasses of this class automatically when the BeanInfo annotation is - * attached to a class. - * - * @author Ross Judson (rjudson@managedobjects.com) - */ -abstract class ScalaBeanInfo(clazz: java.lang.Class[_], - props: Array[String], - methods: Array[String]) extends java.beans.SimpleBeanInfo { - - import java.beans._ - - private val pd = new Array[PropertyDescriptor](props.length / 3) - private val md = - for (m <- clazz.getMethods if methods.exists(_ == m.getName)) - yield new MethodDescriptor(m) - - init() - - override def getPropertyDescriptors() = pd - override def getMethodDescriptors() = md - - // override def getAdditionalBeanInfo() = Array(Introspector getBeanInfo clazz.getSuperclass) - - private def init() { - var i = 0 - while (i < props.length) { - pd(i/3) = new PropertyDescriptor(props(i), clazz, props(i+1), props(i+2)) - i = i + 3 - } - } - -} - diff --git a/tests/scala2-library/src/library/scala/collection/BitSet.scala b/tests/scala2-library/src/library/scala/collection/BitSet.scala deleted file mode 100644 index e255e961408e..000000000000 --- a/tests/scala2-library/src/library/scala/collection/BitSet.scala +++ /dev/null @@ -1,35 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - - - -package scala -package collection - -import generic._ - -/** A common base class for mutable and immutable bitsets. - * $bitsetinfo - */ -trait BitSet extends SortedSet[Int] - with BitSetLike[BitSet] { - override def empty: BitSet = BitSet.empty -} - -/** $factoryInfo - * @define coll bitset - * @define Coll `BitSet` - */ -object BitSet extends BitSetFactory[BitSet] { - val empty: BitSet = immutable.BitSet.empty - def newBuilder = immutable.BitSet.newBuilder - - /** $canBuildFromInfo */ - implicit def canBuildFrom: CanBuildFrom[BitSet, Int, BitSet] = bitsetCanBuildFrom -} - diff --git a/tests/scala2-library/src/library/scala/collection/BitSetLike.scala b/tests/scala2-library/src/library/scala/collection/BitSetLike.scala deleted file mode 100644 index ad4d7e137124..000000000000 --- a/tests/scala2-library/src/library/scala/collection/BitSetLike.scala +++ /dev/null @@ -1,264 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - - - -package scala -package collection - -import BitSetLike._ -import mutable.StringBuilder - -/** A template trait for bitsets. - * $bitsetinfo - * - * This trait provides most of the operations of a `BitSet` independently of its representation. - * It is inherited by all concrete implementations of bitsets. - * - * @tparam This the type of the bitset itself. - * - * @define bitsetinfo - * Bitsets are sets of non-negative integers which are represented as - * variable-size arrays of bits packed into 64-bit words. The memory footprint of a bitset is - * determined by the largest number stored in it. - * @author Martin Odersky - * @version 2.8 - * @since 2.8 - * @define coll bitset - * @define Coll `BitSet` - */ -trait BitSetLike[+This <: BitSetLike[This] with SortedSet[Int]] extends SortedSetLike[Int, This] { self => - - def empty: This - - /** The number of words (each with 64 bits) making up the set */ - protected def nwords: Int - - /** The words at index `idx`, or 0L if outside the range of the set - * '''Note:''' requires `idx >= 0` - */ - protected def word(idx: Int): Long - - /** Creates a new set of this kind from an array of longs - */ - protected def fromBitMaskNoCopy(elems: Array[Long]): This - - /** Creates a bit mask for this set as a new array of longs - */ - def toBitMask: Array[Long] = { - val a = new Array[Long](nwords) - var i = a.length - while(i > 0) { - i -= 1 - a(i) = word(i) - } - a - } - - override def size: Int = { - var s = 0 - var i = nwords - while (i > 0) { - i -= 1 - s += java.lang.Long.bitCount(word(i)) - } - s - } - - override def isEmpty: Boolean = 0 until nwords forall (i => word(i) == 0) - - implicit def ordering: Ordering[Int] = Ordering.Int - - def rangeImpl(from: Option[Int], until: Option[Int]): This = { - val a = toBitMask - val len = a.length - if (from.isDefined) { - var f = from.get - var pos = 0 - while (f >= 64 && pos < len) { - f -= 64 - a(pos) = 0 - pos += 1 - } - if (f > 0 && pos < len) a(pos) &= ~((1L << f)-1) - } - if (until.isDefined) { - val u = until.get - val w = u / 64 - val b = u % 64 - var clearw = w+1 - while (clearw < len) { - a(clearw) = 0 - clearw += 1 - } - if (w < len) a(w) &= (1L << b)-1 - } - fromBitMaskNoCopy(a) - } - - def iterator: Iterator[Int] = iteratorFrom(0) - - override def keysIteratorFrom(start: Int) = new AbstractIterator[Int] { - private var current = start - private val end = nwords * WordLength - def hasNext: Boolean = { - while (current != end && !self.contains(current)) current += 1 - current != end - } - def next(): Int = - if (hasNext) { val r = current; current += 1; r } - else Iterator.empty.next() - } - - override def foreach[U](f: Int => U): Unit = { - /* NOTE: while loops are significantly faster as of 2.11 and - one major use case of bitsets is performance. Also, there - is nothing to do when all bits are clear, so use that as - the inner loop condition. */ - var i = 0 - while (i < nwords) { - var w = word(i) - var j = i * WordLength - while (w != 0L) { - if ((w&1L) == 1L) f(j) - w = w >>> 1 - j += 1 - } - i += 1 - } - } - - /** Computes the union between this bitset and another bitset by performing - * a bitwise "or". - * - * @param other the bitset to form the union with. - * @return a new bitset consisting of all bits that are in this - * bitset or in the given bitset `other`. - */ - def | (other: BitSet): This = { - val len = this.nwords max other.nwords - val words = new Array[Long](len) - for (idx <- 0 until len) - words(idx) = this.word(idx) | other.word(idx) - fromBitMaskNoCopy(words) - } - - /** Computes the intersection between this bitset and another bitset by performing - * a bitwise "and". - * @param other the bitset to intersect with. - * @return a new bitset consisting of all elements that are both in this - * bitset and in the given bitset `other`. - */ - def & (other: BitSet): This = { - val len = this.nwords min other.nwords - val words = new Array[Long](len) - for (idx <- 0 until len) - words(idx) = this.word(idx) & other.word(idx) - fromBitMaskNoCopy(words) - } - - /** Computes the difference of this bitset and another bitset by performing - * a bitwise "and-not". - * - * @param other the set of bits to exclude. - * @return a bitset containing those bits of this - * bitset that are not also contained in the given bitset `other`. - */ - def &~ (other: BitSet): This = { - val len = this.nwords - val words = new Array[Long](len) - for (idx <- 0 until len) - words(idx) = this.word(idx) & ~other.word(idx) - fromBitMaskNoCopy(words) - } - - /** Computes the symmetric difference of this bitset and another bitset by performing - * a bitwise "exclusive-or". - * - * @param other the other bitset to take part in the symmetric difference. - * @return a bitset containing those bits of this - * bitset or the other bitset that are not contained in both bitsets. - */ - def ^ (other: BitSet): This = { - val len = this.nwords max other.nwords - val words = new Array[Long](len) - for (idx <- 0 until len) - words(idx) = this.word(idx) ^ other.word(idx) - fromBitMaskNoCopy(words) - } - - def contains(elem: Int): Boolean = - 0 <= elem && (word(elem >> LogWL) & (1L << elem)) != 0L - - /** Tests whether this bitset is a subset of another bitset. - * - * @param other the bitset to test. - * @return `true` if this bitset is a subset of `other`, i.e. if - * every bit of this set is also an element in `other`. - */ - def subsetOf(other: BitSet): Boolean = - (0 until nwords) forall (idx => (this.word(idx) & ~ other.word(idx)) == 0L) - - override def head: Int = { - val n = nwords - var i = 0 - while (i < n) { - val wi = word(i) - if (wi != 0L) return WordLength*i + java.lang.Long.numberOfTrailingZeros(wi) - i += 1 - } - throw new NoSuchElementException("Empty BitSet") - } - - override def last: Int = { - var i = nwords - 1 - while (i >= 0) { - val wi = word(i) - if (wi != 0L) return WordLength*i + 63 - java.lang.Long.numberOfLeadingZeros(wi) - i -= 1 - } - throw new NoSuchElementException("Empty BitSet") - } - - override def addString(sb: StringBuilder, start: String, sep: String, end: String) = { - sb append start - var pre = "" - val max = nwords * WordLength - var i = 0 - while (i != max) { - if (contains(i)) { - sb append pre append i - pre = sep - } - i += 1 - } - sb append end - } - - override def stringPrefix = "BitSet" -} - -/** Companion object for BitSets. Contains private data only */ -object BitSetLike { - /* Final vals can sometimes be inlined as constants (faster) */ - private[collection] final val LogWL = 6 - private final val WordLength = 64 - private[collection] final val MaxSize = (Int.MaxValue >> LogWL) + 1 - - private[collection] def updateArray(elems: Array[Long], idx: Int, w: Long): Array[Long] = { - var len = elems.length - while (len > 0 && (elems(len - 1) == 0L || w == 0L && idx == len - 1)) len -= 1 - var newlen = len - if (idx >= newlen && w != 0L) newlen = idx + 1 - val newelems = new Array[Long](newlen) - Array.copy(elems, 0, newelems, 0, len) - if (idx < newlen) newelems(idx) = w - else assert(w == 0L) - newelems - } -} diff --git a/tests/scala2-library/src/library/scala/collection/BufferedIterator.scala b/tests/scala2-library/src/library/scala/collection/BufferedIterator.scala deleted file mode 100644 index 1424ef2fd049..000000000000 --- a/tests/scala2-library/src/library/scala/collection/BufferedIterator.scala +++ /dev/null @@ -1,34 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - - - -package scala -package collection - -/** Buffered iterators are iterators which provide a method `head` - * that inspects the next element without discarding it. - * - * @author Martin Odersky - * @version 2.8 - * @since 2.8 - */ -trait BufferedIterator[+A] extends Iterator[A] { - - /** Returns next element of iterator without advancing beyond it. - */ - def head: A - - /** Returns an option of the next element of an iterator without advancing beyond it. - * @return the next element of this iterator if it has a next element - * `None` if it does not - */ - def headOption : Option[A] = if (hasNext) Some(head) else None - - override def buffered: this.type = this -} diff --git a/tests/scala2-library/src/library/scala/collection/CustomParallelizable.scala b/tests/scala2-library/src/library/scala/collection/CustomParallelizable.scala deleted file mode 100644 index cbeb28d643f8..000000000000 --- a/tests/scala2-library/src/library/scala/collection/CustomParallelizable.scala +++ /dev/null @@ -1,18 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala -package collection - -import parallel.Combiner - -trait CustomParallelizable[+A, +ParRepr <: Parallel] extends Any with Parallelizable[A, ParRepr] { - override def par: ParRepr - override protected[this] def parCombiner: Combiner[A, ParRepr] = throw new UnsupportedOperationException("") -} - diff --git a/tests/scala2-library/src/library/scala/collection/DefaultMap.scala b/tests/scala2-library/src/library/scala/collection/DefaultMap.scala deleted file mode 100644 index 8afda7cfcfb0..000000000000 --- a/tests/scala2-library/src/library/scala/collection/DefaultMap.scala +++ /dev/null @@ -1,44 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala -package collection - -/** A default map which implements the `+` and `-` methods of maps. - * - * Instances that inherit from `DefaultMap[A, B]` still have to define: - * {{{ - * def get(key: A): Option[B] - * def iterator: Iterator[(A, B)] - * }}} - * It refers back to the original map. - * - * It might also be advisable to override `foreach` or `size` if efficient - * implementations can be found. - * - * @since 2.8 - */ -trait DefaultMap[A, +B] extends Map[A, B] { self => - - /** A default implementation which creates a new immutable map. - */ - override def +[B1 >: B](kv: (A, B1)): Map[A, B1] = { - val b = Map.newBuilder[A, B1] - b ++= this - b += ((kv._1, kv._2)) - b.result() - } - - /** A default implementation which creates a new immutable map. - */ - override def - (key: A): Map[A, B] = { - val b = newBuilder - b ++= this filter (key != _._1) - b.result() - } -} diff --git a/tests/scala2-library/src/library/scala/collection/GenIterable.scala b/tests/scala2-library/src/library/scala/collection/GenIterable.scala deleted file mode 100644 index 1e9e37cc9eb5..000000000000 --- a/tests/scala2-library/src/library/scala/collection/GenIterable.scala +++ /dev/null @@ -1,37 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala -package collection - - -import generic._ - - -/** A trait for all iterable collections which may possibly - * have their operations implemented in parallel. - * - * @author Martin Odersky - * @author Aleksandar Prokopec - * @since 2.9 - */ -trait GenIterable[+A] -extends GenIterableLike[A, GenIterable[A]] - with GenTraversable[A] - with GenericTraversableTemplate[A, GenIterable] -{ - def seq: Iterable[A] - override def companion: GenericCompanion[GenIterable] = GenIterable -} - - -object GenIterable extends GenTraversableFactory[GenIterable] { - implicit def canBuildFrom[A]: GenericCanBuildFrom[A] = ReusableCBF.asInstanceOf[GenericCanBuildFrom[A]] - def newBuilder[A] = Iterable.newBuilder -} - diff --git a/tests/scala2-library/src/library/scala/collection/GenIterableLike.scala b/tests/scala2-library/src/library/scala/collection/GenIterableLike.scala deleted file mode 100644 index 1dbb54ddc7c1..000000000000 --- a/tests/scala2-library/src/library/scala/collection/GenIterableLike.scala +++ /dev/null @@ -1,145 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala -package collection - -import generic.{ CanBuildFrom => CBF } - -/** A template trait for all iterable collections which may possibly - * have their operations implemented in parallel. - * - * This trait contains abstract methods and methods that can be implemented - * directly in terms of other methods. - * - * @define Coll `GenIterable` - * @define coll general iterable collection - * - * @author Martin Odersky - * @author Aleksandar Prokopec - * @since 2.9 - * @define zipthatinfo the class of the returned collection. Where possible, `That` is - * the same class as the current collection class `Repr`, but this - * depends on the element type `(A1, B)` being admissible for that class, - * which means that an implicit instance of type `CanBuildFrom[Repr, (A1, B), That]`. - * is found. - * @define zipbfinfo an implicit value of class `CanBuildFrom` which determines the - * result class `That` from the current representation type `Repr` - * and the new element type `(A1, B)`. - * @define iterableInfo - * This is a base trait for all Scala collections that define an `iterator` - * method to step through one-by-one the collection's elements. - */ -trait GenIterableLike[+A, +Repr] extends Any with GenTraversableLike[A, Repr] { - - def iterator: Iterator[A] - - /** Checks if the other iterable collection contains the same elements in the same order as this $coll. - * - * @param that the collection to compare with. - * @tparam A1 the type of the elements of collection `that`. - * @return `true`, if both collections contain the same elements in the same order, `false` otherwise. - * - * @usecase def sameElements(that: GenIterable[A]): Boolean - * @inheritdoc - * - * $orderDependent - * $willNotTerminateInf - * - * @param that the collection to compare with. - * @return `true`, if both collections contain the same elements in the same order, `false` otherwise. - */ - def sameElements[A1 >: A](that: GenIterable[A1]): Boolean - - /** Returns a $coll formed from this $coll and another iterable collection - * by combining corresponding elements in pairs. - * If one of the two collections is longer than the other, its remaining elements are ignored. - * - * @param that The iterable providing the second half of each result pair - * @tparam A1 the type of the first half of the returned pairs (this is always a supertype - * of the collection's element type `A`). - * @tparam B the type of the second half of the returned pairs - * @tparam That $zipthatinfo - * @param bf $zipbfinfo - * @return a new collection of type `That` containing pairs consisting of - * corresponding elements of this $coll and `that`. The length - * of the returned collection is the minimum of the lengths of this $coll and `that`. - * - * @usecase def zip[B](that: GenIterable[B]): $Coll[(A, B)] - * @inheritdoc - * - * $orderDependent - * - * @param that The iterable providing the second half of each result pair - * @tparam B the type of the second half of the returned pairs - * @return a new $coll containing pairs consisting of - * corresponding elements of this $coll and `that`. The length - * of the returned collection is the minimum of the lengths of this $coll and `that`. - */ - def zip[A1 >: A, B, That](that: GenIterable[B])(implicit bf: CBF[Repr, (A1, B), That]): That - - /** Zips this $coll with its indices. - * - * @tparam A1 the type of the first half of the returned pairs (this is always a supertype - * of the collection's element type `A`). - * @tparam That the class of the returned collection. Where possible, `That` is - * the same class as the current collection class `Repr`, but this - * depends on the element type `(A1, Int)` being admissible for that class, - * which means that an implicit instance of type `CanBuildFrom[Repr, (A1, Int), That]`. - * is found. - * @param bf an implicit value of class `CanBuildFrom` which determines the - * result class `That` from the current representation type `Repr` - * and the new element type `(A1, Int)`. - * @return A new collection of type `That` containing pairs consisting of all elements of this - * $coll paired with their index. Indices start at `0`. - * - * @usecase def zipWithIndex: $Coll[(A, Int)] - * @inheritdoc - * - * $orderDependent - * - * @return A new $coll containing pairs consisting of all elements of this - * $coll paired with their index. Indices start at `0`. - * @example - * `List("a", "b", "c").zipWithIndex = List(("a", 0), ("b", 1), ("c", 2))` - * - */ - def zipWithIndex[A1 >: A, That](implicit bf: CBF[Repr, (A1, Int), That]): That - - /** Returns a $coll formed from this $coll and another iterable collection - * by combining corresponding elements in pairs. - * If one of the two collections is shorter than the other, - * placeholder elements are used to extend the shorter collection to the length of the longer. - * - * @param that the iterable providing the second half of each result pair - * @param thisElem the element to be used to fill up the result if this $coll is shorter than `that`. - * @param thatElem the element to be used to fill up the result if `that` is shorter than this $coll. - * @return a new collection of type `That` containing pairs consisting of - * corresponding elements of this $coll and `that`. The length - * of the returned collection is the maximum of the lengths of this $coll and `that`. - * If this $coll is shorter than `that`, `thisElem` values are used to pad the result. - * If `that` is shorter than this $coll, `thatElem` values are used to pad the result. - * - * @usecase def zipAll[B](that: Iterable[B], thisElem: A, thatElem: B): $Coll[(A, B)] - * @inheritdoc - * - * $orderDependent - * - * @param that The iterable providing the second half of each result pair - * @param thisElem the element to be used to fill up the result if this $coll is shorter than `that`. - * @param thatElem the element to be used to fill up the result if `that` is shorter than this $coll. - * @tparam B the type of the second half of the returned pairs - * @return a new $coll containing pairs consisting of - * corresponding elements of this $coll and `that`. The length - * of the returned collection is the maximum of the lengths of this $coll and `that`. - * If this $coll is shorter than `that`, `thisElem` values are used to pad the result. - * If `that` is shorter than this $coll, `thatElem` values are used to pad the result. - */ - def zipAll[B, A1 >: A, That](that: GenIterable[B], thisElem: A1, thatElem: B)(implicit bf: CBF[Repr, (A1, B), That]): That - -} diff --git a/tests/scala2-library/src/library/scala/collection/GenMap.scala b/tests/scala2-library/src/library/scala/collection/GenMap.scala deleted file mode 100644 index 6bc507ae9319..000000000000 --- a/tests/scala2-library/src/library/scala/collection/GenMap.scala +++ /dev/null @@ -1,35 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala -package collection - -import generic._ - -/** A trait for all traversable collections which may possibly - * have their operations implemented in parallel. - * - * @author Martin Odersky - * @author Aleksandar Prokopec - * @since 2.9 - */ -trait GenMap[K, +V] -extends GenMapLike[K, V, GenMap[K, V]] - with GenIterable[(K, V)] -{ - def seq: Map[K, V] - - def updated [V1 >: V](key: K, value: V1): GenMap[K, V1] -} - -object GenMap extends GenMapFactory[GenMap] { - def empty[K, V]: immutable.Map[K, V] = immutable.Map.empty - - /** $mapCanBuildFromInfo */ - implicit def canBuildFrom[K, V]: CanBuildFrom[Coll, (K, V), GenMap[K, V]] = new MapCanBuildFrom[K, V] -} diff --git a/tests/scala2-library/src/library/scala/collection/GenMapLike.scala b/tests/scala2-library/src/library/scala/collection/GenMapLike.scala deleted file mode 100644 index f6c2d071b510..000000000000 --- a/tests/scala2-library/src/library/scala/collection/GenMapLike.scala +++ /dev/null @@ -1,132 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala -package collection - -/** A trait for all maps upon which operations may be - * implemented in parallel. - * - * @define Coll `GenMap` - * @define coll general map - * @author Martin Odersky - * @author Aleksandar Prokopec - * @since 2.9 - * @define mapNote - * - * A map is a collection of bindings from keys to values, where there are - * no duplicate keys. - */ -trait GenMapLike[K, +V, +Repr] extends GenIterableLike[(K, V), Repr] with Equals with Parallelizable[(K, V), parallel.ParMap[K, V]] { - def default(key: K): V - def get(key: K): Option[V] - def apply(key: K): V - def seq: Map[K, V] - def +[V1 >: V](kv: (K, V1)): GenMap[K, V1] - def - (key: K): Repr - - // This hash code must be symmetric in the contents but ought not - // collide trivially. - override def hashCode()= scala.util.hashing.MurmurHash3.mapHash(seq) - - /** Returns the value associated with a key, or a default value if the key is not contained in the map. - * @param key the key. - * @param default a computation that yields a default value in case no binding for `key` is - * found in the map. - * @tparam B1 the result type of the default computation. - * @return the value associated with `key` if it exists, - * otherwise the result of the `default` computation. - * @usecase def getOrElse(key: K, default: => V): V - * @inheritdoc - */ - def getOrElse[V1 >: V](key: K, default: => V1): V1 - - /** Tests whether this map contains a binding for a key. - * - * @param key the key - * @return `true` if there is a binding for `key` in this map, `false` otherwise. - */ - def contains(key: K): Boolean - - /** Tests whether this map contains a binding for a key. This method, - * which implements an abstract method of trait `PartialFunction`, - * is equivalent to `contains`. - * - * @param key the key - * @return `true` if there is a binding for `key` in this map, `false` otherwise. - */ - def isDefinedAt(key: K): Boolean - - def keySet: GenSet[K] - - /** Collects all keys of this map in an iterable collection. - * - * @return the keys of this map as an iterable. - */ - def keys: GenIterable[K] - - /** Collects all values of this map in an iterable collection. - * - * @return the values of this map as an iterable. - */ - def values: GenIterable[V] - - /** Creates an iterator for all keys. - * - * @return an iterator over all keys. - */ - def keysIterator: Iterator[K] - - /** Creates an iterator for all values in this map. - * - * @return an iterator over all values that are associated with some key in this map. - */ - def valuesIterator: Iterator[V] - - /** Filters this map by retaining only keys satisfying a predicate. - * @param p the predicate used to test keys - * @return an immutable map consisting only of those key value pairs of this map where the key satisfies - * the predicate `p`. The resulting map wraps the original map without copying any elements. - */ - def filterKeys(p: K => Boolean): GenMap[K, V] - - /** Transforms this map by applying a function to every retrieved value. - * @param f the function used to transform values of this map. - * @return a map view which maps every key of this map - * to `f(this(key))`. The resulting map wraps the original map without copying any elements. - */ - def mapValues[W](f: V => W): GenMap[K, W] - - /** Compares two maps structurally; i.e., checks if all mappings - * contained in this map are also contained in the other map, - * and vice versa. - * - * @param that the other map - * @return `true` if both maps contain exactly the - * same mappings, `false` otherwise. - */ - override def equals(that: Any): Boolean = that match { - case that: GenMap[b, _] => - (this eq that) || - (that canEqual this) && - (this.size == that.size) && { - try { - this forall { - case (k, v) => that.get(k.asInstanceOf[b]) match { - case Some(`v`) => - true - case _ => false - } - } - } catch { - case ex: ClassCastException => false - }} - case _ => - false - } -} diff --git a/tests/scala2-library/src/library/scala/collection/GenSeq.scala b/tests/scala2-library/src/library/scala/collection/GenSeq.scala deleted file mode 100644 index 50c6474a4ea0..000000000000 --- a/tests/scala2-library/src/library/scala/collection/GenSeq.scala +++ /dev/null @@ -1,37 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala -package collection - - -import generic._ - - -/** A trait for all sequences which may possibly - * have their operations implemented in parallel. - * - * @author Martin Odersky - * @author Aleksandar Prokopec - * @since 2.9 - */ -trait GenSeq[+A] -extends GenSeqLike[A, GenSeq[A]] - with GenIterable[A] - with Equals - with GenericTraversableTemplate[A, GenSeq] -{ - def seq: Seq[A] - override def companion: GenericCompanion[GenSeq] = GenSeq -} - - -object GenSeq extends GenTraversableFactory[GenSeq] { - implicit def canBuildFrom[A]: GenericCanBuildFrom[A] = ReusableCBF.asInstanceOf[GenericCanBuildFrom[A]] - def newBuilder[A] = Seq.newBuilder -} diff --git a/tests/scala2-library/src/library/scala/collection/GenSeqLike.scala b/tests/scala2-library/src/library/scala/collection/GenSeqLike.scala deleted file mode 100644 index 405d8d7e57ea..000000000000 --- a/tests/scala2-library/src/library/scala/collection/GenSeqLike.scala +++ /dev/null @@ -1,481 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala -package collection - -import generic._ - -/** A template trait for all sequences which may be traversed - * in parallel. - * - * @define Coll GenSeq - * @define coll general sequence - * @define mayNotTerminateInf - * - * Note: may not terminate for infinite-sized collections. - * @define willNotTerminateInf - * - * Note: will not terminate for infinite-sized collections. - * - * @author Martin Odersky - * @author Aleksandar Prokopec - * @since 2.9 - * @define seqInfo - * Sequences are special cases of iterable collections of class `Iterable`. - * Unlike iterables, sequences always have a defined order of elements. - */ -trait GenSeqLike[+A, +Repr] extends Any with GenIterableLike[A, Repr] with Equals with Parallelizable[A, parallel.ParSeq[A]] { - def seq: Seq[A] - - /** Selects an element by its index in the $coll. - * - * Example: - * - * {{{ - * scala> val x = List(1, 2, 3, 4, 5) - * x: List[Int] = List(1, 2, 3, 4, 5) - * - * scala> x(3) - * res1: Int = 4 - * }}} - * - * @param idx The index to select. - * @return the element of this $coll at index `idx`, where `0` indicates the first element. - * @throws IndexOutOfBoundsException if `idx` does not satisfy `0 <= idx < length`. - */ - def apply(idx: Int): A - - /** The length of the $coll. - * - * $willNotTerminateInf - * - * Note: `xs.length` and `xs.size` yield the same result. - * - * @return the number of elements in this $coll. - * @throws IllegalArgumentException if the length of the sequence cannot be represented in an `Int`, for example, `(-1 to Int.MaxValue).length`. - */ - def length: Int - - /** Tests whether this $coll contains given index. - * - * The implementations of methods `apply` and `isDefinedAt` turn a `Seq[A]` into - * a `PartialFunction[Int, A]`. - * - * @param idx the index to test - * @return `true` if this $coll contains an element at position `idx`, `false` otherwise. - */ - def isDefinedAt(idx: Int): Boolean = (idx >= 0) && (idx < length) - - /** Computes length of longest segment whose elements all satisfy some predicate. - * - * $mayNotTerminateInf - * - * @param p the predicate used to test elements. - * @param from the index where the search starts. - * @return the length of the longest segment of this $coll starting from index `from` - * such that every element of the segment satisfies the predicate `p`. - */ - def segmentLength(p: A => Boolean, from: Int): Int - - /** Returns the length of the longest prefix whose elements all satisfy some predicate. - * - * $mayNotTerminateInf - * - * @param p the predicate used to test elements. - * @return the length of the longest prefix of this $coll - * such that every element of the segment satisfies the predicate `p`. - */ - def prefixLength(p: A => Boolean): Int = segmentLength(p, 0) - - /** Finds index of the first element satisfying some predicate after or at some start index. - * - * $mayNotTerminateInf - * - * @param p the predicate used to test elements. - * @param from the start index - * @return the index `>= from` of the first element of this $coll that satisfies the predicate `p`, - * or `-1`, if none exists. - */ - def indexWhere(p: A => Boolean, from: Int): Int - - /** Finds index of first element satisfying some predicate. - * - * $mayNotTerminateInf - * - * @param p the predicate used to test elements. - * @return the index of the first element of this $coll that satisfies the predicate `p`, - * or `-1`, if none exists. - */ - def indexWhere(p: A => Boolean): Int = indexWhere(p, 0) - - /** Finds index of first occurrence of some value in this $coll. - * - * @param elem the element value to search for. - * @tparam B the type of the element `elem`. - * @return the index of the first element of this $coll that is equal (as determined by `==`) - * to `elem`, or `-1`, if none exists. - * - * @usecase def indexOf(elem: A): Int - * @inheritdoc - * - * $mayNotTerminateInf - * - */ - def indexOf[B >: A](elem: B): Int = indexOf(elem, 0) - - /** Finds index of first occurrence of some value in this $coll after or at some start index. - * - * @param elem the element value to search for. - * @tparam B the type of the element `elem`. - * @param from the start index - * @return the index `>= from` of the first element of this $coll that is equal (as determined by `==`) - * to `elem`, or `-1`, if none exists. - * - * @usecase def indexOf(elem: A, from: Int): Int - * @inheritdoc - * - * $mayNotTerminateInf - * - */ - def indexOf[B >: A](elem: B, from: Int): Int = indexWhere(elem == _, from) - - /** Finds index of last occurrence of some value in this $coll. - * - * @param elem the element value to search for. - * @tparam B the type of the element `elem`. - * @return the index of the last element of this $coll that is equal (as determined by `==`) - * to `elem`, or `-1`, if none exists. - * - * @usecase def lastIndexOf(elem: A): Int - * @inheritdoc - * - * $willNotTerminateInf - * - */ - def lastIndexOf[B >: A](elem: B): Int = lastIndexWhere(elem == _) - - /** Finds index of last occurrence of some value in this $coll before or at a given end index. - * - * @param elem the element value to search for. - * @param end the end index. - * @tparam B the type of the element `elem`. - * @return the index `<= end` of the last element of this $coll that is equal (as determined by `==`) - * to `elem`, or `-1`, if none exists. - * - * @usecase def lastIndexOf(elem: A, end: Int): Int - * @inheritdoc - */ - def lastIndexOf[B >: A](elem: B, end: Int): Int = lastIndexWhere(elem == _, end) - - /** Finds index of last element satisfying some predicate. - * - * $willNotTerminateInf - * - * @param p the predicate used to test elements. - * @return the index of the last element of this $coll that satisfies the predicate `p`, - * or `-1`, if none exists. - */ - def lastIndexWhere(p: A => Boolean): Int = lastIndexWhere(p, length - 1) - - /** Finds index of last element satisfying some predicate before or at given end index. - * - * @param p the predicate used to test elements. - * @return the index `<= end` of the last element of this $coll that satisfies the predicate `p`, - * or `-1`, if none exists. - */ - def lastIndexWhere(p: A => Boolean, end: Int): Int - - /** Returns new $coll with elements in reversed order. - * - * $willNotTerminateInf - * - * @return A new $coll with all elements of this $coll in reversed order. - */ - def reverse: Repr - - /** - * Builds a new collection by applying a function to all elements of this $coll and - * collecting the results in reversed order. - * - * @param f the function to apply to each element. - * @tparam B the element type of the returned collection. - * @tparam That $thatinfo - * @param bf $bfinfo - * @return a new collection of type `That` resulting from applying the given function - * `f` to each element of this $coll and collecting the results in reversed order. - * - * @usecase def reverseMap[B](f: A => B): $Coll[B] - * @inheritdoc - * - * $willNotTerminateInf - * - * Note: `xs.reverseMap(f)` is the same as `xs.reverse.map(f)` but might be more efficient. - * - * @return a new $coll resulting from applying the given function - * `f` to each element of this $coll and collecting the results in reversed order. - */ - def reverseMap[B, That](f: A => B)(implicit bf: CanBuildFrom[Repr, B, That]): That - - /** Tests whether this $coll starts with the given sequence. - * - * @param that the sequence to test - * @return `true` if this collection has `that` as a prefix, `false` otherwise. - */ - def startsWith[B](that: GenSeq[B]): Boolean = startsWith(that, 0) - - /** Tests whether this $coll contains the given sequence at a given index. - * - * '''Note''': If the both the receiver object `this` and the argument - * `that` are infinite sequences this method may not terminate. - * - * @param that the sequence to test - * @param offset the index where the sequence is searched. - * @return `true` if the sequence `that` is contained in this $coll at - * index `offset`, otherwise `false`. - */ - def startsWith[B](that: GenSeq[B], offset: Int): Boolean - - /** Tests whether this $coll ends with the given sequence. - * $willNotTerminateInf - * @param that the sequence to test - * @return `true` if this $coll has `that` as a suffix, `false` otherwise. - */ - def endsWith[B](that: GenSeq[B]): Boolean - - /** Produces a new $coll where a slice of elements in this $coll is replaced by another sequence. - * - * @param from the index of the first replaced element - * @param patch the replacement sequence - * @param replaced the number of elements to drop in the original $coll - * @tparam B the element type of the returned $coll. - * @tparam That $thatinfo - * @param bf $bfinfo - * @return a new $coll consisting of all elements of this $coll - * except that `replaced` elements starting from `from` are replaced - * by `patch`. - * - * @usecase def patch(from: Int, that: GenSeq[A], replaced: Int): $Coll[A] - * @inheritdoc - * - * @return a new $coll consisting of all elements of this $coll - * except that `replaced` elements starting from `from` are replaced - * by `patch`. - */ - def patch[B >: A, That](from: Int, patch: GenSeq[B], replaced: Int)(implicit bf: CanBuildFrom[Repr, B, That]): That - - /** A copy of this $coll with one single replaced element. - * @param index the position of the replacement - * @param elem the replacing element - * @tparam B the element type of the returned $coll. - * @tparam That $thatinfo - * @param bf $bfinfo - * @return a new $coll which is a copy of this $coll with the element at position `index` replaced by `elem`. - * @throws IndexOutOfBoundsException if `index` does not satisfy `0 <= index < length`. - * - * @usecase def updated(index: Int, elem: A): $Coll[A] - * @inheritdoc - * - * @return a copy of this $coll with the element at position `index` replaced by `elem`. - */ - def updated[B >: A, That](index: Int, elem: B)(implicit bf: CanBuildFrom[Repr, B, That]): That - - /** A copy of the $coll with an element prepended. - * - * @param elem the prepended element - * @tparam B the element type of the returned $coll. - * @tparam That $thatinfo - * @param bf $bfinfo - * @return a new collection of type `That` consisting of `elem` followed - * by all elements of this $coll. - * - * @usecase def +:(elem: A): $Coll[A] - * @inheritdoc - * - * Note that :-ending operators are right associative (see example). - * A mnemonic for `+:` vs. `:+` is: the COLon goes on the COLlection side. - * - * Also, the original $coll is not modified, so you will want to capture the result. - * - * Example: - * {{{ - * scala> val x = List(1) - * x: List[Int] = List(1) - * - * scala> val y = 2 +: x - * y: List[Int] = List(2, 1) - * - * scala> println(x) - * List(1) - * }}} - * - * @return a new $coll consisting of `elem` followed - * by all elements of this $coll. - */ - def +:[B >: A, That](elem: B)(implicit bf: CanBuildFrom[Repr, B, That]): That - - /** A copy of this $coll with an element appended. - * - * A mnemonic for `+:` vs. `:+` is: the COLon goes on the COLlection side. - * - * @param elem the appended element - * @tparam B the element type of the returned $coll. - * @tparam That $thatinfo - * @param bf $bfinfo - * @return a new collection of type `That` consisting of - * all elements of this $coll followed by `elem`. - * - * @usecase def :+(elem: A): $Coll[A] - * @inheritdoc - * - * $willNotTerminateInf - * - * Example: - * {{{ - * scala> val a = List(1) - * a: List[Int] = List(1) - * - * scala> val b = a :+ 2 - * b: List[Int] = List(1, 2) - * - * scala> println(a) - * List(1) - * }}} - * - * @return a new $coll consisting of - * all elements of this $coll followed by `elem`. - */ - def :+[B >: A, That](elem: B)(implicit bf: CanBuildFrom[Repr, B, That]): That - - /** A copy of this $coll with an element value appended until a given target length is reached. - * - * @param len the target length - * @param elem the padding value - * @tparam B the element type of the returned $coll. - * @tparam That $thatinfo - * @param bf $bfinfo - * @return a new collection of type `That` consisting of - * all elements of this $coll followed by the minimal number of occurrences of `elem` so - * that the resulting collection has a length of at least `len`. - * @usecase def padTo(len: Int, elem: A): $Coll[A] - * @inheritdoc - * - * @return a new $coll consisting of - * all elements of this $coll followed by the minimal number of occurrences of `elem` so - * that the resulting $coll has a length of at least `len`. - */ - def padTo[B >: A, That](len: Int, elem: B)(implicit bf: CanBuildFrom[Repr, B, That]): That - - /** Tests whether every element of this $coll relates to the - * corresponding element of another sequence by satisfying a test predicate. - * - * @param that the other sequence - * @param p the test predicate, which relates elements from both sequences - * @tparam B the type of the elements of `that` - * @return `true` if both sequences have the same length and - * `p(x, y)` is `true` for all corresponding elements `x` of this $coll - * and `y` of `that`, otherwise `false`. - */ - def corresponds[B](that: GenSeq[B])(p: (A, B) => Boolean): Boolean - - def toSeq: GenSeq[A] - - /** Produces a new sequence which contains all elements of this $coll and also all elements of - * a given sequence. `xs union ys` is equivalent to `xs ++ ys`. - * - * @param that the sequence to add. - * @tparam B the element type of the returned $coll. - * @tparam That $thatinfo - * @param bf $bfinfo - * @return a new collection of type `That` which contains all elements of this $coll - * followed by all elements of `that`. - * - * @usecase def union(that: GenSeq[A]): $Coll[A] - * @inheritdoc - * - * Another way to express this - * is that `xs union ys` computes the order-preserving multi-set union of `xs` and `ys`. - * `union` is hence a counter-part of `diff` and `intersect` which also work on multi-sets. - * - * $willNotTerminateInf - * - * @return a new $coll which contains all elements of this $coll - * followed by all elements of `that`. - */ - def union[B >: A, That](that: GenSeq[B])(implicit bf: CanBuildFrom[Repr, B, That]): That = this ++ that - - /** Computes the multiset difference between this $coll and another sequence. - * - * @param that the sequence of elements to remove - * @tparam B the element type of the returned $coll. - * @return a new collection of type `That` which contains all elements of this $coll - * except some of occurrences of elements that also appear in `that`. - * If an element value `x` appears - * ''n'' times in `that`, then the first ''n'' occurrences of `x` will not form - * part of the result, but any following occurrences will. - * - * @usecase def diff(that: GenSeq[A]): $Coll[A] - * @inheritdoc - * - * $willNotTerminateInf - * - * @return a new $coll which contains all elements of this $coll - * except some of occurrences of elements that also appear in `that`. - * If an element value `x` appears - * ''n'' times in `that`, then the first ''n'' occurrences of `x` will not form - * part of the result, but any following occurrences will. - */ - def diff[B >: A](that: GenSeq[B]): Repr - - /** Computes the multiset intersection between this $coll and another sequence. - * - * @param that the sequence of elements to intersect with. - * @tparam B the element type of the returned $coll. - * @return a new collection of type `That` which contains all elements of this $coll - * which also appear in `that`. - * If an element value `x` appears - * ''n'' times in `that`, then the first ''n'' occurrences of `x` will be retained - * in the result, but any following occurrences will be omitted. - * - * @usecase def intersect(that: GenSeq[A]): $Coll[A] - * @inheritdoc - * - * $mayNotTerminateInf - * - * @return a new $coll which contains all elements of this $coll - * which also appear in `that`. - * If an element value `x` appears - * ''n'' times in `that`, then the first ''n'' occurrences of `x` will be retained - * in the result, but any following occurrences will be omitted. - */ - def intersect[B >: A](that: GenSeq[B]): Repr - - /** Builds a new $coll from this $coll without any duplicate elements. - * $willNotTerminateInf - * - * @return A new $coll which contains the first occurrence of every element of this $coll. - */ - def distinct: Repr - - /** Hashcodes for $Coll produce a value from the hashcodes of all the - * elements of the $coll. - */ - override def hashCode()= scala.util.hashing.MurmurHash3.seqHash(seq) - - /** The equals method for arbitrary sequences. Compares this sequence to - * some other object. - * @param that The object to compare the sequence to - * @return `true` if `that` is a sequence that has the same elements as - * this sequence in the same order, `false` otherwise - */ - override def equals(that: Any): Boolean = that match { - case that: GenSeq[_] => (that canEqual this) && (this sameElements that) - case _ => false - } - -} diff --git a/tests/scala2-library/src/library/scala/collection/GenSet.scala b/tests/scala2-library/src/library/scala/collection/GenSet.scala deleted file mode 100644 index 6c26eb3cefc3..000000000000 --- a/tests/scala2-library/src/library/scala/collection/GenSet.scala +++ /dev/null @@ -1,38 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - - -package scala -package collection - - -import generic._ - - -/** A trait for sets which may possibly - * have their operations implemented in parallel. - * - * @author Martin Odersky - * @author Aleksandar Prokopec - * @since 2.9 - */ -trait GenSet[A] -extends GenSetLike[A, GenSet[A]] - with GenIterable[A] - with GenericSetTemplate[A, GenSet] -{ - override def companion: GenericCompanion[GenSet] = GenSet - def seq: Set[A] -} - - -object GenSet extends GenTraversableFactory[GenSet] { - implicit def canBuildFrom[A]: GenericCanBuildFrom[A] = ReusableCBF.asInstanceOf[GenericCanBuildFrom[A]] - def newBuilder[A] = Set.newBuilder -} - diff --git a/tests/scala2-library/src/library/scala/collection/GenSetLike.scala b/tests/scala2-library/src/library/scala/collection/GenSetLike.scala deleted file mode 100644 index c5355e58ecda..000000000000 --- a/tests/scala2-library/src/library/scala/collection/GenSetLike.scala +++ /dev/null @@ -1,132 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala -package collection - - -/** A template trait for sets which may possibly - * have their operations implemented in parallel. - * - * @define Coll GenSet - * @define coll general set - * @author Martin Odersky - * @author Aleksandar Prokopec - * @since 2.9 - * @define setNote - * - * A set is a collection that contains no duplicate elements. - */ -trait GenSetLike[A, +Repr] -extends GenIterableLike[A, Repr] - with (A => Boolean) - with Equals - with Parallelizable[A, parallel.ParSet[A]] { - - def iterator: Iterator[A] - def contains(elem: A): Boolean - def +(elem: A): Repr - def -(elem: A): Repr - - def seq: Set[A] - - /** Tests if some element is contained in this set. - * - * This method is equivalent to `contains`. It allows sets to be interpreted as predicates. - * @param elem the element to test for membership. - * @return `true` if `elem` is contained in this set, `false` otherwise. - */ - def apply(elem: A): Boolean = this contains elem - - /** Computes the intersection between this set and another set. - * - * @param that the set to intersect with. - * @return a new set consisting of all elements that are both in this - * set and in the given set `that`. - */ - def intersect(that: GenSet[A]): Repr = this filter that - - /** Computes the intersection between this set and another set. - * - * '''Note:''' Same as `intersect`. - * @param that the set to intersect with. - * @return a new set consisting of all elements that are both in this - * set and in the given set `that`. - */ - def &(that: GenSet[A]): Repr = this intersect that - - /** Computes the union between of set and another set. - * - * @param that the set to form the union with. - * @return a new set consisting of all elements that are in this - * set or in the given set `that`. - */ - def union(that: GenSet[A]): Repr - - /** Computes the union between this set and another set. - * - * '''Note:''' Same as `union`. - * @param that the set to form the union with. - * @return a new set consisting of all elements that are in this - * set or in the given set `that`. - */ - def | (that: GenSet[A]): Repr = this union that - - /** Computes the difference of this set and another set. - * - * @param that the set of elements to exclude. - * @return a set containing those elements of this - * set that are not also contained in the given set `that`. - */ - def diff(that: GenSet[A]): Repr - - /** The difference of this set and another set. - * - * '''Note:''' Same as `diff`. - * @param that the set of elements to exclude. - * @return a set containing those elements of this - * set that are not also contained in the given set `that`. - */ - def &~(that: GenSet[A]): Repr = this diff that - - /** Tests whether this set is a subset of another set. - * - * @param that the set to test. - * @return `true` if this set is a subset of `that`, i.e. if - * every element of this set is also an element of `that`. - */ - def subsetOf(that: GenSet[A]): Boolean = this forall that - - /** Compares this set with another object for equality. - * - * '''Note:''' This operation contains an unchecked cast: if `that` - * is a set, it will assume with an unchecked cast - * that it has the same element type as this set. - * Any subsequent ClassCastException is treated as a `false` result. - * @param that the other object - * @return `true` if `that` is a set which contains the same elements - * as this set. - */ - override def equals(that: Any): Boolean = that match { - case that: GenSet[_] => - (this eq that) || - (that canEqual this) && - (this.size == that.size) && - (try this subsetOf that.asInstanceOf[GenSet[A]] - catch { case ex: ClassCastException => false }) - case _ => - false - } - - // Careful! Don't write a Set's hashCode like: - // override def hashCode() = this map (_.hashCode) sum - // Calling map on a set drops duplicates: any hashcode collisions would - // then be dropped before they can be added. - // Hash should be symmetric in set entries, but without trivial collisions. - override def hashCode()= scala.util.hashing.MurmurHash3.setHash(seq) -} diff --git a/tests/scala2-library/src/library/scala/collection/GenTraversable.scala b/tests/scala2-library/src/library/scala/collection/GenTraversable.scala deleted file mode 100644 index 80447b1be196..000000000000 --- a/tests/scala2-library/src/library/scala/collection/GenTraversable.scala +++ /dev/null @@ -1,33 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala -package collection - -import generic._ - -/** A trait for all traversable collections which may possibly - * have their operations implemented in parallel. - * - * @author Martin Odersky - * @author Aleksandar Prokopec - * @since 2.9 - */ -trait GenTraversable[+A] -extends GenTraversableLike[A, GenTraversable[A]] - with GenTraversableOnce[A] - with GenericTraversableTemplate[A, GenTraversable] -{ - def seq: Traversable[A] - def companion: GenericCompanion[GenTraversable] = GenTraversable -} - -object GenTraversable extends GenTraversableFactory[GenTraversable] { - implicit def canBuildFrom[A]: GenericCanBuildFrom[A] = ReusableCBF.asInstanceOf[GenericCanBuildFrom[A]] - def newBuilder[A] = Traversable.newBuilder -} diff --git a/tests/scala2-library/src/library/scala/collection/GenTraversableLike.scala b/tests/scala2-library/src/library/scala/collection/GenTraversableLike.scala deleted file mode 100644 index 0ee5542e3070..000000000000 --- a/tests/scala2-library/src/library/scala/collection/GenTraversableLike.scala +++ /dev/null @@ -1,378 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala -package collection - - -import generic._ -import scala.annotation.migration - - -/** A template trait for all traversable collections upon which operations - * may be implemented in parallel. - * - * @define thatinfo the class of the returned collection. Where possible, `That` is - * the same class as the current collection class `Repr`, but this - * depends on the element type `B` being admissible for that class, - * which means that an implicit instance of type `CanBuildFrom[Repr, B, That]` - * is found. - * @define bfinfo an implicit value of class `CanBuildFrom` which determines - * the result class `That` from the current representation type `Repr` and - * the new element type `B`. - * @define orderDependent - * - * Note: might return different results for different runs, unless the - * underlying collection type is ordered. - * @define orderDependentFold - * - * Note: might return different results for different runs, unless the - * underlying collection type is ordered. - * or the operator is associative and commutative. - * @define mayNotTerminateInf - * - * Note: may not terminate for infinite-sized collections. - * @define willNotTerminateInf - * - * Note: will not terminate for infinite-sized collections. - * - * @define traversableInfo - * This is a base trait of all kinds of Scala collections. - * - * @define Coll `GenTraversable` - * @define coll general collection - * @define collectExample - * @tparam A the collection element type. - * @tparam Repr the actual type of the element container. - * - * @author Martin Odersky - * @author Aleksandar Prokopec - * @since 2.9 - */ -trait GenTraversableLike[+A, +Repr] extends Any with GenTraversableOnce[A] with Parallelizable[A, parallel.ParIterable[A]] { - - def repr: Repr - - def size: Int - - /** Selects the first element of this $coll. - * $orderDependent - * @return the first element of this $coll. - * @throws NoSuchElementException if the $coll is empty. - */ - def head: A - - /** Optionally selects the first element. - * $orderDependent - * @return the first element of this $coll if it is nonempty, - * `None` if it is empty. - */ - def headOption: Option[A] - - /** Tests whether this $coll can be repeatedly traversed. - * @return `true` - */ - def isTraversableAgain: Boolean - - /** Selects all elements except the first. - * $orderDependent - * @return a $coll consisting of all elements of this $coll - * except the first one. - * @throws UnsupportedOperationException if the $coll is empty. - */ - def tail: Repr - - /** Selects the last element. - * $orderDependent - * @return The last element of this $coll. - * @throws NoSuchElementException If the $coll is empty. - */ - def last: A - - /** Optionally selects the last element. - * $orderDependent - * @return the last element of this $coll$ if it is nonempty, - * `None` if it is empty. - */ - def lastOption: Option[A] - - /** Selects all elements except the last. - * $orderDependent - * @return a $coll consisting of all elements of this $coll - * except the last one. - * @throws UnsupportedOperationException if the $coll is empty. - */ - def init: Repr - - /** Computes a prefix scan of the elements of the collection. - * - * Note: The neutral element `z` may be applied more than once. - * - * @tparam B element type of the resulting collection - * @tparam That type of the resulting collection - * @param z neutral element for the operator `op` - * @param op the associative operator for the scan - * @param cbf combiner factory which provides a combiner - * - * @return a new $coll containing the prefix scan of the elements in this $coll - */ - def scan[B >: A, That](z: B)(op: (B, B) => B)(implicit cbf: CanBuildFrom[Repr, B, That]): That - - /** Produces a collection containing cumulative results of applying the - * operator going left to right. - * - * $willNotTerminateInf - * $orderDependent - * - * @tparam B the type of the elements in the resulting collection - * @tparam That the actual type of the resulting collection - * @param z the initial value - * @param op the binary operator applied to the intermediate result and the element - * @param bf $bfinfo - * @return collection with intermediate results - */ - def scanLeft[B, That](z: B)(op: (B, A) => B)(implicit bf: CanBuildFrom[Repr, B, That]): That - - /** Produces a collection containing cumulative results of applying the operator going right to left. - * The head of the collection is the last cumulative result. - * $willNotTerminateInf - * $orderDependent - * - * Example: - * {{{ - * List(1, 2, 3, 4).scanRight(0)(_ + _) == List(10, 9, 7, 4, 0) - * }}} - * - * @tparam B the type of the elements in the resulting collection - * @tparam That the actual type of the resulting collection - * @param z the initial value - * @param op the binary operator applied to the intermediate result and the element - * @param bf $bfinfo - * @return collection with intermediate results - */ - @migration("The behavior of `scanRight` has changed. The previous behavior can be reproduced with scanRight.reverse.", "2.9.0") - def scanRight[B, That](z: B)(op: (A, B) => B)(implicit bf: CanBuildFrom[Repr, B, That]): That - - def foreach[U](f: A => U): Unit - - /** Builds a new collection by applying a function to all elements of this $coll. - * - * @param f the function to apply to each element. - * @tparam B the element type of the returned collection. - * @tparam That $thatinfo - * @param bf $bfinfo - * @return a new collection of type `That` resulting from applying the given function - * `f` to each element of this $coll and collecting the results. - * - * @usecase def map[B](f: A => B): $Coll[B] - * @inheritdoc - * @return a new $coll resulting from applying the given function - * `f` to each element of this $coll and collecting the results. - */ - def map[B, That](f: A => B)(implicit bf: CanBuildFrom[Repr, B, That]): That - - /** Builds a new collection by applying a partial function to all elements of this $coll - * on which the function is defined. - * - * @param pf the partial function which filters and maps the $coll. - * @tparam B the element type of the returned collection. - * @tparam That $thatinfo - * @param bf $bfinfo - * @return a new collection of type `That` resulting from applying the partial function - * `pf` to each element on which it is defined and collecting the results. - * The order of the elements is preserved. - * - * @usecase def collect[B](pf: PartialFunction[A, B]): $Coll[B] - * @inheritdoc - * - * $collectExample - * - * @return a new $coll resulting from applying the given partial function - * `pf` to each element on which it is defined and collecting the results. - * The order of the elements is preserved. - */ - def collect[B, That](pf: PartialFunction[A, B])(implicit bf: CanBuildFrom[Repr, B, That]): That - - /** Builds a new collection by applying a function to all elements of this $coll - * and using the elements of the resulting collections. - * - * @param f the function to apply to each element. - * @tparam B the element type of the returned collection. - * @tparam That $thatinfo - * @param bf $bfinfo - * @return a new collection of type `That` resulting from applying the given collection-valued function - * `f` to each element of this $coll and concatenating the results. - * - * @usecase def flatMap[B](f: A => GenTraversableOnce[B]): $Coll[B] - * @inheritdoc - * - * For example: - * - * {{{ - * def getWords(lines: Seq[String]): Seq[String] = lines flatMap (line => line split "\\W+") - * }}} - * - * The type of the resulting collection is guided by the static type of $coll. This might - * cause unexpected results sometimes. For example: - * - * {{{ - * // lettersOf will return a Seq[Char] of likely repeated letters, instead of a Set - * def lettersOf(words: Seq[String]) = words flatMap (word => word.toSet) - * - * // lettersOf will return a Set[Char], not a Seq - * def lettersOf(words: Seq[String]) = words.toSet flatMap (word => word.toSeq) - * - * // xs will be an Iterable[Int] - * val xs = Map("a" -> List(11,111), "b" -> List(22,222)).flatMap(_._2) - * - * // ys will be a Map[Int, Int] - * val ys = Map("a" -> List(1 -> 11,1 -> 111), "b" -> List(2 -> 22,2 -> 222)).flatMap(_._2) - * }}} - * - * @return a new $coll resulting from applying the given collection-valued function - * `f` to each element of this $coll and concatenating the results. - */ - def flatMap[B, That](f: A => GenTraversableOnce[B])(implicit bf: CanBuildFrom[Repr, B, That]): That - - /** Returns a new $coll containing the elements from the left hand operand followed by the elements from the - * right hand operand. The element type of the $coll is the most specific superclass encompassing - * the element types of the two operands. - * - * @param that the traversable to append. - * @tparam B the element type of the returned collection. - * @tparam That $thatinfo - * @param bf $bfinfo - * @return a new collection of type `That` which contains all elements - * of this $coll followed by all elements of `that`. - */ - def ++[B >: A, That](that: GenTraversableOnce[B])(implicit bf: CanBuildFrom[Repr, B, That]): That - - /** Selects all elements of this $coll which satisfy a predicate. - * - * @param pred the predicate used to test elements. - * @return a new $coll consisting of all elements of this $coll that satisfy the given - * predicate `p`. Their order may not be preserved. - */ - def filter(pred: A => Boolean): Repr - - /** Selects all elements of this $coll which do not satisfy a predicate. - * - * @param pred the predicate used to test elements. - * @return a new $coll consisting of all elements of this $coll that do not satisfy the given - * predicate `p`. Their order may not be preserved. - */ - def filterNot(pred: A => Boolean): Repr - - /** Partitions this $coll in two ${coll}s according to a predicate. - * - * @param pred the predicate on which to partition. - * @return a pair of ${coll}s: the first $coll consists of all elements that - * satisfy the predicate `p` and the second $coll consists of all elements - * that don't. The relative order of the elements in the resulting ${coll}s - * may not be preserved. - */ - def partition(pred: A => Boolean): (Repr, Repr) - - /** Partitions this $coll into a map of ${coll}s according to some discriminator function. - * - * Note: this method is not re-implemented by views. This means - * when applied to a view it will always force the view and - * return a new $coll. - * - * @param f the discriminator function. - * @tparam K the type of keys returned by the discriminator function. - * @return A map from keys to ${coll}s such that the following invariant holds: - * {{{ - * (xs groupBy f)(k) = xs filter (x => f(x) == k) - * }}} - * That is, every key `k` is bound to a $coll of those elements `x` - * for which `f(x)` equals `k`. - * - */ - def groupBy[K](f: A => K): GenMap[K, Repr] - - /** Selects first ''n'' elements. - * $orderDependent - * @param n the number of elements to take from this $coll. - * @return a $coll consisting only of the first `n` elements of this $coll, - * or else the whole $coll, if it has less than `n` elements. - */ - def take(n: Int): Repr - - /** Selects all elements except first ''n'' ones. - * $orderDependent - * @param n the number of elements to drop from this $coll. - * @return a $coll consisting of all elements of this $coll except the first `n` ones, or else the - * empty $coll, if this $coll has less than `n` elements. - */ - def drop(n: Int): Repr - - /** Selects an interval of elements. The returned collection is made up - * of all elements `x` which satisfy the invariant: - * {{{ - * from <= indexOf(x) < until - * }}} - * $orderDependent - * - * @param unc_from the lowest index to include from this $coll. - * @param unc_until the lowest index to EXCLUDE from this $coll. - * @return a $coll containing the elements greater than or equal to - * index `from` extending up to (but not including) index `until` - * of this $coll. - */ - def slice(unc_from: Int, unc_until: Int): Repr - - /** Splits this $coll into two at a given position. - * Note: `c splitAt n` is equivalent to (but possibly more efficient than) - * `(c take n, c drop n)`. - * $orderDependent - * - * @param n the position at which to split. - * @return a pair of ${coll}s consisting of the first `n` - * elements of this $coll, and the other elements. - */ - def splitAt(n: Int): (Repr, Repr) - - /** Takes longest prefix of elements that satisfy a predicate. - * $orderDependent - * @param pred The predicate used to test elements. - * @return the longest prefix of this $coll whose elements all satisfy - * the predicate `p`. - */ - def takeWhile(pred: A => Boolean): Repr - - /** Splits this $coll into a prefix/suffix pair according to a predicate. - * - * Note: `c span p` is equivalent to (but possibly more efficient than) - * `(c takeWhile p, c dropWhile p)`, provided the evaluation of the - * predicate `p` does not cause any side-effects. - * $orderDependent - * - * @param pred the test predicate - * @return a pair consisting of the longest prefix of this $coll whose - * elements all satisfy `p`, and the rest of this $coll. - */ - def span(pred: A => Boolean): (Repr, Repr) - - /** Drops longest prefix of elements that satisfy a predicate. - * $orderDependent - * @param pred The predicate used to test elements. - * @return the longest suffix of this $coll whose first element - * does not satisfy the predicate `p`. - */ - def dropWhile(pred: A => Boolean): Repr - - /** Defines the prefix of this object's `toString` representation. - * - * @return a string representation which starts the result of `toString` - * applied to this $coll. By default the string prefix is the - * simple name of the collection class $coll. - */ - def stringPrefix: String - -} diff --git a/tests/scala2-library/src/library/scala/collection/GenTraversableOnce.scala b/tests/scala2-library/src/library/scala/collection/GenTraversableOnce.scala deleted file mode 100644 index f87f7654bca5..000000000000 --- a/tests/scala2-library/src/library/scala/collection/GenTraversableOnce.scala +++ /dev/null @@ -1,666 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala -package collection - -import scala.reflect.ClassTag -import scala.collection.generic.CanBuildFrom -import scala.annotation.unchecked.{ uncheckedVariance => uV } -import scala.language.higherKinds - -/** A template trait for all traversable-once objects which may be - * traversed in parallel. - * - * Methods in this trait are either abstract or can be implemented in terms - * of other methods. - * - * @define Coll `GenTraversableOnce` - * @define coll collection or iterator - * @define possiblyparinfo - * This trait may possibly have operations implemented in parallel. - * @define undefinedorder - * The order in which operations are performed on elements is unspecified - * and may be nondeterministic. - * @define orderDependent - * - * Note: might return different results for different runs, unless the - * underlying collection type is ordered. - * @define orderDependentFold - * - * Note: might return different results for different runs, unless the - * underlying collection type is ordered or the operator is associative - * and commutative. - * @define mayNotTerminateInf - * - * Note: may not terminate for infinite-sized collections. - * @define willNotTerminateInf - * - * Note: will not terminate for infinite-sized collections. - * - * @author Martin Odersky - * @author Aleksandar Prokopec - * @since 2.9 - */ -trait GenTraversableOnce[+A] extends Any { - - /** Applies a function `f` to all elements of this $coll. - * - * @param f the function that is applied for its side-effect to every element. - * The result of function `f` is discarded. - * - * @tparam U the type parameter describing the result of function `f`. - * This result will always be ignored. Typically `U` is `Unit`, - * but this is not necessary. - * - * @usecase def foreach(f: A => Unit): Unit - * @inheritdoc - * - * Note: this method underlies the implementation of most other bulk operations. - * It's important to implement this method in an efficient way. - * - */ - def foreach[U](f: A => U): Unit - - /** Tests whether this $coll is known to have a finite size. - * All strict collections are known to have finite size. For a non-strict - * collection such as `Stream`, the predicate returns `'''true'''` if all - * elements have been computed. It returns `'''false'''` if the stream is - * not yet evaluated to the end. Non-empty Iterators usually return - * `'''false'''` even if they were created from a collection with a known - * finite size. - * - * Note: many collection methods will not work on collections of infinite sizes. - * The typical failure mode is an infinite loop. These methods always attempt a - * traversal without checking first that `hasDefiniteSize` returns `'''true'''`. - * However, checking `hasDefiniteSize` can provide an assurance that size is - * well-defined and non-termination is not a concern. - * - * @return `'''true'''` if this collection is known to have finite size, - * `'''false'''` otherwise. - */ - def hasDefiniteSize: Boolean - - def seq: TraversableOnce[A] - - /** The size of this $coll. - * - * $willNotTerminateInf - * - * @return the number of elements in this $coll. - */ - def size: Int - - /** The size of this $coll, if it can be cheaply computed - * - * @return the number of elements in this $coll, or -1 if the size cannot be determined cheaply - */ - protected[collection] def sizeHintIfCheap: Int = -1 - - /** Tests whether the $coll is empty. - * - * Note: Implementations in subclasses that are not repeatedly traversable must take - * care not to consume any elements when `isEmpty` is called. - * - * @return `true` if the $coll contains no elements, `false` otherwise. - */ - def isEmpty: Boolean - - /** Tests whether the $coll is not empty. - * - * @return `true` if the $coll contains at least one element, `false` otherwise. - */ - def nonEmpty: Boolean - - /** Tests whether this $coll can be repeatedly traversed. Always - * true for Traversables and false for Iterators unless overridden. - * - * @return `true` if it is repeatedly traversable, `false` otherwise. - */ - def isTraversableAgain: Boolean - - /** Reduces the elements of this $coll using the specified associative binary operator. - * - * $undefinedorder - * - * @tparam A1 A type parameter for the binary operator, a supertype of `A`. - * @param op A binary operator that must be associative. - * @return The result of applying reduce operator `op` between all the elements if the $coll is nonempty. - * @throws UnsupportedOperationException - * if this $coll is empty. - */ - def reduce[A1 >: A](op: (A1, A1) => A1): A1 - - /** Reduces the elements of this $coll, if any, using the specified - * associative binary operator. - * - * $undefinedorder - * - * @tparam A1 A type parameter for the binary operator, a supertype of `A`. - * @param op A binary operator that must be associative. - * @return An option value containing result of applying reduce operator `op` between all - * the elements if the collection is nonempty, and `None` otherwise. - */ - def reduceOption[A1 >: A](op: (A1, A1) => A1): Option[A1] - - /** Folds the elements of this $coll using the specified associative - * binary operator. - * - * $undefinedorder - * $willNotTerminateInf - * - * @tparam A1 a type parameter for the binary operator, a supertype of `A`. - * @param z a neutral element for the fold operation; may be added to the result - * an arbitrary number of times, and must not change the result (e.g., `Nil` for list concatenation, - * 0 for addition, or 1 for multiplication). - * @param op a binary operator that must be associative. - * @return the result of applying the fold operator `op` between all the elements and `z`, or `z` if this $coll is empty. - */ - def fold[A1 >: A](z: A1)(op: (A1, A1) => A1): A1 - - /** Applies a binary operator to a start value and all elements of this $coll, - * going left to right. - * - * Note: `/:` is alternate syntax for `foldLeft`; `z /: xs` is the same as - * `xs foldLeft z`. - * - * Examples: - * - * Note that the folding function used to compute b is equivalent to that used to compute c. - * {{{ - * scala> val a = List(1,2,3,4) - * a: List[Int] = List(1, 2, 3, 4) - * - * scala> val b = (5 /: a)(_+_) - * b: Int = 15 - * - * scala> val c = (5 /: a)((x,y) => x + y) - * c: Int = 15 - * }}} - - * $willNotTerminateInf - * $orderDependentFold - * - * @param z the start value. - * @param op the binary operator. - * @tparam B the result type of the binary operator. - * @return the result of inserting `op` between consecutive elements of this $coll, - * going left to right with the start value `z` on the left: - * {{{ - * op(...op(op(z, x_1), x_2), ..., x_n) - * }}} - * where `x,,1,,, ..., x,,n,,` are the elements of this $coll. - */ - def /:[B](z: B)(op: (B, A) => B): B - - /** Applies a binary operator to all elements of this $coll and a start value, - * going right to left. - * - * Note: `:\` is alternate syntax for `foldRight`; `xs :\ z` is the same as - * `xs foldRight z`. - * $willNotTerminateInf - * $orderDependentFold - * - * Examples: - * - * Note that the folding function used to compute b is equivalent to that used to compute c. - * {{{ - * scala> val a = List(1,2,3,4) - * a: List[Int] = List(1, 2, 3, 4) - * - * scala> val b = (a :\ 5)(_+_) - * b: Int = 15 - * - * scala> val c = (a :\ 5)((x,y) => x + y) - * c: Int = 15 - * - * }}} - * - * @param z the start value - * @param op the binary operator - * @tparam B the result type of the binary operator. - * @return the result of inserting `op` between consecutive elements of this $coll, - * going right to left with the start value `z` on the right: - * {{{ - * op(x_1, op(x_2, ... op(x_n, z)...)) - * }}} - * where `x,,1,,, ..., x,,n,,` are the elements of this $coll. - */ - def :\[B](z: B)(op: (A, B) => B): B - - /** Applies a binary operator to a start value and all elements of this $coll, - * going left to right. - * - * $willNotTerminateInf - * $orderDependentFold - * - * @param z the start value. - * @param op the binary operator. - * @tparam B the result type of the binary operator. - * @return the result of inserting `op` between consecutive elements of this $coll, - * going left to right with the start value `z` on the left: - * {{{ - * op(...op(z, x_1), x_2, ..., x_n) - * }}} - * where `x,,1,,, ..., x,,n,,` are the elements of this $coll. - * Returns `z` if this $coll is empty. - */ - def foldLeft[B](z: B)(op: (B, A) => B): B - - /** Applies a binary operator to all elements of this $coll and a start value, - * going right to left. - * - * $willNotTerminateInf - * $orderDependentFold - * @param z the start value. - * @param op the binary operator. - * @tparam B the result type of the binary operator. - * @return the result of inserting `op` between consecutive elements of this $coll, - * going right to left with the start value `z` on the right: - * {{{ - * op(x_1, op(x_2, ... op(x_n, z)...)) - * }}} - * where `x,,1,,, ..., x,,n,,` are the elements of this $coll. - * Returns `z` if this $coll is empty. - */ - def foldRight[B](z: B)(op: (A, B) => B): B - - /** Aggregates the results of applying an operator to subsequent elements. - * - * This is a more general form of `fold` and `reduce`. It is similar to - * `foldLeft` in that it doesn't require the result to be a supertype of the - * element type. In addition, it allows parallel collections to be processed - * in chunks, and then combines the intermediate results. - * - * `aggregate` splits the $coll into partitions and processes each - * partition by sequentially applying `seqop`, starting with `z` (like - * `foldLeft`). Those intermediate results are then combined by using - * `combop` (like `fold`). The implementation of this operation may operate - * on an arbitrary number of collection partitions (even 1), so `combop` may - * be invoked an arbitrary number of times (even 0). - * - * As an example, consider summing up the integer values of a list of chars. - * The initial value for the sum is 0. First, `seqop` transforms each input - * character to an Int and adds it to the sum (of the partition). Then, - * `combop` just needs to sum up the intermediate results of the partitions: - * {{{ - * List('a', 'b', 'c').aggregate(0)({ (sum, ch) => sum + ch.toInt }, { (p1, p2) => p1 + p2 }) - * }}} - * - * @tparam B the type of accumulated results - * @param z the initial value for the accumulated result of the partition - this - * will typically be the neutral element for the `seqop` operator (e.g. - * `Nil` for list concatenation or `0` for summation) and may be evaluated - * more than once - * @param seqop an operator used to accumulate results within a partition - * @param combop an associative operator used to combine results from different partitions - */ - def aggregate[B](z: =>B)(seqop: (B, A) => B, combop: (B, B) => B): B - - /** Applies a binary operator to all elements of this $coll, going right to left. - * $willNotTerminateInf - * $orderDependentFold - * - * @param op the binary operator. - * @tparam B the result type of the binary operator. - * @return the result of inserting `op` between consecutive elements of this $coll, - * going right to left: - * {{{ - * op(x_1, op(x_2, ..., op(x_{n-1}, x_n)...)) - * }}} - * where `x,,1,,, ..., x,,n,,` are the elements of this $coll. - * @throws UnsupportedOperationException if this $coll is empty. - */ - def reduceRight[B >: A](op: (A, B) => B): B - - /** Optionally applies a binary operator to all elements of this $coll, going left to right. - * $willNotTerminateInf - * $orderDependentFold - * - * @param op the binary operator. - * @tparam B the result type of the binary operator. - * @return an option value containing the result of `reduceLeft(op)` if this $coll is nonempty, - * `None` otherwise. - */ - def reduceLeftOption[B >: A](op: (B, A) => B): Option[B] - - /** Optionally applies a binary operator to all elements of this $coll, going - * right to left. - * $willNotTerminateInf - * $orderDependentFold - * - * @param op the binary operator. - * @tparam B the result type of the binary operator. - * @return an option value containing the result of `reduceRight(op)` if this $coll is nonempty, - * `None` otherwise. - */ - def reduceRightOption[B >: A](op: (A, B) => B): Option[B] - - /** Counts the number of elements in the $coll which satisfy a predicate. - * - * @param p the predicate used to test elements. - * @return the number of elements satisfying the predicate `p`. - */ - def count(p: A => Boolean): Int - - /** Sums up the elements of this collection. - * - * @param num an implicit parameter defining a set of numeric operations - * which includes the `+` operator to be used in forming the sum. - * @tparam A1 the result type of the `+` operator. - * @return the sum of all elements of this $coll with respect to the `+` operator in `num`. - * - * @usecase def sum: A - * @inheritdoc - * - * @return the sum of all elements in this $coll of numbers of type `Int`. - * Instead of `Int`, any other type `T` with an implicit `Numeric[T]` implementation - * can be used as element type of the $coll and as result type of `sum`. - * Examples of such types are: `Long`, `Float`, `Double`, `BigInt`. - * - */ - def sum[A1 >: A](implicit num: Numeric[A1]): A1 - - /** Multiplies up the elements of this collection. - * - * @param num an implicit parameter defining a set of numeric operations - * which includes the `*` operator to be used in forming the product. - * @tparam A1 the result type of the `*` operator. - * @return the product of all elements of this $coll with respect to the `*` operator in `num`. - * - * @usecase def product: A - * @inheritdoc - * - * @return the product of all elements in this $coll of numbers of type `Int`. - * Instead of `Int`, any other type `T` with an implicit `Numeric[T]` implementation - * can be used as element type of the $coll and as result type of `product`. - * Examples of such types are: `Long`, `Float`, `Double`, `BigInt`. - */ - def product[A1 >: A](implicit num: Numeric[A1]): A1 - - /** Finds the smallest element. - * - * @param ord An ordering to be used for comparing elements. - * @tparam A1 The type over which the ordering is defined. - * @return the smallest element of this $coll with respect to the ordering `ord`. - * - * @usecase def min: A - * @inheritdoc - * - * @return the smallest element of this $coll - */ - def min[A1 >: A](implicit ord: Ordering[A1]): A - - /** Finds the largest element. - * - * @param ord An ordering to be used for comparing elements. - * @tparam A1 The type over which the ordering is defined. - * @return the largest element of this $coll with respect to the ordering `ord`. - * - * @usecase def max: A - * @inheritdoc - * - * @return the largest element of this $coll. - */ - def max[A1 >: A](implicit ord: Ordering[A1]): A - - /** Finds the first element which yields the largest value measured by function f. - * - * @param cmp An ordering to be used for comparing elements. - * @tparam B The result type of the function f. - * @param f The measuring function. - * @return the first element of this $coll with the largest value measured by function f - * with respect to the ordering `cmp`. - * - * @usecase def maxBy[B](f: A => B): A - * @inheritdoc - * - * @return the first element of this $coll with the largest value measured by function f. - */ - def maxBy[B](f: A => B)(implicit cmp: Ordering[B]): A - - /** Finds the first element which yields the smallest value measured by function f. - * - * @param cmp An ordering to be used for comparing elements. - * @tparam B The result type of the function f. - * @param f The measuring function. - * @return the first element of this $coll with the smallest value measured by function f - * with respect to the ordering `cmp`. - * - * @usecase def minBy[B](f: A => B): A - * @inheritdoc - * - * @return the first element of this $coll with the smallest value measured by function f. - */ - def minBy[B](f: A => B)(implicit cmp: Ordering[B]): A - - /** Tests whether a predicate holds for all elements of this $coll. - * - * $mayNotTerminateInf - * - * @param p the predicate used to test elements. - * @return `true` if this $coll is empty or the given predicate `p` - * holds for all elements of this $coll, otherwise `false`. - */ - def forall(@deprecatedName('pred) p: A => Boolean): Boolean - - /** Tests whether a predicate holds for at least one element of this $coll. - * - * $mayNotTerminateInf - * - * @param p the predicate used to test elements. - * @return `true` if the given predicate `p` is satisfied by at least one element of this $coll, otherwise `false` - */ - def exists(@deprecatedName('pred) p: A => Boolean): Boolean - - /** Finds the first element of the $coll satisfying a predicate, if any. - * - * $mayNotTerminateInf - * $orderDependent - * - * @param p the predicate used to test elements. - * @return an option value containing the first element in the $coll - * that satisfies `p`, or `None` if none exists. - */ - def find(@deprecatedName('pred) p: A => Boolean): Option[A] - - /** Copies the elements of this $coll to an array. - * Fills the given array `xs` with values of this $coll. - * Copying will stop once either the end of the current $coll is reached, - * or the end of the target array is reached. - * - * @param xs the array to fill. - * @tparam B the type of the elements of the target array. - * - * @usecase def copyToArray(xs: Array[A]): Unit - * @inheritdoc - * - * $willNotTerminateInf - */ - def copyToArray[B >: A](xs: Array[B]): Unit - - /** Copies the elements of this $coll to an array. - * Fills the given array `xs` with values of this $coll, beginning at index `start`. - * Copying will stop once either the end of the current $coll is reached, - * or the end of the target array is reached. - * - * @param xs the array to fill. - * @param start the starting index. - * @tparam B the type of the elements of the target array. - * - * @usecase def copyToArray(xs: Array[A], start: Int): Unit - * @inheritdoc - * - * $willNotTerminateInf - */ - def copyToArray[B >: A](xs: Array[B], start: Int): Unit - - /** Copies the elements of this $coll to an array. - * Fills the given array `xs` with at most `len` elements of - * this $coll, starting at position `start`. - * Copying will stop once either the end of the current $coll is reached, - * or the end of the target array is reached, or `len` elements have been copied. - * - * @param xs the array to fill. - * @param start the starting index. - * @param len the maximal number of elements to copy. - * @tparam B the type of the elements of the target array. - * - * @usecase def copyToArray(xs: Array[A], start: Int, len: Int): Unit - * @inheritdoc - * - * $willNotTerminateInf - */ - def copyToArray[B >: A](xs: Array[B], start: Int, len: Int): Unit - - /** Displays all elements of this $coll in a string using start, end, and - * separator strings. - * - * @param start the starting string. - * @param sep the separator string. - * @param end the ending string. - * @return a string representation of this $coll. The resulting string - * begins with the string `start` and ends with the string - * `end`. Inside, the string representations (w.r.t. the method - * `toString`) of all elements of this $coll are separated by - * the string `sep`. - * - * @example `List(1, 2, 3).mkString("(", "; ", ")") = "(1; 2; 3)"` - */ - def mkString(start: String, sep: String, end: String): String - - /** Displays all elements of this $coll in a string using a separator string. - * - * @param sep the separator string. - * @return a string representation of this $coll. In the resulting string - * the string representations (w.r.t. the method `toString`) - * of all elements of this $coll are separated by the string `sep`. - * - * @example `List(1, 2, 3).mkString("|") = "1|2|3"` - */ - def mkString(sep: String): String - - /** Displays all elements of this $coll in a string. - * - * @return a string representation of this $coll. In the resulting string - * the string representations (w.r.t. the method `toString`) - * of all elements of this $coll follow each other without any - * separator string. - */ - def mkString: String - - /** Converts this $coll to an array. - * - * @tparam A1 the type of the elements of the array. An `ClassTag` for - * this type must be available. - * @return an array containing all elements of this $coll. - * - * @usecase def toArray: Array[A] - * @inheritdoc - * - * $willNotTerminateInf - * - * @return an array containing all elements of this $coll. - * An `ClassTag` must be available for the element type of this $coll. - */ - def toArray[A1 >: A: ClassTag]: Array[A1] - - /** Converts this $coll to a list. - * $willNotTerminateInf - * @return a list containing all elements of this $coll. - */ - def toList: List[A] - - /** Converts this $coll to an indexed sequence. - * $willNotTerminateInf - * @return an indexed sequence containing all elements of this $coll. - */ - def toIndexedSeq: immutable.IndexedSeq[A] - - /** Converts this $coll to a stream. - * @return a stream containing all elements of this $coll. - */ - def toStream: Stream[A] - - /** Returns an Iterator over the elements in this $coll. Will return - * the same Iterator if this instance is already an Iterator. - * $willNotTerminateInf - * @return an Iterator containing all elements of this $coll. - */ - def toIterator: Iterator[A] - - /** Uses the contents of this $coll to create a new mutable buffer. - * $willNotTerminateInf - * @return a buffer containing all elements of this $coll. - */ - def toBuffer[A1 >: A]: scala.collection.mutable.Buffer[A1] - - /** Converts this $coll to an unspecified Traversable. Will return - * the same collection if this instance is already Traversable. - * $willNotTerminateInf - * @return a Traversable containing all elements of this $coll. - */ - def toTraversable: GenTraversable[A] - - /** Converts this $coll to an iterable collection. Note that - * the choice of target `Iterable` is lazy in this default implementation - * as this `TraversableOnce` may be lazy and unevaluated (i.e. it may - * be an iterator which is only traversable once). - * - * $willNotTerminateInf - * @return an `Iterable` containing all elements of this $coll. - */ - def toIterable: GenIterable[A] - - /** Converts this $coll to a sequence. As with `toIterable`, it's lazy - * in this default implementation, as this `TraversableOnce` may be - * lazy and unevaluated. - * - * $willNotTerminateInf - * @return a sequence containing all elements of this $coll. - */ - def toSeq: GenSeq[A] - - /** Converts this $coll to a set. - * $willNotTerminateInf - * @return a set containing all elements of this $coll. - */ - def toSet[A1 >: A]: GenSet[A1] - - /** Converts this $coll to a map. This method is unavailable unless - * the elements are members of Tuple2, each ((T, U)) becoming a key-value - * pair in the map. Duplicate keys will be overwritten by later keys: - * if this is an unordered collection, which key is in the resulting map - * is undefined. - * @return a map containing all elements of this $coll. - * - * @usecase def toMap[T, U]: Map[T, U] - * @inheritdoc - * $willNotTerminateInf - * @return a map of type `immutable.Map[T, U]` - * containing all key/value pairs of type `(T, U)` of this $coll. - */ - def toMap[K, V](implicit ev: A <:< (K, V)): GenMap[K, V] - - /** Converts this $coll to a Vector. - * $willNotTerminateInf - * @return a vector containing all elements of this $coll. - */ - def toVector: Vector[A] - - /** Converts this $coll into another by copying all elements. - * @tparam Col The collection type to build. - * @return a new collection containing all elements of this $coll. - * - * @usecase def to[Col[_]]: Col[A] - * @inheritdoc - * $willNotTerminateInf - * @return a new collection containing all elements of this $coll. - */ - def to[Col[_]](implicit cbf: CanBuildFrom[Nothing, A, Col[A @uV]]): Col[A @uV] -} diff --git a/tests/scala2-library/src/library/scala/collection/IndexedSeq.scala b/tests/scala2-library/src/library/scala/collection/IndexedSeq.scala deleted file mode 100644 index 1a330261014f..000000000000 --- a/tests/scala2-library/src/library/scala/collection/IndexedSeq.scala +++ /dev/null @@ -1,39 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2006-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala -package collection - -import generic._ -import mutable.Builder - -/** A base trait for indexed sequences. - * $indexedSeqInfo - */ -trait IndexedSeq[+A] extends Seq[A] - with GenericTraversableTemplate[A, IndexedSeq] - with IndexedSeqLike[A, IndexedSeq[A]] { - override def companion: GenericCompanion[IndexedSeq] = IndexedSeq - override def seq: IndexedSeq[A] = this -} - -/** $factoryInfo - * The current default implementation of a $Coll is a `Vector`. - * @define coll indexed sequence - * @define Coll `IndexedSeq` - */ -object IndexedSeq extends IndexedSeqFactory[IndexedSeq] { - // A single CBF which can be checked against to identify - // an indexed collection type. - override val ReusableCBF: GenericCanBuildFrom[Nothing] = new GenericCanBuildFrom[Nothing] { - override def apply() = newBuilder[Nothing] - } - def newBuilder[A]: Builder[A, IndexedSeq[A]] = immutable.IndexedSeq.newBuilder[A] - implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, IndexedSeq[A]] = - ReusableCBF.asInstanceOf[GenericCanBuildFrom[A]] -} diff --git a/tests/scala2-library/src/library/scala/collection/IndexedSeqLike.scala b/tests/scala2-library/src/library/scala/collection/IndexedSeqLike.scala deleted file mode 100644 index f0cede224dfd..000000000000 --- a/tests/scala2-library/src/library/scala/collection/IndexedSeqLike.scala +++ /dev/null @@ -1,97 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2006-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala -package collection - -/** A template trait for indexed sequences of type `IndexedSeq[A]`. - * - * $indexedSeqInfo - * - * This trait just implements `iterator` in terms of `apply` and `length`. - * However, see `IndexedSeqOptimized` for an implementation trait that overrides operations - * to make them run faster under the assumption of fast random access with `apply`. - * - * @define Coll IndexedSeq - * @define indexedSeqInfo - * Indexed sequences support constant-time or near constant-time element - * access and length computation. They are defined in terms of abstract methods - * `apply` for indexing and `length`. - * - * Indexed sequences do not add any new methods to `Seq`, but promise - * efficient implementations of random access patterns. - * - * @tparam A the element type of the $coll - * @tparam Repr the type of the actual $coll containing the elements. - * @author Martin Odersky - * @version 2.8 - * @since 2.8 - * @define willNotTerminateInf - * @define mayNotTerminateInf - */ -trait IndexedSeqLike[+A, +Repr] extends Any with SeqLike[A, Repr] { - self => - - def seq: IndexedSeq[A] - override def hashCode()= scala.util.hashing.MurmurHash3.seqHash(seq) // TODO - can we get faster via "indexedSeqHash" ? - - override protected[this] def thisCollection: IndexedSeq[A] = this.asInstanceOf[IndexedSeq[A]] - override protected[this] def toCollection(repr: Repr): IndexedSeq[A] = repr.asInstanceOf[IndexedSeq[A]] - - /** The class of the iterator returned by the `iterator` method. - * multiple `take`, `drop`, and `slice` operations on this iterator are bunched - * together for better efficiency. - */ - // pre: start >= 0, end <= self.length - @SerialVersionUID(1756321872811029277L) - protected class Elements(start: Int, end: Int) extends AbstractIterator[A] with BufferedIterator[A] with Serializable { - private var index = start - private def available = (end - index) max 0 - - def hasNext: Boolean = index < end - - def next(): A = { - if (index >= end) - Iterator.empty.next() - - val x = self(index) - index += 1 - x - } - - def head = { - if (index >= end) - Iterator.empty.next() - - self(index) - } - - override def drop(n: Int): Iterator[A] = - if (n <= 0) new Elements(index, end) - else if (index + n >= end) new Elements(end, end) - else new Elements(index + n, end) - override def take(n: Int): Iterator[A] = - if (n <= 0) Iterator.empty - else if (n <= available) new Elements(index, index + n) - else new Elements(index, end) - override def slice(from: Int, until: Int): Iterator[A] = - this take until drop from - } - - override /*IterableLike*/ - def iterator: Iterator[A] = new Elements(0, length) - - /* Overridden for efficiency */ - override def toBuffer[A1 >: A]: mutable.Buffer[A1] = { - val result = new mutable.ArrayBuffer[A1](size) - copyToBuffer(result) - result - } - - override protected[collection] def sizeHintIfCheap: Int = size -} diff --git a/tests/scala2-library/src/library/scala/collection/IndexedSeqOptimized.scala b/tests/scala2-library/src/library/scala/collection/IndexedSeqOptimized.scala deleted file mode 100644 index cae2e000af95..000000000000 --- a/tests/scala2-library/src/library/scala/collection/IndexedSeqOptimized.scala +++ /dev/null @@ -1,280 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2006-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala -package collection - -import generic._ -import scala.annotation.tailrec - -/** A template trait for indexed sequences of type `IndexedSeq[A]` which optimizes - * the implementation of several methods under the assumption of fast random access. - * - * $indexedSeqInfo - * - * @define willNotTerminateInf - * @define mayNotTerminateInf - */ -trait IndexedSeqOptimized[+A, +Repr] extends Any with IndexedSeqLike[A, Repr] { self => - - override /*IterableLike*/ - def isEmpty: Boolean = { length == 0 } - - override /*IterableLike*/ - def foreach[U](f: A => U): Unit = { - var i = 0 - val len = length - while (i < len) { f(this(i)); i += 1 } - } - - private def prefixLengthImpl(p: A => Boolean, expectTrue: Boolean): Int = { - var i = 0 - while (i < length && p(apply(i)) == expectTrue) i += 1 - i - } - - override /*IterableLike*/ - def forall(p: A => Boolean): Boolean = prefixLengthImpl(p, expectTrue = true) == length - - override /*IterableLike*/ - def exists(p: A => Boolean): Boolean = prefixLengthImpl(p, expectTrue = false) != length - - override /*IterableLike*/ - def find(p: A => Boolean): Option[A] = { - val i = prefixLength(!p(_)) - if (i < length) Some(this(i)) else None - } - - @tailrec - private def foldl[B](start: Int, end: Int, z: B, op: (B, A) => B): B = - if (start == end) z - else foldl(start + 1, end, op(z, this(start)), op) - - @tailrec - private def foldr[B](start: Int, end: Int, z: B, op: (A, B) => B): B = - if (start == end) z - else foldr(start, end - 1, op(this(end - 1), z), op) - - override /*TraversableLike*/ - def foldLeft[B](z: B)(op: (B, A) => B): B = - foldl(0, length, z, op) - - override /*IterableLike*/ - def foldRight[B](z: B)(op: (A, B) => B): B = - foldr(0, length, z, op) - - override /*TraversableLike*/ - def reduceLeft[B >: A](op: (B, A) => B): B = - if (length > 0) foldl(1, length, this(0), op) else super[IndexedSeqLike].reduceLeft(op) - - override /*IterableLike*/ - def reduceRight[B >: A](op: (A, B) => B): B = - if (length > 0) foldr(0, length - 1, this(length - 1), op) else super[IndexedSeqLike].reduceRight(op) - - override /*IterableLike*/ - def zip[A1 >: A, B, That](that: GenIterable[B])(implicit bf: CanBuildFrom[Repr, (A1, B), That]): That = that match { - case that: IndexedSeq[_] => - val b = bf(repr) - var i = 0 - val len = this.length min that.length - b.sizeHint(len) - while (i < len) { - b += ((this(i), that(i).asInstanceOf[B])) - i += 1 - } - b.result() - case _ => - super[IndexedSeqLike].zip[A1, B, That](that)(bf) - } - - override /*IterableLike*/ - def zipWithIndex[A1 >: A, That](implicit bf: CanBuildFrom[Repr, (A1, Int), That]): That = { - val b = bf(repr) - val len = length - b.sizeHint(len) - var i = 0 - while (i < len) { - b += ((this(i), i)) - i += 1 - } - b.result() - } - - override /*IterableLike*/ - def slice(from: Int, until: Int): Repr = { - val lo = math.max(from, 0) - val hi = math.min(math.max(until, 0), length) - val elems = math.max(hi - lo, 0) - val b = newBuilder - b.sizeHint(elems) - - var i = lo - while (i < hi) { - b += self(i) - i += 1 - } - b.result() - } - - override /*IterableLike*/ - def head: A = if (isEmpty) super[IndexedSeqLike].head else this(0) - - override /*TraversableLike*/ - def tail: Repr = if (isEmpty) super[IndexedSeqLike].tail else slice(1, length) - - override /*TraversableLike*/ - def last: A = if (length > 0) this(length - 1) else super[IndexedSeqLike].last - - override /*IterableLike*/ - def init: Repr = if (length > 0) slice(0, length - 1) else super[IndexedSeqLike].init - - override /*TraversableLike*/ - def take(n: Int): Repr = slice(0, n) - - override /*TraversableLike*/ - def drop(n: Int): Repr = slice(n, length) - - override /*IterableLike*/ - def takeRight(n: Int): Repr = slice(length - math.max(n, 0), length) - - override /*IterableLike*/ - def dropRight(n: Int): Repr = slice(0, length - math.max(n, 0)) - - override /*TraversableLike*/ - def splitAt(n: Int): (Repr, Repr) = (take(n), drop(n)) - - override /*IterableLike*/ - def takeWhile(p: A => Boolean): Repr = take(prefixLength(p)) - - override /*TraversableLike*/ - def dropWhile(p: A => Boolean): Repr = drop(prefixLength(p)) - - override /*TraversableLike*/ - def span(p: A => Boolean): (Repr, Repr) = splitAt(prefixLength(p)) - - override /*IterableLike*/ - def sameElements[B >: A](that: GenIterable[B]): Boolean = that match { - case that: IndexedSeq[_] => - val len = length - len == that.length && { - var i = 0 - while (i < len && this(i) == that(i)) i += 1 - i == len - } - case _ => - super[IndexedSeqLike].sameElements(that) - } - - override /*IterableLike*/ - def copyToArray[B >: A](xs: Array[B], start: Int, len: Int) { - var i = 0 - var j = start - val end = length min len min (xs.length - start) - while (i < end) { - xs(j) = this(i) - i += 1 - j += 1 - } - } - - // Overridden methods from Seq - - override /*SeqLike*/ - def lengthCompare(len: Int): Int = length - len - - override /*SeqLike*/ - def segmentLength(p: A => Boolean, from: Int): Int = { - val len = length - var i = from - while (i < len && p(this(i))) i += 1 - i - from - } - - private def negLength(n: Int) = if (n >= length) -1 else n - - override /*SeqLike*/ - def indexWhere(p: A => Boolean, from: Int): Int = { - val start = math.max(from, 0) - negLength(start + segmentLength(!p(_), start)) - } - - override /*SeqLike*/ - def lastIndexWhere(p: A => Boolean, end: Int): Int = { - var i = math.min(end, length - 1) - while (i >= 0 && !p(this(i))) i -= 1 - i - } - - override /*SeqLike*/ - def reverse: Repr = { - val b = newBuilder - b.sizeHint(length) - var i = length - while (0 < i) { - i -= 1 - b += this(i) - } - b.result() - } - - override /*SeqLike*/ - def reverseIterator: Iterator[A] = new AbstractIterator[A] { - private var i = self.length - def hasNext: Boolean = 0 < i - def next(): A = - if (0 < i) { - i -= 1 - self(i) - } else Iterator.empty.next() - } - - override /*SeqLike*/ - def startsWith[B](that: GenSeq[B], offset: Int): Boolean = that match { - case that: IndexedSeq[_] => - var i = offset - var j = 0 - val thisLen = length - val thatLen = that.length - while (i < thisLen && j < thatLen && this(i) == that(j)) { - i += 1 - j += 1 - } - j == thatLen - case _ => - var i = offset - val thisLen = length - val thatElems = that.iterator - while (i < thisLen && thatElems.hasNext) { - if (this(i) != thatElems.next()) - return false - - i += 1 - } - !thatElems.hasNext - } - - override /*SeqLike*/ - def endsWith[B](that: GenSeq[B]): Boolean = that match { - case that: IndexedSeq[_] => - var i = length - 1 - var j = that.length - 1 - - (j <= i) && { - while (j >= 0) { - if (this(i) != that(j)) - return false - i -= 1 - j -= 1 - } - true - } - case _ => - super[IndexedSeqLike].endsWith(that) - } -} - diff --git a/tests/scala2-library/src/library/scala/collection/Iterable.scala b/tests/scala2-library/src/library/scala/collection/Iterable.scala deleted file mode 100644 index afbffd36c694..000000000000 --- a/tests/scala2-library/src/library/scala/collection/Iterable.scala +++ /dev/null @@ -1,54 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - - - -package scala -package collection - -import generic._ -import mutable.Builder - -/** A base trait for iterable collections. - * $iterableInfo - */ -trait Iterable[+A] extends Traversable[A] - with GenIterable[A] - with GenericTraversableTemplate[A, Iterable] - with IterableLike[A, Iterable[A]] { - override def companion: GenericCompanion[Iterable] = Iterable - - override def seq = this - - /* The following methods are inherited from trait IterableLike - * - override def iterator: Iterator[A] - override def takeRight(n: Int): Iterable[A] - override def dropRight(n: Int): Iterable[A] - override def sameElements[B >: A](that: GenIterable[B]): Boolean - override def view - override def view(from: Int, until: Int) - */ - -} - -/** $factoryInfo - * The current default implementation of a $Coll is a `List`. - * @define coll iterable collection - * @define Coll `Iterable` - */ -object Iterable extends TraversableFactory[Iterable] { - - /** $genericCanBuildFromInfo */ - implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, Iterable[A]] = ReusableCBF.asInstanceOf[GenericCanBuildFrom[A]] - - def newBuilder[A]: Builder[A, Iterable[A]] = immutable.Iterable.newBuilder[A] -} - -/** Explicit instantiation of the `Iterable` trait to reduce class file size in subclasses. */ -abstract class AbstractIterable[+A] extends AbstractTraversable[A] with Iterable[A] diff --git a/tests/scala2-library/src/library/scala/collection/IterableLike.scala b/tests/scala2-library/src/library/scala/collection/IterableLike.scala deleted file mode 100644 index 876ea0c6545b..000000000000 --- a/tests/scala2-library/src/library/scala/collection/IterableLike.scala +++ /dev/null @@ -1,316 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala -package collection - -import generic._ -import immutable.Stream - -/** A template trait for iterable collections of type `Iterable[A]`. - * $iterableInfo - * @define iterableInfo - * This is a base trait for all $mutability Scala collections that define an `iterator` - * method to step through one-by-one the collection's elements. - * Implementations of this trait need to provide a concrete method with - * signature: - * {{{ - * def iterator: Iterator[A] - * }}} - * They also need to provide a method `newBuilder` - * which creates a builder for collections of the same kind. - * - * This trait implements `Iterable`'s `foreach` - * method by stepping through all elements using `iterator`. - * Subclasses should re-implement `foreach` with something more efficient, - * if possible. - - * This trait adds methods `iterator`, `sameElements`, - * `takeRight`, `dropRight` to the methods inherited - * from trait
- * `Traversable`. - - * Note: This trait replaces every method that uses `break` in - * `TraversableLike` by an iterator version. - * - * @author Martin Odersky - * @version 2.8 - * @since 2.8 - * @tparam A the element type of the collection - * @tparam Repr the type of the actual collection containing the elements. - * - * @define Coll Iterable - * @define coll iterable collection - */ -trait IterableLike[+A, +Repr] extends Any with Equals with TraversableLike[A, Repr] with GenIterableLike[A, Repr] { -self => - - override protected[this] def thisCollection: Iterable[A] = this.asInstanceOf[Iterable[A]] - override protected[this] def toCollection(repr: Repr): Iterable[A] = repr.asInstanceOf[Iterable[A]] - - /** Creates a new iterator over all elements contained in this iterable object. - * - * @return the new iterator - */ - def iterator: Iterator[A] - - /** Applies a function `f` to all elements of this $coll. - * - * Note: this method underlies the implementation of most other bulk operations. - * Subclasses should re-implement this method if a more efficient implementation exists. - * - * @usecase def foreach(f: A => Unit): Unit - * @inheritdoc - */ - def foreach[U](f: A => U): Unit = - iterator.foreach(f) - - override /*TraversableLike*/ def forall(p: A => Boolean): Boolean = - iterator.forall(p) - override /*TraversableLike*/ def exists(p: A => Boolean): Boolean = - iterator.exists(p) - override /*TraversableLike*/ def find(p: A => Boolean): Option[A] = - iterator.find(p) - override /*TraversableLike*/ def isEmpty: Boolean = - !iterator.hasNext - override /*TraversableLike*/ def foldRight[B](z: B)(op: (A, B) => B): B = - iterator.foldRight(z)(op) - override /*TraversableLike*/ def reduceRight[B >: A](op: (A, B) => B): B = - iterator.reduceRight(op) - - - /** Returns this $coll as an iterable collection. - * - * A new collection will not be built; lazy collections will stay lazy. - * - * $willNotTerminateInf - * @return an `Iterable` containing all elements of this $coll. - */ - override /*TraversableLike*/ def toIterable: Iterable[A] = - thisCollection - - /** Returns an Iterator over the elements in this $coll. Produces the same - * result as `iterator`. - * $willNotTerminateInf - * @return an Iterator containing all elements of this $coll. - */ - @deprecatedOverriding("toIterator should stay consistent with iterator for all Iterables: override iterator instead.", "2.11.0") - override def toIterator: Iterator[A] = iterator - - override /*TraversableLike*/ def head: A = - iterator.next() - - override /*TraversableLike*/ def slice(from: Int, until: Int): Repr = { - val lo = math.max(from, 0) - val elems = until - lo - val b = newBuilder - if (elems <= 0) b.result() - else { - b.sizeHintBounded(elems, this) - var i = 0 - val it = iterator drop lo - while (i < elems && it.hasNext) { - b += it.next - i += 1 - } - b.result() - } - } - - override /*TraversableLike*/ def take(n: Int): Repr = { - val b = newBuilder - - if (n <= 0) b.result() - else { - b.sizeHintBounded(n, this) - var i = 0 - val it = iterator - while (i < n && it.hasNext) { - b += it.next - i += 1 - } - b.result() - } - } - - override /*TraversableLike*/ def drop(n: Int): Repr = { - val b = newBuilder - val lo = math.max(0, n) - b.sizeHint(this, -lo) - var i = 0 - val it = iterator - while (i < n && it.hasNext) { - it.next() - i += 1 - } - (b ++= it).result() - } - - override /*TraversableLike*/ def takeWhile(p: A => Boolean): Repr = { - val b = newBuilder - val it = iterator - while (it.hasNext) { - val x = it.next() - if (!p(x)) return b.result() - b += x - } - b.result() - } - - /** Partitions elements in fixed size ${coll}s. - * @see [[scala.collection.Iterator]], method `grouped` - * - * @param size the number of elements per group - * @return An iterator producing ${coll}s of size `size`, except the - * last will be less than size `size` if the elements don't divide evenly. - */ - def grouped(size: Int): Iterator[Repr] = - for (xs <- iterator grouped size) yield { - val b = newBuilder - b ++= xs - b.result() - } - - /** Groups elements in fixed size blocks by passing a "sliding window" - * over them (as opposed to partitioning them, as is done in `grouped`.) - * The "sliding window" step is set to one. - * @see [[scala.collection.Iterator]], method `sliding` - * - * @param size the number of elements per group - * @return An iterator producing ${coll}s of size `size`, except the - * last element (which may be the only element) will be truncated - * if there are fewer than `size` elements remaining to be grouped. - */ - def sliding(size: Int): Iterator[Repr] = sliding(size, 1) - - /** Groups elements in fixed size blocks by passing a "sliding window" - * over them (as opposed to partitioning them, as is done in grouped.) - * @see [[scala.collection.Iterator]], method `sliding` - * - * @param size the number of elements per group - * @param step the distance between the first elements of successive - * groups - * @return An iterator producing ${coll}s of size `size`, except the - * last element (which may be the only element) will be truncated - * if there are fewer than `size` elements remaining to be grouped. - */ - def sliding(size: Int, step: Int): Iterator[Repr] = - for (xs <- iterator.sliding(size, step)) yield { - val b = newBuilder - b ++= xs - b.result() - } - - /** Selects last ''n'' elements. - * $orderDependent - * - * @param n the number of elements to take - * @return a $coll consisting only of the last `n` elements of this $coll, or else the - * whole $coll, if it has less than `n` elements. - */ - def takeRight(n: Int): Repr = { - val b = newBuilder - b.sizeHintBounded(n, this) - val lead = this.iterator drop n - val it = this.iterator - while (lead.hasNext) { - lead.next() - it.next() - } - while (it.hasNext) b += it.next() - b.result() - } - - /** Selects all elements except last ''n'' ones. - * $orderDependent - * - * @param n The number of elements to take - * @return a $coll consisting of all elements of this $coll except the last `n` ones, or else the - * empty $coll, if this $coll has less than `n` elements. - */ - def dropRight(n: Int): Repr = { - val b = newBuilder - if (n >= 0) b.sizeHint(this, -n) - val lead = iterator drop n - val it = iterator - while (lead.hasNext) { - b += it.next - lead.next() - } - b.result() - } - - override /*TraversableLike*/ def copyToArray[B >: A](xs: Array[B], start: Int, len: Int) { - var i = start - val end = (start + len) min xs.length - val it = iterator - while (i < end && it.hasNext) { - xs(i) = it.next() - i += 1 - } - } - - def zip[A1 >: A, B, That](that: GenIterable[B])(implicit bf: CanBuildFrom[Repr, (A1, B), That]): That = { - val b = bf(repr) - val these = this.iterator - val those = that.iterator - while (these.hasNext && those.hasNext) - b += ((these.next(), those.next())) - b.result() - } - - def zipAll[B, A1 >: A, That](that: GenIterable[B], thisElem: A1, thatElem: B)(implicit bf: CanBuildFrom[Repr, (A1, B), That]): That = { - val b = bf(repr) - val these = this.iterator - val those = that.iterator - while (these.hasNext && those.hasNext) - b += ((these.next(), those.next())) - while (these.hasNext) - b += ((these.next(), thatElem)) - while (those.hasNext) - b += ((thisElem, those.next())) - b.result() - } - - def zipWithIndex[A1 >: A, That](implicit bf: CanBuildFrom[Repr, (A1, Int), That]): That = { - val b = bf(repr) - var i = 0 - for (x <- this) { - b += ((x, i)) - i += 1 - } - b.result() - } - - def sameElements[B >: A](that: GenIterable[B]): Boolean = { - val these = this.iterator - val those = that.iterator - while (these.hasNext && those.hasNext) - if (these.next != those.next) - return false - - !these.hasNext && !those.hasNext - } - - override /*TraversableLike*/ def toStream: Stream[A] = iterator.toStream - - /** Method called from equality methods, so that user-defined subclasses can - * refuse to be equal to other collections of the same kind. - * @param that The object with which this $coll should be compared - * @return `true`, if this $coll can possibly equal `that`, `false` otherwise. The test - * takes into consideration only the run-time types of objects but ignores their elements. - */ - override /*TraversableLike*/ def canEqual(that: Any) = true - - override /*TraversableLike*/ def view: IterableView[A, Repr] = new IterableView[A, Repr] { - protected lazy val underlying = self.repr - override def iterator = self.iterator - } - - override /*TraversableLike*/ def view(from: Int, until: Int) = view.slice(from, until) -} diff --git a/tests/scala2-library/src/library/scala/collection/IterableProxy.scala b/tests/scala2-library/src/library/scala/collection/IterableProxy.scala deleted file mode 100644 index 5f4d69c4117c..000000000000 --- a/tests/scala2-library/src/library/scala/collection/IterableProxy.scala +++ /dev/null @@ -1,20 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala -package collection - -/** This trait implements a proxy for iterable objects. It forwards all calls - * to a different iterable object. - * - * @author Martin Odersky - * @version 2.8 - * @since 2.8 - */ -@deprecated("proxying is deprecated due to lack of use and compiler-level support", "2.11.3") -trait IterableProxy[+A] extends Iterable[A] with IterableProxyLike[A, Iterable[A]] diff --git a/tests/scala2-library/src/library/scala/collection/IterableProxyLike.scala b/tests/scala2-library/src/library/scala/collection/IterableProxyLike.scala deleted file mode 100644 index f87089cba832..000000000000 --- a/tests/scala2-library/src/library/scala/collection/IterableProxyLike.scala +++ /dev/null @@ -1,41 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - - - -package scala -package collection - -import generic._ - -// Methods could be printed by cat IterableLike.scala | egrep '^ (override )?def' - -/** This trait implements a proxy for Iterable objects. It forwards - * all calls to a different Iterable object. - * - * @author Martin Odersky - * @version 2.8 - * @since 2.8 - */ -@deprecated("proxying is deprecated due to lack of use and compiler-level support", "2.11.0") -trait IterableProxyLike[+A, +Repr <: IterableLike[A, Repr] with Iterable[A]] - extends IterableLike[A, Repr] - with TraversableProxyLike[A, Repr] { - override def iterator: Iterator[A] = self.iterator - override def grouped(size: Int): Iterator[Repr] = self.grouped(size) - override def sliding(size: Int): Iterator[Repr] = self.sliding(size) - override def sliding(size: Int, step: Int): Iterator[Repr] = self.sliding(size, step) - override def takeRight(n: Int): Repr = self.takeRight(n) - override def dropRight(n: Int): Repr = self.dropRight(n) - override def zip[A1 >: A, B, That](that: GenIterable[B])(implicit bf: CanBuildFrom[Repr, (A1, B), That]): That = self.zip[A1, B, That](that)(bf) - override def zipAll[B, A1 >: A, That](that: GenIterable[B], thisElem: A1, thatElem: B)(implicit bf: CanBuildFrom[Repr, (A1, B), That]): That = self.zipAll(that, thisElem, thatElem)(bf) - override def zipWithIndex[A1 >: A, That](implicit bf: CanBuildFrom[Repr, (A1, Int), That]): That = self.zipWithIndex(bf) - override def sameElements[B >: A](that: GenIterable[B]): Boolean = self.sameElements(that) - override def view = self.view - override def view(from: Int, until: Int) = self.view(from, until) -} diff --git a/tests/scala2-library/src/library/scala/collection/IterableView.scala b/tests/scala2-library/src/library/scala/collection/IterableView.scala deleted file mode 100644 index 1964e9b19372..000000000000 --- a/tests/scala2-library/src/library/scala/collection/IterableView.scala +++ /dev/null @@ -1,32 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - - - -package scala -package collection - -import generic._ -import TraversableView.NoBuilder - -/** A base trait for non-strict views of `Iterable`s. - * $iterableViewInfo - */ -trait IterableView[+A, +Coll] extends IterableViewLike[A, Coll, IterableView[A, Coll]] - -/** An object containing the necessary implicit definitions to make - * `IterableView`s work. Its definitions are generally not accessed directly by clients. - */ -object IterableView { - type Coll = TraversableView[_, _ <: Traversable[_]] - implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, IterableView[A, Iterable[_]]] = - new CanBuildFrom[Coll, A, IterableView[A, Iterable[_]]] { - def apply(from: Coll) = new NoBuilder - def apply() = new NoBuilder - } -} diff --git a/tests/scala2-library/src/library/scala/collection/IterableViewLike.scala b/tests/scala2-library/src/library/scala/collection/IterableViewLike.scala deleted file mode 100644 index db0bb7a4c44b..000000000000 --- a/tests/scala2-library/src/library/scala/collection/IterableViewLike.scala +++ /dev/null @@ -1,164 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala -package collection - -import generic._ -import immutable.Stream -import scala.language.implicitConversions - -/** A template trait for non-strict views of iterable collections. - * $iterableViewInfo - * - * @define iterableViewInfo - * $viewInfo - * All views for iterable collections are defined by re-interpreting the `iterator` method. - * - * @author Martin Odersky - * @version 2.8 - * @since 2.8 - * @tparam A the element type of the view - * @tparam Coll the type of the underlying collection containing the elements. - * @tparam This the type of the view itself - */ -trait IterableViewLike[+A, - +Coll, - +This <: IterableView[A, Coll] with IterableViewLike[A, Coll, This]] - extends Iterable[A] - with IterableLike[A, This] - with TraversableView[A, Coll] - with TraversableViewLike[A, Coll, This] -{ self => - - /** Explicit instantiation of the `TransformedI` trait to reduce class file size in subclasses. */ - private[collection] abstract class AbstractTransformedI[+B] extends Iterable[B] with super[TraversableViewLike].TransformedT[B] with TransformedI[B] - - trait TransformedI[+B] extends IterableView[B, Coll] with super.TransformedT[B] { - def iterator: Iterator[B] - override def foreach[U](f: B => U): Unit = iterator foreach f - override def toString = viewToString - override def isEmpty = !iterator.hasNext - } - - trait EmptyViewI extends TransformedI[Nothing] with super.EmptyViewT { - final def iterator: Iterator[Nothing] = Iterator.empty - } - - trait ForcedI[B] extends super.ForcedT[B] with TransformedI[B] { - def iterator = forced.iterator - } - - trait SlicedI extends super.SlicedT with TransformedI[A] { - def iterator: Iterator[A] = self.iterator.slice(from, until) - } - - trait MappedI[B] extends super.MappedT[B] with TransformedI[B] { - def iterator = self.iterator map mapping - } - - trait FlatMappedI[B] extends super.FlatMappedT[B] with TransformedI[B] { - def iterator: Iterator[B] = self.iterator flatMap mapping - } - - trait AppendedI[B >: A] extends super.AppendedT[B] with TransformedI[B] { - def iterator = self.iterator ++ rest - } - - trait PrependedI[B >: A] extends super.PrependedT[B] with TransformedI[B] { - def iterator = fst.toIterator ++ self - } - - trait FilteredI extends super.FilteredT with TransformedI[A] { - def iterator = self.iterator filter pred - } - - trait TakenWhileI extends super.TakenWhileT with TransformedI[A] { - def iterator = self.iterator takeWhile pred - } - - trait DroppedWhileI extends super.DroppedWhileT with TransformedI[A] { - def iterator = self.iterator dropWhile pred - } - - trait ZippedI[B] extends TransformedI[(A, B)] { - protected[this] lazy val other: GenIterable[B] - def iterator: Iterator[(A, B)] = self.iterator zip other.iterator - final override protected[this] def viewIdentifier = "Z" - } - - trait ZippedAllI[A1 >: A, B] extends TransformedI[(A1, B)] { - protected[this] lazy val other: GenIterable[B] - protected[this] lazy val thisElem: A1 - protected[this] lazy val thatElem: B - final override protected[this] def viewIdentifier = "Z" - def iterator: Iterator[(A1, B)] = - self.iterator.zipAll(other.iterator, thisElem, thatElem) - } - - private[this] implicit def asThis(xs: TransformedI[A]): This = xs.asInstanceOf[This] - - /** Boilerplate method, to override in each subclass - * This method could be eliminated if Scala had virtual classes - */ - protected def newZipped[B](that: GenIterable[B]): TransformedI[(A, B)] = new AbstractTransformedI[(A, B)] with ZippedI[B] { lazy val other = that } - protected def newZippedAll[A1 >: A, B](that: GenIterable[B], _thisElem: A1, _thatElem: B): TransformedI[(A1, B)] = new AbstractTransformedI[(A1, B)] with ZippedAllI[A1, B] { - lazy val other: GenIterable[B] = that - lazy val thisElem = _thisElem - lazy val thatElem = _thatElem - } - protected override def newForced[B](xs: => GenSeq[B]): TransformedI[B] = new AbstractTransformedI[B] with ForcedI[B] { lazy val forced = xs } - protected override def newAppended[B >: A](that: GenTraversable[B]): TransformedI[B] = new AbstractTransformedI[B] with AppendedI[B] { lazy val rest = that } - protected override def newMapped[B](f: A => B): TransformedI[B] = new AbstractTransformedI[B] with MappedI[B] { lazy val mapping = f } - protected override def newPrepended[B >: A](that: GenTraversable[B]): TransformedI[B] = new AbstractTransformedI[B] with PrependedI[B] { lazy val fst = that } - protected override def newFlatMapped[B](f: A => GenTraversableOnce[B]): TransformedI[B] = new AbstractTransformedI[B] with FlatMappedI[B] { lazy val mapping = f } - protected override def newFiltered(p: A => Boolean): TransformedI[A] = new AbstractTransformedI[A] with FilteredI { lazy val pred = p } - protected override def newSliced(_endpoints: SliceInterval): TransformedI[A] = new AbstractTransformedI[A] with SlicedI { lazy val endpoints = _endpoints } - protected override def newDroppedWhile(p: A => Boolean): TransformedI[A] = new AbstractTransformedI[A] with DroppedWhileI { lazy val pred = p } - protected override def newTakenWhile(p: A => Boolean): TransformedI[A] = new AbstractTransformedI[A] with TakenWhileI { lazy val pred = p } - - // After adding take and drop overrides to IterableLike, these overrides (which do nothing - // but duplicate the implementation in TraversableViewLike) had to be added to prevent the - // overrides in IterableLike from besting the overrides in TraversableViewLike when mixed - // together in e.g. SeqViewLike. This is a suboptimal situation. Examples of failing tests - // are run/bug2876 and run/viewtest. - protected override def newTaken(n: Int): TransformedI[A] = newSliced(SliceInterval(0, n)) - protected override def newDropped(n: Int): TransformedI[A] = newSliced(SliceInterval(n, Int.MaxValue)) - override def drop(n: Int): This = newDropped(n) - override def take(n: Int): This = newTaken(n) - - override def zip[A1 >: A, B, That](that: GenIterable[B])(implicit bf: CanBuildFrom[This, (A1, B), That]): That = { - newZipped(that).asInstanceOf[That] -// was: val b = bf(repr) -// if (b.isInstanceOf[NoBuilder[_]]) newZipped(that).asInstanceOf[That] -// else super.zip[A1, B, That](that)(bf) - } - - override def zipWithIndex[A1 >: A, That](implicit bf: CanBuildFrom[This, (A1, Int), That]): That = - zip[A1, Int, That](Stream from 0)(bf) - - override def zipAll[B, A1 >: A, That](that: GenIterable[B], thisElem: A1, thatElem: B)(implicit bf: CanBuildFrom[This, (A1, B), That]): That = - newZippedAll(that, thisElem, thatElem).asInstanceOf[That] - - override def grouped(size: Int): Iterator[This] = - self.iterator grouped size map (x => newForced(x).asInstanceOf[This]) - - override def sliding(size: Int, step: Int): Iterator[This] = - self.iterator.sliding(size, step) map (x => newForced(x).asInstanceOf[This]) - - override def sliding(size: Int): Iterator[This] = - sliding(size, 1) // we could inherit this, but that implies knowledge of the way the super class is implemented. - - override def dropRight(n: Int): This = - take(thisSeq.length - math.max(n, 0)) - - override def takeRight(n: Int): This = - drop(thisSeq.length - math.max(n, 0)) - - override def stringPrefix = "IterableView" -} diff --git a/tests/scala2-library/src/library/scala/collection/Iterator.scala b/tests/scala2-library/src/library/scala/collection/Iterator.scala deleted file mode 100644 index 7510f315aa24..000000000000 --- a/tests/scala2-library/src/library/scala/collection/Iterator.scala +++ /dev/null @@ -1,1417 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala -package collection - -import mutable.ArrayBuffer -import scala.annotation.{tailrec, migration} -import scala.annotation.unchecked.{uncheckedVariance => uV} -import immutable.Stream - -/** The `Iterator` object provides various functions for creating specialized iterators. - * - * @author Martin Odersky - * @author Matthias Zenger - * @version 2.8 - * @since 2.8 - */ -object Iterator { - - /** With the advent of `TraversableOnce` and `Iterator`, it can be useful to have a builder which - * operates on `Iterator`s so they can be treated uniformly along with the collections. - * See `scala.util.Random.shuffle` for an example. - */ - implicit def IteratorCanBuildFrom[A]: TraversableOnce.BufferedCanBuildFrom[A, Iterator] = new { - def bufferToColl[B](coll: ArrayBuffer[B]) = coll.iterator - def traversableToColl[B](t: GenTraversable[B]) = t.toIterator - } - - /** The iterator which produces no values. */ - val empty: Iterator[Nothing] = new AbstractIterator[Nothing] { - def hasNext: Boolean = false - def next(): Nothing = throw new NoSuchElementException("next on empty iterator") - } - - /** Creates an iterator which produces a single element. - * '''Note:''' Equivalent, but more efficient than Iterator(elem) - * - * @param elem the element - * @return An iterator which produces `elem` on the first call to `next`, - * and which has no further elements. - */ - def single[A](elem: A): Iterator[A] = new AbstractIterator[A] { - private var hasnext = true - def hasNext: Boolean = hasnext - def next(): A = - if (hasnext) { hasnext = false; elem } - else empty.next() - } - - /** Creates an iterator with given elements. - * - * @param elems The elements returned one-by-one from the iterator - * @return An iterator which produces the given elements on the - * first calls to `next`, and which has no further elements. - */ - def apply[A](elems: A*): Iterator[A] = elems.iterator - - /** Creates iterator that produces the results of some element computation a number of times. - * - * @param len the number of elements returned by the iterator. - * @param elem the element computation - * @return An iterator that produces the results of `n` evaluations of `elem`. - */ - def fill[A](len: Int)(elem: => A): Iterator[A] = new AbstractIterator[A] { - private var i = 0 - def hasNext: Boolean = i < len - def next(): A = - if (hasNext) { i += 1; elem } - else empty.next() - } - - /** Creates an iterator producing the values of a given function over a range of integer values starting from 0. - * - * @param end The number of elements returned by the iterator - * @param f The function computing element values - * @return An iterator that produces the values `f(0), ..., f(n -1)`. - */ - def tabulate[A](end: Int)(f: Int => A): Iterator[A] = new AbstractIterator[A] { - private var i = 0 - def hasNext: Boolean = i < end - def next(): A = - if (hasNext) { val result = f(i); i += 1; result } - else empty.next() - } - - /** Creates nn iterator returning successive values in some integer interval. - * - * @param start the start value of the iterator - * @param end the end value of the iterator (the first value NOT returned) - * @return the iterator producing values `start, start + 1, ..., end - 1` - */ - def range(start: Int, end: Int): Iterator[Int] = range(start, end, 1) - - /** An iterator producing equally spaced values in some integer interval. - * - * @param start the start value of the iterator - * @param end the end value of the iterator (the first value NOT returned) - * @param step the increment value of the iterator (must be positive or negative) - * @return the iterator producing values `start, start + step, ...` up to, but excluding `end` - */ - def range(start: Int, end: Int, step: Int): Iterator[Int] = new AbstractIterator[Int] { - if (step == 0) throw new IllegalArgumentException("zero step") - private var i = start - def hasNext: Boolean = (step <= 0 || i < end) && (step >= 0 || i > end) - def next(): Int = - if (hasNext) { val result = i; i += step; result } - else empty.next() - } - - /** Creates an infinite iterator that repeatedly applies a given function to the previous result. - * - * @param start the start value of the iterator - * @param f the function that's repeatedly applied - * @return the iterator producing the infinite sequence of values `start, f(start), f(f(start)), ...` - */ - def iterate[T](start: T)(f: T => T): Iterator[T] = new AbstractIterator[T] { - private[this] var first = true - private[this] var acc = start - def hasNext: Boolean = true - def next(): T = { - if (first) first = false - else acc = f(acc) - - acc - } - } - - /** Creates an infinite-length iterator which returns successive values from some start value. - - * @param start the start value of the iterator - * @return the iterator producing the infinite sequence of values `start, start + 1, start + 2, ...` - */ - def from(start: Int): Iterator[Int] = from(start, 1) - - /** Creates an infinite-length iterator returning values equally spaced apart. - * - * @param start the start value of the iterator - * @param step the increment between successive values - * @return the iterator producing the infinite sequence of values `start, start + 1 * step, start + 2 * step, ...` - */ - def from(start: Int, step: Int): Iterator[Int] = new AbstractIterator[Int] { - private var i = start - def hasNext: Boolean = true - def next(): Int = { val result = i; i += step; result } - } - - /** Creates an infinite-length iterator returning the results of evaluating an expression. - * The expression is recomputed for every element. - * - * @param elem the element computation. - * @return the iterator containing an infinite number of results of evaluating `elem`. - */ - def continually[A](elem: => A): Iterator[A] = new AbstractIterator[A] { - def hasNext = true - def next = elem - } - - /** Creates an iterator to which other iterators can be appended efficiently. - * Nested ConcatIterators are merged to avoid blowing the stack. - */ - private final class ConcatIterator[+A](private var current: Iterator[A @uV]) extends Iterator[A] { - private var tail: ConcatIteratorCell[A @uV] = null - private var last: ConcatIteratorCell[A @uV] = null - private var currentHasNextChecked = false - - // Advance current to the next non-empty iterator - // current is set to null when all iterators are exhausted - @tailrec - private[this] def advance(): Boolean = { - if (tail eq null) { - current = null - last = null - false - } - else { - current = tail.headIterator - tail = tail.tail - merge() - if (currentHasNextChecked) true - else if (current.hasNext) { - currentHasNextChecked = true - true - } else advance() - } - } - - // If the current iterator is a ConcatIterator, merge it into this one - @tailrec - private[this] def merge(): Unit = - if (current.isInstanceOf[ConcatIterator[_]]) { - val c = current.asInstanceOf[ConcatIterator[A]] - current = c.current - currentHasNextChecked = c.currentHasNextChecked - if (c.tail ne null) { - c.last.tail = tail - tail = c.tail - } - merge() - } - - def hasNext = - if (currentHasNextChecked) true - else if (current eq null) false - else if (current.hasNext) { - currentHasNextChecked = true - true - } else advance() - - def next() = - if (hasNext) { - currentHasNextChecked = false - current.next() - } else Iterator.empty.next() - - override def ++[B >: A](that: => GenTraversableOnce[B]): Iterator[B] = { - val c = new ConcatIteratorCell[B](that, null).asInstanceOf[ConcatIteratorCell[A]] - if(tail eq null) { - tail = c - last = c - } else { - last.tail = c - last = c - } - if(current eq null) current = Iterator.empty - this - } - } - - private[this] final class ConcatIteratorCell[A](head: => GenTraversableOnce[A], var tail: ConcatIteratorCell[A]) { - def headIterator: Iterator[A] = head.toIterator - } - - /** Creates a delegating iterator capped by a limit count. Negative limit means unbounded. - * Lazily skip to start on first evaluation. Avoids daisy-chained iterators due to slicing. - */ - private[scala] final class SliceIterator[A](val underlying: Iterator[A], start: Int, limit: Int) extends AbstractIterator[A] { - private var remaining = limit - private var dropping = start - @inline private def unbounded = remaining < 0 - private def skip(): Unit = - while (dropping > 0) { - if (underlying.hasNext) { - underlying.next() - dropping -= 1 - } else - dropping = 0 - } - def hasNext = { skip(); remaining != 0 && underlying.hasNext } - def next() = { - skip() - if (remaining > 0) { - remaining -= 1 - underlying.next() - } - else if (unbounded) underlying.next() - else empty.next() - } - override protected def sliceIterator(from: Int, until: Int): Iterator[A] = { - val lo = from max 0 - def adjustedBound = - if (unbounded) -1 - else 0 max (remaining - lo) - val rest = - if (until < 0) adjustedBound // respect current bound, if any - else if (until <= lo) 0 // empty - else if (unbounded) until - lo // now finite - else adjustedBound min (until - lo) // keep lesser bound - if (rest == 0) empty - else { - dropping += lo - remaining = rest - this - } - } - } -} - -import Iterator.empty - -/** Iterators are data structures that allow to iterate over a sequence - * of elements. They have a `hasNext` method for checking - * if there is a next element available, and a `next` method - * which returns the next element and advances the iterator. - * - * An iterator is mutable: most operations on it change its state. While it is often used - * to iterate through the elements of a collection, it can also be used without - * being backed by any collection (see constructors on the companion object). - * - * It is of particular importance to note that, unless stated otherwise, ''one should never - * use an iterator after calling a method on it''. The two most important exceptions - * are also the sole abstract methods: `next` and `hasNext`. - * - * Both these methods can be called any number of times without having to discard the - * iterator. Note that even `hasNext` may cause mutation -- such as when iterating - * from an input stream, where it will block until the stream is closed or some - * input becomes available. - * - * Consider this example for safe and unsafe use: - * - * {{{ - * def f[A](it: Iterator[A]) = { - * if (it.hasNext) { // Safe to reuse "it" after "hasNext" - * it.next // Safe to reuse "it" after "next" - * val remainder = it.drop(2) // it is *not* safe to use "it" again after this line! - * remainder.take(2) // it is *not* safe to use "remainder" after this line! - * } else it - * } - * }}} - * - * @author Martin Odersky, Matthias Zenger - * @version 2.8 - * @since 1 - * @define willNotTerminateInf - * Note: will not terminate for infinite iterators. - * @define mayNotTerminateInf - * Note: may not terminate for infinite iterators. - * @define preservesIterator - * The iterator remains valid for further use whatever result is returned. - * @define consumesIterator - * After calling this method, one should discard the iterator it was called - * on. Using it is undefined and subject to change. - * @define consumesAndProducesIterator - * After calling this method, one should discard the iterator it was called - * on, and use only the iterator that was returned. Using the old iterator - * is undefined, subject to change, and may result in changes to the new - * iterator as well. - * @define consumesTwoAndProducesOneIterator - * After calling this method, one should discard the iterator it was called - * on, as well as the one passed as a parameter, and use only the iterator - * that was returned. Using the old iterators is undefined, subject to change, - * and may result in changes to the new iterator as well. - * @define consumesOneAndProducesTwoIterators - * After calling this method, one should discard the iterator it was called - * on, and use only the iterators that were returned. Using the old iterator - * is undefined, subject to change, and may result in changes to the new - * iterators as well. - * @define consumesTwoIterators - * After calling this method, one should discard the iterator it was called - * on, as well as the one passed as parameter. Using the old iterators is - * undefined and subject to change. - */ -trait Iterator[+A] extends TraversableOnce[A] { - self => - - def seq: Iterator[A] = this - - /** Tests whether this iterator can provide another element. - * - * @return `true` if a subsequent call to `next` will yield an element, - * `false` otherwise. - * @note Reuse: $preservesIterator - */ - def hasNext: Boolean - - /** Produces the next element of this iterator. - * - * @return the next element of this iterator, if `hasNext` is `true`, - * undefined behavior otherwise. - * @note Reuse: $preservesIterator - */ - def next(): A - - /** Tests whether this iterator is empty. - * - * @return `true` if hasNext is false, `false` otherwise. - * @note Reuse: $preservesIterator - */ - def isEmpty: Boolean = !hasNext - - /** Tests whether this Iterator can be repeatedly traversed. - * - * @return `false` - * @note Reuse: $preservesIterator - */ - def isTraversableAgain = false - - /** Tests whether this Iterator has a known size. - * - * @return `true` for empty Iterators, `false` otherwise. - * @note Reuse: $preservesIterator - */ - def hasDefiniteSize = isEmpty - - /** Selects first ''n'' values of this iterator. - * - * @param n the number of values to take - * @return an iterator producing only the first `n` values of this iterator, or else the - * whole iterator, if it produces fewer than `n` values. - * @note Reuse: $consumesAndProducesIterator - */ - def take(n: Int): Iterator[A] = sliceIterator(0, n max 0) - - /** Advances this iterator past the first ''n'' elements, or the length of the iterator, whichever is smaller. - * - * @param n the number of elements to drop - * @return an iterator which produces all values of the current iterator, except - * it omits the first `n` values. - * @note Reuse: $consumesAndProducesIterator - */ - def drop(n: Int): Iterator[A] = { - var j = 0 - while (j < n && hasNext) { - next() - j += 1 - } - this - } - - /** Creates an iterator returning an interval of the values produced by this iterator. - * - * @param from the index of the first element in this iterator which forms part of the slice. - * If negative, the slice starts at zero. - * @param until the index of the first element following the slice. If negative, the slice is empty. - * @return an iterator which advances this iterator past the first `from` elements using `drop`, - * and then takes `until - from` elements, using `take`. - * @note Reuse: $consumesAndProducesIterator - */ - def slice(from: Int, until: Int): Iterator[A] = sliceIterator(from, until max 0) - - /** Creates an optionally bounded slice, unbounded if `until` is negative. */ - protected def sliceIterator(from: Int, until: Int): Iterator[A] = { - val lo = from max 0 - val rest = - if (until < 0) -1 // unbounded - else if (until <= lo) 0 // empty - else until - lo // finite - - if (rest == 0) empty - else new Iterator.SliceIterator(this, lo, rest) - } - - /** Creates a new iterator that maps all produced values of this iterator - * to new values using a transformation function. - * - * @param f the transformation function - * @return a new iterator which transforms every value produced by this - * iterator by applying the function `f` to it. - * @note Reuse: $consumesAndProducesIterator - */ - def map[B](f: A => B): Iterator[B] = new AbstractIterator[B] { - def hasNext = self.hasNext - def next() = f(self.next()) - } - - /** Concatenates this iterator with another. - * - * @param that the other iterator - * @return a new iterator that first yields the values produced by this - * iterator followed by the values produced by iterator `that`. - * @note Reuse: $consumesTwoAndProducesOneIterator - * - * @usecase def ++(that: => Iterator[A]): Iterator[A] - * @inheritdoc - */ - def ++[B >: A](that: => GenTraversableOnce[B]): Iterator[B] = new Iterator.ConcatIterator(self) ++ that - - /** Creates a new iterator by applying a function to all values produced by this iterator - * and concatenating the results. - * - * @param f the function to apply on each element. - * @return the iterator resulting from applying the given iterator-valued function - * `f` to each value produced by this iterator and concatenating the results. - * @note Reuse: $consumesAndProducesIterator - */ - def flatMap[B](f: A => GenTraversableOnce[B]): Iterator[B] = new AbstractIterator[B] { - private var cur: Iterator[B] = empty - private def nextCur() { cur = f(self.next()).toIterator } - def hasNext: Boolean = { - // Equivalent to cur.hasNext || self.hasNext && { nextCur(); hasNext } - // but slightly shorter bytecode (better JVM inlining!) - while (!cur.hasNext) { - if (!self.hasNext) return false - nextCur() - } - true - } - def next(): B = (if (hasNext) cur else empty).next() - } - - /** Returns an iterator over all the elements of this iterator that satisfy the predicate `p`. - * The order of the elements is preserved. - * - * @param p the predicate used to test values. - * @return an iterator which produces those values of this iterator which satisfy the predicate `p`. - * @note Reuse: $consumesAndProducesIterator - */ - def filter(p: A => Boolean): Iterator[A] = new AbstractIterator[A] { - // TODO 2.12 - Make a full-fledged FilterImpl that will reverse sense of p - private var hd: A = _ - private var hdDefined: Boolean = false - - def hasNext: Boolean = hdDefined || { - do { - if (!self.hasNext) return false - hd = self.next() - } while (!p(hd)) - hdDefined = true - true - } - - def next() = if (hasNext) { hdDefined = false; hd } else empty.next() - } - - /** Tests whether every element of this iterator relates to the - * corresponding element of another collection by satisfying a test predicate. - * - * @param that the other collection - * @param p the test predicate, which relates elements from both collections - * @tparam B the type of the elements of `that` - * @return `true` if both collections have the same length and - * `p(x, y)` is `true` for all corresponding elements `x` of this iterator - * and `y` of `that`, otherwise `false` - */ - def corresponds[B](that: GenTraversableOnce[B])(p: (A, B) => Boolean): Boolean = { - val that0 = that.toIterator - while (hasNext && that0.hasNext) - if (!p(next(), that0.next())) return false - - hasNext == that0.hasNext - } - - /** Creates an iterator over all the elements of this iterator that - * satisfy the predicate `p`. The order of the elements - * is preserved. - * - * '''Note:''' `withFilter` is the same as `filter` on iterators. It exists so that - * for-expressions with filters work over iterators. - * - * @param p the predicate used to test values. - * @return an iterator which produces those values of this iterator which satisfy the predicate `p`. - * @note Reuse: $consumesAndProducesIterator - */ - def withFilter(p: A => Boolean): Iterator[A] = filter(p) - - /** Creates an iterator over all the elements of this iterator which - * do not satisfy a predicate p. - * - * @param p the predicate used to test values. - * @return an iterator which produces those values of this iterator which do not satisfy the predicate `p`. - * @note Reuse: $consumesAndProducesIterator - */ - def filterNot(p: A => Boolean): Iterator[A] = filter(!p(_)) - - /** Creates an iterator by transforming values - * produced by this iterator with a partial function, dropping those - * values for which the partial function is not defined. - * - * @param pf the partial function which filters and maps the iterator. - * @return a new iterator which yields each value `x` produced by this iterator for - * which `pf` is defined the image `pf(x)`. - * @note Reuse: $consumesAndProducesIterator - */ - @migration("`collect` has changed. The previous behavior can be reproduced with `toSeq`.", "2.8.0") - def collect[B](pf: PartialFunction[A, B]): Iterator[B] = new AbstractIterator[B] { - // Manually buffer to avoid extra layer of wrapping with buffered - private[this] var hd: A = _ - - // Little state machine to keep track of where we are - // Seek = 0; Found = 1; Empty = -1 - // Not in vals because scalac won't make them static (@inline def only works with -optimize) - // BE REALLY CAREFUL TO KEEP COMMENTS AND NUMBERS IN SYNC! - private[this] var status = 0/*Seek*/ - - def hasNext = { - while (status == 0/*Seek*/) { - if (self.hasNext) { - hd = self.next() - if (pf.isDefinedAt(hd)) status = 1/*Found*/ - } - else status = -1/*Empty*/ - } - status == 1/*Found*/ - } - def next() = if (hasNext) { status = 0/*Seek*/; pf(hd) } else Iterator.empty.next() - } - - /** Produces a collection containing cumulative results of applying the - * operator going left to right. - * - * $willNotTerminateInf - * $orderDependent - * - * @tparam B the type of the elements in the resulting collection - * @param z the initial value - * @param op the binary operator applied to the intermediate result and the element - * @return iterator with intermediate results - * @note Reuse: $consumesAndProducesIterator - */ - def scanLeft[B](z: B)(op: (B, A) => B): Iterator[B] = new AbstractIterator[B] { - var hasNext = true - var elem = z - def next() = if (hasNext) { - val res = elem - if (self.hasNext) elem = op(elem, self.next()) - else hasNext = false - res - } else Iterator.empty.next() - } - - /** Produces a collection containing cumulative results of applying the operator going right to left. - * The head of the collection is the last cumulative result. - * - * $willNotTerminateInf - * $orderDependent - * - * @tparam B the type of the elements in the resulting collection - * @param z the initial value - * @param op the binary operator applied to the intermediate result and the element - * @return iterator with intermediate results - * @example {{{ - * Iterator(1, 2, 3, 4).scanRight(0)(_ + _).toList == List(10, 9, 7, 4, 0) - * }}} - * @note Reuse: $consumesAndProducesIterator - */ - def scanRight[B](z: B)(op: (A, B) => B): Iterator[B] = toBuffer.scanRight(z)(op).iterator - - /** Takes longest prefix of values produced by this iterator that satisfy a predicate. - * - * @param p The predicate used to test elements. - * @return An iterator returning the values produced by this iterator, until - * this iterator produces a value that does not satisfy - * the predicate `p`. - * @note Reuse: $consumesAndProducesIterator - */ - def takeWhile(p: A => Boolean): Iterator[A] = new AbstractIterator[A] { - private var hd: A = _ - private var hdDefined: Boolean = false - private var tail: Iterator[A] = self - - def hasNext = hdDefined || tail.hasNext && { - hd = tail.next() - if (p(hd)) hdDefined = true - else tail = Iterator.empty - hdDefined - } - def next() = if (hasNext) { hdDefined = false; hd } else empty.next() - } - - /** Partitions this iterator in two iterators according to a predicate. - * - * @param p the predicate on which to partition - * @return a pair of iterators: the iterator that satisfies the predicate - * `p` and the iterator that does not. - * The relative order of the elements in the resulting iterators - * is the same as in the original iterator. - * @note Reuse: $consumesOneAndProducesTwoIterators - */ - def partition(p: A => Boolean): (Iterator[A], Iterator[A]) = { - val self = buffered - class PartitionIterator(p: A => Boolean) extends AbstractIterator[A] { - var other: PartitionIterator = _ - val lookahead = new mutable.Queue[A] - def skip() = - while (self.hasNext && !p(self.head)) { - other.lookahead += self.next - } - def hasNext = !lookahead.isEmpty || { skip(); self.hasNext } - def next() = if (!lookahead.isEmpty) lookahead.dequeue() - else { skip(); self.next() } - } - val l = new PartitionIterator(p) - val r = new PartitionIterator(!p(_)) - l.other = r - r.other = l - (l, r) - } - - /** Splits this Iterator into a prefix/suffix pair according to a predicate. - * - * @param p the test predicate - * @return a pair of Iterators consisting of the longest prefix of this - * whose elements all satisfy `p`, and the rest of the Iterator. - * @note Reuse: $consumesOneAndProducesTwoIterators - */ - def span(p: A => Boolean): (Iterator[A], Iterator[A]) = { - /* - * Giving a name to following iterator (as opposed to trailing) because - * anonymous class is represented as a structural type that trailing - * iterator is referring (the finish() method) and thus triggering - * handling of structural calls. It's not what's intended here. - */ - class Leading extends AbstractIterator[A] { - private[this] var lookahead: mutable.Queue[A] = null - private[this] var hd: A = _ - /* Status is kept with magic numbers - * 1 means next element is in hd and we're still reading into this iterator - * 0 means we're still reading but haven't found a next element - * -1 means we are done reading into the iterator, so we must rely on lookahead - * -2 means we are done but have saved hd for the other iterator to use as its first element - */ - private[this] var status = 0 - private def store(a: A) { - if (lookahead == null) lookahead = new mutable.Queue[A] - lookahead += a - } - def hasNext = { - if (status < 0) (lookahead ne null) && lookahead.nonEmpty - else if (status > 0) true - else { - if (self.hasNext) { - hd = self.next() - status = if (p(hd)) 1 else -2 - } - else status = -1 - status > 0 - } - } - def next() = { - if (hasNext) { - if (status == 1) { status = 0; hd } - else lookahead.dequeue() - } - else empty.next() - } - def finish(): Boolean = status match { - case -2 => status = -1 ; true - case -1 => false - case 1 => store(hd) ; status = 0 ; finish() - case 0 => - status = -1 - while (self.hasNext) { - val a = self.next() - if (p(a)) store(a) - else { - hd = a - return true - } - } - false - } - def trailer: A = hd - } - - val leading = new Leading - - val trailing = new AbstractIterator[A] { - private[this] var myLeading = leading - /* Status flag meanings: - * -1 not yet accessed - * 0 single element waiting in leading - * 1 defer to self - */ - private[this] var status = -1 - def hasNext = { - if (status > 0) self.hasNext - else { - if (status == 0) true - else if (myLeading.finish()) { - status = 0 - true - } - else { - status = 1 - myLeading = null - self.hasNext - } - } - } - def next() = { - if (hasNext) { - if (status > 0) self.next() - else { - status = 1 - val ans = myLeading.trailer - myLeading = null - ans - } - } - else Iterator.empty.next() - } - - override def toString = "unknown-if-empty iterator" - } - - (leading, trailing) - } - - /** Skips longest sequence of elements of this iterator which satisfy given - * predicate `p`, and returns an iterator of the remaining elements. - * - * @param p the predicate used to skip elements. - * @return an iterator consisting of the remaining elements - * @note Reuse: $consumesAndProducesIterator - */ - def dropWhile(p: A => Boolean): Iterator[A] = new AbstractIterator[A] { - // Magic value: -1 = hasn't dropped, 0 = found first, 1 = defer to parent iterator - private[this] var status = -1 - // Local buffering to avoid double-wrap with .buffered - private[this] var fst: A = _ - def hasNext: Boolean = - if (status == 1) self.hasNext - else if (status == 0) true - else { - while (self.hasNext) { - val a = self.next() - if (!p(a)) { - fst = a - status = 0 - return true - } - } - status = 1 - false - } - def next() = - if (hasNext) { - if (status == 1) self.next() - else { - status = 1 - fst - } - } - else Iterator.empty.next() - } - - /** Creates an iterator formed from this iterator and another iterator - * by combining corresponding values in pairs. - * If one of the two iterators is longer than the other, its remaining - * elements are ignored. - * - * @param that The iterator providing the second half of each result pair - * @return a new iterator containing pairs consisting of - * corresponding elements of this iterator and `that`. The number - * of elements returned by the new iterator is the - * minimum of the number of elements returned by this - * iterator and `that`. - * @note Reuse: $consumesTwoAndProducesOneIterator - */ - def zip[B](that: Iterator[B]): Iterator[(A, B)] = new AbstractIterator[(A, B)] { - def hasNext = self.hasNext && that.hasNext - def next = (self.next(), that.next()) - } - - /** Appends an element value to this iterator until a given target length is reached. - * - * @param len the target length - * @param elem the padding value - * @return a new iterator consisting of producing all values of this iterator, - * followed by the minimal number of occurrences of `elem` so - * that the number of produced values is at least `len`. - * @note Reuse: $consumesAndProducesIterator - * - * @usecase def padTo(len: Int, elem: A): Iterator[A] - * @inheritdoc - */ - def padTo[A1 >: A](len: Int, elem: A1): Iterator[A1] = new AbstractIterator[A1] { - private var count = 0 - def hasNext = self.hasNext || count < len - def next = { - count += 1 - if (self.hasNext) self.next() - else if (count <= len) elem - else empty.next() - } - } - - /** Creates an iterator that pairs each element produced by this iterator - * with its index, counting from 0. - * - * @return a new iterator containing pairs consisting of - * corresponding elements of this iterator and their indices. - * @note Reuse: $consumesAndProducesIterator - */ - def zipWithIndex: Iterator[(A, Int)] = new AbstractIterator[(A, Int)] { - var idx = 0 - def hasNext = self.hasNext - def next = { - val ret = (self.next(), idx) - idx += 1 - ret - } - } - - /** Creates an iterator formed from this iterator and another iterator - * by combining corresponding elements in pairs. - * If one of the two iterators is shorter than the other, - * placeholder elements are used to extend the shorter iterator to the length of the longer. - * - * @param that iterator `that` may have a different length - * as the self iterator. - * @param thisElem element `thisElem` is used to fill up the - * resulting iterator if the self iterator is shorter than - * `that` - * @param thatElem element `thatElem` is used to fill up the - * resulting iterator if `that` is shorter than - * the self iterator - * @return a new iterator containing pairs consisting of - * corresponding values of this iterator and `that`. The length - * of the returned iterator is the maximum of the lengths of this iterator and `that`. - * If this iterator is shorter than `that`, `thisElem` values are used to pad the result. - * If `that` is shorter than this iterator, `thatElem` values are used to pad the result. - * @note Reuse: $consumesTwoAndProducesOneIterator - * - * @usecase def zipAll[B](that: Iterator[B], thisElem: A, thatElem: B): Iterator[(A, B)] - * @inheritdoc - */ - def zipAll[B, A1 >: A, B1 >: B](that: Iterator[B], thisElem: A1, thatElem: B1): Iterator[(A1, B1)] = new AbstractIterator[(A1, B1)] { - def hasNext = self.hasNext || that.hasNext - def next(): (A1, B1) = - if (self.hasNext) { - if (that.hasNext) (self.next(), that.next()) - else (self.next(), thatElem) - } else { - if (that.hasNext) (thisElem, that.next()) - else empty.next() - } - } - - /** Applies a function `f` to all values produced by this iterator. - * - * @param f the function that is applied for its side-effect to every element. - * The result of function `f` is discarded. - * - * @tparam U the type parameter describing the result of function `f`. - * This result will always be ignored. Typically `U` is `Unit`, - * but this is not necessary. - * - * @note Reuse: $consumesIterator - * - * @usecase def foreach(f: A => Unit): Unit - * @inheritdoc - */ - def foreach[U](f: A => U) { while (hasNext) f(next()) } - - /** Tests whether a predicate holds for all values produced by this iterator. - * $mayNotTerminateInf - * - * @param p the predicate used to test elements. - * @return `true` if the given predicate `p` holds for all values - * produced by this iterator, otherwise `false`. - * @note Reuse: $consumesIterator - */ - def forall(p: A => Boolean): Boolean = { - var res = true - while (res && hasNext) res = p(next()) - res - } - - /** Tests whether a predicate holds for some of the values produced by this iterator. - * $mayNotTerminateInf - * - * @param p the predicate used to test elements. - * @return `true` if the given predicate `p` holds for some of the values - * produced by this iterator, otherwise `false`. - * @note Reuse: $consumesIterator - */ - def exists(p: A => Boolean): Boolean = { - var res = false - while (!res && hasNext) res = p(next()) - res - } - - /** Tests whether this iterator contains a given value as an element. - * $mayNotTerminateInf - * - * @param elem the element to test. - * @return `true` if this iterator produces some value that is - * is equal (as determined by `==`) to `elem`, `false` otherwise. - * @note Reuse: $consumesIterator - */ - def contains(elem: Any): Boolean = exists(_ == elem) // Note--this seems faster than manual inlining! - - /** Finds the first value produced by the iterator satisfying a - * predicate, if any. - * $mayNotTerminateInf - * - * @param p the predicate used to test values. - * @return an option value containing the first value produced by the iterator that satisfies - * predicate `p`, or `None` if none exists. - * @note Reuse: $consumesIterator - */ - def find(p: A => Boolean): Option[A] = { - while (hasNext) { - val a = next() - if (p(a)) return Some(a) - } - None - } - - /** Returns the index of the first produced value satisfying a predicate, or -1. - * $mayNotTerminateInf - * - * @param p the predicate to test values - * @return the index of the first produced value satisfying `p`, - * or -1 if such an element does not exist until the end of the iterator is reached. - * @note Reuse: $consumesIterator - */ - def indexWhere(p: A => Boolean): Int = indexWhere(p, 0) - - /** Returns the index of the first produced value satisfying a predicate, or -1, after or at - * some start index. - * $mayNotTerminateInf - * - * @param p the predicate to test values - * @param from the start index - * @return the index `>= from` of the first produced value satisfying `p`, - * or -1 if such an element does not exist until the end of the iterator is reached. - * @note Reuse: $consumesIterator - */ - def indexWhere(p: A => Boolean, from: Int): Int = { - var i = 0 - while (i < from && hasNext) { - next() - i += 1 - } - - while (hasNext) { - if (p(next())) return i - i += 1 - } - -1 - } - - /** Returns the index of the first occurrence of the specified - * object in this iterable object. - * $mayNotTerminateInf - * - * @param elem element to search for. - * @return the index of the first occurrence of `elem` in the values produced by this iterator, - * or -1 if such an element does not exist until the end of the iterator is reached. - * @note Reuse: $consumesIterator - */ - def indexOf[B >: A](elem: B): Int = indexOf(elem, 0) - - /** Returns the index of the first occurrence of the specified object in this iterable object - * after or at some start index. - * $mayNotTerminateInf - * - * @param elem element to search for. - * @param from the start index - * @return the index `>= from` of the first occurrence of `elem` in the values produced by this - * iterator, or -1 if such an element does not exist until the end of the iterator is - * reached. - * @note Reuse: $consumesIterator - */ - def indexOf[B >: A](elem: B, from: Int): Int = { - var i = 0 - while (i < from && hasNext) { - next() - i += 1 - } - - while (hasNext) { - if (next() == elem) return i - i += 1 - } - -1 - } - - /** Creates a buffered iterator from this iterator. - * - * @see [[scala.collection.BufferedIterator]] - * @return a buffered iterator producing the same values as this iterator. - * @note Reuse: $consumesAndProducesIterator - */ - def buffered: BufferedIterator[A] = new AbstractIterator[A] with BufferedIterator[A] { - private var hd: A = _ - private var hdDefined: Boolean = false - - def head: A = { - if (!hdDefined) { - hd = next() - hdDefined = true - } - hd - } - - def hasNext = - hdDefined || self.hasNext - - def next() = - if (hdDefined) { - hdDefined = false - hd - } else self.next() - } - - /** A flexible iterator for transforming an `Iterator[A]` into an - * Iterator[Seq[A]], with configurable sequence size, step, and - * strategy for dealing with elements which don't fit evenly. - * - * Typical uses can be achieved via methods `grouped` and `sliding`. - */ - class GroupedIterator[B >: A](self: Iterator[A], size: Int, step: Int) - extends AbstractIterator[Seq[B]] - with Iterator[Seq[B]] { - - require(size >= 1 && step >= 1, f"size=$size%d and step=$step%d, but both must be positive") - - private[this] var buffer: ArrayBuffer[B] = ArrayBuffer() // the buffer - private[this] var filled = false // whether the buffer is "hot" - private[this] var _partial = true // whether we deliver short sequences - private[this] var pad: Option[() => B] = None // what to pad short sequences with - - /** Public functions which can be used to configure the iterator before use. - * - * Pads the last segment if necessary so that all segments will - * have the same size. - * - * @param x The element that will be appended to the last segment, if necessary. - * @return The same iterator, and ''not'' a new iterator. - * @note This method mutates the iterator it is called on, which can be safely used afterwards. - * @note This method is mutually exclusive with `withPartial(true)`. - */ - def withPadding(x: => B): this.type = { - pad = Some(() => x) - this - } - /** Public functions which can be used to configure the iterator before use. - * - * Select whether the last segment may be returned with less than `size` - * elements. If not, some elements of the original iterator may not be - * returned at all. - * - * @param x `true` if partial segments may be returned, `false` otherwise. - * @return The same iterator, and ''not'' a new iterator. - * @note This method mutates the iterator it is called on, which can be safely used afterwards. - * @note This method is mutually exclusive with `withPadding`. - */ - def withPartial(x: Boolean): this.type = { - _partial = x - if (_partial == true) // reset pad since otherwise it will take precedence - pad = None - - this - } - - /** For reasons which remain to be determined, calling - * self.take(n).toSeq cause an infinite loop, so we have - * a slight variation on take for local usage. - * NB: self.take.toSeq is slice.toStream, lazily built on self, - * so a subsequent self.hasNext would not test self after the - * group was consumed. - */ - private def takeDestructively(size: Int): Seq[A] = { - val buf = new ArrayBuffer[A] - var i = 0 - // The order of terms in the following condition is important - // here as self.hasNext could be blocking - while (i < size && self.hasNext) { - buf += self.next - i += 1 - } - buf - } - - private def padding(x: Int) = List.fill(x)(pad.get()) - private def gap = (step - size) max 0 - - private def go(count: Int) = { - val prevSize = buffer.size - def isFirst = prevSize == 0 - // If there is padding defined we insert it immediately - // so the rest of the code can be oblivious - val xs: Seq[B] = { - val res = takeDestructively(count) - // was: extra checks so we don't calculate length unless there's reason - // but since we took the group eagerly, just use the fast length - val shortBy = count - res.length - if (shortBy > 0 && pad.isDefined) res ++ padding(shortBy) else res - } - lazy val len = xs.length - lazy val incomplete = len < count - - // if 0 elements are requested, or if the number of newly obtained - // elements is less than the gap between sequences, we are done. - def deliver(howMany: Int) = { - (howMany > 0 && (isFirst || len > gap)) && { - if (!isFirst) - buffer trimStart (step min prevSize) - - val available = - if (isFirst) len - else howMany min (len - gap) - - buffer ++= (xs takeRight available) - filled = true - true - } - } - - if (xs.isEmpty) false // self ran out of elements - else if (_partial) deliver(len min size) // if _partial is true, we deliver regardless - else if (incomplete) false // !_partial && incomplete means no more seqs - else if (isFirst) deliver(len) // first element - else deliver(step min size) // the typical case - } - - // fill() returns false if no more sequences can be produced - private def fill(): Boolean = { - if (!self.hasNext) false - // the first time we grab size, but after that we grab step - else if (buffer.isEmpty) go(size) - else go(step) - } - - def hasNext = filled || fill() - def next = { - if (!filled) - fill() - - if (!filled) - throw new NoSuchElementException("next on empty iterator") - filled = false - buffer.toList - } - } - - /** Returns an iterator which groups this iterator into fixed size - * blocks. Example usages: - * {{{ - * // Returns List(List(1, 2, 3), List(4, 5, 6), List(7))) - * (1 to 7).iterator grouped 3 toList - * // Returns List(List(1, 2, 3), List(4, 5, 6)) - * (1 to 7).iterator grouped 3 withPartial false toList - * // Returns List(List(1, 2, 3), List(4, 5, 6), List(7, 20, 25) - * // Illustrating that withPadding's argument is by-name. - * val it2 = Iterator.iterate(20)(_ + 5) - * (1 to 7).iterator grouped 3 withPadding it2.next toList - * }}} - * - * @note Reuse: $consumesAndProducesIterator - */ - def grouped[B >: A](size: Int): GroupedIterator[B] = - new GroupedIterator[B](self, size, size) - - /** Returns an iterator which presents a "sliding window" view of - * this iterator. The first argument is the window size, and - * the second argument `step` is how far to advance the window - * on each iteration. The `step` defaults to `1`. - * - * The default `GroupedIterator` can be configured to either - * pad a partial result to size `size` or suppress the partial - * result entirely. - * - * Example usages: - * {{{ - * // Returns List(List(1, 2, 3), List(2, 3, 4), List(3, 4, 5)) - * (1 to 5).iterator.sliding(3).toList - * // Returns List(List(1, 2, 3, 4), List(4, 5)) - * (1 to 5).iterator.sliding(4, 3).toList - * // Returns List(List(1, 2, 3, 4)) - * (1 to 5).iterator.sliding(4, 3).withPartial(false).toList - * // Returns List(List(1, 2, 3, 4), List(4, 5, 20, 25)) - * // Illustrating that withPadding's argument is by-name. - * val it2 = Iterator.iterate(20)(_ + 5) - * (1 to 5).iterator.sliding(4, 3).withPadding(it2.next).toList - * }}} - * - * @return An iterator producing `Seq[B]`s of size `size`, except the - * last element (which may be the only element) will be truncated - * if there are fewer than `size` elements remaining to be grouped. - * This behavior can be configured. - * - * @note Reuse: $consumesAndProducesIterator - */ - def sliding[B >: A](size: Int, step: Int = 1): GroupedIterator[B] = - new GroupedIterator[B](self, size, step) - - /** Returns the number of elements in this iterator. - * $willNotTerminateInf - * - * @note Reuse: $consumesIterator - */ - def length: Int = this.size - - /** Creates two new iterators that both iterate over the same elements - * as this iterator (in the same order). The duplicate iterators are - * considered equal if they are positioned at the same element. - * - * Given that most methods on iterators will make the original iterator - * unfit for further use, this methods provides a reliable way of calling - * multiple such methods on an iterator. - * - * @return a pair of iterators - * @note The implementation may allocate temporary storage for elements - * iterated by one iterator but not yet by the other. - * @note Reuse: $consumesOneAndProducesTwoIterators - */ - def duplicate: (Iterator[A], Iterator[A]) = { - val gap = new scala.collection.mutable.Queue[A] - var ahead: Iterator[A] = null - class Partner extends AbstractIterator[A] { - def hasNext: Boolean = self.synchronized { - (this ne ahead) && !gap.isEmpty || self.hasNext - } - def next(): A = self.synchronized { - if (gap.isEmpty) ahead = this - if (this eq ahead) { - val e = self.next() - gap enqueue e - e - } else gap.dequeue() - } - // to verify partnerhood we use reference equality on gap because - // type testing does not discriminate based on origin. - private def compareGap(queue: scala.collection.mutable.Queue[A]) = gap eq queue - override def hashCode = gap.hashCode() - override def equals(other: Any) = other match { - case x: Partner => x.compareGap(gap) && gap.isEmpty - case _ => super.equals(other) - } - } - (new Partner, new Partner) - } - - /** Returns this iterator with patched values. - * Patching at negative indices is the same as patching starting at 0. - * Patching at indices at or larger than the length of the original iterator appends the patch to the end. - * If more values are replaced than actually exist, the excess is ignored. - * - * @param from The start index from which to patch - * @param patchElems The iterator of patch values - * @param replaced The number of values in the original iterator that are replaced by the patch. - * @note Reuse: $consumesTwoAndProducesOneIterator - */ - def patch[B >: A](from: Int, patchElems: Iterator[B], replaced: Int): Iterator[B] = new AbstractIterator[B] { - private var origElems = self - private var i = (if (from > 0) from else 0) // Counts down, switch to patch on 0, -1 means use patch first - def hasNext: Boolean = { - if (i == 0) { - origElems = origElems drop replaced - i = -1 - } - origElems.hasNext || patchElems.hasNext - } - def next(): B = { - if (i == 0) { - origElems = origElems drop replaced - i = -1 - } - if (i < 0) { - if (patchElems.hasNext) patchElems.next() - else origElems.next() - } - else { - if (origElems.hasNext) { - i -= 1 - origElems.next() - } - else { - i = -1 - patchElems.next() - } - } - } - } - - /** Copies selected values produced by this iterator to an array. - * Fills the given array `xs` starting at index `start` with at most - * `len` values produced by this iterator. - * Copying will stop once either the end of the current iterator is reached, - * or the end of the array is reached, or `len` elements have been copied. - * - * @param xs the array to fill. - * @param start the starting index. - * @param len the maximal number of elements to copy. - * @tparam B the type of the elements of the array. - * - * @note Reuse: $consumesIterator - * - * @usecase def copyToArray(xs: Array[A], start: Int, len: Int): Unit - * @inheritdoc - * - * $willNotTerminateInf - */ - def copyToArray[B >: A](xs: Array[B], start: Int, len: Int): Unit = { - var i = start - val end = start + math.min(len, xs.length - start) - while (i < end && hasNext) { - xs(i) = next() - i += 1 - } - // TODO: return i - start so the caller knows how many values read? - } - - /** Tests if another iterator produces the same values as this one. - * - * $willNotTerminateInf - * - * @param that the other iterator - * @return `true`, if both iterators produce the same elements in the same order, `false` otherwise. - * - * @note Reuse: $consumesTwoIterators - */ - def sameElements(that: Iterator[_]): Boolean = { - while (hasNext && that.hasNext) - if (next != that.next) - return false - - !hasNext && !that.hasNext - } - - def toTraversable: Traversable[A] = toStream - def toIterator: Iterator[A] = self - def toStream: Stream[A] = - if (self.hasNext) Stream.cons(self.next(), self.toStream) - else Stream.empty[A] - - - /** Converts this iterator to a string. - * - * @return `"empty iterator"` or `"non-empty iterator"`, depending on - * whether or not the iterator is empty. - * @note Reuse: $preservesIterator - */ - override def toString = (if (hasNext) "non-empty" else "empty")+" iterator" -} - -/** Explicit instantiation of the `Iterator` trait to reduce class file size in subclasses. */ -abstract class AbstractIterator[+A] extends Iterator[A] diff --git a/tests/scala2-library/src/library/scala/collection/JavaConversions.scala b/tests/scala2-library/src/library/scala/collection/JavaConversions.scala deleted file mode 100644 index 93994d80bf01..000000000000 --- a/tests/scala2-library/src/library/scala/collection/JavaConversions.scala +++ /dev/null @@ -1,60 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2006-2016, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://www.scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala -package collection - -import convert._ - -/** A variety of implicit conversions supporting interoperability between - * Scala and Java collections. - * - * The following conversions are supported: - *{{{ - * scala.collection.Iterable <=> java.lang.Iterable - * scala.collection.Iterable <=> java.util.Collection - * scala.collection.Iterator <=> java.util.{ Iterator, Enumeration } - * scala.collection.mutable.Buffer <=> java.util.List - * scala.collection.mutable.Set <=> java.util.Set - * scala.collection.mutable.Map <=> java.util.{ Map, Dictionary } - * scala.collection.concurrent.Map <=> java.util.concurrent.ConcurrentMap - *}}} - * In all cases, converting from a source type to a target type and back - * again will return the original source object: - * - *{{{ - * import scala.collection.JavaConversions._ - * - * val sl = new scala.collection.mutable.ListBuffer[Int] - * val jl : java.util.List[Int] = sl - * val sl2 : scala.collection.mutable.Buffer[Int] = jl - * assert(sl eq sl2) - *}}} - * In addition, the following one way conversions are provided: - * - *{{{ - * scala.collection.Seq => java.util.List - * scala.collection.mutable.Seq => java.util.List - * scala.collection.Set => java.util.Set - * scala.collection.Map => java.util.Map - * java.util.Properties => scala.collection.mutable.Map[String, String] - *}}} - * - * The transparent conversions provided here are considered - * fragile because they can result in unexpected behavior and performance. - * - * Therefore, this API has been deprecated and `JavaConverters` should be - * used instead. `JavaConverters` provides the same conversions, but through - * extension methods. - * - * @author Miles Sabin - * @author Martin Odersky - * @since 2.8 - */ -@deprecated("use JavaConverters", since="2.12.0") -object JavaConversions extends WrapAsScala with WrapAsJava diff --git a/tests/scala2-library/src/library/scala/collection/JavaConverters.scala b/tests/scala2-library/src/library/scala/collection/JavaConverters.scala deleted file mode 100644 index 2337f0ef8424..000000000000 --- a/tests/scala2-library/src/library/scala/collection/JavaConverters.scala +++ /dev/null @@ -1,73 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2006-2016, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://www.scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala -package collection - -import convert._ - -/** A variety of decorators that enable converting between - * Scala and Java collections using extension methods, `asScala` and `asJava`. - * - * The extension methods return adapters for the corresponding API. - * - * The following conversions are supported via `asScala` and `asJava`: - *{{{ - * scala.collection.Iterable <=> java.lang.Iterable - * scala.collection.Iterator <=> java.util.Iterator - * scala.collection.mutable.Buffer <=> java.util.List - * scala.collection.mutable.Set <=> java.util.Set - * scala.collection.mutable.Map <=> java.util.Map - * scala.collection.concurrent.Map <=> java.util.concurrent.ConcurrentMap - *}}} - * The following conversions are supported via `asScala` and through - * specially-named extension methods to convert to Java collections, as shown: - *{{{ - * scala.collection.Iterable <=> java.util.Collection (via asJavaCollection) - * scala.collection.Iterator <=> java.util.Enumeration (via asJavaEnumeration) - * scala.collection.mutable.Map <=> java.util.Dictionary (via asJavaDictionary) - *}}} - * In addition, the following one-way conversions are provided via `asJava`: - *{{{ - * scala.collection.Seq => java.util.List - * scala.collection.mutable.Seq => java.util.List - * scala.collection.Set => java.util.Set - * scala.collection.Map => java.util.Map - *}}} - * The following one way conversion is provided via `asScala`: - *{{{ - * java.util.Properties => scala.collection.mutable.Map - *}}} - * In all cases, converting from a source type to a target type and back - * again will return the original source object. For example: - * {{{ - * import scala.collection.JavaConverters._ - * - * val source = new scala.collection.mutable.ListBuffer[Int] - * val target: java.util.List[Int] = source.asJava - * val other: scala.collection.mutable.Buffer[Int] = target.asScala - * assert(source eq other) - * }}} - * Alternatively, the conversion methods have descriptive names and can be invoked explicitly. - * {{{ - * scala> val vs = java.util.Arrays.asList("hi", "bye") - * vs: java.util.List[String] = [hi, bye] - * - * scala> val ss = asScalaIterator(vs.iterator) - * ss: Iterator[String] = non-empty iterator - * - * scala> .toList - * res0: List[String] = List(hi, bye) - * - * scala> val ss = asScalaBuffer(vs) - * ss: scala.collection.mutable.Buffer[String] = Buffer(hi, bye) - * }}} - * - * @since 2.8.1 - */ -object JavaConverters extends DecorateAsJava with DecorateAsScala diff --git a/tests/scala2-library/src/library/scala/collection/LinearSeq.scala b/tests/scala2-library/src/library/scala/collection/LinearSeq.scala deleted file mode 100644 index 5a7bb5891e07..000000000000 --- a/tests/scala2-library/src/library/scala/collection/LinearSeq.scala +++ /dev/null @@ -1,42 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - - - -package scala -package collection - -import generic._ -import mutable.Builder - -/** A base trait for linear sequences. - * - * $linearSeqInfo - * - * @define linearSeqInfo - * Linear sequences have reasonably efficient `head`, `tail`, and `isEmpty` methods. - * If these methods provide the fastest way to traverse the collection, a - * collection `Coll` that extends this trait should also extend - * `LinearSeqOptimized[A, Coll[A]]`. - */ -trait LinearSeq[+A] extends Seq[A] - with GenericTraversableTemplate[A, LinearSeq] - with LinearSeqLike[A, LinearSeq[A]] { - override def companion: GenericCompanion[LinearSeq] = LinearSeq - override def seq: LinearSeq[A] = this -} - -/** $factoryInfo - * The current default implementation of a $Coll is a `List`. - * @define coll linear sequence - * @define Coll `LinearSeq` - */ -object LinearSeq extends SeqFactory[LinearSeq] { - implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, LinearSeq[A]] = ReusableCBF.asInstanceOf[GenericCanBuildFrom[A]] - def newBuilder[A]: Builder[A, LinearSeq[A]] = immutable.LinearSeq.newBuilder[A] -} diff --git a/tests/scala2-library/src/library/scala/collection/LinearSeqLike.scala b/tests/scala2-library/src/library/scala/collection/LinearSeqLike.scala deleted file mode 100644 index 4dba52dc743c..000000000000 --- a/tests/scala2-library/src/library/scala/collection/LinearSeqLike.scala +++ /dev/null @@ -1,69 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala -package collection - -import immutable.List -import scala.annotation.tailrec - -/** A template trait for linear sequences of type `LinearSeq[A]`. - * - * This trait just implements `iterator` and `corresponds` in terms of `isEmpty, ``head`, and `tail`. - * However, see `LinearSeqOptimized` for an implementation trait that overrides many more operations - * to make them run faster under the assumption of fast linear access with `head` and `tail`. - * - * Linear sequences do not add any new methods to `Seq`, but promise efficient implementations - * of linear access patterns. - * @author Martin Odersky - * @version 2.8 - * @since 2.8 - * - * @tparam A the element type of the $coll - * @tparam Repr the type of the actual $coll containing the elements. - */ -trait LinearSeqLike[+A, +Repr <: LinearSeqLike[A, Repr]] extends SeqLike[A, Repr] { - self: Repr => - - override protected[this] def thisCollection: LinearSeq[A] = this.asInstanceOf[LinearSeq[A]] - override protected[this] def toCollection(repr: Repr): LinearSeq[A] = repr.asInstanceOf[LinearSeq[A]] - - def seq: LinearSeq[A] - - override def hashCode()= scala.util.hashing.MurmurHash3.seqHash(seq) // TODO - can we get faster via "linearSeqHash" ? - - override /*IterableLike*/ - def iterator: Iterator[A] = new AbstractIterator[A] { - var these = self - def hasNext: Boolean = !these.isEmpty - def next(): A = - if (hasNext) { - val result = these.head; these = these.tail; result - } else Iterator.empty.next() - - override def toList: List[A] = { - /* Have to clear `these` so the iterator is exhausted like - * it would be without the optimization. - * - * Calling "newBuilder.result()" in toList method - * prevents original seq from garbage collection, - * so we use these.take(0) here. - * - * Check scala/bug#8924 for details - */ - val xs = these.toList - these = these.take(0) - xs - } - } - - @tailrec override final def corresponds[B](that: GenSeq[B])(p: (A,B) => Boolean): Boolean = { - if (this.isEmpty) that.isEmpty - else that.nonEmpty && p(head, that.head) && (tail corresponds that.tail)(p) - } -} diff --git a/tests/scala2-library/src/library/scala/collection/LinearSeqOptimized.scala b/tests/scala2-library/src/library/scala/collection/LinearSeqOptimized.scala deleted file mode 100644 index 68b85dcfe508..000000000000 --- a/tests/scala2-library/src/library/scala/collection/LinearSeqOptimized.scala +++ /dev/null @@ -1,318 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala -package collection - -import scala.annotation.tailrec - -/** A template trait for linear sequences of type `LinearSeq[A]` which optimizes - * the implementation of various methods under the assumption of fast linear access. - * - * $linearSeqOptim - * - * @define linearSeqOptim - * Linear-optimized sequences implement most operations in in terms of three methods, - * which are assumed to have efficient implementations. These are: - * {{{ - * def isEmpty: Boolean - * def head: A - * def tail: Repr - * }}} - * Here, `A` is the type of the sequence elements and `Repr` is the type of the sequence itself. - * Note that default implementations are provided via inheritance, but these - * should be overridden for performance. - * - * - */ -trait LinearSeqOptimized[+A, +Repr <: LinearSeqOptimized[A, Repr]] extends LinearSeqLike[A, Repr] { self: Repr => - - def isEmpty: Boolean - - def head: A - - def tail: Repr - - /** The length of the $coll. - * - * $willNotTerminateInf - * - * Note: the execution of `length` may take time proportional to the length of the sequence. - */ - def length: Int = { - var these = self - var len = 0 - while (!these.isEmpty) { - len += 1 - these = these.tail - } - len - } - - /** Selects an element by its index in the $coll. - * Note: the execution of `apply` may take time proportional to the index value. - * @throws IndexOutOfBoundsException if `idx` does not satisfy `0 <= idx < length`. - */ - def apply(n: Int): A = { - val rest = drop(n) - if (n < 0 || rest.isEmpty) throw new IndexOutOfBoundsException("" + n) - rest.head - } - - override /*IterableLike*/ - def foreach[U](f: A => U) { - var these = this - while (!these.isEmpty) { - f(these.head) - these = these.tail - } - } - - - override /*IterableLike*/ - def forall(p: A => Boolean): Boolean = { - var these = this - while (!these.isEmpty) { - if (!p(these.head)) return false - these = these.tail - } - true - } - - override /*IterableLike*/ - def exists(p: A => Boolean): Boolean = { - var these = this - while (!these.isEmpty) { - if (p(these.head)) return true - these = these.tail - } - false - } - - override /*SeqLike*/ - def contains[A1 >: A](elem: A1): Boolean = { - var these = this - while (!these.isEmpty) { - if (these.head == elem) return true - these = these.tail - } - false - } - - override /*IterableLike*/ - def find(p: A => Boolean): Option[A] = { - var these = this - while (!these.isEmpty) { - if (p(these.head)) return Some(these.head) - these = these.tail - } - None - } - - override /*TraversableLike*/ - def foldLeft[B](z: B)(@deprecatedName('f) op: (B, A) => B): B = { - var acc = z - var these = this - while (!these.isEmpty) { - acc = op(acc, these.head) - these = these.tail - } - acc - } - - override /*IterableLike*/ - def foldRight[B](z: B)(@deprecatedName('f) op: (A, B) => B): B = - if (this.isEmpty) z - else op(head, tail.foldRight(z)(op)) - - override /*TraversableLike*/ - def reduceLeft[B >: A](@deprecatedName('f) op: (B, A) => B): B = - if (isEmpty) throw new UnsupportedOperationException("empty.reduceLeft") - else tail.foldLeft[B](head)(op) - - override /*IterableLike*/ - def reduceRight[B >: A](op: (A, B) => B): B = - if (isEmpty) throw new UnsupportedOperationException("Nil.reduceRight") - else if (tail.isEmpty) head - else op(head, tail.reduceRight(op)) - - override /*TraversableLike*/ - def last: A = { - if (isEmpty) throw new NoSuchElementException - var these = this - var nx = these.tail - while (!nx.isEmpty) { - these = nx - nx = nx.tail - } - these.head - } - - override /*IterableLike*/ - def take(n: Int): Repr = { - val b = newBuilder - var i = 0 - var these = repr - while (!these.isEmpty && i < n) { - i += 1 - b += these.head - these = these.tail - } - b.result() - } - - override /*TraversableLike*/ - def drop(n: Int): Repr = { - var these: Repr = repr - var count = n - while (!these.isEmpty && count > 0) { - these = these.tail - count -= 1 - } - // !!! This line should actually be something like: - // newBuilder ++= these result - // since we are in collection.*, not immutable.*. - // However making that change will pessimize all the - // immutable linear seqs (like list) which surely expect - // drop to share. (Or at least it would penalize List if - // it didn't override drop. It would be a lot better if - // the leaf collections didn't override so many methods.) - // - // Upshot: MutableList is broken and passes part of the - // original list as the result of drop. - these - } - - override /*IterableLike*/ - def dropRight(n: Int): Repr = { - val b = newBuilder - var these = this - var lead = this drop n - while (!lead.isEmpty) { - b += these.head - these = these.tail - lead = lead.tail - } - b.result() - } - - override /*IterableLike*/ - def slice(from: Int, until: Int): Repr = { - var these: Repr = repr - var count = from max 0 - if (until <= count) - return newBuilder.result() - - val b = newBuilder - var sliceElems = until - count - while (these.nonEmpty && count > 0) { - these = these.tail - count -= 1 - } - while (these.nonEmpty && sliceElems > 0) { - sliceElems -= 1 - b += these.head - these = these.tail - } - b.result() - } - - override /*IterableLike*/ - def takeWhile(p: A => Boolean): Repr = { - val b = newBuilder - var these = this - while (!these.isEmpty && p(these.head)) { - b += these.head - these = these.tail - } - b.result() - } - - override /*TraversableLike*/ - def span(p: A => Boolean): (Repr, Repr) = { - var these: Repr = repr - val b = newBuilder - while (!these.isEmpty && p(these.head)) { - b += these.head - these = these.tail - } - (b.result(), these) - } - - override /*IterableLike*/ - def sameElements[B >: A](that: GenIterable[B]): Boolean = that match { - case that1: LinearSeq[_] => - // Probably immutable, so check reference identity first (it's quick anyway) - (this eq that1) || { - var these = this - var those = that1 - while (!these.isEmpty && !those.isEmpty && these.head == those.head) { - these = these.tail - those = those.tail - } - these.isEmpty && those.isEmpty - } - case _ => - super.sameElements(that) - } - - override /*SeqLike*/ - def lengthCompare(len: Int): Int = { - @tailrec def loop(i: Int, xs: Repr): Int = { - if (i == len) - if (xs.isEmpty) 0 else 1 - else if (xs.isEmpty) - -1 - else - loop(i + 1, xs.tail) - } - if (len < 0) 1 - else loop(0, this) - } - - override /*SeqLike*/ - def isDefinedAt(x: Int): Boolean = x >= 0 && lengthCompare(x) > 0 - - override /*SeqLike*/ - def segmentLength(p: A => Boolean, from: Int): Int = { - var i = 0 - var these = this drop from - while (!these.isEmpty && p(these.head)) { - i += 1 - these = these.tail - } - i - } - - override /*SeqLike*/ - def indexWhere(p: A => Boolean, from: Int): Int = { - var i = math.max(from, 0) - var these = this drop from - while (these.nonEmpty) { - if (p(these.head)) - return i - - i += 1 - these = these.tail - } - -1 - } - - override /*SeqLike*/ - def lastIndexWhere(p: A => Boolean, end: Int): Int = { - var i = 0 - var these = this - var last = -1 - while (!these.isEmpty && i <= end) { - if (p(these.head)) last = i - these = these.tail - i += 1 - } - last - } -} diff --git a/tests/scala2-library/src/library/scala/collection/Map.scala b/tests/scala2-library/src/library/scala/collection/Map.scala deleted file mode 100644 index c9a943f1f724..000000000000 --- a/tests/scala2-library/src/library/scala/collection/Map.scala +++ /dev/null @@ -1,59 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala -package collection - -import generic._ - -/** - * A map from keys of type `K` to values of type `V`. - * - * $mapNote - * - * '''Note:''' If you do not have specific implementations for `add` and `-` in mind, - * you might consider inheriting from `DefaultMap` instead. - * - * '''Note:''' If your additions and mutations return the same kind of map as the map - * you are defining, you should inherit from `MapLike` as well. - * - * @tparam K the type of the keys in this map. - * @tparam V the type of the values associated with keys. - * - * @since 1.0 - */ -trait Map[K, +V] extends Iterable[(K, V)] with GenMap[K, V] with MapLike[K, V, Map[K, V]] { - def empty: Map[K, V] = Map.empty - - override def seq: Map[K, V] = this -} - -/** $factoryInfo - * @define Coll `Map` - * @define coll map - */ -object Map extends MapFactory[Map] { - def empty[K, V]: immutable.Map[K, V] = immutable.Map.empty - - /** $mapCanBuildFromInfo */ - implicit def canBuildFrom[K, V]: CanBuildFrom[Coll, (K, V), Map[K, V]] = new MapCanBuildFrom[K, V] - - /** An abstract shell used by { mutable, immutable }.Map but not by collection.Map - * because of variance issues. - */ - abstract class WithDefault[K, +V](underlying: Map[K, V], d: K => V) extends AbstractMap[K, V] with Map[K, V] with Serializable { - override def size = underlying.size - def get(key: K) = underlying.get(key) // removed in 2.9: orElse Some(default(key)) - def iterator = underlying.iterator - override def default(key: K): V = d(key) - } - -} - -/** Explicit instantiation of the `Map` trait to reduce class file size in subclasses. */ -abstract class AbstractMap[K, +V] extends AbstractIterable[(K, V)] with Map[K, V] diff --git a/tests/scala2-library/src/library/scala/collection/MapLike.scala b/tests/scala2-library/src/library/scala/collection/MapLike.scala deleted file mode 100644 index a087cb0f4542..000000000000 --- a/tests/scala2-library/src/library/scala/collection/MapLike.scala +++ /dev/null @@ -1,372 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala -package collection - -import generic._ -import mutable.{ Builder, MapBuilder } -import scala.annotation.migration -import parallel.ParMap - -/** A template trait for maps, which associate keys with values. - * - * $mapNote - * $mapTags - * @since 2.8 - * - * @define mapNote - * '''Implementation note:''' - * This trait provides most of the operations of a `Map` independently of its representation. - * It is typically inherited by concrete implementations of maps. - * - * To implement a concrete map, you need to provide implementations of the - * following methods: - * {{{ - * def get(key: K): Option[V] - * def iterator: Iterator[(K, V)] - * def + [V1 >: V](kv: (K, V1)): This - * def -(key: K): This - * }}} - * If you wish that methods like `take`, `drop`, `filter` also return the same kind of map - * you should also override: - * {{{ - * def empty: This - * }}} - * It is also good idea to override methods `foreach` and - * `size` for efficiency. - * - * @define mapTags - * @tparam K the type of the keys. - * @tparam V the type of associated values. - * @tparam This the type of the map itself. - * - * @author Martin Odersky - * @version 2.8 - * - * @define coll map - * @define Coll Map - * @define willNotTerminateInf - * @define mayNotTerminateInf - */ -trait MapLike[K, +V, +This <: MapLike[K, V, This] with Map[K, V]] - extends PartialFunction[K, V] - with IterableLike[(K, V), This] - with GenMapLike[K, V, This] - with Subtractable[K, This] - with Parallelizable[(K, V), ParMap[K, V]] -{ -self => - - /** The empty map of the same type as this map - * @return an empty map of type `This`. - */ - def empty: This - - /** A common implementation of `newBuilder` for all maps in terms of `empty`. - * Overridden for mutable maps in `mutable.MapLike`. - */ - override protected[this] def newBuilder: Builder[(K, V), This] = new MapBuilder[K, V, This](empty) - - /** Optionally returns the value associated with a key. - * - * @param key the key value - * @return an option value containing the value associated with `key` in this map, - * or `None` if none exists. - */ - def get(key: K): Option[V] - - /** Creates a new iterator over all key/value pairs of this map - * - * @return the new iterator - */ - def iterator: Iterator[(K, V)] - - /** Adds a key/value pair to this map, returning a new map. - * @param kv the key/value pair - * @tparam V1 the type of the value in the key/value pair. - * @return a new map with the new binding added to this map - * - * @usecase def + (kv: (K, V)): Map[K, V] - * @inheritdoc - */ - def + [V1 >: V] (kv: (K, V1)): Map[K, V1] - - /** Removes a key from this map, returning a new map. - * @param key the key to be removed - * @return a new map without a binding for `key` - * - * @usecase def - (key: K): Map[K, V] - * @inheritdoc - */ - def - (key: K): This - - /** Tests whether the map is empty. - * - * @return `true` if the map does not contain any key/value binding, `false` otherwise. - */ - override def isEmpty: Boolean = size == 0 - - /** Returns the value associated with a key, or a default value if the key is not contained in the map. - * @param key the key. - * @param default a computation that yields a default value in case no binding for `key` is - * found in the map. - * @tparam V1 the result type of the default computation. - * @return the value associated with `key` if it exists, - * otherwise the result of the `default` computation. - * - * @usecase def getOrElse(key: K, default: => V): V - * @inheritdoc - */ - def getOrElse[V1 >: V](key: K, default: => V1): V1 = get(key) match { - case Some(v) => v - case None => default - } - - /** Retrieves the value which is associated with the given key. This - * method invokes the `default` method of the map if there is no mapping - * from the given key to a value. Unless overridden, the `default` method throws a - * `NoSuchElementException`. - * - * @param key the key - * @return the value associated with the given key, or the result of the - * map's `default` method, if none exists. - */ - def apply(key: K): V = get(key) match { - case None => default(key) - case Some(value) => value - } - - /** Tests whether this map contains a binding for a key. - * - * @param key the key - * @return `true` if there is a binding for `key` in this map, `false` otherwise. - */ - def contains(key: K): Boolean = get(key).isDefined - - /** Tests whether this map contains a binding for a key. This method, - * which implements an abstract method of trait `PartialFunction`, - * is equivalent to `contains`. - * - * @param key the key - * @return `true` if there is a binding for `key` in this map, `false` otherwise. - */ - def isDefinedAt(key: K) = contains(key) - - override /*PartialFunction*/ - def applyOrElse[K1 <: K, V1 >: V](x: K1, default: K1 => V1): V1 = - getOrElse(x, default(x)) - - /** Collects all keys of this map in a set. - * @return a set containing all keys of this map. - */ - def keySet: Set[K] = new DefaultKeySet - - /** The implementation class of the set returned by `keySet`. - */ - protected class DefaultKeySet extends AbstractSet[K] with Set[K] with Serializable { - def contains(key : K) = self.contains(key) - def iterator = keysIterator - def + (elem: K): Set[K] = (Set[K]() ++ this + elem).asInstanceOf[Set[K]] // !!! concrete overrides abstract problem - def - (elem: K): Set[K] = (Set[K]() ++ this - elem).asInstanceOf[Set[K]] // !!! concrete overrides abstract problem - override def size = self.size - override def foreach[U](f: K => U) = self.keysIterator foreach f - } - - /** Creates an iterator for all keys. - * - * @return an iterator over all keys. - */ - def keysIterator: Iterator[K] = new AbstractIterator[K] { - val iter = self.iterator - def hasNext = iter.hasNext - def next() = iter.next()._1 - } - - /** Collects all keys of this map in an iterable collection. - * - * @return the keys of this map as an iterable. - */ - @migration("`keys` returns `Iterable[K]` rather than `Iterator[K]`.", "2.8.0") - def keys: Iterable[K] = keySet - - /** Collects all values of this map in an iterable collection. - * - * @return the values of this map as an iterable. - */ - @migration("`values` returns `Iterable[V]` rather than `Iterator[V]`.", "2.8.0") - def values: Iterable[V] = new DefaultValuesIterable - - /** The implementation class of the iterable returned by `values`. - */ - protected class DefaultValuesIterable extends AbstractIterable[V] with Iterable[V] with Serializable { - def iterator = valuesIterator - override def size = self.size - override def foreach[U](f: V => U) = self.valuesIterator foreach f - } - - /** Creates an iterator for all values in this map. - * - * @return an iterator over all values that are associated with some key in this map. - */ - def valuesIterator: Iterator[V] = new AbstractIterator[V] { - val iter = self.iterator - def hasNext = iter.hasNext - def next() = iter.next()._2 - } - - /** Defines the default value computation for the map, - * returned when a key is not found - * The method implemented here throws an exception, - * but it might be overridden in subclasses. - * - * @param key the given key value for which a binding is missing. - * @throws NoSuchElementException - */ - def default(key: K): V = - throw new NoSuchElementException("key not found: " + key) - - protected class FilteredKeys(p: K => Boolean) extends AbstractMap[K, V] with DefaultMap[K, V] { - override def foreach[U](f: ((K, V)) => U): Unit = for (kv <- self) if (p(kv._1)) f(kv) - def iterator = self.iterator.filter(kv => p(kv._1)) - override def contains(key: K) = p(key) && self.contains(key) - def get(key: K) = if (!p(key)) None else self.get(key) - } - - /** Filters this map by retaining only keys satisfying a predicate. - * - * '''Note''': the predicate must accept any key of type `K`, not just those already - * present in the map, as the predicate is tested before the underlying map is queried. - * - * @param p the predicate used to test keys - * @return an immutable map consisting only of those key value pairs of this map where the key satisfies - * the predicate `p`. The resulting map wraps the original map without copying any elements. - */ - def filterKeys(p: K => Boolean): Map[K, V] = new FilteredKeys(p) - - protected class MappedValues[W](f: V => W) extends AbstractMap[K, W] with DefaultMap[K, W] { - override def foreach[U](g: ((K, W)) => U): Unit = for ((k, v) <- self) g((k, f(v))) - def iterator = for ((k, v) <- self.iterator) yield (k, f(v)) - override def size = self.size - override def contains(key: K) = self.contains(key) - def get(key: K) = self.get(key).map(f) - } - - /** Transforms this map by applying a function to every retrieved value. - * @param f the function used to transform values of this map. - * @return a map view which maps every key of this map - * to `f(this(key))`. The resulting map wraps the original map without copying any elements. - */ - def mapValues[W](f: V => W): Map[K, W] = new MappedValues(f) - - // The following 5 operations (updated, two times +, two times ++) should really be - // generic, returning This[V]. We need better covariance support to express that though. - // So right now we do the brute force approach of code duplication. - - /** Creates a new map obtained by updating this map with a given key/value pair. - * @param key the key - * @param value the value - * @tparam V1 the type of the added value - * @return A new map with the new key/value mapping added to this map. - * - * @usecase def updated(key: K, value: V): Map[K, V] - * @inheritdoc - */ - def updated [V1 >: V](key: K, value: V1): Map[K, V1] = this + ((key, value)) - - /** Adds key/value pairs to this map, returning a new map. - * - * This method takes two or more key/value pairs. Another overloaded - * variant of this method handles the case where a single key/value pair is - * added. - * @param kv1 the first key/value pair - * @param kv2 the second key/value pair - * @param kvs the remaining key/value pairs - * @tparam V1 the type of the added values - * @return a new map with the given bindings added to this map - * - * @usecase def + (kvs: (K, V)*): Map[K, V] - * @inheritdoc - * @param kvs the key/value pairs - */ - def + [V1 >: V] (kv1: (K, V1), kv2: (K, V1), kvs: (K, V1) *): Map[K, V1] = - this + kv1 + kv2 ++ kvs - - /** Adds all key/value pairs in a traversable collection to this map, returning a new map. - * - * @param xs the collection containing the added key/value pairs - * @tparam V1 the type of the added values - * @return a new map with the given bindings added to this map - * - * @usecase def ++ (xs: Traversable[(K, V)]): Map[K, V] - * @inheritdoc - */ - def ++[V1 >: V](xs: GenTraversableOnce[(K, V1)]): Map[K, V1] = - ((repr: Map[K, V1]) /: xs.seq) (_ + _) - - /** Returns a new map obtained by removing all key/value pairs for which the predicate - * `p` returns `true`. - * - * '''Note:''' This method works by successively removing elements for which the - * predicate is true from this set. - * If removal is slow, or you expect that most elements of the set - * will be removed, you might consider using `filter` - * with a negated predicate instead. - * @param p A predicate over key-value pairs - * @return A new map containing elements not satisfying the predicate. - */ - override def filterNot(p: ((K, V)) => Boolean): This = { - var res: This = repr - for (kv <- this) - if (p(kv)) res = (res - kv._1).asInstanceOf[This] // !!! concrete overrides abstract problem - res - } - - override def toSeq: Seq[(K, V)] = { - if (isEmpty) Vector.empty[(K, V)] - else { - // Default appropriate for immutable collections; mutable collections override this - val vb = Vector.newBuilder[(K, V)] - foreach(vb += _) - vb.result - } - } - - override def toBuffer[E >: (K, V)]: mutable.Buffer[E] = { - val result = new mutable.ArrayBuffer[E](size) - // Faster to let the map iterate itself than to defer through copyToBuffer - foreach(result += _) - result - } - - protected[this] override def parCombiner = ParMap.newCombiner[K, V] - - /** Appends all bindings of this map to a string builder using start, end, and separator strings. - * The written text begins with the string `start` and ends with the string - * `end`. Inside, the string representations of all bindings of this map - * in the form of `key -> value` are separated by the string `sep`. - * - * @param b the builder to which strings are appended. - * @param start the starting string. - * @param sep the separator string. - * @param end the ending string. - * @return the string builder `b` to which elements were appended. - */ - override def addString(b: StringBuilder, start: String, sep: String, end: String): StringBuilder = - this.iterator.map { case (k, v) => k+" -> "+v }.addString(b, start, sep, end) - - /** Defines the prefix of this object's `toString` representation. - * @return a string representation which starts the result of `toString` applied to this $coll. - * Unless overridden in subclasses, the string prefix of every map is `"Map"`. - */ - override def stringPrefix: String = "Map" - - override /*PartialFunction*/ - def toString = super[IterableLike].toString - -} diff --git a/tests/scala2-library/src/library/scala/collection/MapProxy.scala b/tests/scala2-library/src/library/scala/collection/MapProxy.scala deleted file mode 100644 index 2faf6899734f..000000000000 --- a/tests/scala2-library/src/library/scala/collection/MapProxy.scala +++ /dev/null @@ -1,21 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala -package collection - -/** This is a simple wrapper class for [[scala.collection.Map]]. - * It is most useful for assembling customized map abstractions - * dynamically using object composition and forwarding. - * - * @author Matthias Zenger - * @version 1.0, 21/07/2003 - * @since 1 - */ -@deprecated("proxying is deprecated due to lack of use and compiler-level support", "2.11.3") -trait MapProxy[A, +B] extends Map[A, B] with MapProxyLike[A, B, Map[A, B]] diff --git a/tests/scala2-library/src/library/scala/collection/MapProxyLike.scala b/tests/scala2-library/src/library/scala/collection/MapProxyLike.scala deleted file mode 100644 index 73a69357882d..000000000000 --- a/tests/scala2-library/src/library/scala/collection/MapProxyLike.scala +++ /dev/null @@ -1,50 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala -package collection - -// Methods could be printed by cat MapLike.scala | egrep '^ (override )?def' - -/** This trait implements a proxy for Map objects. It forwards - * all calls to a different Map object. - * - * @author Martin Odersky - * @version 2.8 - * @since 2.8 - */ -@deprecated("proxying is deprecated due to lack of use and compiler-level support", "2.11.0") -trait MapProxyLike[A, +B, +This <: MapLike[A, B, This] with Map[A, B]] - extends MapLike[A, B, This] - with IterableProxyLike[(A, B), This] -{ - override def get(key: A): Option[B] = self.get(key) - override def iterator: Iterator[(A, B)] = self.iterator - override def + [B1 >: B] (kv: (A, B1)): Map[A, B1] = self.+(kv) - override def - (key: A): This = self.-(key) - override def isEmpty: Boolean = self.isEmpty - override def getOrElse[B1 >: B](key: A, default: => B1): B1 = self.getOrElse(key, default) - override def apply(key: A): B = self.apply(key) - override def contains(key: A): Boolean = self.contains(key) - override def isDefinedAt(key: A) = self.isDefinedAt(key) - override def keySet: Set[A] = self.keySet - override def keysIterator: Iterator[A] = self.keysIterator - override def keys: Iterable[A] = self.keys - override def values: Iterable[B] = self.values - override def valuesIterator: Iterator[B] = self.valuesIterator - override def default(key: A): B = self.default(key) - override def filterKeys(p: A => Boolean) = self.filterKeys(p) - override def mapValues[C](f: B => C) = self.mapValues(f) - override def updated [B1 >: B](key: A, value: B1): Map[A, B1] = self.updated(key, value) - override def + [B1 >: B] (kv1: (A, B1), kv2: (A, B1), kvs: (A, B1) *): Map[A, B1] = self.+(kv1, kv2, kvs: _*) - override def ++[B1 >: B](xs: GenTraversableOnce[(A, B1)]): Map[A, B1] = self.++(xs) - override def filterNot(p: ((A, B)) => Boolean) = self filterNot p - - override def addString(b: StringBuilder, start: String, sep: String, end: String): StringBuilder = - self.addString(b, start, sep, end) -} diff --git a/tests/scala2-library/src/library/scala/collection/Parallel.scala b/tests/scala2-library/src/library/scala/collection/Parallel.scala deleted file mode 100644 index 174e3ab75e57..000000000000 --- a/tests/scala2-library/src/library/scala/collection/Parallel.scala +++ /dev/null @@ -1,17 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala -package collection - -/** A marker trait for collections which have their operations parallelised. - * - * @since 2.9 - * @author Aleksandar Prokopec - */ -trait Parallel diff --git a/tests/scala2-library/src/library/scala/collection/Parallelizable.scala b/tests/scala2-library/src/library/scala/collection/Parallelizable.scala deleted file mode 100644 index c13155638854..000000000000 --- a/tests/scala2-library/src/library/scala/collection/Parallelizable.scala +++ /dev/null @@ -1,53 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala -package collection - -import parallel.Combiner - -/** This trait describes collections which can be turned into parallel collections - * by invoking the method `par`. Parallelizable collections may be parameterized with - * a target type different than their own. - * - * @tparam A the type of the elements in the collection - * @tparam ParRepr the actual type of the collection, which has to be parallel - */ -trait Parallelizable[+A, +ParRepr <: Parallel] extends Any { - - def seq: TraversableOnce[A] - - /** Returns a parallel implementation of this collection. - * - * For most collection types, this method creates a new parallel collection by copying - * all the elements. For these collection, `par` takes linear time. Mutable collections - * in this category do not produce a mutable parallel collection that has the same - * underlying dataset, so changes in one collection will not be reflected in the other one. - * - * Specific collections (e.g. `ParArray` or `mutable.ParHashMap`) override this default - * behaviour by creating a parallel collection which shares the same underlying dataset. - * For these collections, `par` takes constant or sublinear time. - * - * All parallel collections return a reference to themselves. - * - * @return a parallel implementation of this collection - */ - def par: ParRepr = { - val cb = parCombiner - for (x <- seq) cb += x - cb.result() - } - - /** The default `par` implementation uses the combiner provided by this method - * to create a new parallel collection. - * - * @return a combiner for the parallel collection of type `ParRepr` - */ - protected[this] def parCombiner: Combiner[A, ParRepr] -} - diff --git a/tests/scala2-library/src/library/scala/collection/Searching.scala b/tests/scala2-library/src/library/scala/collection/Searching.scala deleted file mode 100644 index 25e8b5e253d5..000000000000 --- a/tests/scala2-library/src/library/scala/collection/Searching.scala +++ /dev/null @@ -1,118 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala -package collection - -import scala.language.implicitConversions -import scala.annotation.tailrec -import scala.collection.generic.IsSeqLike -import scala.math.Ordering - -/** A collection of wrappers that provide sequence classes with search functionality. - * - * Example usage: - * {{{ - * import scala.collection.Searching._ - * val l = List(1, 2, 3, 4, 5) - * l.search(3) - * // == Found(2) - * }}} - */ -object Searching { - sealed abstract class SearchResult { - def insertionPoint: Int - } - - case class Found(foundIndex: Int) extends SearchResult { - override def insertionPoint = foundIndex - } - case class InsertionPoint(insertionPoint: Int) extends SearchResult - - class SearchImpl[A, Repr](val coll: SeqLike[A, Repr]) { - /** Search the sorted sequence for a specific element. If the sequence is an - * `IndexedSeqLike`, a binary search is used. Otherwise, a linear search is used. - * - * The sequence should be sorted with the same `Ordering` before calling; otherwise, - * the results are undefined. - * - * @see [[scala.collection.IndexedSeqLike]] - * @see [[scala.math.Ordering]] - * @see [[scala.collection.SeqLike]], method `sorted` - * - * @param elem the element to find. - * @param ord the ordering to be used to compare elements. - * - * @return a `Found` value containing the index corresponding to the element in the - * sequence, or the `InsertionPoint` where the element would be inserted if - * the element is not in the sequence. - */ - final def search[B >: A](elem: B)(implicit ord: Ordering[B]): SearchResult = - coll match { - case _: IndexedSeqLike[A, Repr] => binarySearch(elem, 0, coll.length)(ord) - case _ => linearSearch(coll.view, elem, 0)(ord) - } - - /** Search within an interval in the sorted sequence for a specific element. If the - * sequence is an `IndexedSeqLike`, a binary search is used. Otherwise, a linear search - * is used. - * - * The sequence should be sorted with the same `Ordering` before calling; otherwise, - * the results are undefined. - * - * @see [[scala.collection.IndexedSeqLike]] - * @see [[scala.math.Ordering]] - * @see [[scala.collection.SeqLike]], method `sorted` - * - * @param elem the element to find. - * @param from the index where the search starts. - * @param to the index following where the search ends. - * @param ord the ordering to be used to compare elements. - * - * @return a `Found` value containing the index corresponding to the element in the - * sequence, or the `InsertionPoint` where the element would be inserted if - * the element is not in the sequence. - */ - final def search[B >: A](elem: B, from: Int, to: Int) - (implicit ord: Ordering[B]): SearchResult = - coll match { - case _: IndexedSeqLike[A, Repr] => binarySearch(elem, from, to)(ord) - case _ => linearSearch(coll.view(from, to), elem, from)(ord) - } - - @tailrec - private def binarySearch[B >: A](elem: B, from: Int, to: Int) - (implicit ord: Ordering[B]): SearchResult = { - if (to == from) InsertionPoint(from) else { - val idx = from+(to-from-1)/2 - math.signum(ord.compare(elem, coll(idx))) match { - case -1 => binarySearch(elem, from, idx)(ord) - case 1 => binarySearch(elem, idx + 1, to)(ord) - case _ => Found(idx) - } - } - } - - private def linearSearch[B >: A](c: SeqView[A, Repr], elem: B, offset: Int) - (implicit ord: Ordering[B]): SearchResult = { - var idx = offset - val it = c.iterator - while (it.hasNext) { - val cur = it.next() - if (ord.equiv(elem, cur)) return Found(idx) - else if (ord.lt(elem, cur)) return InsertionPoint(idx) - idx += 1 - } - InsertionPoint(idx) - } - - } - - implicit def search[Repr, A](coll: Repr) - (implicit fr: IsSeqLike[Repr]): SearchImpl[fr.A, Repr] = new SearchImpl(fr.conversion(coll)) -} diff --git a/tests/scala2-library/src/library/scala/collection/Seq.scala b/tests/scala2-library/src/library/scala/collection/Seq.scala deleted file mode 100644 index 2f4b3e5f8a09..000000000000 --- a/tests/scala2-library/src/library/scala/collection/Seq.scala +++ /dev/null @@ -1,41 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala -package collection - -import generic._ -import mutable.Builder - -/** A base trait for sequences. - * $seqInfo - */ -trait Seq[+A] extends PartialFunction[Int, A] - with Iterable[A] - with GenSeq[A] - with GenericTraversableTemplate[A, Seq] - with SeqLike[A, Seq[A]] { - override def companion: GenericCompanion[Seq] = Seq - - override def seq: Seq[A] = this -} - -/** $factoryInfo - * The current default implementation of a $Coll is a `List`. - * @define coll sequence - * @define Coll `Seq` - */ -object Seq extends SeqFactory[Seq] { - /** $genericCanBuildFromInfo */ - implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, Seq[A]] = ReusableCBF.asInstanceOf[GenericCanBuildFrom[A]] - - def newBuilder[A]: Builder[A, Seq[A]] = immutable.Seq.newBuilder[A] -} - -/** Explicit instantiation of the `Seq` trait to reduce class file size in subclasses. */ -abstract class AbstractSeq[+A] extends AbstractIterable[A] with Seq[A] diff --git a/tests/scala2-library/src/library/scala/collection/SeqExtractors.scala b/tests/scala2-library/src/library/scala/collection/SeqExtractors.scala deleted file mode 100644 index 888b3e20f629..000000000000 --- a/tests/scala2-library/src/library/scala/collection/SeqExtractors.scala +++ /dev/null @@ -1,24 +0,0 @@ -package scala -package collection - -/** An extractor used to head/tail deconstruct sequences. */ -object +: { - def unapply[T,Coll <: SeqLike[T, Coll]]( - t: Coll with SeqLike[T, Coll]): Option[(T, Coll)] = - if(t.isEmpty) None - else Some(t.head -> t.tail) -} - -/** An extractor used to init/last deconstruct sequences. */ -object :+ { - /** Splits a sequence into init :+ last. - * @return Some((init, last)) if sequence is non-empty. None otherwise. - */ - def unapply[T,Coll <: SeqLike[T, Coll]]( - t: Coll with SeqLike[T, Coll]): Option[(Coll, T)] = - if(t.isEmpty) None - else Some(t.init -> t.last) -} - -// Dummy to fool ant -private abstract class SeqExtractors diff --git a/tests/scala2-library/src/library/scala/collection/SeqLike.scala b/tests/scala2-library/src/library/scala/collection/SeqLike.scala deleted file mode 100644 index cef0fbdf86a1..000000000000 --- a/tests/scala2-library/src/library/scala/collection/SeqLike.scala +++ /dev/null @@ -1,930 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala -package collection - -import immutable.{ List, Range } -import generic._ -import parallel.ParSeq -import scala.math.Ordering - -/** A template trait for sequences of type `Seq[A]` - * $seqInfo - * - * @define seqInfo - * Sequences are special cases of iterable collections of class `Iterable`. - * Unlike iterables, sequences always have a defined order of elements. - * Sequences provide a method `apply` for indexing. Indices range from `0` up to the `length` of - * a sequence. Sequences support a number of methods to find occurrences of elements or subsequences, including - * `segmentLength`, `prefixLength`, `indexWhere`, `indexOf`, `lastIndexWhere`, `lastIndexOf`, - * `startsWith`, `endsWith`, `indexOfSlice`. - * - * Another way to see a sequence is as a `PartialFunction` from `Int` values - * to the element type of the sequence. The `isDefinedAt` method of a sequence - * returns `true` for the interval from `0` until `length`. - * - * Sequences can be accessed in reverse order of their elements, using methods - * `reverse` and `reverseIterator`. - * - * Sequences have two principal subtraits, `IndexedSeq` and `LinearSeq`, which give different guarantees for performance. - * An `IndexedSeq` provides fast random-access of elements and a fast `length` operation. - * A `LinearSeq` provides fast access only to the first element via `head`, but also - * has a fast `tail` operation. - * - * @tparam A the element type of the collection - * @tparam Repr the type of the actual collection containing the elements. - * - * @author Martin Odersky - * @author Matthias Zenger - * @version 1.0, 16/07/2003 - * @since 2.8 - * - * @define Coll `Seq` - * @define coll sequence - * @define thatinfo the class of the returned collection. Where possible, `That` is - * the same class as the current collection class `Repr`, but this - * depends on the element type `B` being admissible for that class, - * which means that an implicit instance of type `CanBuildFrom[Repr, B, That]` - * is found. - * @define bfinfo an implicit value of class `CanBuildFrom` which determines the - * result class `That` from the current representation type `Repr` - * and the new element type `B`. - * @define orderDependent - * @define orderDependentFold - */ -trait SeqLike[+A, +Repr] extends Any with IterableLike[A, Repr] with GenSeqLike[A, Repr] with Parallelizable[A, ParSeq[A]] { self => - - override protected[this] def thisCollection: Seq[A] = this.asInstanceOf[Seq[A]] - override protected[this] def toCollection(repr: Repr): Seq[A] = repr.asInstanceOf[Seq[A]] - - def length: Int - - def apply(idx: Int): A - - protected[this] override def parCombiner = ParSeq.newCombiner[A] - - /** Compares the length of this $coll to a test value. - * - * @param len the test value that gets compared with the length. - * @return A value `x` where - * {{{ - * x < 0 if this.length < len - * x == 0 if this.length == len - * x > 0 if this.length > len - * }}} - * The method as implemented here does not call `length` directly; its running time - * is `O(length min len)` instead of `O(length)`. The method should be overwritten - * if computing `length` is cheap. - */ - def lengthCompare(len: Int): Int = { - if (len < 0) 1 - else { - var i = 0 - val it = iterator - while (it.hasNext) { - if (i == len) return if (it.hasNext) 1 else 0 - it.next() - i += 1 - } - i - len - } - } - - override /*IterableLike*/ def isEmpty: Boolean = lengthCompare(0) == 0 - - /** The size of this $coll, equivalent to `length`. - * - * $willNotTerminateInf - */ - override def size = length - - def segmentLength(p: A => Boolean, from: Int): Int = { - var i = 0 - val it = iterator.drop(from) - while (it.hasNext && p(it.next())) - i += 1 - i - } - - def indexWhere(p: A => Boolean, from: Int): Int = { - var i = math.max(from, 0) - val it = iterator.drop(from) - while (it.hasNext) { - if (p(it.next())) return i - else i += 1 - } - -1 - } - - def lastIndexWhere(p: A => Boolean, end: Int): Int = { - var i = length - 1 - val it = reverseIterator - while (it.hasNext && { val elem = it.next(); (i > end || !p(elem)) }) i -= 1 - i - } - - /** Iterates over distinct permutations. - * - * @return An Iterator which traverses the distinct permutations of this $coll. - * @example `"abb".permutations = Iterator(abb, bab, bba)` - */ - def permutations: Iterator[Repr] = - if (isEmpty) Iterator(repr) - else new PermutationsItr - - /** Iterates over combinations. A _combination_ of length `n` is a subsequence of - * the original sequence, with the elements taken in order. Thus, `"xy"` and `"yy"` - * are both length-2 combinations of `"xyy"`, but `"yx"` is not. If there is - * more than one way to generate the same subsequence, only one will be returned. - * - * For example, `"xyyy"` has three different ways to generate `"xy"` depending on - * whether the first, second, or third `"y"` is selected. However, since all are - * identical, only one will be chosen. Which of the three will be taken is an - * implementation detail that is not defined. - * - * @return An Iterator which traverses the possible n-element combinations of this $coll. - * @example `"abbbc".combinations(2) = Iterator(ab, ac, bb, bc)` - */ - def combinations(n: Int): Iterator[Repr] = - if (n < 0 || n > size) Iterator.empty - else new CombinationsItr(n) - - private class PermutationsItr extends AbstractIterator[Repr] { - private[this] val (elms, idxs) = init() - private var _hasNext = true - - def hasNext = _hasNext - def next(): Repr = { - if (!hasNext) - Iterator.empty.next() - - val forcedElms = new mutable.ArrayBuffer[A](elms.size) ++= elms - val result = (self.newBuilder ++= forcedElms).result() - var i = idxs.length - 2 - while(i >= 0 && idxs(i) >= idxs(i+1)) - i -= 1 - - if (i < 0) - _hasNext = false - else { - var j = idxs.length - 1 - while(idxs(j) <= idxs(i)) j -= 1 - swap(i,j) - - val len = (idxs.length - i) / 2 - var k = 1 - while (k <= len) { - swap(i+k, idxs.length - k) - k += 1 - } - } - result - } - private def swap(i: Int, j: Int) { - val tmpI = idxs(i) - idxs(i) = idxs(j) - idxs(j) = tmpI - val tmpE = elms(i) - elms(i) = elms(j) - elms(j) = tmpE - } - - private[this] def init() = { - val m = mutable.HashMap[A, Int]() - val (es, is) = (thisCollection map (e => (e, m.getOrElseUpdate(e, m.size))) sortBy (_._2)).unzip - - (es.toBuffer, is.toArray) - } - } - - private class CombinationsItr(n: Int) extends AbstractIterator[Repr] { - // generating all nums such that: - // (1) nums(0) + .. + nums(length-1) = n - // (2) 0 <= nums(i) <= cnts(i), where 0 <= i <= cnts.length-1 - private val (elms, cnts, nums) = init() - private val offs = cnts.scanLeft(0)(_ + _) - private var _hasNext = true - - def hasNext = _hasNext - def next(): Repr = { - if (!hasNext) - Iterator.empty.next() - - /* Calculate this result. */ - val buf = self.newBuilder - for(k <- 0 until nums.length; j <- 0 until nums(k)) - buf += elms(offs(k)+j) - val res = buf.result() - - /* Prepare for the next call to next. */ - var idx = nums.length - 1 - while (idx >= 0 && nums(idx) == cnts(idx)) - idx -= 1 - - idx = nums.lastIndexWhere(_ > 0, idx - 1) - - if (idx < 0) - _hasNext = false - else { - // OPT: hand rolled version of `sum = nums.view(idx + 1, nums.length).sum + 1` - var sum = 1 - var i = idx + 1 - while (i < nums.length) { - sum += nums(i) - i += 1 - } - nums(idx) -= 1 - for (k <- (idx+1) until nums.length) { - nums(k) = sum min cnts(k) - sum -= nums(k) - } - } - - res - } - - /** Rearrange seq to newSeq a0a0..a0a1..a1...ak..ak such that - * seq.count(_ == aj) == cnts(j) - * - * @return (newSeq,cnts,nums) - */ - private def init(): (IndexedSeq[A], Array[Int], Array[Int]) = { - val m = mutable.HashMap[A, Int]() - - // e => (e, weight(e)) - val (es, is) = (thisCollection map (e => (e, m.getOrElseUpdate(e, m.size))) sortBy (_._2)).unzip - val cs = new Array[Int](m.size) - is foreach (i => cs(i) += 1) - val ns = new Array[Int](cs.length) - - var r = n - 0 until ns.length foreach { k => - ns(k) = r min cs(k) - r -= ns(k) - } - (es.toIndexedSeq, cs, ns) - } - } - - def reverse: Repr = { - var xs: List[A] = List() - for (x <- this) - xs = x :: xs - val b = newBuilder - b.sizeHint(this) - for (x <- xs) - b += x - b.result() - } - - def reverseMap[B, That](f: A => B)(implicit bf: CanBuildFrom[Repr, B, That]): That = { - var xs: List[A] = List() - for (x <- this) - xs = x :: xs - val b = bf(repr) - for (x <- xs) - b += f(x) - - b.result() - } - - /** An iterator yielding elements in reversed order. - * - * $willNotTerminateInf - * - * Note: `xs.reverseIterator` is the same as `xs.reverse.iterator` but might be more efficient. - * - * @return an iterator yielding the elements of this $coll in reversed order - */ - def reverseIterator: Iterator[A] = toCollection(reverse).iterator - - def startsWith[B](that: GenSeq[B], offset: Int): Boolean = { - val i = this.iterator drop offset - val j = that.iterator - while (j.hasNext && i.hasNext) - if (i.next != j.next) - return false - - !j.hasNext - } - - def endsWith[B](that: GenSeq[B]): Boolean = { - val i = this.iterator.drop(length - that.length) - val j = that.iterator - while (i.hasNext && j.hasNext) - if (i.next != j.next) - return false - - !j.hasNext - } - - /** Finds first index where this $coll contains a given sequence as a slice. - * $mayNotTerminateInf - * @param that the sequence to test - * @return the first index such that the elements of this $coll starting at this index - * match the elements of sequence `that`, or `-1` of no such subsequence exists. - */ - def indexOfSlice[B >: A](that: GenSeq[B]): Int = indexOfSlice(that, 0) - - /** Finds first index after or at a start index where this $coll contains a given sequence as a slice. - * $mayNotTerminateInf - * @param that the sequence to test - * @param from the start index - * @return the first index `>= from` such that the elements of this $coll starting at this index - * match the elements of sequence `that`, or `-1` of no such subsequence exists. - */ - def indexOfSlice[B >: A](that: GenSeq[B], from: Int): Int = - if (this.hasDefiniteSize && that.hasDefiniteSize) { - val l = length - val tl = that.length - val clippedFrom = math.max(0, from) - if (from > l) -1 - else if (tl < 1) clippedFrom - else if (l < tl) -1 - else SeqLike.kmpSearch(thisCollection, clippedFrom, l, that.seq, 0, tl, forward = true) - } - else { - var i = from - var s: Seq[A] = thisCollection drop i - while (!s.isEmpty) { - if (s startsWith that) - return i - - i += 1 - s = s.tail - } - -1 - } - - /** Finds last index where this $coll contains a given sequence as a slice. - * $willNotTerminateInf - * @param that the sequence to test - * @return the last index such that the elements of this $coll starting a this index - * match the elements of sequence `that`, or `-1` of no such subsequence exists. - */ - def lastIndexOfSlice[B >: A](that: GenSeq[B]): Int = lastIndexOfSlice(that, length) - - /** Finds last index before or at a given end index where this $coll contains a given sequence as a slice. - * @param that the sequence to test - * @param end the end index - * @return the last index `<= end` such that the elements of this $coll starting at this index - * match the elements of sequence `that`, or `-1` of no such subsequence exists. - */ - def lastIndexOfSlice[B >: A](that: GenSeq[B], end: Int): Int = { - val l = length - val tl = that.length - val clippedL = math.min(l-tl, end) - - if (end < 0) -1 - else if (tl < 1) clippedL - else if (l < tl) -1 - else SeqLike.kmpSearch(thisCollection, 0, clippedL+tl, that.seq, 0, tl, forward = false) - } - - /** Tests whether this $coll contains a given sequence as a slice. - * $mayNotTerminateInf - * @param that the sequence to test - * @return `true` if this $coll contains a slice with the same elements - * as `that`, otherwise `false`. - */ - def containsSlice[B](that: GenSeq[B]): Boolean = indexOfSlice(that) != -1 - - /** Tests whether this $coll contains a given value as an element. - * $mayNotTerminateInf - * - * @param elem the element to test. - * @return `true` if this $coll has an element that is equal (as - * determined by `==`) to `elem`, `false` otherwise. - */ - def contains[A1 >: A](elem: A1): Boolean = exists (_ == elem) - - /** Produces a new sequence which contains all elements of this $coll and also all elements of - * a given sequence. `xs union ys` is equivalent to `xs ++ ys`. - * - * @param that the sequence to add. - * @tparam B the element type of the returned $coll. - * @tparam That $thatinfo - * @param bf $bfinfo - * @return a new collection of type `That` which contains all elements of this $coll - * followed by all elements of `that`. - * @usecase def union(that: Seq[A]): $Coll[A] - * @inheritdoc - * - * Another way to express this - * is that `xs union ys` computes the order-preserving multi-set union of `xs` and `ys`. - * `union` is hence a counter-part of `diff` and `intersect` which also work on multi-sets. - * - * $willNotTerminateInf - * - * @return a new $coll which contains all elements of this $coll - * followed by all elements of `that`. - */ - override def union[B >: A, That](that: GenSeq[B])(implicit bf: CanBuildFrom[Repr, B, That]): That = - this ++ that - - /** Computes the multiset difference between this $coll and another sequence. - * - * @param that the sequence of elements to remove - * @tparam B the element type of the returned $coll. - * @return a new collection of type `That` which contains all elements of this $coll - * except some of occurrences of elements that also appear in `that`. - * If an element value `x` appears - * ''n'' times in `that`, then the first ''n'' occurrences of `x` will not form - * part of the result, but any following occurrences will. - * @usecase def diff(that: Seq[A]): $Coll[A] - * @inheritdoc - * - * $willNotTerminateInf - * - * @return a new $coll which contains all elements of this $coll - * except some of occurrences of elements that also appear in `that`. - * If an element value `x` appears - * ''n'' times in `that`, then the first ''n'' occurrences of `x` will not form - * part of the result, but any following occurrences will. - */ - def diff[B >: A](that: GenSeq[B]): Repr = { - val occ = occCounts(that.seq) - val b = newBuilder - for (x <- this) { - val ox = occ(x) // Avoid multiple map lookups - if (ox == 0) b += x - else occ(x) = ox - 1 - } - b.result() - } - - /** Computes the multiset intersection between this $coll and another sequence. - * - * @param that the sequence of elements to intersect with. - * @tparam B the element type of the returned $coll. - * @return a new collection of type `That` which contains all elements of this $coll - * which also appear in `that`. - * If an element value `x` appears - * ''n'' times in `that`, then the first ''n'' occurrences of `x` will be retained - * in the result, but any following occurrences will be omitted. - * @usecase def intersect(that: Seq[A]): $Coll[A] - * @inheritdoc - * - * $mayNotTerminateInf - * - * @return a new $coll which contains all elements of this $coll - * which also appear in `that`. - * If an element value `x` appears - * ''n'' times in `that`, then the first ''n'' occurrences of `x` will be retained - * in the result, but any following occurrences will be omitted. - */ - def intersect[B >: A](that: GenSeq[B]): Repr = { - val occ = occCounts(that.seq) - val b = newBuilder - for (x <- this) { - val ox = occ(x) // Avoid multiple map lookups - if (ox > 0) { - b += x - occ(x) = ox - 1 - } - } - b.result() - } - - private def occCounts[B](sq: Seq[B]): mutable.Map[B, Int] = { - val occ = new mutable.HashMap[B, Int] { override def default(k: B) = 0 } - for (y <- sq) occ(y) += 1 - occ - } - - /** Builds a new $coll from this $coll without any duplicate elements. - * $willNotTerminateInf - * - * @return A new $coll which contains the first occurrence of every element of this $coll. - */ - def distinct: Repr = { - val b = newBuilder - val seen = mutable.HashSet[A]() - for (x <- this) { - if (!seen(x)) { - b += x - seen += x - } - } - b.result() - } - - def patch[B >: A, That](from: Int, patch: GenSeq[B], replaced: Int)(implicit bf: CanBuildFrom[Repr, B, That]): That = { - val b = bf(repr) - var i = 0 - val it = this.iterator - while (i < from && it.hasNext) { - b += it.next() - i += 1 - } - b ++= patch.seq - i = replaced - while (i > 0 && it.hasNext) { - it.next() - i -= 1 - } - while (it.hasNext) b += it.next() - b.result() - } - - def updated[B >: A, That](index: Int, elem: B)(implicit bf: CanBuildFrom[Repr, B, That]): That = { - if (index < 0) throw new IndexOutOfBoundsException(index.toString) - val b = bf(repr) - var i = 0 - val it = this.iterator - while (i < index && it.hasNext) { - b += it.next() - i += 1 - } - if (!it.hasNext) throw new IndexOutOfBoundsException(index.toString) - b += elem - it.next() - while (it.hasNext) b += it.next() - b.result() - } - - def +:[B >: A, That](elem: B)(implicit bf: CanBuildFrom[Repr, B, That]): That = { - val b = bf(repr) - b += elem - b ++= thisCollection - b.result() - } - - def :+[B >: A, That](elem: B)(implicit bf: CanBuildFrom[Repr, B, That]): That = { - val b = bf(repr) - b ++= thisCollection - b += elem - b.result() - } - - def padTo[B >: A, That](len: Int, elem: B)(implicit bf: CanBuildFrom[Repr, B, That]): That = { - val b = bf(repr) - val L = length - b.sizeHint(math.max(L, len)) - var diff = len - L - b ++= thisCollection - while (diff > 0) { - b += elem - diff -= 1 - } - b.result() - } - - def corresponds[B](that: GenSeq[B])(p: (A,B) => Boolean): Boolean = { - val i = this.iterator - val j = that.iterator - while (i.hasNext && j.hasNext) - if (!p(i.next(), j.next())) - return false - - !i.hasNext && !j.hasNext - } - - /** Sorts this $coll according to a comparison function. - * $willNotTerminateInf - * - * The sort is stable. That is, elements that are equal (as determined by - * `lt`) appear in the same order in the sorted sequence as in the original. - * - * @param lt the comparison function which tests whether - * its first argument precedes its second argument in - * the desired ordering. - * @return a $coll consisting of the elements of this $coll - * sorted according to the comparison function `lt`. - * @example {{{ - * List("Steve", "Tom", "John", "Bob").sortWith(_.compareTo(_) < 0) = - * List("Bob", "John", "Steve", "Tom") - * }}} - */ - def sortWith(lt: (A, A) => Boolean): Repr = sorted(Ordering fromLessThan lt) - - /** Sorts this $Coll according to the Ordering which results from transforming - * an implicitly given Ordering with a transformation function. - * @see [[scala.math.Ordering]] - * $willNotTerminateInf - * @param f the transformation function mapping elements - * to some other domain `B`. - * @param ord the ordering assumed on domain `B`. - * @tparam B the target type of the transformation `f`, and the type where - * the ordering `ord` is defined. - * @return a $coll consisting of the elements of this $coll - * sorted according to the ordering where `x < y` if - * `ord.lt(f(x), f(y))`. - * - * @example {{{ - * val words = "The quick brown fox jumped over the lazy dog".split(' ') - * // this works because scala.Ordering will implicitly provide an Ordering[Tuple2[Int, Char]] - * words.sortBy(x => (x.length, x.head)) - * res0: Array[String] = Array(The, dog, fox, the, lazy, over, brown, quick, jumped) - * }}} - */ - def sortBy[B](f: A => B)(implicit ord: Ordering[B]): Repr = sorted(ord on f) - - /** Sorts this $coll according to an Ordering. - * - * The sort is stable. That is, elements that are equal (as determined by - * `lt`) appear in the same order in the sorted sequence as in the original. - * - * @see [[scala.math.Ordering]] - * - * @param ord the ordering to be used to compare elements. - * @return a $coll consisting of the elements of this $coll - * sorted according to the ordering `ord`. - */ - def sorted[B >: A](implicit ord: Ordering[B]): Repr = { - val len = this.length - val b = newBuilder - if (len == 1) b ++= this - else if (len > 1) { - b.sizeHint(len) - val arr = new Array[AnyRef](len) // Previously used ArraySeq for more compact but slower code - var i = 0 - for (x <- this) { - arr(i) = x.asInstanceOf[AnyRef] - i += 1 - } - java.util.Arrays.sort(arr, ord.asInstanceOf[Ordering[Object]]) - i = 0 - while (i < arr.length) { - b += arr(i).asInstanceOf[A] - i += 1 - } - } - b.result() - } - - /** Converts this $coll to a sequence. - * $willNotTerminateInf - * - * A new collection will not be built; in particular, lazy sequences will stay lazy. - */ - override def toSeq: Seq[A] = thisCollection - - /** Produces the range of all indices of this sequence. - * - * @return a `Range` value from `0` to one less than the length of this $coll. - */ - def indices: Range = 0 until length - - override def view: SeqView[A, Repr] = new SeqView[A, Repr] { - protected lazy val underlying = self.repr - override def iterator = self.iterator - override def length = self.length - override def apply(idx: Int) = self.apply(idx) - } - - override def view(from: Int, until: Int): SeqView[A, Repr] = view.slice(from, until) - - /* Need to override string, so that it's not the Function1's string that gets mixed in. - */ - override def toString = super[IterableLike].toString -} - -/** The companion object for trait `SeqLike`. - */ -object SeqLike { - // KMP search utilities - - /** Make sure a target sequence has fast, correctly-ordered indexing for KMP. - * - * @author Rex Kerr - * @since 2.10 - * @param W The target sequence - * @param n0 The first element in the target sequence that we should use - * @param n1 The far end of the target sequence that we should use (exclusive) - * @return Target packed in an IndexedSeq (taken from iterator unless W already is an IndexedSeq) - */ - private def kmpOptimizeWord[B](W: Seq[B], n0: Int, n1: Int, forward: Boolean) = W match { - case iso: IndexedSeq[_] => - // Already optimized for indexing--use original (or custom view of original) - if (forward && n0==0 && n1==W.length) iso.asInstanceOf[IndexedSeq[B]] - else if (forward) new AbstractSeq[B] with IndexedSeq[B] { - val length = n1 - n0 - def apply(x: Int) = iso(n0 + x).asInstanceOf[B] - } - else new AbstractSeq[B] with IndexedSeq[B] { - def length = n1 - n0 - def apply(x: Int) = iso(n1 - 1 - x).asInstanceOf[B] - } - case _ => - // W is probably bad at indexing. Pack in array (in correct orientation) - // Would be marginally faster to special-case each direction - new AbstractSeq[B] with IndexedSeq[B] { - private[this] val Warr = new Array[AnyRef](n1-n0) - private[this] val delta = if (forward) 1 else -1 - private[this] val done = if (forward) n1-n0 else -1 - val wit = W.iterator.drop(n0) - var i = if (forward) 0 else (n1-n0-1) - while (i != done) { - Warr(i) = wit.next().asInstanceOf[AnyRef] - i += delta - } - - val length = n1 - n0 - def apply(x: Int) = Warr(x).asInstanceOf[B] - } - } - - /** Make a jump table for KMP search. - * - * @author paulp, Rex Kerr - * @since 2.10 - * @param Wopt The target sequence, as at least an IndexedSeq - * @param wlen Just in case we're only IndexedSeq and not IndexedSeqOptimized - * @return KMP jump table for target sequence - */ - private def kmpJumpTable[B](Wopt: IndexedSeq[B], wlen: Int) = { - val arr = new Array[Int](wlen) - var pos = 2 - var cnd = 0 - arr(0) = -1 - arr(1) = 0 - while (pos < wlen) { - if (Wopt(pos-1) == Wopt(cnd)) { - arr(pos) = cnd + 1 - pos += 1 - cnd += 1 - } - else if (cnd > 0) { - cnd = arr(cnd) - } - else { - arr(pos) = 0 - pos += 1 - } - } - arr - } - - /** A KMP implementation, based on the undoubtedly reliable wikipedia entry. - * Note: I made this private to keep it from entering the API. That can be reviewed. - * - * @author paulp, Rex Kerr - * @since 2.10 - * @param S Sequence that may contain target - * @param m0 First index of S to consider - * @param m1 Last index of S to consider (exclusive) - * @param W Target sequence - * @param n0 First index of W to match - * @param n1 Last index of W to match (exclusive) - * @param forward Direction of search (from beginning==true, from end==false) - * @return Index of start of sequence if found, -1 if not (relative to beginning of S, not m0). - */ - private def kmpSearch[B](S: Seq[B], m0: Int, m1: Int, W: Seq[B], n0: Int, n1: Int, forward: Boolean): Int = { - // Check for redundant case when target has single valid element - def clipR(x: Int, y: Int) = if (x < y) x else -1 - def clipL(x: Int, y: Int) = if (x > y) x else -1 - - if (n1 == n0+1) { - if (forward) - clipR(S.indexOf(W(n0), m0), m1) - else - clipL(S.lastIndexOf(W(n0), m1-1), m0-1) - } - - // Check for redundant case when both sequences are same size - else if (m1-m0 == n1-n0) { - // Accepting a little slowness for the uncommon case. - if (S.view.slice(m0, m1) == W.view.slice(n0, n1)) m0 - else -1 - } - // Now we know we actually need KMP search, so do it - else S match { - case xs: IndexedSeq[_] => - // We can index into S directly; it should be adequately fast - val Wopt = kmpOptimizeWord(W, n0, n1, forward) - val T = kmpJumpTable(Wopt, n1-n0) - var i, m = 0 - val zero = if (forward) m0 else m1-1 - val delta = if (forward) 1 else -1 - while (i+m < m1-m0) { - if (Wopt(i) == S(zero+delta*(i+m))) { - i += 1 - if (i == n1-n0) return (if (forward) m+m0 else m1-m-i) - } - else { - val ti = T(i) - m += i - ti - if (i > 0) i = ti - } - } - -1 - case _ => - // We had better not index into S directly! - val iter = S.iterator.drop(m0) - val Wopt = kmpOptimizeWord(W, n0, n1, forward = true) - val T = kmpJumpTable(Wopt, n1-n0) - val cache = new Array[AnyRef](n1-n0) // Ring buffer--need a quick way to do a look-behind - var largest = 0 - var i, m = 0 - var answer = -1 - while (m+m0+n1-n0 <= m1) { - while (i+m >= largest) { - cache(largest%(n1-n0)) = iter.next().asInstanceOf[AnyRef] - largest += 1 - } - if (Wopt(i) == cache((i+m)%(n1-n0))) { - i += 1 - if (i == n1-n0) { - if (forward) return m+m0 - else { - i -= 1 - answer = m+m0 - val ti = T(i) - m += i - ti - if (i > 0) i = ti - } - } - } - else { - val ti = T(i) - m += i - ti - if (i > 0) i = ti - } - } - answer - } - } - - /** Finds a particular index at which one sequence occurs in another sequence. - * Both the source sequence and the target sequence are expressed in terms - * other sequences S' and T' with offset and length parameters. This - * function is designed to wrap the KMP machinery in a sufficiently general - * way that all library sequence searches can use it. It is unlikely you - * have cause to call it directly: prefer functions such as StringBuilder#indexOf - * and Seq#lastIndexOf. - * - * @param source the sequence to search in - * @param sourceOffset the starting offset in source - * @param sourceCount the length beyond sourceOffset to search - * @param target the sequence being searched for - * @param targetOffset the starting offset in target - * @param targetCount the length beyond targetOffset which makes up the target string - * @param fromIndex the smallest index at which the target sequence may start - * - * @return the applicable index in source where target exists, or -1 if not found - */ - def indexOf[B]( - source: Seq[B], sourceOffset: Int, sourceCount: Int, - target: Seq[B], targetOffset: Int, targetCount: Int, - fromIndex: Int - ): Int = { - // Fiddle with variables to match previous behavior and use kmpSearch - // Doing LOTS of max/min, both clearer and faster to use math._ - val slen = source.length - val clippedFrom = math.max(0, fromIndex) - val s0 = math.min(slen, sourceOffset + clippedFrom) - val s1 = math.min(slen, s0 + sourceCount) - val tlen = target.length - val t0 = math.min(tlen, targetOffset) - val t1 = math.min(tlen, t0 + targetCount) - - // Error checking - if (clippedFrom > slen-sourceOffset) -1 // Cannot return an index in range - else if (t1 - t0 < 1) s0 // Empty, matches first available position - else if (s1 - s0 < t1 - t0) -1 // Source is too short to find target - else { - // Nontrivial search - val ans = kmpSearch(source, s0, s1, target, t0, t1, forward = true) - if (ans < 0) ans else ans - math.min(slen, sourceOffset) - } - } - - /** Finds a particular index at which one sequence occurs in another sequence. - * Like `indexOf`, but finds the latest occurrence rather than earliest. - * - * @see [[scala.collection.SeqLike]], method `indexOf` - */ - def lastIndexOf[B]( - source: Seq[B], sourceOffset: Int, sourceCount: Int, - target: Seq[B], targetOffset: Int, targetCount: Int, - fromIndex: Int - ): Int = { - // Fiddle with variables to match previous behavior and use kmpSearch - // Doing LOTS of max/min, both clearer and faster to use math._ - val slen = source.length - val tlen = target.length - val s0 = math.min(slen, sourceOffset) - val s1 = math.min(slen, s0 + sourceCount) - val clippedFrom = math.min(s1 - s0, fromIndex) - val t0 = math.min(tlen, targetOffset) - val t1 = math.min(tlen, t0 + targetCount) - val fixed_s1 = math.min(s1, s0 + clippedFrom + (t1 - t0) - 1) - - // Error checking - if (clippedFrom < 0) -1 // Cannot return an index in range - else if (t1 - t0 < 1) s0+clippedFrom // Empty, matches last available position - else if (fixed_s1 - s0 < t1 - t0) -1 // Source is too short to find target - else { - // Nontrivial search - val ans = kmpSearch(source, s0, fixed_s1, target, t0, t1, forward = false) - if (ans < 0) ans else ans - s0 - } - } -} diff --git a/tests/scala2-library/src/library/scala/collection/SeqProxy.scala b/tests/scala2-library/src/library/scala/collection/SeqProxy.scala deleted file mode 100644 index f2b39c7b55ff..000000000000 --- a/tests/scala2-library/src/library/scala/collection/SeqProxy.scala +++ /dev/null @@ -1,22 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - - - -package scala -package collection - -/** This trait implements a proxy for sequence objects. It forwards - * all calls to a different sequence object. - * - * @author Martin Odersky - * @version 2.8 - * @since 2.8 - */ -@deprecated("proxying is deprecated due to lack of use and compiler-level support", "2.11.0") -trait SeqProxy[+A] extends Seq[A] with SeqProxyLike[A, Seq[A]] diff --git a/tests/scala2-library/src/library/scala/collection/SeqProxyLike.scala b/tests/scala2-library/src/library/scala/collection/SeqProxyLike.scala deleted file mode 100644 index b493c707968e..000000000000 --- a/tests/scala2-library/src/library/scala/collection/SeqProxyLike.scala +++ /dev/null @@ -1,74 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - - - -package scala -package collection - -import generic._ - -// Methods could be printed by cat SeqLike.scala | egrep '^ (override )?def' - - -/** This trait implements a proxy for sequences. It forwards - * all calls to a different sequence. - * - * @author Martin Odersky - * @version 2.8 - * @since 2.8 - */ -@deprecated("proxying is deprecated due to lack of use and compiler-level support", "2.11.0") -trait SeqProxyLike[+A, +Repr <: SeqLike[A, Repr] with Seq[A]] extends SeqLike[A, Repr] with IterableProxyLike[A, Repr] { - override def size = self.size - override def toSeq: Seq[A] = self.toSeq - override def length: Int = self.length - override def apply(idx: Int): A = self.apply(idx) - override def lengthCompare(len: Int): Int = self.lengthCompare(len) - override def isDefinedAt(x: Int): Boolean = self.isDefinedAt(x) - override def segmentLength(p: A => Boolean, from: Int): Int = self.segmentLength(p, from) - override def prefixLength(p: A => Boolean) = self.prefixLength(p) - override def indexWhere(p: A => Boolean): Int = self.indexWhere(p) - override def indexWhere(p: A => Boolean, from: Int): Int = self.indexWhere(p, from) - override def indexOf[B >: A](elem: B): Int = self.indexOf(elem) - override def indexOf[B >: A](elem: B, from: Int): Int = self.indexOf(elem, from) - override def lastIndexOf[B >: A](elem: B): Int = self.lastIndexOf(elem) - override def lastIndexOf[B >: A](elem: B, end: Int): Int = self.lastIndexWhere(elem == _, end) - override def lastIndexWhere(p: A => Boolean): Int = self.lastIndexWhere(p, length - 1) - override def lastIndexWhere(p: A => Boolean, end: Int): Int = self.lastIndexWhere(p) - override def reverse: Repr = self.reverse - override def reverseMap[B, That](f: A => B)(implicit bf: CanBuildFrom[Repr, B, That]): That = self.reverseMap(f)(bf) - override def reverseIterator: Iterator[A] = self.reverseIterator - override def startsWith[B](that: GenSeq[B], offset: Int): Boolean = self.startsWith(that, offset) - override def startsWith[B](that: GenSeq[B]): Boolean = self.startsWith(that) - override def endsWith[B](that: GenSeq[B]): Boolean = self.endsWith(that) - override def indexOfSlice[B >: A](that: GenSeq[B]): Int = self.indexOfSlice(that) - override def indexOfSlice[B >: A](that: GenSeq[B], from: Int): Int = self.indexOfSlice(that) - override def lastIndexOfSlice[B >: A](that: GenSeq[B]): Int = self.lastIndexOfSlice(that) - override def lastIndexOfSlice[B >: A](that: GenSeq[B], end: Int): Int = self.lastIndexOfSlice(that, end) - override def containsSlice[B](that: GenSeq[B]): Boolean = self.indexOfSlice(that) != -1 - override def contains[A1 >: A](elem: A1): Boolean = self.contains(elem) - override def union[B >: A, That](that: GenSeq[B])(implicit bf: CanBuildFrom[Repr, B, That]): That = self.union(that)(bf) - override def diff[B >: A](that: GenSeq[B]): Repr = self.diff(that) - override def intersect[B >: A](that: GenSeq[B]): Repr = self.intersect(that) - override def distinct: Repr = self.distinct - override def patch[B >: A, That](from: Int, patch: GenSeq[B], replaced: Int)(implicit bf: CanBuildFrom[Repr, B, That]): That = self.patch(from, patch, replaced)(bf) - override def updated[B >: A, That](index: Int, elem: B)(implicit bf: CanBuildFrom[Repr, B, That]): That = self.updated(index, elem)(bf) - override def +:[B >: A, That](elem: B)(implicit bf: CanBuildFrom[Repr, B, That]): That = self.+:(elem)(bf) - override def :+[B >: A, That](elem: B)(implicit bf: CanBuildFrom[Repr, B, That]): That = self.:+(elem)(bf) - override def padTo[B >: A, That](len: Int, elem: B)(implicit bf: CanBuildFrom[Repr, B, That]): That = self.padTo(len, elem)(bf) - override def corresponds[B](that: GenSeq[B])(p: (A,B) => Boolean): Boolean = self.corresponds(that)(p) - override def sortWith(lt: (A, A) => Boolean): Repr = self.sortWith(lt) - override def sortBy[B](f: A => B)(implicit ord: Ordering[B]): Repr = self.sortBy(f)(ord) - override def sorted[B >: A](implicit ord: Ordering[B]): Repr = self.sorted(ord) - override def indices: Range = self.indices - override def view = self.view - override def view(from: Int, until: Int) = self.view(from, until) -} - - diff --git a/tests/scala2-library/src/library/scala/collection/SeqView.scala b/tests/scala2-library/src/library/scala/collection/SeqView.scala deleted file mode 100644 index eb1fa35cb339..000000000000 --- a/tests/scala2-library/src/library/scala/collection/SeqView.scala +++ /dev/null @@ -1,33 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - - - -package scala -package collection - -import generic._ -import TraversableView.NoBuilder - -/** A base trait for non-strict views of sequences. - * $seqViewInfo - */ -trait SeqView[+A, +Coll] extends SeqViewLike[A, Coll, SeqView[A, Coll]] - -/** An object containing the necessary implicit definitions to make - * `SeqView`s work. Its definitions are generally not accessed directly by clients. - */ -object SeqView { - type Coll = TraversableView[_, _ <: Traversable[_]] - implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, SeqView[A, Seq[_]]] = - new CanBuildFrom[Coll, A, SeqView[A, Seq[_]]] { - def apply(from: Coll) = new NoBuilder - def apply() = new NoBuilder - } -} - diff --git a/tests/scala2-library/src/library/scala/collection/SeqViewLike.scala b/tests/scala2-library/src/library/scala/collection/SeqViewLike.scala deleted file mode 100644 index aae777424b9b..000000000000 --- a/tests/scala2-library/src/library/scala/collection/SeqViewLike.scala +++ /dev/null @@ -1,276 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala -package collection - -import generic._ -import Seq.fill - -/** A template trait for non-strict views of sequences. - * $seqViewInfo - * - * @define seqViewInfo - * $viewInfo - * All views for sequences are defined by re-interpreting the `length` and - * `apply` methods. - * - * @author Martin Odersky - * @version 2.8 - * @since 2.8 - * @tparam A the element type of the view - * @tparam Coll the type of the underlying collection containing the elements. - * @tparam This the type of the view itself - */ -trait SeqViewLike[+A, - +Coll, - +This <: SeqView[A, Coll] with SeqViewLike[A, Coll, This]] - extends Seq[A] with SeqLike[A, This] with IterableView[A, Coll] with IterableViewLike[A, Coll, This] -{ self => - - /** Explicit instantiation of the `Transformed` trait to reduce class file size in subclasses. */ - private[collection] abstract class AbstractTransformedS[+B] extends Seq[B] with super[IterableViewLike].TransformedI[B] with TransformedS[B] - - trait TransformedS[+B] extends SeqView[B, Coll] with super.TransformedI[B] { - def length: Int - def apply(idx: Int): B - override def toString = viewToString - } - - trait EmptyViewS extends TransformedS[Nothing] with super.EmptyViewI { - final override def length = 0 - final override def apply(n: Int) = Nil(n) - } - - trait ForcedS[B] extends super.ForcedI[B] with TransformedS[B] { - def length = forced.length - def apply(idx: Int) = forced.apply(idx) - } - - trait SlicedS extends super.SlicedI with TransformedS[A] { - def length = iterator.size - def apply(idx: Int): A = - if (idx >= 0 && idx + from < until) self.apply(idx + from) - else throw new IndexOutOfBoundsException(idx.toString) - - override def foreach[U](f: A => U) = iterator foreach f - override def iterator: Iterator[A] = self.iterator drop from take endpoints.width - } - - trait MappedS[B] extends super.MappedI[B] with TransformedS[B] { - def length = self.length - def apply(idx: Int): B = mapping(self(idx)) - } - - trait FlatMappedS[B] extends super.FlatMappedI[B] with TransformedS[B] { - protected[this] lazy val index = { - val index = new Array[Int](self.length + 1) - index(0) = 0 - for (i <- 0 until self.length) // note that if the mapping returns a list, performance is bad, bad - index(i + 1) = index(i) + mapping(self(i)).seq.size - index - } - protected[this] def findRow(idx: Int, lo: Int, hi: Int): Int = { - val mid = (lo + hi) / 2 - if (idx < index(mid)) findRow(idx, lo, mid - 1) - else if (idx >= index(mid + 1)) findRow(idx, mid + 1, hi) - else mid - } - def length = index(self.length) - def apply(idx: Int) = { - if (idx < 0 || idx >= length) throw new IndexOutOfBoundsException(idx.toString) - val row = findRow(idx, 0, self.length - 1) - mapping(self(row)).seq.toSeq(idx - index(row)) - } - } - - trait AppendedS[B >: A] extends super.AppendedI[B] with TransformedS[B] { - protected[this] lazy val restSeq = rest.toSeq - def length = self.length + restSeq.length - def apply(idx: Int) = - if (idx < self.length) self(idx) else restSeq(idx - self.length) - } - - trait PrependedS[B >: A] extends super.PrependedI[B] with TransformedS[B] { - protected[this] lazy val fstSeq = fst.toSeq - def length: Int = fstSeq.length + self.length - def apply(idx: Int): B = - if (idx < fstSeq.length) fstSeq(idx) - else self.apply(idx - fstSeq.length) - } - - trait FilteredS extends super.FilteredI with TransformedS[A] { - protected[this] lazy val index = { - var len = 0 - val arr = new Array[Int](self.length) - for (i <- 0 until self.length) - if (pred(self(i))) { - arr(len) = i - len += 1 - } - arr take len - } - def length = index.length - def apply(idx: Int) = self(index(idx)) - } - - trait TakenWhileS extends super.TakenWhileI with TransformedS[A] { - protected[this] lazy val len = self prefixLength pred - def length = len - def apply(idx: Int) = - if (idx < len) self(idx) - else throw new IndexOutOfBoundsException(idx.toString) - } - - trait DroppedWhileS extends super.DroppedWhileI with TransformedS[A] { - protected[this] lazy val start = self prefixLength pred - def length = self.length - start - def apply(idx: Int) = - if (idx >= 0) self(idx + start) - else throw new IndexOutOfBoundsException(idx.toString) - } - - trait ZippedS[B] extends super.ZippedI[B] with TransformedS[(A, B)] { - protected[this] lazy val thatSeq = other.seq.toSeq - /* Have to be careful here - other may be an infinite sequence. */ - def length = if ((thatSeq lengthCompare self.length) <= 0) thatSeq.length else self.length - def apply(idx: Int) = (self.apply(idx), thatSeq.apply(idx)) - } - - trait ZippedAllS[A1 >: A, B] extends super.ZippedAllI[A1, B] with TransformedS[(A1, B)] { - protected[this] lazy val thatSeq = other.seq.toSeq - def length: Int = self.length max thatSeq.length - def apply(idx: Int) = - (if (idx < self.length) self.apply(idx) else thisElem, - if (idx < thatSeq.length) thatSeq.apply(idx) else thatElem) - } - - trait ReversedS extends TransformedS[A] { - override def iterator: Iterator[A] = createReversedIterator - def length: Int = self.length - def apply(idx: Int): A = self.apply(length - 1 - idx) - final override protected[this] def viewIdentifier = "R" - - private def createReversedIterator = { - var lst = List[A]() - for (elem <- self) lst ::= elem - lst.iterator - } - } - - // Note--for this to work, must ensure 0 <= from and 0 <= replaced - // Must also take care to allow patching inside an infinite stream - // (patching in an infinite stream is not okay) - trait PatchedS[B >: A] extends TransformedS[B] { - protected[this] lazy val from: Int - protected[this] lazy val patch: GenSeq[B] - protected[this] lazy val replaced: Int - private lazy val plen = patch.length - override def iterator: Iterator[B] = self.iterator patch (from, patch.iterator, replaced) - def length: Int = { - val len = self.length - val pre = math.min(from, len) - val post = math.max(0, len - pre - replaced) - pre + plen + post - } - def apply(idx: Int): B = { - val actualFrom = if (self.lengthCompare(from) < 0) self.length else from - if (idx < actualFrom) self.apply(idx) - else if (idx < actualFrom + plen) patch.apply(idx - actualFrom) - else self.apply(idx - plen + replaced) - } - final override protected[this] def viewIdentifier = "P" - } - - /** Boilerplate method, to override in each subclass - * This method could be eliminated if Scala had virtual classes - */ - protected override def newForced[B](xs: => GenSeq[B]): TransformedS[B] = new AbstractTransformedS[B] with ForcedS[B] { lazy val forced = xs } - protected override def newAppended[B >: A](that: GenTraversable[B]): TransformedS[B] = new AbstractTransformedS[B] with AppendedS[B] { lazy val rest = that } - protected override def newPrepended[B >: A](that: GenTraversable[B]): TransformedS[B] = new AbstractTransformedS[B] with PrependedS[B] { lazy protected[this] val fst = that } - protected override def newMapped[B](f: A => B): TransformedS[B] = new AbstractTransformedS[B] with MappedS[B] { lazy val mapping = f } - protected override def newFlatMapped[B](f: A => GenTraversableOnce[B]): TransformedS[B] = new AbstractTransformedS[B] with FlatMappedS[B] { lazy val mapping = f } - protected override def newFiltered(p: A => Boolean): TransformedS[A] = new AbstractTransformedS[A] with FilteredS { lazy val pred = p } - protected override def newSliced(_endpoints: SliceInterval): TransformedS[A] = new AbstractTransformedS[A] with SlicedS { lazy val endpoints = _endpoints } - protected override def newDroppedWhile(p: A => Boolean): TransformedS[A] = new AbstractTransformedS[A] with DroppedWhileS { lazy val pred = p } - protected override def newTakenWhile(p: A => Boolean): TransformedS[A] = new AbstractTransformedS[A] with TakenWhileS { lazy val pred = p } - protected override def newZipped[B](that: GenIterable[B]): TransformedS[(A, B)] = new AbstractTransformedS[(A, B)] with ZippedS[B] { lazy val other = that } - protected override def newZippedAll[A1 >: A, B](that: GenIterable[B], _thisElem: A1, _thatElem: B): TransformedS[(A1, B)] = new AbstractTransformedS[(A1, B)] with ZippedAllS[A1, B] { - lazy val other = that - lazy val thisElem = _thisElem - lazy val thatElem = _thatElem - } - protected def newReversed: TransformedS[A] = new AbstractTransformedS[A] with ReversedS - protected def newPatched[B >: A](_from: Int, _patch: GenSeq[B], _replaced: Int): TransformedS[B] = new AbstractTransformedS[B] with PatchedS[B] { - lazy val from = _from - lazy val patch = _patch - lazy val replaced = _replaced - } - - // see comment in IterableViewLike. - protected override def newTaken(n: Int): TransformedS[A] = newSliced(SliceInterval(0, n)) - protected override def newDropped(n: Int): TransformedS[A] = newSliced(SliceInterval(n, Int.MaxValue)) - - override def reverse: This = newReversed.asInstanceOf[This] - - override def patch[B >: A, That](from: Int, patch: GenSeq[B], replaced: Int)(implicit bf: CanBuildFrom[This, B, That]): That = { - // Be careful to not evaluate the entire sequence! Patch should work (slowly, perhaps) on infinite streams. - val nonNegFrom = math.max(0,from) - val nonNegRep = math.max(0,replaced) - newPatched(nonNegFrom, patch, nonNegRep).asInstanceOf[That] -// was: val b = bf(repr) -// if (b.isInstanceOf[NoBuilder[_]]) newPatched(from, patch, replaced).asInstanceOf[That] -// else super.patch[B, That](from, patch, replaced)(bf) - } - - override def padTo[B >: A, That](len: Int, elem: B)(implicit bf: CanBuildFrom[This, B, That]): That = - patch(length, fill(len - length)(elem), 0) - - override def reverseMap[B, That](f: A => B)(implicit bf: CanBuildFrom[This, B, That]): That = - reverse map f - - override def updated[B >: A, That](index: Int, elem: B)(implicit bf: CanBuildFrom[This, B, That]): That = { - require(0 <= index && index < length) // !!! can't call length like this. - patch(index, List(elem), 1)(bf) - } - - override def +:[B >: A, That](elem: B)(implicit bf: CanBuildFrom[This, B, That]): That = - newPrepended(elem :: Nil).asInstanceOf[That] - - override def :+[B >: A, That](elem: B)(implicit bf: CanBuildFrom[This, B, That]): That = - ++(Iterator.single(elem))(bf) - - override def union[B >: A, That](that: GenSeq[B])(implicit bf: CanBuildFrom[This, B, That]): That = - newForced(thisSeq union that).asInstanceOf[That] - - override def diff[B >: A](that: GenSeq[B]): This = - newForced(thisSeq diff that).asInstanceOf[This] - - override def intersect[B >: A](that: GenSeq[B]): This = - newForced(thisSeq intersect that).asInstanceOf[This] - - override def sorted[B >: A](implicit ord: Ordering[B]): This = - newForced(thisSeq sorted ord).asInstanceOf[This] - - override def sortWith(lt: (A, A) => Boolean): This = - newForced(thisSeq sortWith lt).asInstanceOf[This] - - override def sortBy[B](f: (A) => B)(implicit ord: Ordering[B]): This = - newForced(thisSeq sortBy f).asInstanceOf[This] - - override def combinations(n: Int): Iterator[This] = - (thisSeq combinations n).map(as => newForced(as).asInstanceOf[This]) - - override def permutations: Iterator[This] = - thisSeq.permutations.map(as => newForced(as).asInstanceOf[This]) - - override def distinct: This = - newForced(thisSeq.distinct).asInstanceOf[This] - - override def stringPrefix = "SeqView" -} diff --git a/tests/scala2-library/src/library/scala/collection/Set.scala b/tests/scala2-library/src/library/scala/collection/Set.scala deleted file mode 100644 index f74c26571abc..000000000000 --- a/tests/scala2-library/src/library/scala/collection/Set.scala +++ /dev/null @@ -1,47 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala -package collection - -import generic._ - -/** A base trait for all sets, mutable as well as immutable. - * - * $setNote - * '''Implementation note:''' If your additions and mutations return the same kind of set as the set - * you are defining, you should inherit from `SetLike` as well. - * $setTags - * - * @since 1.0 - * @author Matthias Zenger - */ -trait Set[A] extends (A => Boolean) - with Iterable[A] - with GenSet[A] - with GenericSetTemplate[A, Set] - with SetLike[A, Set[A]] { - override def companion: GenericCompanion[Set] = Set - - override def seq: Set[A] = this -} - -/** $factoryInfo - * The current default implementation of a $Coll is one of `EmptySet`, `Set1`, `Set2`, `Set3`, `Set4` in - * class `immutable.Set` for sets of sizes up to 4, and a `immutable.HashSet` for sets of larger sizes. - * @define coll set - * @define Coll `Set` - */ -object Set extends SetFactory[Set] { - def newBuilder[A] = immutable.Set.newBuilder[A] - override def empty[A]: Set[A] = immutable.Set.empty[A] - implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, Set[A]] = setCanBuildFrom[A] -} - -/** Explicit instantiation of the `Set` trait to reduce class file size in subclasses. */ -abstract class AbstractSet[A] extends AbstractIterable[A] with Set[A] diff --git a/tests/scala2-library/src/library/scala/collection/SetLike.scala b/tests/scala2-library/src/library/scala/collection/SetLike.scala deleted file mode 100644 index 440452ce990c..000000000000 --- a/tests/scala2-library/src/library/scala/collection/SetLike.scala +++ /dev/null @@ -1,257 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala -package collection - -import generic._ -import mutable.{ Builder, SetBuilder } -import scala.annotation.migration -import parallel.ParSet - -/** A template trait for sets. - * - * $setNote - * '''Implementation note:''' - * This trait provides most of the operations of a `Set` independently of its representation. - * It is typically inherited by concrete implementations of sets. - * $setTags - * @since 2.8 - * - * @define setNote - * - * A set is a collection that contains no duplicate elements. - * - * To implement a concrete set, you need to provide implementations of the - * following methods: - * {{{ - * def contains(key: A): Boolean - * def iterator: Iterator[A] - * def +(elem: A): This - * def -(elem: A): This - * }}} - * If you wish that methods like `take`, `drop`, - * `filter` return the same kind of set, you should also override: - * {{{ - * def empty: This - * }}} - * It is also good idea to override methods `foreach` and - * `size` for efficiency. - * - * @define setTags - * @tparam A the type of the elements of the set - * @tparam This the type of the set itself. - * - * @author Martin Odersky - * @version 2.8 - * - * @define coll set - * @define Coll Set - * @define willNotTerminateInf - * @define mayNotTerminateInf - */ -trait SetLike[A, +This <: SetLike[A, This] with Set[A]] -extends IterableLike[A, This] - with GenSetLike[A, This] - with Subtractable[A, This] - with Parallelizable[A, ParSet[A]] -{ -self => - - /** The empty set of the same type as this set - * @return an empty set of type `This`. - */ - def empty: This - - /** A common implementation of `newBuilder` for all sets in terms - * of `empty`. Overridden for mutable sets in - * - * `mutable.SetLike`. - */ - override protected[this] def newBuilder: Builder[A, This] = new SetBuilder[A, This](empty) - - protected[this] override def parCombiner = ParSet.newCombiner[A] - - // Default collection type appropriate for immutable collections; mutable collections override this - override def toSeq: Seq[A] = { - if (isEmpty) Vector.empty[A] - else { - val vb = Vector.newBuilder[A] - foreach(vb += _) - vb.result - } - } - - override def toBuffer[A1 >: A]: mutable.Buffer[A1] = { - val result = new mutable.ArrayBuffer[A1](size) - // Faster to let the map iterate itself than to defer through copyToBuffer - foreach(result += _) - result - } - - // note: this is only overridden here to add the migration annotation, - // which I hope to turn into an Xlint style warning as the migration aspect - // is not central to its importance. - @migration("Set.map now returns a Set, so it will discard duplicate values.", "2.8.0") - override def map[B, That](f: A => B)(implicit bf: CanBuildFrom[This, B, That]): That = super.map(f)(bf) - - /** Tests if some element is contained in this set. - * - * @param elem the element to test for membership. - * @return `true` if `elem` is contained in this set, `false` otherwise. - */ - def contains(elem: A): Boolean - - /** Creates a new set with an additional element, unless the element is - * already present. - * - * @param elem the element to be added - * @return a new set that contains all elements of this set and that also - * contains `elem`. - */ - def + (elem: A): This - - /** Creates a new $coll with additional elements, omitting duplicates. - * - * This method takes two or more elements to be added. Elements that already exist in the $coll will - * not be added. Another overloaded variant of this method handles the case where a single element is added. - * - * Example: - * {{{ - * scala> val a = Set(1, 3) + 2 + 3 - * a: scala.collection.immutable.Set[Int] = Set(1, 3, 2) - * }}} - * - * @param elem1 the first element to add. - * @param elem2 the second element to add. - * @param elems the remaining elements to add. - * @return a new $coll with the given elements added, omitting duplicates. - */ - def + (elem1: A, elem2: A, elems: A*): This = this + elem1 + elem2 ++ elems - - /** Creates a new $coll by adding all elements contained in another collection to this $coll, omitting duplicates. - * - * This method takes a collection of elements and adds all elements, omitting duplicates, into $coll. - * - * Example: - * {{{ - * scala> val a = Set(1, 2) ++ Set(2, "a") - * a: scala.collection.immutable.Set[Any] = Set(1, 2, a) - * }}} - * - * @param elems the collection containing the elements to add. - * @return a new $coll with the given elements added, omitting duplicates. - */ - def ++ (elems: GenTraversableOnce[A]): This = (repr /: elems.seq)(_ + _) - - /** Creates a new set with a given element removed from this set. - * - * @param elem the element to be removed - * @return a new set that contains all elements of this set but that does not - * contain `elem`. - */ - def - (elem: A): This - - /** Tests if this set is empty. - * - * @return `true` if there is no element in the set, `false` otherwise. - */ - override def isEmpty: Boolean = size == 0 - - /** Computes the union between of set and another set. - * - * @param that the set to form the union with. - * @return a new set consisting of all elements that are in this - * set or in the given set `that`. - */ - def union(that: GenSet[A]): This = this ++ that - - /** Computes the difference of this set and another set. - * - * @param that the set of elements to exclude. - * @return a set containing those elements of this - * set that are not also contained in the given set `that`. - */ - def diff(that: GenSet[A]): This = this -- that - - /** An iterator over all subsets of this set of the given size. - * If the requested size is impossible, an empty iterator is returned. - * - * @param len the size of the subsets. - * @return the iterator. - */ - def subsets(len: Int): Iterator[This] = { - if (len < 0 || len > size) Iterator.empty - else new SubsetsItr(self.toIndexedSeq, len) - } - - /** An iterator over all subsets of this set. - * - * @return the iterator. - */ - def subsets(): Iterator[This] = new AbstractIterator[This] { - private val elms = self.toIndexedSeq - private var len = 0 - private var itr: Iterator[This] = Iterator.empty - - def hasNext = len <= elms.size || itr.hasNext - def next = { - if (!itr.hasNext) { - if (len > elms.size) Iterator.empty.next() - else { - itr = new SubsetsItr(elms, len) - len += 1 - } - } - - itr.next() - } - } - - /** An Iterator including all subsets containing exactly len elements. - * If the elements in 'This' type is ordered, then the subsets will also be in the same order. - * ListSet(1,2,3).subsets => {{1},{2},{3},{1,2},{1,3},{2,3},{1,2,3}} - * - * @author Eastsun - * @date 2010.12.6 - */ - private class SubsetsItr(elms: IndexedSeq[A], len: Int) extends AbstractIterator[This] { - private val idxs = Array.range(0, len+1) - private var _hasNext = true - idxs(len) = elms.size - - def hasNext = _hasNext - def next(): This = { - if (!hasNext) Iterator.empty.next() - - val buf = self.newBuilder - idxs.slice(0, len) foreach (idx => buf += elms(idx)) - val result = buf.result() - - var i = len - 1 - while (i >= 0 && idxs(i) == idxs(i+1)-1) i -= 1 - - if (i < 0) _hasNext = false - else { - idxs(i) += 1 - for (j <- (i+1) until len) - idxs(j) = idxs(j-1) + 1 - } - - result - } - } - - /** Defines the prefix of this object's `toString` representation. - * @return a string representation which starts the result of `toString` applied to this set. - * Unless overridden this is simply `"Set"`. - */ - override def stringPrefix: String = "Set" - override def toString = super[IterableLike].toString - -} diff --git a/tests/scala2-library/src/library/scala/collection/SetProxy.scala b/tests/scala2-library/src/library/scala/collection/SetProxy.scala deleted file mode 100644 index 4a3fc17a78b1..000000000000 --- a/tests/scala2-library/src/library/scala/collection/SetProxy.scala +++ /dev/null @@ -1,21 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala -package collection - -/** This is a simple wrapper class for [[scala.collection.Set]]. - * It is most useful for assembling customized set abstractions - * dynamically using object composition and forwarding. - * - * @author Matthias Zenger - * @author Martin Odersky - * @version 2.0, 01/01/2007 - */ -@deprecated("proxying is deprecated due to lack of use and compiler-level support", "2.11.3") -trait SetProxy[A] extends Set[A] with SetProxyLike[A, Set[A]] diff --git a/tests/scala2-library/src/library/scala/collection/SetProxyLike.scala b/tests/scala2-library/src/library/scala/collection/SetProxyLike.scala deleted file mode 100644 index fa23fe545026..000000000000 --- a/tests/scala2-library/src/library/scala/collection/SetProxyLike.scala +++ /dev/null @@ -1,35 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala -package collection - -// Methods could be printed by cat SetLike.scala | egrep '^ (override )?def' - -/** This trait implements a proxy for sets. It forwards - * all calls to a different set. - * - * @author Martin Odersky - * @version 2.8 - */ -@deprecated("proxying is deprecated due to lack of use and compiler-level support", "2.11.0") -trait SetProxyLike[A, +This <: SetLike[A, This] with Set[A]] extends SetLike[A, This] with IterableProxyLike[A, This] { - def empty: This - override def contains(elem: A): Boolean = self.contains(elem) - override def + (elem: A) = self.+(elem) - override def - (elem: A) = self.-(elem) - override def isEmpty: Boolean = self.isEmpty - override def apply(elem: A): Boolean = self.apply(elem) - override def intersect(that: GenSet[A]) = self.intersect(that) - override def &(that: GenSet[A]): This = self.&(that) - override def union(that: GenSet[A]): This = self.union(that) - override def | (that: GenSet[A]): This = self.|(that) - override def diff(that: GenSet[A]): This = self.diff(that) - override def &~(that: GenSet[A]): This = self.&~(that) - override def subsetOf(that: GenSet[A]): Boolean = self.subsetOf(that) -} diff --git a/tests/scala2-library/src/library/scala/collection/SortedMap.scala b/tests/scala2-library/src/library/scala/collection/SortedMap.scala deleted file mode 100644 index 36e7eae79c77..000000000000 --- a/tests/scala2-library/src/library/scala/collection/SortedMap.scala +++ /dev/null @@ -1,53 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2006-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala -package collection - -import generic._ -import mutable.Builder - -/** A map whose keys are sorted. - * - * @author Sean McDirmid - * @author Martin Odersky - * @version 2.8 - * @since 2.4 - */ -trait SortedMap[A, +B] extends Map[A, B] with SortedMapLike[A, B, SortedMap[A, B]] { - /** Needs to be overridden in subclasses. */ - override def empty: SortedMap[A, B] = SortedMap.empty[A, B] - - override protected[this] def newBuilder: Builder[(A, B), SortedMap[A, B]] = - immutable.SortedMap.newBuilder[A, B] -} - -/** - * @since 2.8 - */ -object SortedMap extends SortedMapFactory[SortedMap] { - def empty[A, B](implicit ord: Ordering[A]): SortedMap[A, B] = immutable.SortedMap.empty[A, B](ord) - - implicit def canBuildFrom[A, B](implicit ord: Ordering[A]): CanBuildFrom[Coll, (A, B), SortedMap[A, B]] = new SortedMapCanBuildFrom[A, B] - - private[collection] trait Default[A, +B] extends SortedMap[A, B] { - self => - override def +[B1 >: B](kv: (A, B1)): SortedMap[A, B1] = { - val b = SortedMap.newBuilder[A, B1] - b ++= this - b += ((kv._1, kv._2)) - b.result() - } - - override def - (key: A): SortedMap[A, B] = { - val b = newBuilder - for (kv <- this; if kv._1 != key) b += kv - b.result() - } - } -} diff --git a/tests/scala2-library/src/library/scala/collection/SortedMapLike.scala b/tests/scala2-library/src/library/scala/collection/SortedMapLike.scala deleted file mode 100644 index cf5e9c36c759..000000000000 --- a/tests/scala2-library/src/library/scala/collection/SortedMapLike.scala +++ /dev/null @@ -1,122 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2006-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala -package collection - -import generic._ - -/** A template for maps whose keys are sorted. - * To create a concrete sorted map, you need to implement the rangeImpl method, - * in addition to those of `MapLike`. - * - * @author Sean McDirmid - * @author Martin Odersky - * @version 2.8 - * @since 2.8 - */ -trait SortedMapLike[A, +B, +This <: SortedMapLike[A, B, This] with SortedMap[A, B]] extends Sorted[A, This] with MapLike[A, B, This] { -self => - - def firstKey : A = head._1 - def lastKey : A = last._1 - - implicit def ordering: Ordering[A] - - // XXX: implement default version - def rangeImpl(from : Option[A], until : Option[A]) : This - - override def keySet : SortedSet[A] = new DefaultKeySortedSet - - protected class DefaultKeySortedSet extends super.DefaultKeySet with SortedSet[A] { - implicit def ordering = self.ordering - override def + (elem: A): SortedSet[A] = (SortedSet[A]() ++ this + elem) - override def - (elem: A): SortedSet[A] = (SortedSet[A]() ++ this - elem) - override def rangeImpl(from : Option[A], until : Option[A]) : SortedSet[A] = { - val map = self.rangeImpl(from, until) - new map.DefaultKeySortedSet - } - override def keysIteratorFrom(start: A) = self.keysIteratorFrom(start) - } - - /** Add a key/value pair to this map. - * @param key the key - * @param value the value - * @return A new map with the new binding added to this map - */ - override def updated[B1 >: B](key: A, value: B1): SortedMap[A, B1] = this+((key, value)) - - /** Add a key/value pair to this map. - * @param kv the key/value pair - * @return A new map with the new binding added to this map - */ - def + [B1 >: B] (kv: (A, B1)): SortedMap[A, B1] - - // todo: Add generic +,-, and so on. - - /** Adds two or more elements to this collection and returns - * either the collection itself (if it is mutable), or a new collection - * with the added elements. - * - * @param elem1 the first element to add. - * @param elem2 the second element to add. - * @param elems the remaining elements to add. - */ - override def + [B1 >: B] (elem1: (A, B1), elem2: (A, B1), elems: (A, B1) *): SortedMap[A, B1] = { - var m = this + elem1 + elem2 - for (e <- elems) m = m + e - m - } - - override def filterKeys(p: A => Boolean): SortedMap[A, B] = new FilteredKeys(p) with SortedMap.Default[A, B] { - implicit def ordering: Ordering[A] = self.ordering - override def rangeImpl(from : Option[A], until : Option[A]): SortedMap[A, B] = self.rangeImpl(from, until).filterKeys(p) - override def iteratorFrom(start: A) = self iteratorFrom start filter {case (k, _) => p(k)} - override def keysIteratorFrom(start: A) = self keysIteratorFrom start filter p - override def valuesIteratorFrom(start: A) = self iteratorFrom start collect {case (k,v) if p(k) => v} - } - - override def mapValues[C](f: B => C): SortedMap[A, C] = new MappedValues(f) with SortedMap.Default[A, C] { - implicit def ordering: Ordering[A] = self.ordering - override def rangeImpl(from : Option[A], until : Option[A]): SortedMap[A, C] = self.rangeImpl(from, until).mapValues(f) - override def iteratorFrom(start: A) = (self iteratorFrom start) map {case (k,v) => (k, f(v))} - override def keysIteratorFrom(start: A) = self keysIteratorFrom start - override def valuesIteratorFrom(start: A) = self valuesIteratorFrom start map f - } - - /** Adds a number of elements provided by a traversable object - * and returns a new collection with the added elements. - * - * @param xs the traversable object. - */ - override def ++[B1 >: B](xs: GenTraversableOnce[(A, B1)]): SortedMap[A, B1] = - ((repr: SortedMap[A, B1]) /: xs.seq) (_ + _) - - /** - * Creates an iterator over all the key/value pairs - * contained in this map having a key greater than or - * equal to `start` according to the ordering of - * this map. x.iteratorFrom(y) is equivalent - * to but often more efficient than x.from(y).iterator. - * - * @param start The lower bound (inclusive) - * on the keys to be returned - */ - def iteratorFrom(start: A): Iterator[(A, B)] - /** - * Creates an iterator over all the values contained in this - * map that are associated with a key greater than or equal to `start` - * according to the ordering of this map. x.valuesIteratorFrom(y) is - * equivalent to but often more efficient than - * x.from(y).valuesIterator. - * - * @param start The lower bound (inclusive) - * on the keys to be returned - */ - def valuesIteratorFrom(start: A): Iterator[B] -} diff --git a/tests/scala2-library/src/library/scala/collection/SortedSet.scala b/tests/scala2-library/src/library/scala/collection/SortedSet.scala deleted file mode 100644 index 0fa5ce09666a..000000000000 --- a/tests/scala2-library/src/library/scala/collection/SortedSet.scala +++ /dev/null @@ -1,34 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2006-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - - -package scala -package collection -import generic._ - -/** A sorted set. - * - * @author Sean McDirmid - * @author Martin Odersky - * @version 2.8 - * @since 2.4 - */ -trait SortedSet[A] extends Set[A] with SortedSetLike[A, SortedSet[A]] { - /** Needs to be overridden in subclasses. */ - override def empty: SortedSet[A] = SortedSet.empty[A] -} - -/** - * @since 2.8 - */ -object SortedSet extends SortedSetFactory[SortedSet] { - def empty[A](implicit ord: Ordering[A]): immutable.SortedSet[A] = immutable.SortedSet.empty[A](ord) - def canBuildFrom[A](implicit ord: Ordering[A]): CanBuildFrom[Coll, A, SortedSet[A]] = newCanBuildFrom[A] - // Force a declaration here so that BitSet (which does not inherit from SortedSetFactory) can be more specific - override implicit def newCanBuildFrom[A](implicit ord : Ordering[A]) : CanBuildFrom[Coll, A, SortedSet[A]] = super.newCanBuildFrom -} diff --git a/tests/scala2-library/src/library/scala/collection/SortedSetLike.scala b/tests/scala2-library/src/library/scala/collection/SortedSetLike.scala deleted file mode 100644 index 657423834f23..000000000000 --- a/tests/scala2-library/src/library/scala/collection/SortedSetLike.scala +++ /dev/null @@ -1,54 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2006-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - - -package scala -package collection -import generic._ - -/** A template for sets which are sorted. - * - * @author Sean McDirmid - * @author Martin Odersky - * @version 2.8 - * @since 2.8 - */ -trait SortedSetLike[A, +This <: SortedSet[A] with SortedSetLike[A, This]] extends Sorted[A, This] with SetLike[A, This] { -self => - - implicit def ordering: Ordering[A] - - override def keySet = repr - - override def firstKey: A = head - override def lastKey: A = last - - def rangeImpl(from: Option[A], until: Option[A]): This - - override def from(from: A): This = rangeImpl(Some(from), None) - override def until(until: A): This = rangeImpl(None, Some(until)) - override def range(from: A, until: A): This = rangeImpl(Some(from), Some(until)) - - override def subsetOf(that: GenSet[A]): Boolean = that match { - // TODO: It may actually be pretty rare that the guard here ever - // passes. Is this really worth keeping? If it is, we should add - // more sensible implementations of == to Ordering. - case that: SortedSet[A] if that.ordering == ordering => that.hasAll(this.iterator) - case that => super.subsetOf(that) - } - - /** - * Creates an iterator that contains all values from this collection - * greater than or equal to `start` according to the ordering of - * this collection. x.iteratorFrom(y) is equivalent to but will usually - * be more efficient than x.from(y).iterator - * - * @param start The lower-bound (inclusive) of the iterator - */ - def iteratorFrom(start: A): Iterator[A] = keysIteratorFrom(start) -} diff --git a/tests/scala2-library/src/library/scala/collection/Traversable.scala b/tests/scala2-library/src/library/scala/collection/Traversable.scala deleted file mode 100644 index 8145eaa20419..000000000000 --- a/tests/scala2-library/src/library/scala/collection/Traversable.scala +++ /dev/null @@ -1,104 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala -package collection - -import generic._ -import mutable.Builder -import scala.util.control.Breaks - -/** A trait for traversable collections. - * All operations are guaranteed to be performed in a single-threaded manner. - * - * $traversableInfo - */ -trait Traversable[+A] extends TraversableLike[A, Traversable[A]] - with GenTraversable[A] - with TraversableOnce[A] - with GenericTraversableTemplate[A, Traversable] { - override def companion: GenericCompanion[Traversable] = Traversable - - override def seq: Traversable[A] = this - - /* The following methods are inherited from TraversableLike - * - override def isEmpty: Boolean - override def size: Int - override def hasDefiniteSize - override def ++[B >: A, That](xs: GenTraversableOnce[B])(implicit bf: CanBuildFrom[Traversable[A], B, That]): That - override def map[B, That](f: A => B)(implicit bf: CanBuildFrom[Traversable[A], B, That]): That - override def flatMap[B, That](f: A => GenTraversableOnce[B])(implicit bf: CanBuildFrom[Traversable[A], B, That]): That - override def filter(p: A => Boolean): Traversable[A] - override def remove(p: A => Boolean): Traversable[A] - override def partition(p: A => Boolean): (Traversable[A], Traversable[A]) - override def groupBy[K](f: A => K): Map[K, Traversable[A]] - override def foreach[U](f: A => U): Unit - override def forall(p: A => Boolean): Boolean - override def exists(p: A => Boolean): Boolean - override def count(p: A => Boolean): Int - override def find(p: A => Boolean): Option[A] - override def foldLeft[B](z: B)(op: (B, A) => B): B - override def /: [B](z: B)(op: (B, A) => B): B - override def foldRight[B](z: B)(op: (A, B) => B): B - override def :\ [B](z: B)(op: (A, B) => B): B - override def reduceLeft[B >: A](op: (B, A) => B): B - override def reduceLeftOption[B >: A](op: (B, A) => B): Option[B] - override def reduceRight[B >: A](op: (A, B) => B): B - override def reduceRightOption[B >: A](op: (A, B) => B): Option[B] - override def head: A - override def headOption: Option[A] - override def tail: Traversable[A] - override def last: A - override def lastOption: Option[A] - override def init: Traversable[A] - override def take(n: Int): Traversable[A] - override def drop(n: Int): Traversable[A] - override def slice(from: Int, until: Int): Traversable[A] - override def takeWhile(p: A => Boolean): Traversable[A] - override def dropWhile(p: A => Boolean): Traversable[A] - override def span(p: A => Boolean): (Traversable[A], Traversable[A]) - override def splitAt(n: Int): (Traversable[A], Traversable[A]) - override def copyToBuffer[B >: A](dest: Buffer[B]) - override def copyToArray[B >: A](xs: Array[B], start: Int, len: Int) - override def copyToArray[B >: A](xs: Array[B], start: Int) - override def toArray[B >: A : ClassTag]: Array[B] - override def toList: List[A] - override def toIterable: Iterable[A] - override def toSeq: Seq[A] - override def toStream: Stream[A] - override def sortWith(lt : (A,A) => Boolean): Traversable[A] - override def mkString(start: String, sep: String, end: String): String - override def mkString(sep: String): String - override def mkString: String - override def addString(b: StringBuilder, start: String, sep: String, end: String): StringBuilder - override def addString(b: StringBuilder, sep: String): StringBuilder - override def addString(b: StringBuilder): StringBuilder - override def toString - override def stringPrefix : String - override def view - override def view(from: Int, until: Int): TraversableView[A, Traversable[A]] - */ -} - -/** $factoryInfo - * The current default implementation of a $Coll is a `List`. - */ -object Traversable extends TraversableFactory[Traversable] { self => - - /** Provides break functionality separate from client code */ - private[collection] val breaks: Breaks = new Breaks - - /** $genericCanBuildFromInfo */ - implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, Traversable[A]] = ReusableCBF.asInstanceOf[GenericCanBuildFrom[A]] - - def newBuilder[A]: Builder[A, Traversable[A]] = immutable.Traversable.newBuilder[A] -} - -/** Explicit instantiation of the `Traversable` trait to reduce class file size in subclasses. */ -abstract class AbstractTraversable[+A] extends Traversable[A] diff --git a/tests/scala2-library/src/library/scala/collection/TraversableLike.scala b/tests/scala2-library/src/library/scala/collection/TraversableLike.scala deleted file mode 100644 index c80313957f8d..000000000000 --- a/tests/scala2-library/src/library/scala/collection/TraversableLike.scala +++ /dev/null @@ -1,808 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala -package collection - -import generic._ -import mutable.Builder -import scala.annotation.migration -import scala.annotation.unchecked.{ uncheckedVariance => uV } -import parallel.ParIterable -import scala.language.higherKinds - -/** A template trait for traversable collections of type `Traversable[A]`. - * - * $traversableInfo - * @define mutability - * @define traversableInfo - * This is a base trait of all kinds of $mutability Scala collections. It - * implements the behavior common to all collections, in terms of a method - * `foreach` with signature: - * {{{ - * def foreach[U](f: Elem => U): Unit - * }}} - * Collection classes mixing in this trait provide a concrete - * `foreach` method which traverses all the - * elements contained in the collection, applying a given function to each. - * They also need to provide a method `newBuilder` - * which creates a builder for collections of the same kind. - * - * A traversable class might or might not have two properties: strictness - * and orderedness. Neither is represented as a type. - * - * The instances of a strict collection class have all their elements - * computed before they can be used as values. By contrast, instances of - * a non-strict collection class may defer computation of some of their - * elements until after the instance is available as a value. - * A typical example of a non-strict collection class is a - * [[scala.collection.immutable.Stream]]. - * A more general class of examples are `TraversableViews`. - * - * If a collection is an instance of an ordered collection class, traversing - * its elements with `foreach` will always visit elements in the - * same order, even for different runs of the program. If the class is not - * ordered, `foreach` can visit elements in different orders for - * different runs (but it will keep the same order in the same run).' - * - * A typical example of a collection class which is not ordered is a - * `HashMap` of objects. The traversal order for hash maps will - * depend on the hash codes of its elements, and these hash codes might - * differ from one run to the next. By contrast, a `LinkedHashMap` - * is ordered because its `foreach` method visits elements in the - * order they were inserted into the `HashMap`. - * - * @author Martin Odersky - * @version 2.8 - * @since 2.8 - * @tparam A the element type of the collection - * @tparam Repr the type of the actual collection containing the elements. - * - * @define Coll Traversable - * @define coll traversable collection - */ -trait TraversableLike[+A, +Repr] extends Any - with HasNewBuilder[A, Repr] - with FilterMonadic[A, Repr] - with TraversableOnce[A] - with GenTraversableLike[A, Repr] - with Parallelizable[A, ParIterable[A]] -{ - self => - - import Traversable.breaks._ - - /** The type implementing this traversable */ - protected[this] type Self = Repr - - /** The collection of type $coll underlying this `TraversableLike` object. - * By default this is implemented as the `TraversableLike` object itself, - * but this can be overridden. - */ - def repr: Repr = this.asInstanceOf[Repr] - - final def isTraversableAgain: Boolean = true - - /** The underlying collection seen as an instance of `$Coll`. - * By default this is implemented as the current collection object itself, - * but this can be overridden. - */ - protected[this] def thisCollection: Traversable[A] = this.asInstanceOf[Traversable[A]] - - /** A conversion from collections of type `Repr` to `$Coll` objects. - * By default this is implemented as just a cast, but this can be overridden. - */ - protected[this] def toCollection(repr: Repr): Traversable[A] = repr.asInstanceOf[Traversable[A]] - - /** Creates a new builder for this collection type. - */ - protected[this] def newBuilder: Builder[A, Repr] - - protected[this] def parCombiner = ParIterable.newCombiner[A] - - /** Applies a function `f` to all elements of this $coll. - * - * @param f the function that is applied for its side-effect to every element. - * The result of function `f` is discarded. - * - * @tparam U the type parameter describing the result of function `f`. - * This result will always be ignored. Typically `U` is `Unit`, - * but this is not necessary. - * - * @usecase def foreach(f: A => Unit): Unit - * @inheritdoc - * - * Note: this method underlies the implementation of most other bulk operations. - * It's important to implement this method in an efficient way. - * - */ - def foreach[U](f: A => U): Unit - - /** Tests whether this $coll is empty. - * - * @return `true` if the $coll contain no elements, `false` otherwise. - */ - def isEmpty: Boolean = { - var result = true - breakable { - for (x <- this) { - result = false - break - } - } - result - } - - def hasDefiniteSize = true - - def ++[B >: A, That](that: GenTraversableOnce[B])(implicit bf: CanBuildFrom[Repr, B, That]): That = { - val b = bf(repr) - if (that.isInstanceOf[IndexedSeqLike[_, _]]) b.sizeHint(this, that.seq.size) - b ++= thisCollection - b ++= that.seq - b.result - } - - /** As with `++`, returns a new collection containing the elements from the left operand followed by the - * elements from the right operand. - * - * It differs from `++` in that the right operand determines the type of - * the resulting collection rather than the left one. - * Mnemonic: the COLon is on the side of the new COLlection type. - * - * @param that the traversable to append. - * @tparam B the element type of the returned collection. - * @tparam That $thatinfo - * @param bf $bfinfo - * @return a new collection of type `That` which contains all elements - * of this $coll followed by all elements of `that`. - * - * @usecase def ++:[B](that: TraversableOnce[B]): $Coll[B] - * @inheritdoc - * - * Example: - * {{{ - * scala> val x = List(1) - * x: List[Int] = List(1) - * - * scala> val y = LinkedList(2) - * y: scala.collection.mutable.LinkedList[Int] = LinkedList(2) - * - * scala> val z = x ++: y - * z: scala.collection.mutable.LinkedList[Int] = LinkedList(1, 2) - * }}} - * - * @return a new $coll which contains all elements of this $coll - * followed by all elements of `that`. - */ - def ++:[B >: A, That](that: TraversableOnce[B])(implicit bf: CanBuildFrom[Repr, B, That]): That = { - val b = bf(repr) - if (that.isInstanceOf[IndexedSeqLike[_, _]]) b.sizeHint(this, that.size) - b ++= that - b ++= thisCollection - b.result - } - - /** As with `++`, returns a new collection containing the elements from the - * left operand followed by the elements from the right operand. - * - * It differs from `++` in that the right operand determines the type of - * the resulting collection rather than the left one. - * Mnemonic: the COLon is on the side of the new COLlection type. - * - * Example: - * {{{ - * scala> val x = List(1) - * x: List[Int] = List(1) - * - * scala> val y = LinkedList(2) - * y: scala.collection.mutable.LinkedList[Int] = LinkedList(2) - * - * scala> val z = x ++: y - * z: scala.collection.mutable.LinkedList[Int] = LinkedList(1, 2) - * }}} - * - * This overload exists because: for the implementation of `++:` we should - * reuse that of `++` because many collections override it with more - * efficient versions. - * - * Since `TraversableOnce` has no `++` method, we have to implement that - * directly, but `Traversable` and down can use the overload. - * - * @param that the traversable to append. - * @tparam B the element type of the returned collection. - * @tparam That $thatinfo - * @param bf $bfinfo - * @return a new collection of type `That` which contains all elements - * of this $coll followed by all elements of `that`. - */ - def ++:[B >: A, That](that: Traversable[B])(implicit bf: CanBuildFrom[Repr, B, That]): That = - (that ++ seq)(breakOut) - - def map[B, That](f: A => B)(implicit bf: CanBuildFrom[Repr, B, That]): That = { - def builder = { // extracted to keep method size under 35 bytes, so that it can be JIT-inlined - val b = bf(repr) - b.sizeHint(this) - b - } - val b = builder - for (x <- this) b += f(x) - b.result - } - - def flatMap[B, That](f: A => GenTraversableOnce[B])(implicit bf: CanBuildFrom[Repr, B, That]): That = { - def builder = bf(repr) // extracted to keep method size under 35 bytes, so that it can be JIT-inlined - val b = builder - for (x <- this) b ++= f(x).seq - b.result - } - - private[scala] def filterImpl(p: A => Boolean, isFlipped: Boolean): Repr = { - val b = newBuilder - for (x <- this) - if (p(x) != isFlipped) b += x - - b.result - } - - /** Selects all elements of this $coll which satisfy a predicate. - * - * @param p the predicate used to test elements. - * @return a new $coll consisting of all elements of this $coll that satisfy the given - * predicate `p`. The order of the elements is preserved. - */ - def filter(p: A => Boolean): Repr = filterImpl(p, isFlipped = false) - - /** Selects all elements of this $coll which do not satisfy a predicate. - * - * @param p the predicate used to test elements. - * @return a new $coll consisting of all elements of this $coll that do not satisfy the given - * predicate `p`. The order of the elements is preserved. - */ - def filterNot(p: A => Boolean): Repr = filterImpl(p, isFlipped = true) - - def collect[B, That](pf: PartialFunction[A, B])(implicit bf: CanBuildFrom[Repr, B, That]): That = { - val b = bf(repr) - foreach(pf.runWith(b += _)) - b.result - } - - /** Builds a new collection by applying an option-valued function to all - * elements of this $coll on which the function is defined. - * - * @param f the option-valued function which filters and maps the $coll. - * @tparam B the element type of the returned collection. - * @tparam That $thatinfo - * @param bf $bfinfo - * @return a new collection of type `That` resulting from applying the option-valued function - * `f` to each element and collecting all defined results. - * The order of the elements is preserved. - * - * @usecase def filterMap[B](f: A => Option[B]): $Coll[B] - * @inheritdoc - * - * @param pf the partial function which filters and maps the $coll. - * @return a new $coll resulting from applying the given option-valued function - * `f` to each element and collecting all defined results. - * The order of the elements is preserved. - def filterMap[B, That](f: A => Option[B])(implicit bf: CanBuildFrom[Repr, B, That]): That = { - val b = bf(repr) - for (x <- this) - f(x) match { - case Some(y) => b += y - case _ => - } - b.result - } - */ - - /** Partitions this $coll in two ${coll}s according to a predicate. - * - * @param p the predicate on which to partition. - * @return a pair of ${coll}s: the first $coll consists of all elements that - * satisfy the predicate `p` and the second $coll consists of all elements - * that don't. The relative order of the elements in the resulting ${coll}s - * is the same as in the original $coll. - */ - def partition(p: A => Boolean): (Repr, Repr) = { - val l, r = newBuilder - for (x <- this) (if (p(x)) l else r) += x - (l.result, r.result) - } - - def groupBy[K](f: A => K): immutable.Map[K, Repr] = { - val m = mutable.Map.empty[K, Builder[A, Repr]] - for (elem <- this) { - val key = f(elem) - val bldr = m.getOrElseUpdate(key, newBuilder) - bldr += elem - } - val b = immutable.Map.newBuilder[K, Repr] - for ((k, v) <- m) - b += ((k, v.result)) - - b.result - } - - def forall(p: A => Boolean): Boolean = { - var result = true - breakable { - for (x <- this) - if (!p(x)) { result = false; break } - } - result - } - - /** Tests whether a predicate holds for at least one element of this $coll. - * - * $mayNotTerminateInf - * - * @param p the predicate used to test elements. - * @return `false` if this $coll is empty, otherwise `true` if the given predicate `p` - * holds for some of the elements of this $coll, otherwise `false` - */ - def exists(p: A => Boolean): Boolean = { - var result = false - breakable { - for (x <- this) - if (p(x)) { result = true; break } - } - result - } - - def find(p: A => Boolean): Option[A] = { - var result: Option[A] = None - breakable { - for (x <- this) - if (p(x)) { result = Some(x); break } - } - result - } - - def scan[B >: A, That](z: B)(op: (B, B) => B)(implicit cbf: CanBuildFrom[Repr, B, That]): That = scanLeft(z)(op) - - def scanLeft[B, That](z: B)(op: (B, A) => B)(implicit bf: CanBuildFrom[Repr, B, That]): That = { - val b = bf(repr) - b.sizeHint(this, 1) - var acc = z - b += acc - for (x <- this) { acc = op(acc, x); b += acc } - b.result - } - - @migration("The behavior of `scanRight` has changed. The previous behavior can be reproduced with scanRight.reverse.", "2.9.0") - def scanRight[B, That](z: B)(op: (A, B) => B)(implicit bf: CanBuildFrom[Repr, B, That]): That = { - var scanned = List(z) - var acc = z - for (x <- reversed) { - acc = op(x, acc) - scanned ::= acc - } - val b = bf(repr) - for (elem <- scanned) b += elem - b.result - } - - /** Selects the first element of this $coll. - * $orderDependent - * @return the first element of this $coll. - * @throws NoSuchElementException if the $coll is empty. - */ - def head: A = { - var result: () => A = () => throw new NoSuchElementException - breakable { - for (x <- this) { - result = () => x - break - } - } - result() - } - - /** Optionally selects the first element. - * $orderDependent - * @return the first element of this $coll if it is nonempty, - * `None` if it is empty. - */ - def headOption: Option[A] = if (isEmpty) None else Some(head) - - /** Selects all elements except the first. - * $orderDependent - * @return a $coll consisting of all elements of this $coll - * except the first one. - * @throws `UnsupportedOperationException` if the $coll is empty. - */ - override def tail: Repr = { - if (isEmpty) throw new UnsupportedOperationException("empty.tail") - drop(1) - } - - /** Selects the last element. - * $orderDependent - * @return The last element of this $coll. - * @throws NoSuchElementException If the $coll is empty. - */ - def last: A = { - var lst = head - for (x <- this) - lst = x - lst - } - - /** Optionally selects the last element. - * $orderDependent - * @return the last element of this $coll$ if it is nonempty, - * `None` if it is empty. - */ - def lastOption: Option[A] = if (isEmpty) None else Some(last) - - /** Selects all elements except the last. - * $orderDependent - * @return a $coll consisting of all elements of this $coll - * except the last one. - * @throws UnsupportedOperationException if the $coll is empty. - */ - def init: Repr = { - if (isEmpty) throw new UnsupportedOperationException("empty.init") - var lst = head - var follow = false - val b = newBuilder - b.sizeHint(this, -1) - for (x <- this) { - if (follow) b += lst - else follow = true - lst = x - } - b.result - } - - def take(n: Int): Repr = slice(0, n) - - def drop(n: Int): Repr = - if (n <= 0) { - val b = newBuilder - b.sizeHint(this) - (b ++= thisCollection).result - } - else sliceWithKnownDelta(n, Int.MaxValue, -n) - - def slice(from: Int, until: Int): Repr = - sliceWithKnownBound(scala.math.max(from, 0), until) - - // Precondition: from >= 0, until > 0, builder already configured for building. - private[this] def sliceInternal(from: Int, until: Int, b: Builder[A, Repr]): Repr = { - var i = 0 - breakable { - for (x <- this) { - if (i >= from) b += x - i += 1 - if (i >= until) break - } - } - b.result - } - // Precondition: from >= 0 - private[scala] def sliceWithKnownDelta(from: Int, until: Int, delta: Int): Repr = { - val b = newBuilder - if (until <= from) b.result - else { - b.sizeHint(this, delta) - sliceInternal(from, until, b) - } - } - // Precondition: from >= 0 - private[scala] def sliceWithKnownBound(from: Int, until: Int): Repr = { - val b = newBuilder - if (until <= from) b.result - else { - b.sizeHintBounded(until - from, this) - sliceInternal(from, until, b) - } - } - - def takeWhile(p: A => Boolean): Repr = { - val b = newBuilder - breakable { - for (x <- this) { - if (!p(x)) break - b += x - } - } - b.result - } - - def dropWhile(p: A => Boolean): Repr = { - val b = newBuilder - var go = false - for (x <- this) { - if (!go && !p(x)) go = true - if (go) b += x - } - b.result - } - - def span(p: A => Boolean): (Repr, Repr) = { - val l, r = newBuilder - var toLeft = true - for (x <- this) { - toLeft = toLeft && p(x) - (if (toLeft) l else r) += x - } - (l.result, r.result) - } - - def splitAt(n: Int): (Repr, Repr) = { - val l, r = newBuilder - l.sizeHintBounded(n, this) - if (n >= 0) r.sizeHint(this, -n) - var i = 0 - for (x <- this) { - (if (i < n) l else r) += x - i += 1 - } - (l.result, r.result) - } - - /** Iterates over the tails of this $coll. The first value will be this - * $coll and the final one will be an empty $coll, with the intervening - * values the results of successive applications of `tail`. - * - * @return an iterator over all the tails of this $coll - * @example `List(1,2,3).tails = Iterator(List(1,2,3), List(2,3), List(3), Nil)` - */ - def tails: Iterator[Repr] = iterateUntilEmpty(_.tail) - - /** Iterates over the inits of this $coll. The first value will be this - * $coll and the final one will be an empty $coll, with the intervening - * values the results of successive applications of `init`. - * - * @return an iterator over all the inits of this $coll - * @example `List(1,2,3).inits = Iterator(List(1,2,3), List(1,2), List(1), Nil)` - */ - def inits: Iterator[Repr] = iterateUntilEmpty(_.init) - - def copyToArray[B >: A](xs: Array[B], start: Int, len: Int) { - var i = start - val end = (start + len) min xs.length - breakable { - for (x <- this) { - if (i >= end) break - xs(i) = x - i += 1 - } - } - } - - @deprecatedOverriding("Enforce contract of toTraversable that if it is Traversable it returns itself.", "2.11.0") - def toTraversable: Traversable[A] = thisCollection - - def toIterator: Iterator[A] = toStream.iterator - def toStream: Stream[A] = toBuffer.toStream - // Override to provide size hint. - override def to[Col[_]](implicit cbf: CanBuildFrom[Nothing, A, Col[A @uV]]): Col[A @uV] = { - val b = cbf() - b.sizeHint(this) - b ++= thisCollection - b.result - } - - /** Converts this $coll to a string. - * - * @return a string representation of this collection. By default this - * string consists of the `stringPrefix` of this $coll, followed - * by all elements separated by commas and enclosed in parentheses. - */ - override def toString = mkString(stringPrefix + "(", ", ", ")") - - /** Defines the prefix of this object's `toString` representation. - * - * @return a string representation which starts the result of `toString` - * applied to this $coll. By default the string prefix is the - * simple name of the collection class $coll. - */ - def stringPrefix: String = { - /* This method is written in a style that avoids calling `String.split()` - * as well as methods of java.lang.Character that require the Unicode - * database information. This is mostly important for Scala.js, so that - * using the collection library does automatically bring java.util.regex.* - * and the Unicode database in the generated code. - * - * This algorithm has the additional benefit that it won't allocate - * anything except the result String in the common case, where the class - * is not an inner class (i.e., when the result contains no '.'). - */ - val fqn = repr.getClass.getName - var pos: Int = fqn.length - 1 - - // Skip trailing $'s - while (pos != -1 && fqn.charAt(pos) == '$') { - pos -= 1 - } - if (pos == -1 || fqn.charAt(pos) == '.') { - return "" - } - - var result: String = "" - while (true) { - // Invariant: if we enter the loop, there is a non-empty part - - // Look for the beginning of the part, remembering where was the last non-digit - val partEnd = pos + 1 - while (pos != -1 && fqn.charAt(pos) <= '9' && fqn.charAt(pos) >= '0') { - pos -= 1 - } - val lastNonDigit = pos - while (pos != -1 && fqn.charAt(pos) != '$' && fqn.charAt(pos) != '.') { - pos -= 1 - } - val partStart = pos + 1 - - // A non-last part which contains only digits marks a method-local part -> drop the prefix - if (pos == lastNonDigit && partEnd != fqn.length) { - return result - } - - // Skip to the next part, and determine whether we are the end - while (pos != -1 && fqn.charAt(pos) == '$') { - pos -= 1 - } - val atEnd = pos == -1 || fqn.charAt(pos) == '.' - - // Handle the actual content of the part (we ignore parts that are likely synthetic) - def isPartLikelySynthetic = { - val firstChar = fqn.charAt(partStart) - (firstChar > 'Z' && firstChar < 0x7f) || (firstChar < 'A') - } - if (atEnd || !isPartLikelySynthetic) { - val part = fqn.substring(partStart, partEnd) - result = if (result.isEmpty) part else part + '.' + result - if (atEnd) - return result - } - } - - // dead code - result - } - - /** Creates a non-strict view of this $coll. - * - * @return a non-strict view of this $coll. - */ - def view: TraversableView[A, Repr] = new TraversableView[A, Repr] { - protected lazy val underlying = self.repr - override def foreach[U](f: A => U) = self foreach f - } - - /** Creates a non-strict view of a slice of this $coll. - * - * Note: the difference between `view` and `slice` is that `view` produces - * a view of the current $coll, whereas `slice` produces a new $coll. - * - * Note: `view(from, to)` is equivalent to `view.slice(from, to)` - * $orderDependent - * - * @param from the index of the first element of the view - * @param until the index of the element following the view - * @return a non-strict view of a slice of this $coll, starting at index `from` - * and extending up to (but not including) index `until`. - */ - def view(from: Int, until: Int): TraversableView[A, Repr] = view.slice(from, until) - - /** Creates a non-strict filter of this $coll. - * - * Note: the difference between `c filter p` and `c withFilter p` is that - * the former creates a new collection, whereas the latter only - * restricts the domain of subsequent `map`, `flatMap`, `foreach`, - * and `withFilter` operations. - * $orderDependent - * - * @param p the predicate used to test elements. - * @return an object of class `WithFilter`, which supports - * `map`, `flatMap`, `foreach`, and `withFilter` operations. - * All these operations apply to those elements of this $coll - * which satisfy the predicate `p`. - */ - def withFilter(p: A => Boolean): FilterMonadic[A, Repr] = new WithFilter(p) - - /** A class supporting filtered operations. Instances of this class are - * returned by method `withFilter`. - */ - class WithFilter(p: A => Boolean) extends FilterMonadic[A, Repr] { - - /** Builds a new collection by applying a function to all elements of the - * outer $coll containing this `WithFilter` instance that satisfy predicate `p`. - * - * @param f the function to apply to each element. - * @tparam B the element type of the returned collection. - * @tparam That $thatinfo - * @param bf $bfinfo - * @return a new collection of type `That` resulting from applying - * the given function `f` to each element of the outer $coll - * that satisfies predicate `p` and collecting the results. - * - * @usecase def map[B](f: A => B): $Coll[B] - * @inheritdoc - * - * @return a new $coll resulting from applying the given function - * `f` to each element of the outer $coll that satisfies - * predicate `p` and collecting the results. - */ - def map[B, That](f: A => B)(implicit bf: CanBuildFrom[Repr, B, That]): That = { - val b = bf(repr) - for (x <- self) - if (p(x)) b += f(x) - b.result - } - - /** Builds a new collection by applying a function to all elements of the - * outer $coll containing this `WithFilter` instance that satisfy - * predicate `p` and concatenating the results. - * - * @param f the function to apply to each element. - * @tparam B the element type of the returned collection. - * @tparam That $thatinfo - * @param bf $bfinfo - * @return a new collection of type `That` resulting from applying - * the given collection-valued function `f` to each element - * of the outer $coll that satisfies predicate `p` and - * concatenating the results. - * - * @usecase def flatMap[B](f: A => TraversableOnce[B]): $Coll[B] - * @inheritdoc - * - * The type of the resulting collection will be guided by the static type - * of the outer $coll. - * - * @return a new $coll resulting from applying the given - * collection-valued function `f` to each element of the - * outer $coll that satisfies predicate `p` and concatenating - * the results. - */ - def flatMap[B, That](f: A => GenTraversableOnce[B])(implicit bf: CanBuildFrom[Repr, B, That]): That = { - val b = bf(repr) - for (x <- self) - if (p(x)) b ++= f(x).seq - b.result - } - - /** Applies a function `f` to all elements of the outer $coll containing - * this `WithFilter` instance that satisfy predicate `p`. - * - * @param f the function that is applied for its side-effect to every element. - * The result of function `f` is discarded. - * - * @tparam U the type parameter describing the result of function `f`. - * This result will always be ignored. Typically `U` is `Unit`, - * but this is not necessary. - * - * @usecase def foreach(f: A => Unit): Unit - * @inheritdoc - */ - def foreach[U](f: A => U): Unit = - for (x <- self) - if (p(x)) f(x) - - /** Further refines the filter for this $coll. - * - * @param q the predicate used to test elements. - * @return an object of class `WithFilter`, which supports - * `map`, `flatMap`, `foreach`, and `withFilter` operations. - * All these operations apply to those elements of this $coll which - * satisfy the predicate `q` in addition to the predicate `p`. - */ - def withFilter(q: A => Boolean): WithFilter = - new WithFilter(x => p(x) && q(x)) - } - - // A helper for tails and inits. - private def iterateUntilEmpty(f: Traversable[A @uV] => Traversable[A @uV]): Iterator[Repr] = { - val it = Iterator.iterate(thisCollection)(f) takeWhile (x => !x.isEmpty) - it ++ Iterator(Nil) map (x => (newBuilder ++= x).result) - } -} diff --git a/tests/scala2-library/src/library/scala/collection/TraversableOnce.scala b/tests/scala2-library/src/library/scala/collection/TraversableOnce.scala deleted file mode 100644 index 59f9afa4742f..000000000000 --- a/tests/scala2-library/src/library/scala/collection/TraversableOnce.scala +++ /dev/null @@ -1,477 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala -package collection - -import mutable.{ Buffer, Builder, ArrayBuffer } -import generic.CanBuildFrom -import scala.annotation.unchecked.{ uncheckedVariance => uV } -import scala.language.{implicitConversions, higherKinds} -import scala.reflect.ClassTag - -/** A template trait for collections which can be traversed either once only - * or one or more times. - * $traversableonceinfo - * - * @author Martin Odersky - * @author Paul Phillips - * @version 2.8 - * @since 2.8 - * - * @define coll traversable or iterator - * - * @tparam A the element type of the collection - * - * @define traversableonceinfo - * This trait exists primarily to eliminate code duplication between - * `Iterator` and `Traversable`, and thus implements some of the common - * methods that can be implemented solely in terms of foreach without - * access to a `Builder`. It also includes a number of abstract methods - * whose implementations are provided by `Iterator`, `Traversable`, etc. - * It contains implementations common to `Iterators` and - * `Traversables`, such as folds, conversions, and other operations which - * traverse some or all of the elements and return a derived value. - * Directly subclassing `TraversableOnce` is not recommended - instead, - * consider declaring an `Iterator` with a `next` and `hasNext` method or - * creating an `Iterator` with one of the methods on the `Iterator` object. - * Consider declaring a subclass of `Traversable` instead if the elements - * can be traversed repeatedly. - * - * @define coll traversable or iterator - * @define orderDependent - * - * Note: might return different results for different runs, unless the underlying collection type is ordered. - * @define orderDependentFold - * - * Note: might return different results for different runs, unless the - * underlying collection type is ordered or the operator is associative - * and commutative. - * @define mayNotTerminateInf - * - * Note: may not terminate for infinite-sized collections. - * @define willNotTerminateInf - * - * Note: will not terminate for infinite-sized collections. - */ -trait TraversableOnce[+A] extends Any with GenTraversableOnce[A] { - self => - - //TODO 2.12: Remove these methods. They are already defined in GenTraversableOnce - /* Self-documenting abstract methods. */ - def foreach[U](f: A => U): Unit - def isEmpty: Boolean - def hasDefiniteSize: Boolean - - // Note: We could redefine this in TraversableLike to always return `repr` - // of type `Repr`, only if `Repr` had type bounds, which it doesn't, because - // not all `Repr` are a subtype `TraversableOnce[A]`. - // The alternative is redefining it for maps, sets and seqs. For concrete implementations - // we don't have to do this anyway, since they are leaves in the inheritance hierarchy. - // Note 2: This is implemented in all collections _not_ inheriting `Traversable[A]` - // at least indirectly. Currently, these are `ArrayOps` and `StringOps`. - // It is also implemented in `TraversableOnce[A]`. - /** A version of this collection with all - * of the operations implemented sequentially (i.e., in a single-threaded manner). - * - * This method returns a reference to this collection. In parallel collections, - * it is redefined to return a sequential implementation of this collection. In - * both cases, it has O(1) complexity. - * - * @return a sequential view of the collection. - */ - def seq: TraversableOnce[A] - - // Presently these are abstract because the Traversable versions use - // breakable/break, and I wasn't sure enough of how that's supposed to - // function to consolidate them with the Iterator versions. - def forall(p: A => Boolean): Boolean - def exists(p: A => Boolean): Boolean - def find(p: A => Boolean): Option[A] - def copyToArray[B >: A](xs: Array[B], start: Int, len: Int): Unit - - // for internal use - protected[this] def reversed = { - var elems: List[A] = Nil - self foreach (elems ::= _) - elems - } - - def size: Int = { - var result = 0 - for (x <- self) result += 1 - result - } - - def nonEmpty: Boolean = !isEmpty - - def count(p: A => Boolean): Int = { - var cnt = 0 - for (x <- this) - if (p(x)) cnt += 1 - - cnt - } - - /** Finds the first element of the $coll for which the given partial - * function is defined, and applies the partial function to it. - * - * $mayNotTerminateInf - * $orderDependent - * - * @param pf the partial function - * @return an option value containing pf applied to the first - * value for which it is defined, or `None` if none exists. - * @example `Seq("a", 1, 5L).collectFirst({ case x: Int => x*10 }) = Some(10)` - */ - def collectFirst[B](pf: PartialFunction[A, B]): Option[B] = { - // TODO 2.12 -- move out alternate implementations into child classes - val i: Iterator[A] = self match { - case it: Iterator[A] => it - case _: GenIterable[_] => self.toIterator // If it might be parallel, be sure to .seq or use iterator! - case _ => // Not parallel, not iterable--just traverse - self.foreach(pf.runWith(b => return Some(b))) - return None - } - // Presumably the fastest way to get in and out of a partial function is for a sentinel function to return itself - // (Tested to be lower-overhead than runWith. Would be better yet to not need to (formally) allocate it--change in 2.12.) - val sentinel: Function1[A, Any] = new scala.runtime.AbstractFunction1[A, Any]{ def apply(a: A) = this } - while (i.hasNext) { - val x = pf.applyOrElse(i.next, sentinel) - if (x.asInstanceOf[AnyRef] ne sentinel) return Some(x.asInstanceOf[B]) - } - None - } - - def /:[B](z: B)(op: (B, A) => B): B = foldLeft(z)(op) - - def :\[B](z: B)(op: (A, B) => B): B = foldRight(z)(op) - - def foldLeft[B](z: B)(op: (B, A) => B): B = { - var result = z - this foreach (x => result = op(result, x)) - result - } - - def foldRight[B](z: B)(op: (A, B) => B): B = - reversed.foldLeft(z)((x, y) => op(y, x)) - - /** Applies a binary operator to all elements of this $coll, - * going left to right. - * $willNotTerminateInf - * $orderDependentFold - * - * @param op the binary operator. - * @tparam B the result type of the binary operator. - * @return the result of inserting `op` between consecutive elements of this $coll, - * going left to right: - * {{{ - * op( op( ... op(x_1, x_2) ..., x_{n-1}), x_n) - * }}} - * where `x,,1,,, ..., x,,n,,` are the elements of this $coll. - * @throws UnsupportedOperationException if this $coll is empty. */ - def reduceLeft[B >: A](op: (B, A) => B): B = { - if (isEmpty) - throw new UnsupportedOperationException("empty.reduceLeft") - - var first = true - var acc: B = 0.asInstanceOf[B] - - for (x <- self) { - if (first) { - acc = x - first = false - } - else acc = op(acc, x) - } - acc - } - - def reduceRight[B >: A](op: (A, B) => B): B = { - if (isEmpty) - throw new UnsupportedOperationException("empty.reduceRight") - - reversed.reduceLeft[B]((x, y) => op(y, x)) - } - - def reduceLeftOption[B >: A](op: (B, A) => B): Option[B] = - if (isEmpty) None else Some(reduceLeft(op)) - - def reduceRightOption[B >: A](op: (A, B) => B): Option[B] = - if (isEmpty) None else Some(reduceRight(op)) - - def reduce[A1 >: A](op: (A1, A1) => A1): A1 = reduceLeft(op) - - def reduceOption[A1 >: A](op: (A1, A1) => A1): Option[A1] = reduceLeftOption(op) - - def fold[A1 >: A](z: A1)(op: (A1, A1) => A1): A1 = foldLeft(z)(op) - - def aggregate[B](z: =>B)(seqop: (B, A) => B, combop: (B, B) => B): B = foldLeft(z)(seqop) - - def sum[B >: A](implicit num: Numeric[B]): B = foldLeft(num.zero)(num.plus) - - def product[B >: A](implicit num: Numeric[B]): B = foldLeft(num.one)(num.times) - - def min[B >: A](implicit cmp: Ordering[B]): A = { - if (isEmpty) - throw new UnsupportedOperationException("empty.min") - - reduceLeft((x, y) => if (cmp.lteq(x, y)) x else y) - } - - def max[B >: A](implicit cmp: Ordering[B]): A = { - if (isEmpty) - throw new UnsupportedOperationException("empty.max") - - reduceLeft((x, y) => if (cmp.gteq(x, y)) x else y) - } - - def maxBy[B](f: A => B)(implicit cmp: Ordering[B]): A = { - if (isEmpty) - throw new UnsupportedOperationException("empty.maxBy") - - var maxF: B = null.asInstanceOf[B] - var maxElem: A = null.asInstanceOf[A] - var first = true - - for (elem <- self) { - val fx = f(elem) - if (first || cmp.gt(fx, maxF)) { - maxElem = elem - maxF = fx - first = false - } - } - maxElem - } - def minBy[B](f: A => B)(implicit cmp: Ordering[B]): A = { - if (isEmpty) - throw new UnsupportedOperationException("empty.minBy") - - var minF: B = null.asInstanceOf[B] - var minElem: A = null.asInstanceOf[A] - var first = true - - for (elem <- self) { - val fx = f(elem) - if (first || cmp.lt(fx, minF)) { - minElem = elem - minF = fx - first = false - } - } - minElem - } - - /** Copies all elements of this $coll to a buffer. - * $willNotTerminateInf - * @param dest The buffer to which elements are copied. - */ - def copyToBuffer[B >: A](dest: Buffer[B]): Unit = dest ++= seq - - def copyToArray[B >: A](xs: Array[B], start: Int): Unit = - copyToArray(xs, start, xs.length - start) - - def copyToArray[B >: A](xs: Array[B]): Unit = - copyToArray(xs, 0, xs.length) - - def toArray[B >: A : ClassTag]: Array[B] = { - if (isTraversableAgain) { - val result = new Array[B](size) - copyToArray(result, 0) - result - } - else toBuffer.toArray - } - - def toTraversable: Traversable[A] - - def toList: List[A] = to[List] - - def toIterable: Iterable[A] = toStream - - def toSeq: Seq[A] = toStream - - def toIndexedSeq: immutable.IndexedSeq[A] = to[immutable.IndexedSeq] - - def toBuffer[B >: A]: mutable.Buffer[B] = to[ArrayBuffer].asInstanceOf[mutable.Buffer[B]] - - def toSet[B >: A]: immutable.Set[B] = to[immutable.Set].asInstanceOf[immutable.Set[B]] - - def toVector: Vector[A] = to[Vector] - - def to[Col[_]](implicit cbf: CanBuildFrom[Nothing, A, Col[A @uV]]): Col[A @uV] = { - val b = cbf() - b ++= seq - b.result() - } - - def toMap[T, U](implicit ev: A <:< (T, U)): immutable.Map[T, U] = { - val b = immutable.Map.newBuilder[T, U] - for (x <- self) - b += x - - b.result() - } - - def mkString(start: String, sep: String, end: String): String = - addString(new StringBuilder(), start, sep, end).toString - - def mkString(sep: String): String = mkString("", sep, "") - - def mkString: String = mkString("") - - /** Appends all elements of this $coll to a string builder using start, end, and separator strings. - * The written text begins with the string `start` and ends with the string `end`. - * Inside, the string representations (w.r.t. the method `toString`) - * of all elements of this $coll are separated by the string `sep`. - * - * Example: - * - * {{{ - * scala> val a = List(1,2,3,4) - * a: List[Int] = List(1, 2, 3, 4) - * - * scala> val b = new StringBuilder() - * b: StringBuilder = - * - * scala> a.addString(b , "List(" , ", " , ")") - * res5: StringBuilder = List(1, 2, 3, 4) - * }}} - * - * @param b the string builder to which elements are appended. - * @param start the starting string. - * @param sep the separator string. - * @param end the ending string. - * @return the string builder `b` to which elements were appended. - */ - def addString(b: StringBuilder, start: String, sep: String, end: String): StringBuilder = { - var first = true - - b append start - for (x <- self) { - if (first) { - b append x - first = false - } - else { - b append sep - b append x - } - } - b append end - - b - } - - /** Appends all elements of this $coll to a string builder using a separator string. - * The written text consists of the string representations (w.r.t. the method `toString`) - * of all elements of this $coll, separated by the string `sep`. - * - * Example: - * - * {{{ - * scala> val a = List(1,2,3,4) - * a: List[Int] = List(1, 2, 3, 4) - * - * scala> val b = new StringBuilder() - * b: StringBuilder = - * - * scala> a.addString(b, ", ") - * res0: StringBuilder = 1, 2, 3, 4 - * }}} - * - * @param b the string builder to which elements are appended. - * @param sep the separator string. - * @return the string builder `b` to which elements were appended. - */ - def addString(b: StringBuilder, sep: String): StringBuilder = addString(b, "", sep, "") - - /** Appends all elements of this $coll to a string builder. - * The written text consists of the string representations (w.r.t. the method - * `toString`) of all elements of this $coll without any separator string. - * - * Example: - * - * {{{ - * scala> val a = List(1,2,3,4) - * a: List[Int] = List(1, 2, 3, 4) - * - * scala> val b = new StringBuilder() - * b: StringBuilder = - * - * scala> val h = a.addString(b) - * h: StringBuilder = 1234 - * }}} - - * @param b the string builder to which elements are appended. - * @return the string builder `b` to which elements were appended. - */ - def addString(b: StringBuilder): StringBuilder = addString(b, "") -} - - -object TraversableOnce { - implicit def alternateImplicit[A](trav: TraversableOnce[A]): ForceImplicitAmbiguity = new ForceImplicitAmbiguity - implicit def flattenTraversableOnce[A, CC[_]](travs: TraversableOnce[CC[A]])(implicit ev: CC[A] => TraversableOnce[A]): FlattenOps[A] = - new FlattenOps[A](travs map ev) - - /* Functionality reused in Iterator.CanBuildFrom */ - private[collection] abstract class BufferedCanBuildFrom[A, CC[X] <: TraversableOnce[X]] extends generic.CanBuildFrom[CC[_], A, CC[A]] { - def bufferToColl[B](buff: ArrayBuffer[B]): CC[B] - def traversableToColl[B](t: GenTraversable[B]): CC[B] - - def newIterator: Builder[A, CC[A]] = new ArrayBuffer[A] mapResult bufferToColl - - /** Creates a new builder on request of a collection. - * @param from the collection requesting the builder to be created. - * @return the result of invoking the `genericBuilder` method on `from`. - */ - def apply(from: CC[_]): Builder[A, CC[A]] = from match { - case xs: generic.GenericTraversableTemplate[_, _] => xs.genericBuilder.asInstanceOf[Builder[A, Traversable[A]]] mapResult { - case res => traversableToColl(res.asInstanceOf[GenTraversable[A]]) - } - case _ => newIterator - } - - /** Creates a new builder from scratch - * @return the result of invoking the `newBuilder` method of this factory. - */ - def apply() = newIterator - } - - /** With the advent of `TraversableOnce`, it can be useful to have a builder which - * operates on `Iterator`s so they can be treated uniformly along with the collections. - * See `scala.util.Random.shuffle` or `scala.concurrent.Future.sequence` for an example. - */ - class OnceCanBuildFrom[A] extends BufferedCanBuildFrom[A, TraversableOnce] { - def bufferToColl[B](buff: ArrayBuffer[B]) = buff.iterator - def traversableToColl[B](t: GenTraversable[B]) = t.seq - } - - /** Evidence for building collections from `TraversableOnce` collections */ - implicit def OnceCanBuildFrom[A]: OnceCanBuildFrom[A] = new OnceCanBuildFrom[A] - - class FlattenOps[A](travs: TraversableOnce[TraversableOnce[A]]) { - def flatten: Iterator[A] = new AbstractIterator[A] { - val its = travs.toIterator - private var it: Iterator[A] = Iterator.empty - def hasNext: Boolean = it.hasNext || its.hasNext && { it = its.next().toIterator; hasNext } - def next(): A = if (hasNext) it.next() else Iterator.empty.next() - } - } - - class ForceImplicitAmbiguity - - implicit class MonadOps[+A](trav: TraversableOnce[A]) { - def map[B](f: A => B): TraversableOnce[B] = trav.toIterator map f - def flatMap[B](f: A => GenTraversableOnce[B]): TraversableOnce[B] = trav.toIterator flatMap f - def withFilter(p: A => Boolean) = trav.toIterator filter p - def filter(p: A => Boolean): TraversableOnce[A] = withFilter(p) - } -} diff --git a/tests/scala2-library/src/library/scala/collection/TraversableProxy.scala b/tests/scala2-library/src/library/scala/collection/TraversableProxy.scala deleted file mode 100644 index 0c7219c5f943..000000000000 --- a/tests/scala2-library/src/library/scala/collection/TraversableProxy.scala +++ /dev/null @@ -1,25 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - - - -package scala -package collection - -// Methods could be printed by cat TraversableLike.scala | egrep '^ (override )?def' - - -/** This trait implements a proxy for traversable objects. It forwards - * all calls to a different traversable object - * - * @author Martin Odersky - * @version 2.8 - * @since 2.8 - */ -@deprecated("proxying is deprecated due to lack of use and compiler-level support", "2.11.3") -trait TraversableProxy[+A] extends Traversable[A] with TraversableProxyLike[A, Traversable[A]] diff --git a/tests/scala2-library/src/library/scala/collection/TraversableProxyLike.scala b/tests/scala2-library/src/library/scala/collection/TraversableProxyLike.scala deleted file mode 100644 index c8b641f88bab..000000000000 --- a/tests/scala2-library/src/library/scala/collection/TraversableProxyLike.scala +++ /dev/null @@ -1,101 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - - - -package scala -package collection - -import generic._ -import mutable.{Buffer, StringBuilder} -import scala.reflect.ClassTag - -// Methods could be printed by cat TraversableLike.scala | egrep '^ (override )?def' - -/** This trait implements a proxy for Traversable objects. It forwards - * all calls to a different Traversable object. - * - * @author Martin Odersky - * @version 2.8 - * @since 2.8 - */ -@deprecated("proxying is deprecated due to lack of use and compiler-level support", "2.11.0") -trait TraversableProxyLike[+A, +Repr <: TraversableLike[A, Repr] with Traversable[A]] extends TraversableLike[A, Repr] with Proxy { - def self: Repr - - override def foreach[U](f: A => U): Unit = self.foreach(f) - override def isEmpty: Boolean = self.isEmpty - override def nonEmpty: Boolean = self.nonEmpty - override def size: Int = self.size - override def hasDefiniteSize = self.hasDefiniteSize - override def ++[B >: A, That](xs: GenTraversableOnce[B])(implicit bf: CanBuildFrom[Repr, B, That]): That = self.++(xs)(bf) - override def map[B, That](f: A => B)(implicit bf: CanBuildFrom[Repr, B, That]): That = self.map(f)(bf) - override def flatMap[B, That](f: A => GenTraversableOnce[B])(implicit bf: CanBuildFrom[Repr, B, That]): That = self.flatMap(f)(bf) - override def filter(p: A => Boolean): Repr = self.filter(p) - override def filterNot(p: A => Boolean): Repr = self.filterNot(p) - override def collect[B, That](pf: PartialFunction[A, B])(implicit bf: CanBuildFrom[Repr, B, That]): That = self.collect(pf)(bf) - override def partition(p: A => Boolean): (Repr, Repr) = self.partition(p) - override def groupBy[K](f: A => K): immutable.Map[K, Repr] = self.groupBy(f) - override def forall(p: A => Boolean): Boolean = self.forall(p) - override def exists(p: A => Boolean): Boolean = self.exists(p) - override def count(p: A => Boolean): Int = self.count(p) - override def find(p: A => Boolean): Option[A] = self.find(p) - override def foldLeft[B](z: B)(op: (B, A) => B): B = self.foldLeft(z)(op) - override def /: [B](z: B)(op: (B, A) => B): B = self./:(z)(op) - override def foldRight[B](z: B)(op: (A, B) => B): B = self.foldRight(z)(op) - override def :\ [B](z: B)(op: (A, B) => B): B = self.:\(z)(op) - override def reduceLeft[B >: A](op: (B, A) => B): B = self.reduceLeft(op) - override def reduceLeftOption[B >: A](op: (B, A) => B): Option[B] = self.reduceLeftOption(op) - override def reduceRight[B >: A](op: (A, B) => B): B = self.reduceRight(op) - override def reduceRightOption[B >: A](op: (A, B) => B): Option[B] = self.reduceRightOption(op) - override def scanLeft[B, That](z: B)(op: (B, A) => B)(implicit bf: CanBuildFrom[Repr, B, That]): That = self.scanLeft(z)(op)(bf) - override def scanRight[B, That](z: B)(op: (A, B) => B)(implicit bf: CanBuildFrom[Repr, B, That]): That = self.scanRight(z)(op)(bf) - override def sum[B >: A](implicit num: Numeric[B]): B = self.sum(num) - override def product[B >: A](implicit num: Numeric[B]): B = self.product(num) - override def min[B >: A](implicit cmp: Ordering[B]): A = self.min(cmp) - override def max[B >: A](implicit cmp: Ordering[B]): A = self.max(cmp) - override def head: A = self.head - override def headOption: Option[A] = self.headOption - override def tail: Repr = self.tail - override def last: A = self.last - override def lastOption: Option[A] = self.lastOption - override def init: Repr = self.init - override def take(n: Int): Repr = self.take(n) - override def drop(n: Int): Repr = self.drop(n) - override def slice(from: Int, until: Int): Repr = self.slice(from, until) - override def takeWhile(p: A => Boolean): Repr = self.takeWhile(p) - override def dropWhile(p: A => Boolean): Repr = self.dropWhile(p) - override def span(p: A => Boolean): (Repr, Repr) = self.span(p) - override def splitAt(n: Int): (Repr, Repr) = self.splitAt(n) - override def copyToBuffer[B >: A](dest: Buffer[B]) = self.copyToBuffer(dest) - override def copyToArray[B >: A](xs: Array[B], start: Int, len: Int) = self.copyToArray(xs, start, len) - override def copyToArray[B >: A](xs: Array[B], start: Int) = self.copyToArray(xs, start) - override def copyToArray[B >: A](xs: Array[B]) = self.copyToArray(xs) - override def toArray[B >: A: ClassTag]: Array[B] = self.toArray - override def toList: List[A] = self.toList - override def toIterable: Iterable[A] = self.toIterable - override def toSeq: Seq[A] = self.toSeq - override def toIndexedSeq: immutable.IndexedSeq[A] = self.toIndexedSeq - override def toBuffer[B >: A] = self.toBuffer - override def toStream: Stream[A] = self.toStream - override def toSet[B >: A]: immutable.Set[B] = self.toSet - override def toMap[T, U](implicit ev: A <:< (T, U)): immutable.Map[T, U] = self.toMap(ev) - override def toTraversable: Traversable[A] = self.toTraversable - override def toIterator: Iterator[A] = self.toIterator - override def mkString(start: String, sep: String, end: String): String = self.mkString(start, sep, end) - override def mkString(sep: String): String = self.mkString(sep) - override def mkString: String = self.mkString - override def addString(b: StringBuilder, start: String, sep: String, end: String): StringBuilder = self.addString(b, start, sep, end) - override def addString(b: StringBuilder, sep: String): StringBuilder = self.addString(b, sep) - override def addString(b: StringBuilder): StringBuilder = self.addString(b) - override def stringPrefix : String = self.stringPrefix - override def view = self.view - override def view(from: Int, until: Int): TraversableView[A, Repr] = self.view(from, until) - // This appears difficult to override due to the type of WithFilter. - // override def withFilter(p: A => Boolean): WithFilter = self.withFilter(p) -} diff --git a/tests/scala2-library/src/library/scala/collection/TraversableView.scala b/tests/scala2-library/src/library/scala/collection/TraversableView.scala deleted file mode 100644 index 431e73b46b64..000000000000 --- a/tests/scala2-library/src/library/scala/collection/TraversableView.scala +++ /dev/null @@ -1,36 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala -package collection - -import generic._ -import mutable.Builder - -/** A base trait for non-strict views of traversable collections. - * $traversableViewInfo - */ -trait TraversableView[+A, +Coll] extends TraversableViewLike[A, Coll, TraversableView[A, Coll]] { } - -/** An object containing the necessary implicit definitions to make - * `TraversableView`s work. Its definitions are generally not accessed directly by clients. - */ -object TraversableView { - class NoBuilder[A] extends Builder[A, Nothing] { - def +=(elem: A): this.type = this - def iterator: Iterator[A] = Iterator.empty - def result() = throw new UnsupportedOperationException("TraversableView.Builder.result") - def clear() {} - } - type Coll = TraversableView[_, _ <: Traversable[_]] - implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, TraversableView[A, Traversable[_]]] = - new CanBuildFrom[Coll, A, TraversableView[A, Traversable[_]]] { - def apply(from: Coll) = new NoBuilder - def apply() = new NoBuilder - } -} diff --git a/tests/scala2-library/src/library/scala/collection/TraversableViewLike.scala b/tests/scala2-library/src/library/scala/collection/TraversableViewLike.scala deleted file mode 100644 index fa94086791c3..000000000000 --- a/tests/scala2-library/src/library/scala/collection/TraversableViewLike.scala +++ /dev/null @@ -1,322 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala -package collection - -import generic._ -import mutable.{ Builder, ArrayBuffer } -import scala.annotation.migration -import scala.language.implicitConversions - -trait ViewMkString[+A] { - self: Traversable[A] => - - // It is necessary to use thisSeq rather than toSeq to avoid cycles in the - // eager evaluation of vals in transformed view subclasses, see #4558. - protected[this] def thisSeq: Seq[A] = (new ArrayBuffer[A] ++= self).result - - // Have to overload all three to work around #4299. The overload - // is because mkString should force a view but toString should not. - override def mkString: String = mkString("") - override def mkString(sep: String): String = mkString("", sep, "") - override def mkString(start: String, sep: String, end: String): String = { - thisSeq.addString(new StringBuilder(), start, sep, end).toString - } - override def addString(b: StringBuilder, start: String, sep: String, end: String): StringBuilder = { - var first = true - b append start - for (x <- self) { - if (first) first = false else b append sep - b append x - } - b append end - b - } -} - -/** A template trait for non-strict views of traversable collections. - * $traversableViewInfo - * - * Implementation note: Methods such as `map` or `flatMap` on this view will not invoke the implicitly passed - * `Builder` factory, but will return a new view directly, to preserve by-name behavior. - * The new view is then cast to the factory's result type. This means that every `CanBuildFrom` - * that takes a `View` as its `From` type parameter must yield the same view (or a generic - * superclass of it) as its result parameter. If that assumption is broken, cast errors might result. - * - * @define viewInfo - * A view is a lazy version of some collection. Collection transformers such as - * `map` or `filter` or `++` do not traverse any elements when applied on a view. - * Instead they create a new view which simply records that fact that the operation - * needs to be applied. The collection elements are accessed, and the view operations are applied, - * when a non-view result is needed, or when the `force` method is called on a view. - * @define traversableViewInfo - * $viewInfo - * - * All views for traversable collections are defined by creating a new `foreach` method. - * - * @author Martin Odersky - * @version 2.8 - * @since 2.8 - * @tparam A the element type of the view - * @tparam Coll the type of the underlying collection containing the elements. - * @tparam This the type of the view itself - */ -trait TraversableViewLike[+A, - +Coll, - +This <: TraversableView[A, Coll] with TraversableViewLike[A, Coll, This]] - extends Traversable[A] with TraversableLike[A, This] with ViewMkString[A] -{ - self => - - protected def underlying: Coll - protected[this] def viewIdentifier: String = "" - protected[this] def viewIdString: String = "" - def viewToString = stringPrefix + viewIdString + "(...)" - override def stringPrefix = "TraversableView" - - override protected[this] def newBuilder: Builder[A, This] = - throw new UnsupportedOperationException(this+".newBuilder") - - def force[B >: A, That](implicit bf: CanBuildFrom[Coll, B, That]) = { - val b = bf(underlying) - b ++= this - b.result() - } - - /** Explicit instantiation of the `TransformedT` trait to reduce class file size in subclasses. */ - private[collection] abstract class AbstractTransformedT[+B] extends Traversable[B] with TransformedT[B] - - - /** The implementation base trait of this view. - * This trait and all its subtraits has to be re-implemented for each - * ViewLike class. - */ - trait TransformedT[+B] extends TraversableView[B, Coll] { - def foreach[U](f: B => U): Unit - - lazy val underlying = self.underlying - final override protected[this] def viewIdString = self.viewIdString + viewIdentifier - - // Methods whose standard implementations use "isEmpty" need to be rewritten - // for views, else they will end up traversing twice in a situation like: - // xs.view.flatMap(f).headOption - override def headOption: Option[B] = { - for (x <- this) - return Some(x) - - None - } - override def lastOption: Option[B] = { - // (Should be) better than allocating a Some for every element. - var empty = true - var result: B = null.asInstanceOf[B] - for (x <- this) { - empty = false - result = x - } - if (empty) None else Some(result) - } - - // XXX: As yet not dealt with, tail and init both call isEmpty. - override def stringPrefix = self.stringPrefix - override def toString = viewToString - } - - trait EmptyViewT extends TransformedT[Nothing] { - final override def isEmpty = true - final override def foreach[U](f: Nothing => U): Unit = () - } - - /** A fall back which forces everything into a vector and then applies an operation - * on it. Used for those operations which do not naturally lend themselves to a view - */ - trait ForcedT[B] extends TransformedT[B] { - protected[this] lazy val forced: GenSeq[B] - def foreach[U](f: B => U) = forced foreach f - final override protected[this] def viewIdentifier = "C" - } - - trait SlicedT extends TransformedT[A] { - protected[this] lazy val endpoints: SliceInterval - protected[this] def from = endpoints.from - protected[this] def until = endpoints.until - // protected def newSliced(_endpoints: SliceInterval): TransformedT[A] = - // self.newSliced(endpoints.recalculate(_endpoints)) - - def foreach[U](f: A => U) { - var index = 0 - for (x <- self) { - if (from <= index) { - if (until <= index) return - f(x) - } - index += 1 - } - } - final override protected[this] def viewIdentifier = "S" - } - - trait MappedT[B] extends TransformedT[B] { - protected[this] lazy val mapping: A => B - def foreach[U](f: B => U) { - for (x <- self) - f(mapping(x)) - } - final override protected[this] def viewIdentifier = "M" - } - - trait FlatMappedT[B] extends TransformedT[B] { - protected[this] lazy val mapping: A => GenTraversableOnce[B] - def foreach[U](f: B => U) { - for (x <- self) - for (y <- mapping(x).seq) - f(y) - } - final override protected[this] def viewIdentifier = "N" - } - - trait AppendedT[B >: A] extends TransformedT[B] { - protected[this] lazy val rest: GenTraversable[B] - def foreach[U](f: B => U) { - self foreach f - rest foreach f - } - final override protected[this] def viewIdentifier = "A" - } - - trait PrependedT[B >: A] extends TransformedT[B] { - protected[this] lazy val fst: GenTraversable[B] - def foreach[U](f: B => U) { - fst foreach f - self foreach f - } - final override protected[this] def viewIdentifier = "A" - } - - trait FilteredT extends TransformedT[A] { - protected[this] lazy val pred: A => Boolean - def foreach[U](f: A => U) { - for (x <- self) - if (pred(x)) f(x) - } - final override protected[this] def viewIdentifier = "F" - } - - trait TakenWhileT extends TransformedT[A] { - protected[this] lazy val pred: A => Boolean - def foreach[U](f: A => U) { - for (x <- self) { - if (!pred(x)) return - f(x) - } - } - final override protected[this] def viewIdentifier = "T" - } - - trait DroppedWhileT extends TransformedT[A] { - protected[this] lazy val pred: A => Boolean - def foreach[U](f: A => U) { - var go = false - for (x <- self) { - if (!go && !pred(x)) go = true - if (go) f(x) - } - } - final override protected[this] def viewIdentifier = "D" - } - - override def ++[B >: A, That](xs: GenTraversableOnce[B])(implicit bf: CanBuildFrom[This, B, That]): That = - newAppended(xs.seq.toTraversable).asInstanceOf[That] - - override def ++:[B >: A, That](xs: TraversableOnce[B])(implicit bf: CanBuildFrom[This, B, That]): That = - newPrepended(xs.seq.toTraversable).asInstanceOf[That] - - // Need second one because of optimization in TraversableLike - override def ++:[B >: A, That](xs: Traversable[B])(implicit bf: CanBuildFrom[This, B, That]): That = - newPrepended(xs).asInstanceOf[That] - - override def map[B, That](f: A => B)(implicit bf: CanBuildFrom[This, B, That]): That = { - newMapped(f).asInstanceOf[That] -// val b = bf(repr) -// if (b.isInstanceOf[NoBuilder[_]]) newMapped(f).asInstanceOf[That] -// else super.map[B, That](f)(bf) - } - - override def collect[B, That](pf: PartialFunction[A, B])(implicit bf: CanBuildFrom[This, B, That]): That = - filter(pf.isDefinedAt).map(pf)(bf) - - override def flatMap[B, That](f: A => GenTraversableOnce[B])(implicit bf: CanBuildFrom[This, B, That]): That = { - newFlatMapped(f).asInstanceOf[That] -// was: val b = bf(repr) -// if (b.isInstanceOf[NoBuilder[_]]) newFlatMapped(f).asInstanceOf[That] -// else super.flatMap[B, That](f)(bf) - } - override def flatten[B](implicit asTraversable: A => /*<: GenSeq[B]): TransformedT[B] = new AbstractTransformedT[B] with ForcedT[B] { lazy val forced = xs } - protected def newAppended[B >: A](that: GenTraversable[B]): TransformedT[B] = new AbstractTransformedT[B] with AppendedT[B] { lazy val rest = that } - protected def newPrepended[B >: A](that: GenTraversable[B]): TransformedT[B] = new AbstractTransformedT[B] with PrependedT[B] { lazy val fst = that } - protected def newMapped[B](f: A => B): TransformedT[B] = new AbstractTransformedT[B] with MappedT[B] { lazy val mapping = f } - protected def newFlatMapped[B](f: A => GenTraversableOnce[B]): TransformedT[B] = new AbstractTransformedT[B] with FlatMappedT[B] { lazy val mapping = f } - protected def newFiltered(p: A => Boolean): TransformedT[A] = new AbstractTransformedT[A] with FilteredT { lazy val pred = p } - protected def newSliced(_endpoints: SliceInterval): TransformedT[A] = new AbstractTransformedT[A] with SlicedT { lazy val endpoints = _endpoints } - protected def newDroppedWhile(p: A => Boolean): TransformedT[A] = new AbstractTransformedT[A] with DroppedWhileT { lazy val pred = p } - protected def newTakenWhile(p: A => Boolean): TransformedT[A] = new AbstractTransformedT[A] with TakenWhileT { lazy val pred = p } - - protected def newTaken(n: Int): TransformedT[A] = newSliced(SliceInterval(0, n)) - protected def newDropped(n: Int): TransformedT[A] = newSliced(SliceInterval(n, Int.MaxValue)) - - override def filter(p: A => Boolean): This = newFiltered(p) - override def withFilter(p: A => Boolean): This = newFiltered(p) - override def partition(p: A => Boolean): (This, This) = (newFiltered(p), newFiltered(!p(_))) - override def init: This = newSliced(SliceInterval(0, size - 1)) // !!! can't call size here. - override def drop(n: Int): This = newDropped(n) - override def take(n: Int): This = newTaken(n) - override def slice(from: Int, until: Int): This = newSliced(SliceInterval(from, until)) - override def dropWhile(p: A => Boolean): This = newDroppedWhile(p) - override def takeWhile(p: A => Boolean): This = newTakenWhile(p) - override def span(p: A => Boolean): (This, This) = (newTakenWhile(p), newDroppedWhile(p)) - override def splitAt(n: Int): (This, This) = (newTaken(n), newDropped(n)) - - override def scanLeft[B, That](z: B)(op: (B, A) => B)(implicit bf: CanBuildFrom[This, B, That]): That = - newForced(thisSeq.scanLeft(z)(op)).asInstanceOf[That] - - @migration("The behavior of `scanRight` has changed. The previous behavior can be reproduced with scanRight.reverse.", "2.9.0") - override def scanRight[B, That](z: B)(op: (A, B) => B)(implicit bf: CanBuildFrom[This, B, That]): That = - newForced(thisSeq.scanRight(z)(op)).asInstanceOf[That] - - override def groupBy[K](f: A => K): immutable.Map[K, This] = - thisSeq groupBy f mapValues (xs => newForced(xs)) - - override def unzip[A1, A2](implicit asPair: A => (A1, A2)) = - (newMapped(x => asPair(x)._1), newMapped(x => asPair(x)._2)) // TODO - Performance improvements. - - override def unzip3[A1, A2, A3](implicit asTriple: A => (A1, A2, A3)) = - (newMapped(x => asTriple(x)._1), newMapped(x => asTriple(x)._2), newMapped(x => asTriple(x)._3)) // TODO - Performance improvements. - - override def filterNot(p: (A) => Boolean): This = - newFiltered(a => !(p(a))) - - override def inits: Iterator[This] = - thisSeq.inits.map(as => newForced(as).asInstanceOf[This]) - - override def tails: Iterator[This] = - thisSeq.tails.map(as => newForced(as).asInstanceOf[This]) - - override def tail: This = - // super.tail would also work as it is currently implemented in terms of drop(Int). - if (isEmpty) super.tail else newDropped(1) - - override def toString = viewToString -} diff --git a/tests/scala2-library/src/library/scala/collection/concurrent/BasicNode.java b/tests/scala2-library/src/library/scala/collection/concurrent/BasicNode.java deleted file mode 100644 index 97b88700368e..000000000000 --- a/tests/scala2-library/src/library/scala/collection/concurrent/BasicNode.java +++ /dev/null @@ -1,15 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala.collection.concurrent; - -public abstract class BasicNode { - - public abstract String string(int lev); - -} diff --git a/tests/scala2-library/src/library/scala/collection/concurrent/CNodeBase.java b/tests/scala2-library/src/library/scala/collection/concurrent/CNodeBase.java deleted file mode 100644 index 2fce971b2b84..000000000000 --- a/tests/scala2-library/src/library/scala/collection/concurrent/CNodeBase.java +++ /dev/null @@ -1,33 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala.collection.concurrent; - -import java.util.concurrent.atomic.AtomicIntegerFieldUpdater; - -abstract class CNodeBase extends MainNode { - - @SuppressWarnings("rawtypes") - public static final AtomicIntegerFieldUpdater updater = - AtomicIntegerFieldUpdater.newUpdater(CNodeBase.class, "csize"); - - public volatile int csize = -1; - - public boolean CAS_SIZE(int oldval, int nval) { - return updater.compareAndSet(this, oldval, nval); - } - - public void WRITE_SIZE(int nval) { - updater.set(this, nval); - } - - public int READ_SIZE() { - return updater.get(this); - } - -} \ No newline at end of file diff --git a/tests/scala2-library/src/library/scala/collection/concurrent/Gen.java b/tests/scala2-library/src/library/scala/collection/concurrent/Gen.java deleted file mode 100644 index 601988468342..000000000000 --- a/tests/scala2-library/src/library/scala/collection/concurrent/Gen.java +++ /dev/null @@ -1,11 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala.collection.concurrent; - -final class Gen {} diff --git a/tests/scala2-library/src/library/scala/collection/concurrent/INodeBase.java b/tests/scala2-library/src/library/scala/collection/concurrent/INodeBase.java deleted file mode 100644 index 2f2d20328791..000000000000 --- a/tests/scala2-library/src/library/scala/collection/concurrent/INodeBase.java +++ /dev/null @@ -1,33 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala.collection.concurrent; - -import java.util.concurrent.atomic.AtomicReferenceFieldUpdater; - -abstract class INodeBase extends BasicNode { - - @SuppressWarnings("rawtypes") - public static final AtomicReferenceFieldUpdater updater = - AtomicReferenceFieldUpdater.newUpdater(INodeBase.class, MainNode.class, "mainnode"); - - public static final Object RESTART = new Object(); - - public volatile MainNode mainnode = null; - - public final Gen gen; - - public INodeBase(Gen generation) { - gen = generation; - } - - public BasicNode prev() { - return null; - } - -} \ No newline at end of file diff --git a/tests/scala2-library/src/library/scala/collection/concurrent/MainNode.java b/tests/scala2-library/src/library/scala/collection/concurrent/MainNode.java deleted file mode 100644 index adb9b59a3de9..000000000000 --- a/tests/scala2-library/src/library/scala/collection/concurrent/MainNode.java +++ /dev/null @@ -1,39 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala.collection.concurrent; - -import java.util.concurrent.atomic.AtomicReferenceFieldUpdater; - -abstract class MainNode extends BasicNode { - - @SuppressWarnings("rawtypes") - public static final AtomicReferenceFieldUpdater updater = - AtomicReferenceFieldUpdater.newUpdater(MainNode.class, MainNode.class, "prev"); - - public volatile MainNode prev = null; - - public abstract int cachedSize(Object ct); - - public boolean CAS_PREV(MainNode oldval, MainNode nval) { - return updater.compareAndSet(this, oldval, nval); - } - - public void WRITE_PREV(MainNode nval) { - updater.set(this, nval); - } - - // do we need this? unclear in the javadocs... - // apparently not - volatile reads are supposed to be safe - // irregardless of whether there are concurrent ARFU updates - @Deprecated @SuppressWarnings("unchecked") - public MainNode READ_PREV() { - return updater.get(this); - } - -} \ No newline at end of file diff --git a/tests/scala2-library/src/library/scala/collection/concurrent/Map.scala b/tests/scala2-library/src/library/scala/collection/concurrent/Map.scala deleted file mode 100644 index f27dfd57fcc1..000000000000 --- a/tests/scala2-library/src/library/scala/collection/concurrent/Map.scala +++ /dev/null @@ -1,100 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2010-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala -package collection.concurrent - -/** A template trait for mutable maps that allow concurrent access. - * - * $concurrentmapinfo - * - * @since 2.8 - * @see [[http://docs.scala-lang.org/overviews/collections/concrete-mutable-collection-classes.html#concurrent_maps "Scala's Collection Library overview"]] - * section on `Concurrent Maps` for more information. - * - * @tparam A the key type of the map - * @tparam B the value type of the map - * - * @define Coll `concurrent.Map` - * @define coll concurrent map - * @define concurrentmapinfo - * This is a base trait for all Scala concurrent map implementations. It - * provides all of the methods a `Map` does, with the difference that all the - * changes are atomic. It also describes methods specific to concurrent maps. - * - * '''Note''': The concurrent maps do not accept `'''null'''` for keys or values. - * - * @define atomicop - * This is an atomic operation. - */ -trait Map[A, B] extends scala.collection.mutable.Map[A, B] { - - /** - * Associates the given key with a given value, unless the key was already - * associated with some other value. - * - * $atomicop - * - * @param k key with which the specified value is to be associated with - * @param v value to be associated with the specified key - * @return `Some(oldvalue)` if there was a value `oldvalue` previously - * associated with the specified key, or `None` if there was no - * mapping for the specified key - */ - def putIfAbsent(k: A, v: B): Option[B] - - /** - * Removes the entry for the specified key if it's currently mapped to the - * specified value. - * - * $atomicop - * - * @param k key for which the entry should be removed - * @param v value expected to be associated with the specified key if - * the removal is to take place - * @return `true` if the removal took place, `false` otherwise - */ - def remove(k: A, v: B): Boolean - - /** - * Replaces the entry for the given key only if it was previously mapped to - * a given value. - * - * $atomicop - * - * @param k key for which the entry should be replaced - * @param oldvalue value expected to be associated with the specified key - * if replacing is to happen - * @param newvalue value to be associated with the specified key - * @return `true` if the entry was replaced, `false` otherwise - */ - def replace(k: A, oldvalue: B, newvalue: B): Boolean - - /** - * Replaces the entry for the given key only if it was previously mapped - * to some value. - * - * $atomicop - * - * @param k key for which the entry should be replaced - * @param v value to be associated with the specified key - * @return `Some(v)` if the given key was previously mapped to some value `v`, or `None` otherwise - */ - def replace(k: A, v: B): Option[B] - - override def getOrElseUpdate(key: A, op: =>B): B = get(key) match { - case Some(v) => v - case None => - val v = op - putIfAbsent(key, v) match { - case Some(nv) => nv - case None => v - } - } - -} diff --git a/tests/scala2-library/src/library/scala/collection/concurrent/TrieMap.scala b/tests/scala2-library/src/library/scala/collection/concurrent/TrieMap.scala deleted file mode 100644 index db3263888dbd..000000000000 --- a/tests/scala2-library/src/library/scala/collection/concurrent/TrieMap.scala +++ /dev/null @@ -1,1151 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala -package collection -package concurrent - -import java.util.concurrent.atomic._ -import scala.collection.parallel.mutable.ParTrieMap -import scala.util.hashing.Hashing -import scala.util.control.ControlThrowable -import generic._ -import scala.annotation.tailrec - -private[collection] final class INode[K, V](bn: MainNode[K, V], g: Gen) extends INodeBase[K, V](g) { - import INodeBase._ - - WRITE(bn) - - def this(g: Gen) = this(null, g) - - def WRITE(nval: MainNode[K, V]) = INodeBase.updater.set(this, nval) - - def CAS(old: MainNode[K, V], n: MainNode[K, V]) = INodeBase.updater.compareAndSet(this, old, n) - - def gcasRead(ct: TrieMap[K, V]): MainNode[K, V] = GCAS_READ(ct) - - def GCAS_READ(ct: TrieMap[K, V]): MainNode[K, V] = { - val m = /*READ*/mainnode - val prevval = /*READ*/m.prev - if (prevval eq null) m - else GCAS_Complete(m, ct) - } - - @tailrec private def GCAS_Complete(m: MainNode[K, V], ct: TrieMap[K, V]): MainNode[K, V] = if (m eq null) null else { - // complete the GCAS - val prev = /*READ*/m.prev - val ctr = ct.readRoot(abort = true) - - prev match { - case null => - m - case fn: FailedNode[_, _] => // try to commit to previous value - if (CAS(m, fn.prev)) fn.prev - else GCAS_Complete(/*READ*/mainnode, ct) - case vn: MainNode[_, _] => - // Assume that you've read the root from the generation G. - // Assume that the snapshot algorithm is correct. - // ==> you can only reach nodes in generations <= G. - // ==> `gen` is <= G. - // We know that `ctr.gen` is >= G. - // ==> if `ctr.gen` = `gen` then they are both equal to G. - // ==> otherwise, we know that either `ctr.gen` > G, `gen` < G, - // or both - if ((ctr.gen eq gen) && ct.nonReadOnly) { - // try to commit - if (m.CAS_PREV(prev, null)) m - else GCAS_Complete(m, ct) - } else { - // try to abort - m.CAS_PREV(prev, new FailedNode(prev)) - GCAS_Complete(/*READ*/mainnode, ct) - } - } - } - - def GCAS(old: MainNode[K, V], n: MainNode[K, V], ct: TrieMap[K, V]): Boolean = { - n.WRITE_PREV(old) - if (CAS(old, n)) { - GCAS_Complete(n, ct) - /*READ*/n.prev eq null - } else false - } - - private def equal(k1: K, k2: K, ct: TrieMap[K, V]) = ct.equality.equiv(k1, k2) - - private def inode(cn: MainNode[K, V]) = { - val nin = new INode[K, V](gen) - nin.WRITE(cn) - nin - } - - def copyToGen(ngen: Gen, ct: TrieMap[K, V]) = { - val nin = new INode[K, V](ngen) - val main = GCAS_READ(ct) - nin.WRITE(main) - nin - } - - /** Inserts a key value pair, overwriting the old pair if the keys match. - * - * @return true if successful, false otherwise - */ - @tailrec def rec_insert(k: K, v: V, hc: Int, lev: Int, parent: INode[K, V], startgen: Gen, ct: TrieMap[K, V]): Boolean = { - val m = GCAS_READ(ct) // use -Yinline! - - m match { - case cn: CNode[K, V] => // 1) a multiway node - val idx = (hc >>> lev) & 0x1f - val flag = 1 << idx - val bmp = cn.bitmap - val mask = flag - 1 - val pos = Integer.bitCount(bmp & mask) - if ((bmp & flag) != 0) { - // 1a) insert below - cn.array(pos) match { - case in: INode[K, V] => - if (startgen eq in.gen) in.rec_insert(k, v, hc, lev + 5, this, startgen, ct) - else { - if (GCAS(cn, cn.renewed(startgen, ct), ct)) rec_insert(k, v, hc, lev, parent, startgen, ct) - else false - } - case sn: SNode[K, V] => - if (sn.hc == hc && equal(sn.k, k, ct)) GCAS(cn, cn.updatedAt(pos, new SNode(k, v, hc), gen), ct) - else { - val rn = if (cn.gen eq gen) cn else cn.renewed(gen, ct) - val nn = rn.updatedAt(pos, inode(CNode.dual(sn, sn.hc, new SNode(k, v, hc), hc, lev + 5, gen)), gen) - GCAS(cn, nn, ct) - } - } - } else { - val rn = if (cn.gen eq gen) cn else cn.renewed(gen, ct) - val ncnode = rn.insertedAt(pos, flag, new SNode(k, v, hc), gen) - GCAS(cn, ncnode, ct) - } - case tn: TNode[K, V] => - clean(parent, ct, lev - 5) - false - case ln: LNode[K, V] => // 3) an l-node - val nn = ln.inserted(k, v) - GCAS(ln, nn, ct) - } - } - - /** Inserts a new key value pair, given that a specific condition is met. - * - * @param cond null - don't care if the key was there; KEY_ABSENT - key wasn't there; KEY_PRESENT - key was there; other value `v` - key must be bound to `v` - * @return null if unsuccessful, Option[V] otherwise (indicating previous value bound to the key) - */ - @tailrec def rec_insertif(k: K, v: V, hc: Int, cond: AnyRef, lev: Int, parent: INode[K, V], startgen: Gen, ct: TrieMap[K, V]): Option[V] = { - val m = GCAS_READ(ct) // use -Yinline! - - m match { - case cn: CNode[K, V] => // 1) a multiway node - val idx = (hc >>> lev) & 0x1f - val flag = 1 << idx - val bmp = cn.bitmap - val mask = flag - 1 - val pos = Integer.bitCount(bmp & mask) - if ((bmp & flag) != 0) { - // 1a) insert below - cn.array(pos) match { - case in: INode[K, V] => - if (startgen eq in.gen) in.rec_insertif(k, v, hc, cond, lev + 5, this, startgen, ct) - else { - if (GCAS(cn, cn.renewed(startgen, ct), ct)) rec_insertif(k, v, hc, cond, lev, parent, startgen, ct) - else null - } - case sn: SNode[K, V] => cond match { - case null => - if (sn.hc == hc && equal(sn.k, k, ct)) { - if (GCAS(cn, cn.updatedAt(pos, new SNode(k, v, hc), gen), ct)) Some(sn.v) else null - } else { - val rn = if (cn.gen eq gen) cn else cn.renewed(gen, ct) - val nn = rn.updatedAt(pos, inode(CNode.dual(sn, sn.hc, new SNode(k, v, hc), hc, lev + 5, gen)), gen) - if (GCAS(cn, nn, ct)) None - else null - } - case INode.KEY_ABSENT => - if (sn.hc == hc && equal(sn.k, k, ct)) Some(sn.v) - else { - val rn = if (cn.gen eq gen) cn else cn.renewed(gen, ct) - val nn = rn.updatedAt(pos, inode(CNode.dual(sn, sn.hc, new SNode(k, v, hc), hc, lev + 5, gen)), gen) - if (GCAS(cn, nn, ct)) None - else null - } - case INode.KEY_PRESENT => - if (sn.hc == hc && equal(sn.k, k, ct)) { - if (GCAS(cn, cn.updatedAt(pos, new SNode(k, v, hc), gen), ct)) Some(sn.v) else null - } else None - case otherv => - if (sn.hc == hc && equal(sn.k, k, ct) && sn.v == otherv) { - if (GCAS(cn, cn.updatedAt(pos, new SNode(k, v, hc), gen), ct)) Some(sn.v) else null - } else None - } - } - } else cond match { - case null | INode.KEY_ABSENT => - val rn = if (cn.gen eq gen) cn else cn.renewed(gen, ct) - val ncnode = rn.insertedAt(pos, flag, new SNode(k, v, hc), gen) - if (GCAS(cn, ncnode, ct)) None else null - case INode.KEY_PRESENT => None - case otherv => None - } - case sn: TNode[K, V] => - clean(parent, ct, lev - 5) - null - case ln: LNode[K, V] => // 3) an l-node - def insertln() = { - val nn = ln.inserted(k, v) - GCAS(ln, nn, ct) - } - cond match { - case null => - val optv = ln.get(k) - if (insertln()) optv else null - case INode.KEY_ABSENT => - ln.get(k) match { - case None => if (insertln()) None else null - case optv => optv - } - case INode.KEY_PRESENT => - ln.get(k) match { - case Some(v0) => if (insertln()) Some(v0) else null - case None => None - } - case otherv => - ln.get(k) match { - case Some(v0) if v0 == otherv => if (insertln()) Some(otherv.asInstanceOf[V]) else null - case _ => None - } - } - } - } - - /** Looks up the value associated with the key. - * - * @return null if no value has been found, RESTART if the operation wasn't successful, or any other value otherwise - */ - @tailrec def rec_lookup(k: K, hc: Int, lev: Int, parent: INode[K, V], startgen: Gen, ct: TrieMap[K, V]): AnyRef = { - val m = GCAS_READ(ct) // use -Yinline! - - m match { - case cn: CNode[K, V] => // 1) a multinode - val idx = (hc >>> lev) & 0x1f - val flag = 1 << idx - val bmp = cn.bitmap - if ((bmp & flag) == 0) null // 1a) bitmap shows no binding - else { // 1b) bitmap contains a value - descend - val pos = if (bmp == 0xffffffff) idx else Integer.bitCount(bmp & (flag - 1)) - val sub = cn.array(pos) - sub match { - case in: INode[K, V] => - if (ct.isReadOnly || (startgen eq in.gen)) in.rec_lookup(k, hc, lev + 5, this, startgen, ct) - else { - if (GCAS(cn, cn.renewed(startgen, ct), ct)) rec_lookup(k, hc, lev, parent, startgen, ct) - else RESTART // used to be throw RestartException - } - case sn: SNode[K, V] => // 2) singleton node - if (sn.hc == hc && equal(sn.k, k, ct)) sn.v.asInstanceOf[AnyRef] - else null - } - } - case tn: TNode[K, V] => // 3) non-live node - def cleanReadOnly(tn: TNode[K, V]) = if (ct.nonReadOnly) { - clean(parent, ct, lev - 5) - RESTART // used to be throw RestartException - } else { - if (tn.hc == hc && tn.k == k) tn.v.asInstanceOf[AnyRef] - else null - } - cleanReadOnly(tn) - case ln: LNode[K, V] => // 5) an l-node - ln.get(k).asInstanceOf[Option[AnyRef]].orNull - } - } - - /** Removes the key associated with the given value. - * - * @param v if null, will remove the key irregardless of the value; otherwise removes only if binding contains that exact key and value - * @return null if not successful, an Option[V] indicating the previous value otherwise - */ - def rec_remove(k: K, v: V, hc: Int, lev: Int, parent: INode[K, V], startgen: Gen, ct: TrieMap[K, V]): Option[V] = { - val m = GCAS_READ(ct) // use -Yinline! - - m match { - case cn: CNode[K, V] => - val idx = (hc >>> lev) & 0x1f - val bmp = cn.bitmap - val flag = 1 << idx - if ((bmp & flag) == 0) None - else { - val pos = Integer.bitCount(bmp & (flag - 1)) - val sub = cn.array(pos) - val res = sub match { - case in: INode[K, V] => - if (startgen eq in.gen) in.rec_remove(k, v, hc, lev + 5, this, startgen, ct) - else { - if (GCAS(cn, cn.renewed(startgen, ct), ct)) rec_remove(k, v, hc, lev, parent, startgen, ct) - else null - } - case sn: SNode[K, V] => - if (sn.hc == hc && equal(sn.k, k, ct) && (v == null || sn.v == v)) { - val ncn = cn.removedAt(pos, flag, gen).toContracted(lev) - if (GCAS(cn, ncn, ct)) Some(sn.v) else null - } else None - } - - if (res == None || (res eq null)) res - else { - @tailrec def cleanParent(nonlive: AnyRef) { - val pm = parent.GCAS_READ(ct) - pm match { - case cn: CNode[K, V] => - val idx = (hc >>> (lev - 5)) & 0x1f - val bmp = cn.bitmap - val flag = 1 << idx - if ((bmp & flag) == 0) {} // somebody already removed this i-node, we're done - else { - val pos = Integer.bitCount(bmp & (flag - 1)) - val sub = cn.array(pos) - if (sub eq this) nonlive match { - case tn: TNode[K, V] => - val ncn = cn.updatedAt(pos, tn.copyUntombed, gen).toContracted(lev - 5) - if (!parent.GCAS(cn, ncn, ct)) - if (ct.readRoot().gen == startgen) cleanParent(nonlive) - } - } - case _ => // parent is no longer a cnode, we're done - } - } - - if (parent ne null) { // never tomb at root - val n = GCAS_READ(ct) - if (n.isInstanceOf[TNode[_, _]]) - cleanParent(n) - } - - res - } - } - case tn: TNode[K, V] => - clean(parent, ct, lev - 5) - null - case ln: LNode[K, V] => - if (v == null) { - val optv = ln.get(k) - val nn = ln.removed(k, ct) - if (GCAS(ln, nn, ct)) optv else null - } else ln.get(k) match { - case optv @ Some(v0) if v0 == v => - val nn = ln.removed(k, ct) - if (GCAS(ln, nn, ct)) optv else null - case _ => None - } - } - } - - private def clean(nd: INode[K, V], ct: TrieMap[K, V], lev: Int) { - val m = nd.GCAS_READ(ct) - m match { - case cn: CNode[K, V] => nd.GCAS(cn, cn.toCompressed(ct, lev, gen), ct) - case _ => - } - } - - def isNullInode(ct: TrieMap[K, V]) = GCAS_READ(ct) eq null - - def cachedSize(ct: TrieMap[K, V]): Int = { - val m = GCAS_READ(ct) - m.cachedSize(ct) - } - - /* this is a quiescent method! */ - def string(lev: Int) = "%sINode -> %s".format(" " * lev, mainnode match { - case null => "" - case tn: TNode[_, _] => "TNode(%s, %s, %d, !)".format(tn.k, tn.v, tn.hc) - case cn: CNode[_, _] => cn.string(lev) - case ln: LNode[_, _] => ln.string(lev) - case x => "".format(x) - }) - -} - - -private[concurrent] object INode { - val KEY_PRESENT = new AnyRef - val KEY_ABSENT = new AnyRef - - def newRootNode[K, V] = { - val gen = new Gen - val cn = new CNode[K, V](0, new Array(0), gen) - new INode[K, V](cn, gen) - } -} - - -private[concurrent] final class FailedNode[K, V](p: MainNode[K, V]) extends MainNode[K, V] { - WRITE_PREV(p) - - def string(lev: Int) = throw new UnsupportedOperationException - - def cachedSize(ct: AnyRef): Int = throw new UnsupportedOperationException - - override def toString = "FailedNode(%s)".format(p) -} - - -private[concurrent] trait KVNode[K, V] { - def kvPair: (K, V) -} - - -private[collection] final class SNode[K, V](final val k: K, final val v: V, final val hc: Int) -extends BasicNode with KVNode[K, V] { - final def copy = new SNode(k, v, hc) - final def copyTombed = new TNode(k, v, hc) - final def copyUntombed = new SNode(k, v, hc) - final def kvPair = (k, v) - final def string(lev: Int) = (" " * lev) + "SNode(%s, %s, %x)".format(k, v, hc) -} - - -private[collection] final class TNode[K, V](final val k: K, final val v: V, final val hc: Int) -extends MainNode[K, V] with KVNode[K, V] { - final def copy = new TNode(k, v, hc) - final def copyTombed = new TNode(k, v, hc) - final def copyUntombed = new SNode(k, v, hc) - final def kvPair = (k, v) - final def cachedSize(ct: AnyRef): Int = 1 - final def string(lev: Int) = (" " * lev) + "TNode(%s, %s, %x, !)".format(k, v, hc) -} - - -private[collection] final class LNode[K, V](final val listmap: immutable.ListMap[K, V]) -extends MainNode[K, V] { - def this(k: K, v: V) = this(immutable.ListMap(k -> v)) - def this(k1: K, v1: V, k2: K, v2: V) = this(immutable.ListMap(k1 -> v1, k2 -> v2)) - def inserted(k: K, v: V) = new LNode(listmap + ((k, v))) - def removed(k: K, ct: TrieMap[K, V]): MainNode[K, V] = { - val updmap = listmap - k - if (updmap.size > 1) new LNode(updmap) - else { - val (k, v) = updmap.iterator.next() - new TNode(k, v, ct.computeHash(k)) // create it tombed so that it gets compressed on subsequent accesses - } - } - def get(k: K) = listmap.get(k) - def cachedSize(ct: AnyRef): Int = listmap.size - def string(lev: Int) = (" " * lev) + "LNode(%s)".format(listmap.mkString(", ")) -} - - -private[collection] final class CNode[K, V](val bitmap: Int, val array: Array[BasicNode], val gen: Gen) extends CNodeBase[K, V] { - // this should only be called from within read-only snapshots - def cachedSize(ct: AnyRef) = { - val currsz = READ_SIZE() - if (currsz != -1) currsz - else { - val sz = computeSize(ct.asInstanceOf[TrieMap[K, V]]) - while (READ_SIZE() == -1) CAS_SIZE(-1, sz) - READ_SIZE() - } - } - - // lends itself towards being parallelizable by choosing - // a random starting offset in the array - // => if there are concurrent size computations, they start - // at different positions, so they are more likely to - // to be independent - private def computeSize(ct: TrieMap[K, V]): Int = { - var i = 0 - var sz = 0 - val offset = - if (array.length > 0) - //util.Random.nextInt(array.length) /* <-- benchmarks show that this causes observable contention */ - java.util.concurrent.ThreadLocalRandom.current.nextInt(0, array.length) - else 0 - while (i < array.length) { - val pos = (i + offset) % array.length - array(pos) match { - case sn: SNode[_, _] => sz += 1 - case in: INode[K, V] => sz += in.cachedSize(ct) - } - i += 1 - } - sz - } - - def updatedAt(pos: Int, nn: BasicNode, gen: Gen) = { - val len = array.length - val narr = new Array[BasicNode](len) - Array.copy(array, 0, narr, 0, len) - narr(pos) = nn - new CNode[K, V](bitmap, narr, gen) - } - - def removedAt(pos: Int, flag: Int, gen: Gen) = { - val arr = array - val len = arr.length - val narr = new Array[BasicNode](len - 1) - Array.copy(arr, 0, narr, 0, pos) - Array.copy(arr, pos + 1, narr, pos, len - pos - 1) - new CNode[K, V](bitmap ^ flag, narr, gen) - } - - def insertedAt(pos: Int, flag: Int, nn: BasicNode, gen: Gen) = { - val len = array.length - val bmp = bitmap - val narr = new Array[BasicNode](len + 1) - Array.copy(array, 0, narr, 0, pos) - narr(pos) = nn - Array.copy(array, pos, narr, pos + 1, len - pos) - new CNode[K, V](bmp | flag, narr, gen) - } - - /** Returns a copy of this cnode such that all the i-nodes below it are copied - * to the specified generation `ngen`. - */ - def renewed(ngen: Gen, ct: TrieMap[K, V]) = { - var i = 0 - val arr = array - val len = arr.length - val narr = new Array[BasicNode](len) - while (i < len) { - arr(i) match { - case in: INode[K, V] => narr(i) = in.copyToGen(ngen, ct) - case bn: BasicNode => narr(i) = bn - } - i += 1 - } - new CNode[K, V](bitmap, narr, ngen) - } - - private def resurrect(inode: INode[K, V], inodemain: AnyRef): BasicNode = inodemain match { - case tn: TNode[_, _] => tn.copyUntombed - case _ => inode - } - - def toContracted(lev: Int): MainNode[K, V] = if (array.length == 1 && lev > 0) array(0) match { - case sn: SNode[K, V] => sn.copyTombed - case _ => this - } else this - - // - if the branching factor is 1 for this CNode, and the child - // is a tombed SNode, returns its tombed version - // - otherwise, if there is at least one non-null node below, - // returns the version of this node with at least some null-inodes - // removed (those existing when the op began) - // - if there are only null-i-nodes below, returns null - def toCompressed(ct: TrieMap[K, V], lev: Int, gen: Gen) = { - val bmp = bitmap - var i = 0 - val arr = array - val tmparray = new Array[BasicNode](arr.length) - while (i < arr.length) { // construct new bitmap - val sub = arr(i) - sub match { - case in: INode[K, V] => - val inodemain = in.gcasRead(ct) - assert(inodemain ne null) - tmparray(i) = resurrect(in, inodemain) - case sn: SNode[K, V] => - tmparray(i) = sn - } - i += 1 - } - - new CNode[K, V](bmp, tmparray, gen).toContracted(lev) - } - - private[concurrent] def string(lev: Int): String = "CNode %x\n%s".format(bitmap, array.map(_.string(lev + 1)).mkString("\n")) - - /* quiescently consistent - don't call concurrently to anything involving a GCAS!! */ - private def collectElems: Seq[(K, V)] = array flatMap { - case sn: SNode[K, V] => Some(sn.kvPair) - case in: INode[K, V] => in.mainnode match { - case tn: TNode[K, V] => Some(tn.kvPair) - case ln: LNode[K, V] => ln.listmap.toList - case cn: CNode[K, V] => cn.collectElems - } - } - - private def collectLocalElems: Seq[String] = array flatMap { - case sn: SNode[K, V] => Some(sn.kvPair._2.toString) - case in: INode[K, V] => Some(in.toString.drop(14) + "(" + in.gen + ")") - } - - override def toString = { - val elems = collectLocalElems - "CNode(sz: %d; %s)".format(elems.size, elems.sorted.mkString(", ")) - } -} - - -private[concurrent] object CNode { - - def dual[K, V](x: SNode[K, V], xhc: Int, y: SNode[K, V], yhc: Int, lev: Int, gen: Gen): MainNode[K, V] = if (lev < 35) { - val xidx = (xhc >>> lev) & 0x1f - val yidx = (yhc >>> lev) & 0x1f - val bmp = (1 << xidx) | (1 << yidx) - if (xidx == yidx) { - val subinode = new INode[K, V](gen)//(TrieMap.inodeupdater) - subinode.mainnode = dual(x, xhc, y, yhc, lev + 5, gen) - new CNode(bmp, Array(subinode), gen) - } else { - if (xidx < yidx) new CNode(bmp, Array(x, y), gen) - else new CNode(bmp, Array(y, x), gen) - } - } else { - new LNode(x.k, x.v, y.k, y.v) - } - -} - - -private[concurrent] case class RDCSS_Descriptor[K, V](old: INode[K, V], expectedmain: MainNode[K, V], nv: INode[K, V]) { - @volatile var committed = false -} - - -/** A concurrent hash-trie or TrieMap is a concurrent thread-safe lock-free - * implementation of a hash array mapped trie. It is used to implement the - * concurrent map abstraction. It has particularly scalable concurrent insert - * and remove operations and is memory-efficient. It supports O(1), atomic, - * lock-free snapshots which are used to implement linearizable lock-free size, - * iterator and clear operations. The cost of evaluating the (lazy) snapshot is - * distributed across subsequent updates, thus making snapshot evaluation horizontally scalable. - * - * For details, see: http://lampwww.epfl.ch/~prokopec/ctries-snapshot.pdf - * - * @author Aleksandar Prokopec - * @since 2.10 - */ -@SerialVersionUID(0L - 6402774413839597105L) -final class TrieMap[K, V] private (r: AnyRef, rtupd: AtomicReferenceFieldUpdater[TrieMap[K, V], AnyRef], hashf: Hashing[K], ef: Equiv[K]) -extends scala.collection.concurrent.Map[K, V] - with scala.collection.mutable.MapLike[K, V, TrieMap[K, V]] - with CustomParallelizable[(K, V), ParTrieMap[K, V]] - with Serializable -{ - private var hashingobj = if (hashf.isInstanceOf[Hashing.Default[_]]) new TrieMap.MangledHashing[K] else hashf - private var equalityobj = ef - private var rootupdater = rtupd - def hashing = hashingobj - def equality = equalityobj - @deprecated("this field will be made private", "2.12.0") - @volatile /*private*/ var root = r - - def this(hashf: Hashing[K], ef: Equiv[K]) = this( - INode.newRootNode, - AtomicReferenceFieldUpdater.newUpdater(classOf[TrieMap[K, V]], classOf[AnyRef], "root"), - hashf, - ef - ) - - def this() = this(Hashing.default, Equiv.universal) - - /* internal methods */ - - private def writeObject(out: java.io.ObjectOutputStream) { - out.writeObject(hashingobj) - out.writeObject(equalityobj) - - val it = iterator - while (it.hasNext) { - val (k, v) = it.next() - out.writeObject(k) - out.writeObject(v) - } - out.writeObject(TrieMapSerializationEnd) - } - - private def readObject(in: java.io.ObjectInputStream) { - root = INode.newRootNode - rootupdater = AtomicReferenceFieldUpdater.newUpdater(classOf[TrieMap[K, V]], classOf[AnyRef], "root") - - hashingobj = in.readObject().asInstanceOf[Hashing[K]] - equalityobj = in.readObject().asInstanceOf[Equiv[K]] - - var obj: AnyRef = null - do { - obj = in.readObject() - if (obj != TrieMapSerializationEnd) { - val k = obj.asInstanceOf[K] - val v = in.readObject().asInstanceOf[V] - update(k, v) - } - } while (obj != TrieMapSerializationEnd) - } - - @deprecated("this method will be made private", "2.12.0") - /*private*/ def CAS_ROOT(ov: AnyRef, nv: AnyRef) = rootupdater.compareAndSet(this, ov, nv) - - @deprecated("this method will be made private", "2.12.0") - /*private[collection]*/ def readRoot(abort: Boolean = false): INode[K, V] = RDCSS_READ_ROOT(abort) - - @deprecated("this method will be made private", "2.12.0") - /*private[concurrent]*/ def RDCSS_READ_ROOT(abort: Boolean = false): INode[K, V] = { - val r = /*READ*/root - r match { - case in: INode[K, V] => in - case desc: RDCSS_Descriptor[K, V] => RDCSS_Complete(abort) - } - } - - @tailrec private def RDCSS_Complete(abort: Boolean): INode[K, V] = { - val v = /*READ*/root - v match { - case in: INode[K, V] => in - case desc: RDCSS_Descriptor[K, V] => - val RDCSS_Descriptor(ov, exp, nv) = desc - if (abort) { - if (CAS_ROOT(desc, ov)) ov - else RDCSS_Complete(abort) - } else { - val oldmain = ov.gcasRead(this) - if (oldmain eq exp) { - if (CAS_ROOT(desc, nv)) { - desc.committed = true - nv - } else RDCSS_Complete(abort) - } else { - if (CAS_ROOT(desc, ov)) ov - else RDCSS_Complete(abort) - } - } - } - } - - private def RDCSS_ROOT(ov: INode[K, V], expectedmain: MainNode[K, V], nv: INode[K, V]): Boolean = { - val desc = RDCSS_Descriptor(ov, expectedmain, nv) - if (CAS_ROOT(ov, desc)) { - RDCSS_Complete(abort = false) - /*READ*/desc.committed - } else false - } - - @tailrec private def inserthc(k: K, hc: Int, v: V) { - val r = RDCSS_READ_ROOT() - if (!r.rec_insert(k, v, hc, 0, null, r.gen, this)) inserthc(k, hc, v) - } - - @tailrec private def insertifhc(k: K, hc: Int, v: V, cond: AnyRef): Option[V] = { - val r = RDCSS_READ_ROOT() - - val ret = r.rec_insertif(k, v, hc, cond, 0, null, r.gen, this) - if (ret eq null) insertifhc(k, hc, v, cond) - else ret - } - - @tailrec private def lookuphc(k: K, hc: Int): AnyRef = { - val r = RDCSS_READ_ROOT() - val res = r.rec_lookup(k, hc, 0, null, r.gen, this) - if (res eq INodeBase.RESTART) lookuphc(k, hc) - else res - } - - /* slower: - //@tailrec - private def lookuphc(k: K, hc: Int): AnyRef = { - val r = RDCSS_READ_ROOT() - try { - r.rec_lookup(k, hc, 0, null, r.gen, this) - } catch { - case RestartException => - lookuphc(k, hc) - } - } - */ - - @tailrec private def removehc(k: K, v: V, hc: Int): Option[V] = { - val r = RDCSS_READ_ROOT() - val res = r.rec_remove(k, v, hc, 0, null, r.gen, this) - if (res ne null) res - else removehc(k, v, hc) - } - - def string = RDCSS_READ_ROOT().string(0) - - /* public methods */ - - override def seq = this - - override def par = new ParTrieMap(this) - - override def empty: TrieMap[K, V] = new TrieMap[K, V] - - def isReadOnly = rootupdater eq null - - def nonReadOnly = rootupdater ne null - - /** Returns a snapshot of this TrieMap. - * This operation is lock-free and linearizable. - * - * The snapshot is lazily updated - the first time some branch - * in the snapshot or this TrieMap are accessed, they are rewritten. - * This means that the work of rebuilding both the snapshot and this - * TrieMap is distributed across all the threads doing updates or accesses - * subsequent to the snapshot creation. - */ - @tailrec def snapshot(): TrieMap[K, V] = { - val r = RDCSS_READ_ROOT() - val expmain = r.gcasRead(this) - if (RDCSS_ROOT(r, expmain, r.copyToGen(new Gen, this))) new TrieMap(r.copyToGen(new Gen, this), rootupdater, hashing, equality) - else snapshot() - } - - /** Returns a read-only snapshot of this TrieMap. - * This operation is lock-free and linearizable. - * - * The snapshot is lazily updated - the first time some branch - * of this TrieMap are accessed, it is rewritten. The work of creating - * the snapshot is thus distributed across subsequent updates - * and accesses on this TrieMap by all threads. - * Note that the snapshot itself is never rewritten unlike when calling - * the `snapshot` method, but the obtained snapshot cannot be modified. - * - * This method is used by other methods such as `size` and `iterator`. - */ - @tailrec def readOnlySnapshot(): scala.collection.Map[K, V] = { - val r = RDCSS_READ_ROOT() - val expmain = r.gcasRead(this) - if (RDCSS_ROOT(r, expmain, r.copyToGen(new Gen, this))) new TrieMap(r, null, hashing, equality) - else readOnlySnapshot() - } - - @tailrec override def clear() { - val r = RDCSS_READ_ROOT() - if (!RDCSS_ROOT(r, r.gcasRead(this), INode.newRootNode[K, V])) clear() - } - - - def computeHash(k: K) = hashingobj.hash(k) - - def lookup(k: K): V = { - val hc = computeHash(k) - lookuphc(k, hc).asInstanceOf[V] - } - - override def apply(k: K): V = { - val hc = computeHash(k) - val res = lookuphc(k, hc) - if (res eq null) throw new NoSuchElementException - else res.asInstanceOf[V] - } - - def get(k: K): Option[V] = { - val hc = computeHash(k) - Option(lookuphc(k, hc)).asInstanceOf[Option[V]] - } - - override def put(key: K, value: V): Option[V] = { - val hc = computeHash(key) - insertifhc(key, hc, value, null) - } - - override def update(k: K, v: V) { - val hc = computeHash(k) - inserthc(k, hc, v) - } - - def +=(kv: (K, V)) = { - update(kv._1, kv._2) - this - } - - override def remove(k: K): Option[V] = { - val hc = computeHash(k) - removehc(k, null.asInstanceOf[V], hc) - } - - def -=(k: K) = { - remove(k) - this - } - - def putIfAbsent(k: K, v: V): Option[V] = { - val hc = computeHash(k) - insertifhc(k, hc, v, INode.KEY_ABSENT) - } - - // TODO once computeIfAbsent is added to concurrent.Map, - // move the comment there and tweak the 'at most once' part - /** If the specified key is not already in the map, computes its value using - * the given thunk `op` and enters it into the map. - * - * Since concurrent maps cannot contain `null` for keys or values, - * a `NullPointerException` is thrown if the thunk `op` - * returns `null`. - * - * If the specified mapping function throws an exception, - * that exception is rethrown. - * - * Note: This method will invoke op at most once. - * However, `op` may be invoked without the result being added to the map if - * a concurrent process is also trying to add a value corresponding to the - * same key `k`. - * - * @param k the key to modify - * @param op the expression that computes the value - * @return the newly added value - */ - override def getOrElseUpdate(k: K, op: =>V): V = { - val oldv = lookup(k) - if (oldv != null) oldv.asInstanceOf[V] - else { - val v = op - if (v == null) { - throw new NullPointerException("Concurrent TrieMap values cannot be null.") - } else { - val hc = computeHash(k) - insertifhc(k, hc, v, INode.KEY_ABSENT) match { - case Some(oldv) => oldv - case None => v - } - } - } - } - - def remove(k: K, v: V): Boolean = { - val hc = computeHash(k) - removehc(k, v, hc).nonEmpty - } - - def replace(k: K, oldvalue: V, newvalue: V): Boolean = { - val hc = computeHash(k) - insertifhc(k, hc, newvalue, oldvalue.asInstanceOf[AnyRef]).nonEmpty - } - - def replace(k: K, v: V): Option[V] = { - val hc = computeHash(k) - insertifhc(k, hc, v, INode.KEY_PRESENT) - } - - def iterator: Iterator[(K, V)] = - if (nonReadOnly) readOnlySnapshot().iterator - else new TrieMapIterator(0, this) - - //////////////////////////////////////////////////////////////////////////// - // - // scala/bug#10177 These methods need overrides as the inherited implementations - // call `.iterator` more than once, which doesn't guarantee a coherent - // view of the data if there is a concurrent writer - // Note that the we don't need overrides for keysIterator or valuesIterator - // TrieMapTest validates the behaviour. - override def values: Iterable[V] = { - if (nonReadOnly) readOnlySnapshot().values - else super.values - } - override def keySet: Set[K] = { - if (nonReadOnly) readOnlySnapshot().keySet - else super.keySet - } - override def filterKeys(p: K => Boolean): collection.Map[K, V] = { - if (nonReadOnly) readOnlySnapshot().filterKeys(p) - else super.filterKeys(p) - } - override def mapValues[W](f: V => W): collection.Map[K, W] = { - if (nonReadOnly) readOnlySnapshot().mapValues(f) - else super.mapValues(f) - } - // END extra overrides - /////////////////////////////////////////////////////////////////// - - - private def cachedSize() = { - val r = RDCSS_READ_ROOT() - r.cachedSize(this) - } - - override def size: Int = - if (nonReadOnly) readOnlySnapshot().size - else cachedSize() - - override def stringPrefix = "TrieMap" - -} - - -object TrieMap extends MutableMapFactory[TrieMap] { - val inodeupdater = AtomicReferenceFieldUpdater.newUpdater(classOf[INodeBase[_, _]], classOf[MainNode[_, _]], "mainnode") - - implicit def canBuildFrom[K, V]: CanBuildFrom[Coll, (K, V), TrieMap[K, V]] = new MapCanBuildFrom[K, V] - - def empty[K, V]: TrieMap[K, V] = new TrieMap[K, V] - - class MangledHashing[K] extends Hashing[K] { - def hash(k: K)= scala.util.hashing.byteswap32(k.##) - } - -} - - -private[collection] class TrieMapIterator[K, V](var level: Int, private var ct: TrieMap[K, V], mustInit: Boolean = true) extends Iterator[(K, V)] { - private val stack = new Array[Array[BasicNode]](7) - private val stackpos = new Array[Int](7) - private var depth = -1 - private var subiter: Iterator[(K, V)] = null - private var current: KVNode[K, V] = null - - if (mustInit) initialize() - - def hasNext = (current ne null) || (subiter ne null) - - def next() = if (hasNext) { - var r: (K, V) = null - if (subiter ne null) { - r = subiter.next() - checkSubiter() - } else { - r = current.kvPair - advance() - } - r - } else Iterator.empty.next() - - private def readin(in: INode[K, V]) = in.gcasRead(ct) match { - case cn: CNode[K, V] => - depth += 1 - stack(depth) = cn.array - stackpos(depth) = -1 - advance() - case tn: TNode[K, V] => - current = tn - case ln: LNode[K, V] => - subiter = ln.listmap.iterator - checkSubiter() - case null => - current = null - } - - private def checkSubiter() = if (!subiter.hasNext) { - subiter = null - advance() - } - - private def initialize() { - assert(ct.isReadOnly) - - val r = ct.RDCSS_READ_ROOT() - readin(r) - } - - def advance(): Unit = if (depth >= 0) { - val npos = stackpos(depth) + 1 - if (npos < stack(depth).length) { - stackpos(depth) = npos - stack(depth)(npos) match { - case sn: SNode[K, V] => - current = sn - case in: INode[K, V] => - readin(in) - } - } else { - depth -= 1 - advance() - } - } else current = null - - protected def newIterator(_lev: Int, _ct: TrieMap[K, V], _mustInit: Boolean) = new TrieMapIterator[K, V](_lev, _ct, _mustInit) - - protected def dupTo(it: TrieMapIterator[K, V]) = { - it.level = this.level - it.ct = this.ct - it.depth = this.depth - it.current = this.current - - // these need a deep copy - Array.copy(this.stack, 0, it.stack, 0, 7) - Array.copy(this.stackpos, 0, it.stackpos, 0, 7) - - // this one needs to be evaluated - if (this.subiter == null) it.subiter = null - else { - val lst = this.subiter.toList - this.subiter = lst.iterator - it.subiter = lst.iterator - } - } - - /** Returns a sequence of iterators over subsets of this iterator. - * It's used to ease the implementation of splitters for a parallel version of the TrieMap. - */ - protected def subdivide(): Seq[Iterator[(K, V)]] = if (subiter ne null) { - // the case where an LNode is being iterated - val it = newIterator(level + 1, ct, _mustInit = false) - it.depth = -1 - it.subiter = this.subiter - it.current = null - this.subiter = null - advance() - this.level += 1 - Seq(it, this) - } else if (depth == -1) { - this.level += 1 - Seq(this) - } else { - var d = 0 - while (d <= depth) { - val rem = stack(d).length - 1 - stackpos(d) - if (rem > 0) { - val (arr1, arr2) = stack(d).drop(stackpos(d) + 1).splitAt(rem / 2) - stack(d) = arr1 - stackpos(d) = -1 - val it = newIterator(level + 1, ct, _mustInit = false) - it.stack(0) = arr2 - it.stackpos(0) = -1 - it.depth = 0 - it.advance() // <-- fix it - this.level += 1 - return Seq(this, it) - } - d += 1 - } - this.level += 1 - Seq(this) - } - - @deprecated("this method will be removed", "2.12.0") - def printDebug() { - println("ctrie iterator") - println(stackpos.mkString(",")) - println("depth: " + depth) - println("curr.: " + current) - println(stack.mkString("\n")) - } - -} - - -private[concurrent] object RestartException extends ControlThrowable - - -/** Only used for ctrie serialization. */ -@SerialVersionUID(0L - 7237891413820527142L) -private[concurrent] case object TrieMapSerializationEnd - - -private[concurrent] object Debug { - import JavaConverters._ - - lazy val logbuffer = new java.util.concurrent.ConcurrentLinkedQueue[AnyRef] - - def log(s: AnyRef) = logbuffer.add(s) - - def flush() { - for (s <- logbuffer.iterator().asScala) Console.out.println(s.toString) - logbuffer.clear() - } - - def clear() { - logbuffer.clear() - } - -} diff --git a/tests/scala2-library/src/library/scala/collection/convert/AsJavaConverters.scala b/tests/scala2-library/src/library/scala/collection/convert/AsJavaConverters.scala deleted file mode 100644 index c7c1fb9c745c..000000000000 --- a/tests/scala2-library/src/library/scala/collection/convert/AsJavaConverters.scala +++ /dev/null @@ -1,262 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2006-2016, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://www.scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala -package collection -package convert - -import java.{ lang => jl, util => ju }, java.util.{ concurrent => juc } - -/** Defines converter methods from Scala to Java collections. */ -trait AsJavaConverters { - import Wrappers._ - - /** - * Converts a Scala `Iterator` to a Java `Iterator`. - * - * The returned Java `Iterator` is backed by the provided Scala `Iterator` and any side-effects of - * using it via the Java interface will be visible via the Scala interface and vice versa. - * - * If the Scala `Iterator` was previously obtained from an implicit or explicit call of - * `[[JavaConverters.asScalaIterator]](java.util.Iterator)` then the original Java `Iterator` will - * be returned. - * - * @param i The Scala `Iterator` to be converted. - * @return A Java `Iterator` view of the argument. - */ - def asJavaIterator[A](i: Iterator[A]): ju.Iterator[A] = i match { - case null => null - case JIteratorWrapper(wrapped) => wrapped.asInstanceOf[ju.Iterator[A]] - case _ => IteratorWrapper(i) - } - - /** - * Converts a Scala `Iterator` to a Java `Enumeration`. - * - * The returned Java `Enumeration` is backed by the provided Scala `Iterator` and any side-effects - * of using it via the Java interface will be visible via the Scala interface and vice versa. - * - * If the Scala `Iterator` was previously obtained from an implicit or explicit call of - * `[[JavaConverters.enumerationAsScalaIterator]](java.util.Enumeration)` then the original Java - * `Enumeration` will be returned. - * - * @param i The Scala `Iterator` to be converted. - * @return A Java `Enumeration` view of the argument. - */ - def asJavaEnumeration[A](i: Iterator[A]): ju.Enumeration[A] = i match { - case null => null - case JEnumerationWrapper(wrapped) => wrapped.asInstanceOf[ju.Enumeration[A]] - case _ => IteratorWrapper(i) - } - - /** - * Converts a Scala `Iterable` to a Java `Iterable`. - * - * The returned Java `Iterable` is backed by the provided Scala `Iterable` and any side-effects of - * using it via the Java interface will be visible via the Scala interface and vice versa. - * - * If the Scala `Iterable` was previously obtained from an implicit or explicit call of - * `[[JavaConverters.iterableAsScalaIterable]](java.lang.Iterable)` then the original Java - * `Iterable` will be returned. - * - * @param i The Scala `Iterable` to be converted. - * @return A Java `Iterable` view of the argument. - */ - def asJavaIterable[A](i: Iterable[A]): jl.Iterable[A] = i match { - case null => null - case JIterableWrapper(wrapped) => wrapped.asInstanceOf[jl.Iterable[A]] - case _ => IterableWrapper(i) - } - - /** - * Converts a Scala `Iterable` to an immutable Java `Collection`. - * - * If the Scala `Iterable` was previously obtained from an implicit or explicit call of - * `[[JavaConverters.collectionAsScalaIterable]](java.util.Collection)` then the original Java - * `Collection` will be returned. - * - * @param i The Scala `Iterable` to be converted. - * @return A Java `Collection` view of the argument. - */ - def asJavaCollection[A](i: Iterable[A]): ju.Collection[A] = i match { - case null => null - case JCollectionWrapper(wrapped) => wrapped.asInstanceOf[ju.Collection[A]] - case _ => new IterableWrapper(i) - } - - /** - * Converts a Scala mutable `Buffer` to a Java List. - * - * The returned Java List is backed by the provided Scala `Buffer` and any side-effects of using - * it via the Java interface will be visible via the Scala interface and vice versa. - * - * If the Scala `Buffer` was previously obtained from an implicit or explicit call of - * `[[JavaConverters.asScalaBuffer]](java.util.List)` then the original Java `List` will be - * returned. - * - * @param b The Scala `Buffer` to be converted. - * @return A Java `List` view of the argument. - */ - def bufferAsJavaList[A](b: mutable.Buffer[A]): ju.List[A] = b match { - case null => null - case JListWrapper(wrapped) => wrapped - case _ => new MutableBufferWrapper(b) - } - - /** - * Converts a Scala mutable `Seq` to a Java `List`. - * - * The returned Java `List` is backed by the provided Scala `Seq` and any side-effects of using it - * via the Java interface will be visible via the Scala interface and vice versa. - * - * If the Scala `Seq` was previously obtained from an implicit or explicit call of - * `[[JavaConverters.asScalaBuffer]](java.util.List)` then the original Java `List` will be - * returned. - * - * @param s The Scala `Seq` to be converted. - * @return A Java `List` view of the argument. - */ - def mutableSeqAsJavaList[A](s: mutable.Seq[A]): ju.List[A] = s match { - case null => null - case JListWrapper(wrapped) => wrapped - case _ => new MutableSeqWrapper(s) - } - - /** - * Converts a Scala `Seq` to a Java `List`. - * - * The returned Java `List` is backed by the provided Scala `Seq` and any side-effects of using it - * via the Java interface will be visible via the Scala interface and vice versa. - * - * If the Scala `Seq` was previously obtained from an implicit or explicit call of - * `[[JavaConverters.asScalaBuffer]](java.util.List)` then the original Java `List` will be - * returned. - * - * @param s The Scala `Seq` to be converted. - * @return A Java `List` view of the argument. - */ - def seqAsJavaList[A](s: Seq[A]): ju.List[A] = s match { - case null => null - case JListWrapper(wrapped) => wrapped.asInstanceOf[ju.List[A]] - case _ => new SeqWrapper(s) - } - - /** - * Converts a Scala mutable `Set` to a Java `Set`. - * - * The returned Java `Set` is backed by the provided Scala `Set` and any side-effects of using it - * via the Java interface will be visible via the Scala interface and vice versa. - * - * If the Scala `Set` was previously obtained from an implicit or explicit call of - * `[[JavaConverters.asScalaSet]](java.util.Set)` then the original Java `Set` will be returned. - * - * @param s The Scala mutable `Set` to be converted. - * @return A Java `Set` view of the argument. - */ - def mutableSetAsJavaSet[A](s: mutable.Set[A]): ju.Set[A] = s match { - case null => null - case JSetWrapper(wrapped) => wrapped - case _ => new MutableSetWrapper(s) - } - - /** - * Converts a Scala `Set` to a Java `Set`. - * - * The returned Java `Set` is backed by the provided Scala `Set` and any side-effects of using it - * via the Java interface will be visible via the Scala interface and vice versa. - * - * If the Scala `Set` was previously obtained from an implicit or explicit call of - * `[[JavaConverters.asScalaSet]](java.util.Set)` then the original Java `Set` will be returned. - * - * @param s The Scala `Set` to be converted. - * @return A Java `Set` view of the argument. - */ - def setAsJavaSet[A](s: Set[A]): ju.Set[A] = s match { - case null => null - case JSetWrapper(wrapped) => wrapped - case _ => new SetWrapper(s) - } - - /** - * Converts a Scala mutable `Map` to a Java `Map`. - * - * The returned Java `Map` is backed by the provided Scala `Map` and any side-effects of using it - * via the Java interface will be visible via the Scala interface and vice versa. - * - * If the Scala `Map` was previously obtained from an implicit or explicit call of - * `[[JavaConverters.mapAsScalaMap]](java.util.Map)` then the original Java `Map` will be - * returned. - * - * @param m The Scala mutable `Map` to be converted. - * @return A Java `Map` view of the argument. - */ - def mutableMapAsJavaMap[A, B](m: mutable.Map[A, B]): ju.Map[A, B] = m match { - case null => null - case JMapWrapper(wrapped) => wrapped - case _ => new MutableMapWrapper(m) - } - - /** - * Converts a Scala mutable `Map` to a Java `Dictionary`. - * - * The returned Java `Dictionary` is backed by the provided Scala `Dictionary` and any - * side-effects of using it via the Java interface will be visible via the Scala interface and - * vice versa. - * - * If the Scala `Dictionary` was previously obtained from an implicit or explicit call of - * `[[JavaConverters.dictionaryAsScalaMap]](java.util.Dictionary)` then the original Java - * `Dictionary` will be returned. - * - * @param m The Scala `Map` to be converted. - * @return A Java `Dictionary` view of the argument. - */ - def asJavaDictionary[A, B](m: mutable.Map[A, B]): ju.Dictionary[A, B] = m match { - case null => null - case JDictionaryWrapper(wrapped) => wrapped - case _ => new DictionaryWrapper(m) - } - - /** - * Converts a Scala `Map` to a Java `Map`. - * - * The returned Java `Map` is backed by the provided Scala `Map` and any side-effects of using it - * via the Java interface will be visible via the Scala interface and vice versa. - * - * If the Scala `Map` was previously obtained from an implicit or explicit call of - * `[[JavaConverters.mapAsScalaMap]](java.util.Map)` then the original Java `Map` will be - * returned. - * - * @param m The Scala `Map` to be converted. - * @return A Java `Map` view of the argument. - */ - def mapAsJavaMap[A, B](m: Map[A, B]): ju.Map[A, B] = m match { - case null => null - case JMapWrapper(wrapped) => wrapped.asInstanceOf[ju.Map[A, B]] - case _ => new MapWrapper(m) - } - - /** - * Converts a Scala mutable `concurrent.Map` to a Java `ConcurrentMap`. - * - * The returned Java `ConcurrentMap` is backed by the provided Scala `concurrent.Map` and any - * side-effects of using it via the Java interface will be visible via the Scala interface and - * vice versa. - * - * If the Scala `concurrent.Map` was previously obtained from an implicit or explicit call of - * `[[JavaConverters.mapAsScalaConcurrentMap]](java.util.concurrent.ConcurrentMap)` then the - * original Java `ConcurrentMap` will be returned. - * - * @param m The Scala `concurrent.Map` to be converted. - * @return A Java `ConcurrentMap` view of the argument. - */ - def mapAsJavaConcurrentMap[A, B](m: concurrent.Map[A, B]): juc.ConcurrentMap[A, B] = m match { - case null => null - case JConcurrentMapWrapper(wrapped) => wrapped - case _ => new ConcurrentMapWrapper(m) - } -} diff --git a/tests/scala2-library/src/library/scala/collection/convert/AsScalaConverters.scala b/tests/scala2-library/src/library/scala/collection/convert/AsScalaConverters.scala deleted file mode 100644 index f9e38797e1f2..000000000000 --- a/tests/scala2-library/src/library/scala/collection/convert/AsScalaConverters.scala +++ /dev/null @@ -1,207 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2006-2016, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://www.scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala -package collection -package convert - -import java.{ lang => jl, util => ju }, java.util.{ concurrent => juc } - -/** Defines converter methods from Java to Scala collections. */ -trait AsScalaConverters { - import Wrappers._ - - /** - * Converts a Java `Iterator` to a Scala `Iterator`. - * - * The returned Scala `Iterator` is backed by the provided Java `Iterator` and any side-effects of - * using it via the Scala interface will be visible via the Java interface and vice versa. - * - * If the Java `Iterator` was previously obtained from an implicit or explicit call of - * `[[JavaConverters.asJavaIterator]](scala.collection.Iterator)` then the original Scala - * `Iterator` will be returned. - * - * @param i The Java `Iterator` to be converted. - * @return A Scala `Iterator` view of the argument. - */ - def asScalaIterator[A](i: ju.Iterator[A]): Iterator[A] = i match { - case null => null - case IteratorWrapper(wrapped) => wrapped - case _ => JIteratorWrapper(i) - } - - /** - * Converts a Java `Enumeration` to a Scala `Iterator`. - * - * The returned Scala `Iterator` is backed by the provided Java `Enumeration` and any side-effects - * of using it via the Scala interface will be visible via the Java interface and vice versa. - * - * If the Java `Enumeration` was previously obtained from an implicit or explicit call of - * `[[JavaConverters.asJavaEnumeration]](scala.collection.Iterator)` then the original Scala - * `Iterator` will be returned. - * - * @param i The Java `Enumeration` to be converted. - * @return A Scala `Iterator` view of the argument. - */ - def enumerationAsScalaIterator[A](i: ju.Enumeration[A]): Iterator[A] = i match { - case null => null - case IteratorWrapper(wrapped) => wrapped - case _ => JEnumerationWrapper(i) - } - - /** - * Converts a Java `Iterable` to a Scala `Iterable`. - * - * The returned Scala `Iterable` is backed by the provided Java `Iterable` and any side-effects of - * using it via the Scala interface will be visible via the Java interface and vice versa. - * - * If the Java `Iterable` was previously obtained from an implicit or explicit call of - * `[[JavaConverters.asJavaIterable]](scala.collection.Iterable) then the original Scala - * `Iterable` will be returned. - * - * @param i The Java `Iterable` to be converted. - * @return A Scala `Iterable` view of the argument. - */ - def iterableAsScalaIterable[A](i: jl.Iterable[A]): Iterable[A] = i match { - case null => null - case IterableWrapper(wrapped) => wrapped - case _ => JIterableWrapper(i) - } - - /** - * Converts a Java `Collection` to an Scala `Iterable`. - * - * If the Java `Collection` was previously obtained from an implicit or explicit call of - * `[[JavaConverters.asJavaCollection]](scala.collection.Iterable)` then the original Scala - * `Iterable` will be returned. - * - * @param i The Java `Collection` to be converted. - * @return A Scala `Iterable` view of the argument. - */ - def collectionAsScalaIterable[A](i: ju.Collection[A]): Iterable[A] = i match { - case null => null - case IterableWrapper(wrapped) => wrapped - case _ => JCollectionWrapper(i) - } - - /** - * Converts a Java `List` to a Scala mutable `Buffer`. - * - * The returned Scala `Buffer` is backed by the provided Java `List` and any side-effects of using - * it via the Scala interface will be visible via the Java interface and vice versa. - * - * If the Java `List` was previously obtained from an implicit or explicit call of - * `[[JavaConverters.bufferAsJavaList]](scala.collection.mutable.Buffer)` then the original Scala - * `Buffer` will be returned. - * - * @param l The Java `List` to be converted. - * @return A Scala mutable `Buffer` view of the argument. - */ - def asScalaBuffer[A](l: ju.List[A]): mutable.Buffer[A] = l match { - case null => null - case MutableBufferWrapper(wrapped) => wrapped - case _ => new JListWrapper(l) - } - - /** - * Converts a Java `Set` to a Scala mutable `Set`. - * - * The returned Scala `Set` is backed by the provided Java `Set` and any side-effects of using it - * via the Scala interface will be visible via the Java interface and vice versa. - * - * If the Java `Set` was previously obtained from an implicit or explicit call of - * `[[JavaConverters.mutableSetAsJavaSet]](scala.collection.mutable.Set)` then the original Scala - * `Set` will be returned. - * - * @param s The Java `Set` to be converted. - * @return A Scala mutable `Set` view of the argument. - */ - def asScalaSet[A](s: ju.Set[A]): mutable.Set[A] = s match { - case null => null - case MutableSetWrapper(wrapped) => wrapped - case _ => new JSetWrapper(s) - } - - /** - * Converts a Java `Map` to a Scala mutable `Map`. - * - * The returned Scala `Map` is backed by the provided Java `Map` and any side-effects of using it - * via the Scala interface will be visible via the Java interface and vice versa. - * - * If the Java `Map` was previously obtained from an implicit or explicit call of - * `[[JavaConverters.mutableMapAsJavaMap]](scala.collection.mutable.Map)` then the original Scala - * `Map` will be returned. - * - * If the wrapped map is synchronized (e.g. from `java.util.Collections.synchronizedMap`), it is - * your responsibility to wrap all non-atomic operations with `underlying.synchronized`. - * This includes `get`, as `java.util.Map`'s API does not allow for an atomic `get` when `null` - * values may be present. - * - * @param m The Java `Map` to be converted. - * @return A Scala mutable `Map` view of the argument. - */ - def mapAsScalaMap[A, B](m: ju.Map[A, B]): mutable.Map[A, B] = m match { - case null => null - case MutableMapWrapper(wrapped) => wrapped - case _ => new JMapWrapper(m) - } - - /** - * Converts a Java `ConcurrentMap` to a Scala mutable `ConcurrentMap`. - * - * The returned Scala `ConcurrentMap` is backed by the provided Java `ConcurrentMap` and any - * side-effects of using it via the Scala interface will be visible via the Java interface and - * vice versa. - * - * If the Java `ConcurrentMap` was previously obtained from an implicit or explicit call of - * `[[JavaConverters.mapAsJavaConcurrentMap]](scala.collection.mutable.ConcurrentMap)` - * then the original Scala `ConcurrentMap` will be returned. - * - * @param m The Java `ConcurrentMap` to be converted. - * @return A Scala mutable `ConcurrentMap` view of the argument. - */ - def mapAsScalaConcurrentMap[A, B](m: juc.ConcurrentMap[A, B]): concurrent.Map[A, B] = m match { - case null => null - case cmw: ConcurrentMapWrapper[_, _] => cmw.underlying - case _ => new JConcurrentMapWrapper(m) - } - - /** - * Converts a Java `Dictionary` to a Scala mutable `Map`. - * - * The returned Scala `Map` is backed by the provided Java `Dictionary` and any side-effects of - * using it via the Scala interface will be visible via the Java interface and vice versa. - * - * If the Java `Dictionary` was previously obtained from an implicit or explicit call of - * `[[JavaConverters.asJavaDictionary]](scala.collection.mutable.Map)` then the original - * Scala `Map` will be returned. - * - * @param p The Java `Dictionary` to be converted. - * @return A Scala mutable `Map` view of the argument. - */ - def dictionaryAsScalaMap[A, B](p: ju.Dictionary[A, B]): mutable.Map[A, B] = p match { - case null => null - case DictionaryWrapper(wrapped) => wrapped - case _ => new JDictionaryWrapper(p) - } - - /** - * Converts a Java `Properties` to a Scala mutable `Map[String, String]`. - * - * The returned Scala `Map[String, String]` is backed by the provided Java `Properties` and any - * side-effects of using it via the Scala interface will be visible via the Java interface and - * vice versa. - * - * @param p The Java `Properties` to be converted. - * @return A Scala mutable `Map[String, String]` view of the argument. - */ - def propertiesAsScalaMap(p: ju.Properties): mutable.Map[String, String] = p match { - case null => null - case _ => new JPropertiesWrapper(p) - } -} diff --git a/tests/scala2-library/src/library/scala/collection/convert/DecorateAsJava.scala b/tests/scala2-library/src/library/scala/collection/convert/DecorateAsJava.scala deleted file mode 100644 index 83fffa59402b..000000000000 --- a/tests/scala2-library/src/library/scala/collection/convert/DecorateAsJava.scala +++ /dev/null @@ -1,109 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2006-2016, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://www.scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala -package collection -package convert - -import java.{ lang => jl, util => ju }, java.util.{ concurrent => juc } -import Decorators._ -import scala.language.implicitConversions - -/** Defines `asJava` extension methods for [[JavaConverters]]. */ -trait DecorateAsJava extends AsJavaConverters { - /** - * Adds an `asJava` method that implicitly converts a Scala `Iterator` to a Java `Iterator`. - * @see [[asJavaIterator]] - */ - implicit def asJavaIteratorConverter[A](i : Iterator[A]): AsJava[ju.Iterator[A]] = - new AsJava(asJavaIterator(i)) - - /** - * Adds an `asJavaEnumeration` method that implicitly converts a Scala `Iterator` to a Java `Enumeration`. - * @see [[asJavaEnumeration]] - */ - implicit def asJavaEnumerationConverter[A](i : Iterator[A]): AsJavaEnumeration[A] = - new AsJavaEnumeration(i) - - /** - * Adds an `asJava` method that implicitly converts a Scala `Iterable` to a Java `Iterable`. - * @see [[asJavaIterable]] - */ - implicit def asJavaIterableConverter[A](i : Iterable[A]): AsJava[jl.Iterable[A]] = - new AsJava(asJavaIterable(i)) - - /** - * Adds an `asJavaCollection` method that implicitly converts a Scala `Iterable` to an immutable Java `Collection`. - * @see [[asJavaCollection]] - */ - implicit def asJavaCollectionConverter[A](i : Iterable[A]): AsJavaCollection[A] = - new AsJavaCollection(i) - - /** - * Adds an `asJava` method that implicitly converts a Scala mutable `Buffer` to a Java `List`. - * @see [[bufferAsJavaList]] - */ - implicit def bufferAsJavaListConverter[A](b : mutable.Buffer[A]): AsJava[ju.List[A]] = - new AsJava(bufferAsJavaList(b)) - - /** - * Adds an `asJava` method that implicitly converts a Scala mutable `Seq` to a Java `List`. - * @see [[mutableSeqAsJavaList]] - */ - implicit def mutableSeqAsJavaListConverter[A](b : mutable.Seq[A]): AsJava[ju.List[A]] = - new AsJava(mutableSeqAsJavaList(b)) - - /** - * Adds an `asJava` method that implicitly converts a Scala `Seq` to a Java `List`. - * @see [[seqAsJavaList]] - */ - implicit def seqAsJavaListConverter[A](b : Seq[A]): AsJava[ju.List[A]] = - new AsJava(seqAsJavaList(b)) - - /** - * Adds an `asJava` method that implicitly converts a Scala mutable `Set` to a Java `Set`. - * @see [[mutableSetAsJavaSet]] - */ - implicit def mutableSetAsJavaSetConverter[A](s : mutable.Set[A]): AsJava[ju.Set[A]] = - new AsJava(mutableSetAsJavaSet(s)) - - /** - * Adds an `asJava` method that implicitly converts a Scala `Set` to a Java `Set`. - * @see [[setAsJavaSet]] - */ - implicit def setAsJavaSetConverter[A](s : Set[A]): AsJava[ju.Set[A]] = - new AsJava(setAsJavaSet(s)) - - /** - * Adds an `asJava` method that implicitly converts a Scala mutable `Map` to a Java `Map`. - * @see [[mutableMapAsJavaMap]] - */ - implicit def mutableMapAsJavaMapConverter[A, B](m : mutable.Map[A, B]): AsJava[ju.Map[A, B]] = - new AsJava(mutableMapAsJavaMap(m)) - - /** - * Adds an `asJavaDictionary` method that implicitly converts a Scala mutable `Map` to a Java `Dictionary`. - * @see [[asJavaDictionary]] - */ - implicit def asJavaDictionaryConverter[A, B](m : mutable.Map[A, B]): AsJavaDictionary[A, B] = - new AsJavaDictionary(m) - - /** - * Adds an `asJava` method that implicitly converts a Scala `Map` to a Java `Map`. - * @see [[mapAsJavaMap]] - */ - implicit def mapAsJavaMapConverter[A, B](m : Map[A, B]): AsJava[ju.Map[A, B]] = - new AsJava(mapAsJavaMap(m)) - - /** - * Adds an `asJava` method that implicitly converts a Scala mutable `concurrent.Map` to a Java `ConcurrentMap`. - * @see [[mapAsJavaConcurrentMap]]. - */ - implicit def mapAsJavaConcurrentMapConverter[A, B](m: concurrent.Map[A, B]): AsJava[juc.ConcurrentMap[A, B]] = - new AsJava(mapAsJavaConcurrentMap(m)) -} diff --git a/tests/scala2-library/src/library/scala/collection/convert/DecorateAsScala.scala b/tests/scala2-library/src/library/scala/collection/convert/DecorateAsScala.scala deleted file mode 100644 index f680aa526707..000000000000 --- a/tests/scala2-library/src/library/scala/collection/convert/DecorateAsScala.scala +++ /dev/null @@ -1,88 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2006-2016, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://www.scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala -package collection -package convert - -import java.{ lang => jl, util => ju }, java.util.{ concurrent => juc } -import Decorators._ -import scala.language.implicitConversions - -/** Defines `asScala` extension methods for [[JavaConverters]]. */ -trait DecorateAsScala extends AsScalaConverters { - /** - * Adds an `asScala` method that implicitly converts a Java `Iterator` to a Scala `Iterator`. - * @see [[asScalaIterator]] - */ - implicit def asScalaIteratorConverter[A](i : ju.Iterator[A]): AsScala[Iterator[A]] = - new AsScala(asScalaIterator(i)) - - /** - * Adds an `asScala` method that implicitly converts a Java `Enumeration` to a Scala `Iterator`. - * @see [[enumerationAsScalaIterator]] - */ - implicit def enumerationAsScalaIteratorConverter[A](i : ju.Enumeration[A]): AsScala[Iterator[A]] = - new AsScala(enumerationAsScalaIterator(i)) - - /** - * Adds an `asScala` method that implicitly converts a Java `Iterable` to a Scala `Iterable`. - * @see [[iterableAsScalaIterable]] - */ - implicit def iterableAsScalaIterableConverter[A](i : jl.Iterable[A]): AsScala[Iterable[A]] = - new AsScala(iterableAsScalaIterable(i)) - - /** - * Adds an `asScala` method that implicitly converts a Java `Collection` to an Scala `Iterable`. - * @see [[collectionAsScalaIterable]] - */ - implicit def collectionAsScalaIterableConverter[A](i : ju.Collection[A]): AsScala[Iterable[A]] = - new AsScala(collectionAsScalaIterable(i)) - - /** - * Adds an `asScala` method that implicitly converts a Java `List` to a Scala mutable `Buffer`. - * @see [[asScalaBuffer]] - */ - implicit def asScalaBufferConverter[A](l : ju.List[A]): AsScala[mutable.Buffer[A]] = - new AsScala(asScalaBuffer(l)) - - /** - * Adds an `asScala` method that implicitly converts a Java `Set` to a Scala mutable `Set`. - * @see [[asScalaSet]] - */ - implicit def asScalaSetConverter[A](s : ju.Set[A]): AsScala[mutable.Set[A]] = - new AsScala(asScalaSet(s)) - - /** - * Adds an `asScala` method that implicitly converts a Java `Map` to a Scala mutable `Map`. - * @see [[mapAsScalaMap]] - */ - implicit def mapAsScalaMapConverter[A, B](m : ju.Map[A, B]): AsScala[mutable.Map[A, B]] = - new AsScala(mapAsScalaMap(m)) - - /** - * Adds an `asScala` method that implicitly converts a Java `ConcurrentMap` to a Scala mutable `concurrent.Map`. - * @see [[mapAsScalaConcurrentMap]] - */ - implicit def mapAsScalaConcurrentMapConverter[A, B](m: juc.ConcurrentMap[A, B]): AsScala[concurrent.Map[A, B]] = - new AsScala(mapAsScalaConcurrentMap(m)) - - /** - * Adds an `asScala` method that implicitly converts a Java `Dictionary` to a Scala mutable `Map`. - * @see [[dictionaryAsScalaMap]] - */ - implicit def dictionaryAsScalaMapConverter[A, B](p: ju.Dictionary[A, B]): AsScala[mutable.Map[A, B]] = - new AsScala(dictionaryAsScalaMap(p)) - - /** - * Adds an `asScala` method that implicitly converts a Java `Properties` to a Scala mutable `Map[String, String]`. - * @see [[propertiesAsScalaMap]] - */ - implicit def propertiesAsScalaMapConverter(p: ju.Properties): AsScala[mutable.Map[String, String]] = - new AsScala(propertiesAsScalaMap(p)) -} diff --git a/tests/scala2-library/src/library/scala/collection/convert/Decorators.scala b/tests/scala2-library/src/library/scala/collection/convert/Decorators.scala deleted file mode 100644 index 3e45a0225430..000000000000 --- a/tests/scala2-library/src/library/scala/collection/convert/Decorators.scala +++ /dev/null @@ -1,45 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2006-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://www.scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala -package collection -package convert - -import java.{ util => ju } - -private[collection] object Decorators { - /** Generic class containing the `asJava` converter method */ - class AsJava[A](op: => A) { - /** Converts a Scala collection to the corresponding Java collection */ - def asJava: A = op - } - - /** Generic class containing the `asScala` converter method */ - class AsScala[A](op: => A) { - /** Converts a Java collection to the corresponding Scala collection */ - def asScala: A = op - } - - /** Generic class containing the `asJavaCollection` converter method */ - class AsJavaCollection[A](i: Iterable[A]) { - /** Converts a Scala `Iterable` to a Java `Collection` */ - def asJavaCollection: ju.Collection[A] = JavaConverters.asJavaCollection(i) - } - - /** Generic class containing the `asJavaEnumeration` converter method */ - class AsJavaEnumeration[A](i: Iterator[A]) { - /** Converts a Scala `Iterator` to a Java `Enumeration` */ - def asJavaEnumeration: ju.Enumeration[A] = JavaConverters.asJavaEnumeration(i) - } - - /** Generic class containing the `asJavaDictionary` converter method */ - class AsJavaDictionary[A, B](m : mutable.Map[A, B]) { - /** Converts a Scala `Map` to a Java `Dictionary` */ - def asJavaDictionary: ju.Dictionary[A, B] = JavaConverters.asJavaDictionary(m) - } -} diff --git a/tests/scala2-library/src/library/scala/collection/convert/ImplicitConversions.scala b/tests/scala2-library/src/library/scala/collection/convert/ImplicitConversions.scala deleted file mode 100644 index 35e6ce1616a2..000000000000 --- a/tests/scala2-library/src/library/scala/collection/convert/ImplicitConversions.scala +++ /dev/null @@ -1,171 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2006-2016, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://www.scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala -package collection -package convert - -import java.{ lang => jl, util => ju }, java.util.{ concurrent => juc } -import scala.language.implicitConversions - -import JavaConverters._ - -/** Defines implicit converter methods from Java to Scala collections. */ -trait ToScalaImplicits { - /** Implicitly converts a Java `Iterator` to a Scala `Iterator`. - * @see [[AsScalaConverters.asScalaIterator]] - */ - implicit def `iterator asScala`[A](it: ju.Iterator[A]): Iterator[A] = asScalaIterator(it) - - /** Implicitly converts a Java `Enumeration` to a Scala `Iterator`. - * @see [[AsScalaConverters.enumerationAsScalaIterator]] - */ - implicit def `enumeration AsScalaIterator`[A](i: ju.Enumeration[A]): Iterator[A] = enumerationAsScalaIterator(i) - - /** Implicitly converts a Java `Iterable` to a Scala `Iterable`. - * @see [[AsScalaConverters.iterableAsScalaIterable]] - */ - implicit def `iterable AsScalaIterable`[A](i: jl.Iterable[A]): Iterable[A] = iterableAsScalaIterable(i) - - /** Implicitly converts a Java `Collection` to an Scala `Iterable`. - * @see [[AsScalaConverters.collectionAsScalaIterable]] - */ - implicit def `collection AsScalaIterable`[A](i: ju.Collection[A]): Iterable[A] = collectionAsScalaIterable(i) - - /** Implicitly converts a Java `List` to a Scala mutable `Buffer`. - * @see [[AsScalaConverters.asScalaBuffer]] - */ - implicit def `list asScalaBuffer`[A](l: ju.List[A]): mutable.Buffer[A] = asScalaBuffer(l) - - /** Implicitly converts a Java `Set` to a Scala mutable `Set`. - * @see [[AsScalaConverters.asScalaSet]] - */ - implicit def `set asScala`[A](s: ju.Set[A]): mutable.Set[A] = asScalaSet(s) - - /** Implicitly converts a Java `Map` to a Scala mutable `Map`. - * @see [[AsScalaConverters.mapAsScalaMap]] - */ - implicit def `map AsScala`[A, B](m: ju.Map[A, B]): mutable.Map[A, B] = mapAsScalaMap(m) - - /** Implicitly converts a Java `ConcurrentMap` to a Scala mutable `ConcurrentMap`. - * @see [[AsScalaConverters.mapAsScalaConcurrentMap]] - */ - implicit def `map AsScalaConcurrentMap`[A, B](m: juc.ConcurrentMap[A, B]): concurrent.Map[A, B] = mapAsScalaConcurrentMap(m) - - /** Implicitly converts a Java `Dictionary` to a Scala mutable `Map`. - * @see [[AsScalaConverters.dictionaryAsScalaMap]] - */ - implicit def `dictionary AsScalaMap`[A, B](p: ju.Dictionary[A, B]): mutable.Map[A, B] = dictionaryAsScalaMap(p) - - /** Implicitly converts a Java `Properties` to a Scala `mutable Map[String, String]`. - * @see [[AsScalaConverters.propertiesAsScalaMap]] - */ - implicit def `properties AsScalaMap`(p: ju.Properties): mutable.Map[String, String] = propertiesAsScalaMap(p) -} - -/** Defines implicit conversions from Scala to Java collections. */ -trait ToJavaImplicits { - /** Implicitly converts a Scala `Iterator` to a Java `Iterator`. - * @see [[AsJavaConverters.asJavaIterator]] - */ - implicit def `iterator asJava`[A](it: Iterator[A]): ju.Iterator[A] = asJavaIterator(it) - - /** Implicitly converts a Scala `Iterator` to a Java `Enumeration`. - * @see [[AsJavaConverters.asJavaEnumeration]] - */ - implicit def `enumeration asJava`[A](it: Iterator[A]): ju.Enumeration[A] = asJavaEnumeration(it) - - /** Implicitly converts a Scala `Iterable` to a Java `Iterable`. - * @see [[AsJavaConverters.asJavaIterable]] - */ - implicit def `iterable asJava`[A](i: Iterable[A]): jl.Iterable[A] = asJavaIterable(i) - - /** Implicitly converts a Scala `Iterable` to an immutable Java `Collection`. - * @see [[AsJavaConverters.asJavaCollection]] - */ - implicit def `collection asJava`[A](it: Iterable[A]): ju.Collection[A] = asJavaCollection(it) - - /** Implicitly converts a Scala mutable `Buffer` to a Java `List`. - * @see [[AsJavaConverters.bufferAsJavaList]] - */ - implicit def `buffer AsJavaList`[A](b: mutable.Buffer[A]): ju.List[A] = bufferAsJavaList(b) - - /** Implicitly converts a Scala mutable `Seq` to a Java `List`. - * @see [[AsJavaConverters.mutableSeqAsJavaList]] - */ - implicit def `mutableSeq AsJavaList`[A](seq: mutable.Seq[A]): ju.List[A] = mutableSeqAsJavaList(seq) - - /** Implicitly converts a Scala `Seq` to a Java `List`. - * @see [[AsJavaConverters.seqAsJavaList]] - */ - implicit def `seq AsJavaList`[A](seq: Seq[A]): ju.List[A] = seqAsJavaList(seq) - - /** Implicitly converts a Scala mutable `Set` to a Java `Set`. - * @see [[AsJavaConverters.mutableSetAsJavaSet]] - */ - implicit def `mutableSet AsJavaSet`[A](s: mutable.Set[A]): ju.Set[A] = mutableSetAsJavaSet(s) - - /** Implicitly converts a Scala `Set` to a Java `Set`. - * @see [[AsJavaConverters.setAsJavaSet]] - */ - implicit def `set AsJavaSet`[A](s: Set[A]): ju.Set[A] = setAsJavaSet(s) - - /** Implicitly converts a Scala mutable `Map` to a Java `Map`. - * @see [[AsJavaConverters.mutableMapAsJavaMap]] - */ - implicit def `mutableMap AsJavaMap`[A, B](m: mutable.Map[A, B]): ju.Map[A, B] = mutableMapAsJavaMap(m) - - /** Implicitly converts a Scala mutable `Map` to a Java `Dictionary`. - * @see [[AsJavaConverters.asJavaDictionary]] - */ - implicit def `dictionary asJava`[A, B](m: mutable.Map[A, B]): ju.Dictionary[A, B] = asJavaDictionary(m) - - /** Implicitly converts a Scala `Map` to a Java `Map`. - * @see [[AsJavaConverters.mapAsJavaMap]] - */ - implicit def `map AsJavaMap`[A, B](m: Map[A, B]): ju.Map[A, B] = mapAsJavaMap(m) - - /** Implicitly converts a Scala mutable `concurrent.Map` to a Java `ConcurrentMap`. - * @see [[AsJavaConverters.mapAsJavaConcurrentMap]] - */ - implicit def `map AsJavaConcurrentMap`[A, B](m: concurrent.Map[A, B]): juc.ConcurrentMap[A, B] = mapAsJavaConcurrentMap(m) -} - -/** - * Convenience for miscellaneous implicit conversions from Scala to Java collections API. - * - * It is recommended to use explicit conversions provided by [[collection.JavaConverters]] instead. - * Implicit conversions may cause unexpected issues, see [[ImplicitConversions]]. - */ -object ImplicitConversionsToJava extends ToJavaImplicits - -/** - * Convenience for miscellaneous implicit conversions from Java to Scala collections API. - * - * It is recommended to use explicit conversions provided by [[collection.JavaConverters]] instead. - * Implicit conversions may cause unexpected issues, see [[ImplicitConversions]]. - */ -object ImplicitConversionsToScala extends ToScalaImplicits - -/** - * Convenience for miscellaneous implicit conversions between Java and Scala collections API. - * - * It is recommended to use explicit conversions provided by [[collection.JavaConverters]] instead. - * Implicit conversions may cause unexpected issues. Example: - * - * {{{ - * import collection.convert.ImplicitConversions._ - * case class StringBox(s: String) - * val m = Map(StringBox("one") -> "uno") - * m.get("one") - * }}} - * - * The above example returns `null` instead of producing a type error at compile-time. The map is - * implicitly converted to a `java.util.Map` which provides a method `get(x: AnyRef)`. - */ -object ImplicitConversions extends ToScalaImplicits with ToJavaImplicits diff --git a/tests/scala2-library/src/library/scala/collection/convert/WrapAsJava.scala b/tests/scala2-library/src/library/scala/collection/convert/WrapAsJava.scala deleted file mode 100644 index e3a064b79dca..000000000000 --- a/tests/scala2-library/src/library/scala/collection/convert/WrapAsJava.scala +++ /dev/null @@ -1,290 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2006-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://www.scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala -package collection -package convert - -import java.{ lang => jl, util => ju }, java.util.{ concurrent => juc } -import scala.language.implicitConversions - -@deprecated("use JavaConverters or consider ToJavaImplicits", since="2.12.0") -trait WrapAsJava extends LowPriorityWrapAsJava { - // provide higher-priority implicits with names that don't exist in JavaConverters for the case - // when importing both JavaConverters._ and JavaConversions._. otherwise implicit conversions - // would not apply, see https://github.com/scala/scala/pull/5109#issuecomment-212417789 - implicit def `deprecated asJavaIterator`[A](it: Iterator[A]): ju.Iterator[A] = asJavaIterator(it) - implicit def `deprecated asJavaEnumeration`[A](it: Iterator[A]): ju.Enumeration[A] = asJavaEnumeration(it) - implicit def `deprecated asJavaIterable`[A](i: Iterable[A]): jl.Iterable[A] = asJavaIterable(i) - implicit def `deprecated asJavaCollection`[A](it: Iterable[A]): ju.Collection[A] = asJavaCollection(it) - implicit def `deprecated bufferAsJavaList`[A](b: mutable.Buffer[A]): ju.List[A] = bufferAsJavaList(b) - implicit def `deprecated mutableSeqAsJavaList`[A](seq: mutable.Seq[A]): ju.List[A] = mutableSeqAsJavaList(seq) - implicit def `deprecated seqAsJavaList`[A](seq: Seq[A]): ju.List[A] = seqAsJavaList(seq) - implicit def `deprecated mutableSetAsJavaSet`[A](s: mutable.Set[A]): ju.Set[A] = mutableSetAsJavaSet(s) - implicit def `deprecated setAsJavaSet`[A](s: Set[A]): ju.Set[A] = setAsJavaSet(s) - implicit def `deprecated mutableMapAsJavaMap`[A, B](m: mutable.Map[A, B]): ju.Map[A, B] = mutableMapAsJavaMap(m) - implicit def `deprecated asJavaDictionary`[A, B](m: mutable.Map[A, B]): ju.Dictionary[A, B] = asJavaDictionary(m) - implicit def `deprecated mapAsJavaMap`[A, B](m: Map[A, B]): ju.Map[A, B] = mapAsJavaMap(m) - implicit def `deprecated mapAsJavaConcurrentMap`[A, B](m: concurrent.Map[A, B]): juc.ConcurrentMap[A, B] = mapAsJavaConcurrentMap(m) -} - -private[convert] trait LowPriorityWrapAsJava { - import Wrappers._ - - /** - * Implicitly converts a Scala Iterator to a Java Iterator. - * The returned Java Iterator is backed by the provided Scala - * Iterator and any side-effects of using it via the Java interface will - * be visible via the Scala interface and vice versa. - * - * If the Scala Iterator was previously obtained from an implicit or - * explicit call of `asIterator(java.util.Iterator)` then the original - * Java Iterator will be returned. - * - * @param it The Iterator to be converted. - * @return A Java Iterator view of the argument. - */ - implicit def asJavaIterator[A](it: Iterator[A]): ju.Iterator[A] = it match { - case null => null - case JIteratorWrapper(wrapped) => wrapped.asInstanceOf[ju.Iterator[A]] - case _ => IteratorWrapper(it) - } - - /** - * Implicitly converts a Scala Iterator to a Java Enumeration. - * The returned Java Enumeration is backed by the provided Scala - * Iterator and any side-effects of using it via the Java interface will - * be visible via the Scala interface and vice versa. - * - * If the Scala Iterator was previously obtained from an implicit or - * explicit call of `asIterator(java.util.Enumeration)` then the - * original Java Enumeration will be returned. - * - * @param it The Iterator to be converted. - * @return A Java Enumeration view of the argument. - */ - implicit def asJavaEnumeration[A](it: Iterator[A]): ju.Enumeration[A] = it match { - case null => null - case JEnumerationWrapper(wrapped) => wrapped.asInstanceOf[ju.Enumeration[A]] - case _ => IteratorWrapper(it) - } - - /** - * Implicitly converts a Scala Iterable to a Java Iterable. - * The returned Java Iterable is backed by the provided Scala - * Iterable and any side-effects of using it via the Java interface will - * be visible via the Scala interface and vice versa. - * - * If the Scala Iterable was previously obtained from an implicit or - * explicit call of `asIterable(java.lang.Iterable)` then the original - * Java Iterable will be returned. - * - * @param i The Iterable to be converted. - * @return A Java Iterable view of the argument. - */ - implicit def asJavaIterable[A](i: Iterable[A]): jl.Iterable[A] = i match { - case null => null - case JIterableWrapper(wrapped) => wrapped.asInstanceOf[jl.Iterable[A]] - case _ => IterableWrapper(i) - } - - /** - * Implicitly converts a Scala Iterable to an immutable Java - * Collection. - * - * If the Scala Iterable was previously obtained from an implicit or - * explicit call of `asSizedIterable(java.util.Collection)` then the original - * Java Collection will be returned. - * - * @param it The SizedIterable to be converted. - * @return A Java Collection view of the argument. - */ - implicit def asJavaCollection[A](it: Iterable[A]): ju.Collection[A] = it match { - case null => null - case JCollectionWrapper(wrapped) => wrapped.asInstanceOf[ju.Collection[A]] - case _ => new IterableWrapper(it) - } - - /** - * Implicitly converts a Scala mutable Buffer to a Java List. - * The returned Java List is backed by the provided Scala - * Buffer and any side-effects of using it via the Java interface will - * be visible via the Scala interface and vice versa. - * - * If the Scala Buffer was previously obtained from an implicit or - * explicit call of `asBuffer(java.util.List)` then the original - * Java List will be returned. - * - * @param b The Buffer to be converted. - * @return A Java List view of the argument. - */ - implicit def bufferAsJavaList[A](b: mutable.Buffer[A]): ju.List[A] = b match { - case null => null - case JListWrapper(wrapped) => wrapped - case _ => new MutableBufferWrapper(b) - } - - /** - * Implicitly converts a Scala mutable Seq to a Java List. - * The returned Java List is backed by the provided Scala - * Seq and any side-effects of using it via the Java interface will - * be visible via the Scala interface and vice versa. - * - * If the Scala Seq was previously obtained from an implicit or - * explicit call of `asSeq(java.util.List)` then the original - * Java List will be returned. - * - * @param seq The Seq to be converted. - * @return A Java List view of the argument. - */ - implicit def mutableSeqAsJavaList[A](seq: mutable.Seq[A]): ju.List[A] = seq match { - case null => null - case JListWrapper(wrapped) => wrapped - case _ => new MutableSeqWrapper(seq) - } - - /** - * Implicitly converts a Scala Seq to a Java List. - * The returned Java List is backed by the provided Scala - * Seq and any side-effects of using it via the Java interface will - * be visible via the Scala interface and vice versa. - * - * If the Scala Seq was previously obtained from an implicit or - * explicit call of `asSeq(java.util.List)` then the original - * Java List will be returned. - * - * @param seq The Seq to be converted. - * @return A Java List view of the argument. - */ - implicit def seqAsJavaList[A](seq: Seq[A]): ju.List[A] = seq match { - case null => null - case JListWrapper(wrapped) => wrapped.asInstanceOf[ju.List[A]] - case _ => new SeqWrapper(seq) - } - - /** - * Implicitly converts a Scala mutable Set to a Java Set. - * The returned Java Set is backed by the provided Scala - * Set and any side-effects of using it via the Java interface will - * be visible via the Scala interface and vice versa. - * - * If the Scala Set was previously obtained from an implicit or - * explicit call of `asSet(java.util.Set)` then the original - * Java Set will be returned. - * - * @param s The Set to be converted. - * @return A Java Set view of the argument. - */ - implicit def mutableSetAsJavaSet[A](s: mutable.Set[A]): ju.Set[A] = s match { - case null => null - case JSetWrapper(wrapped) => wrapped - case _ => new MutableSetWrapper(s) - } - - /** - * Implicitly converts a Scala Set to a Java Set. - * The returned Java Set is backed by the provided Scala - * Set and any side-effects of using it via the Java interface will - * be visible via the Scala interface and vice versa. - * - * If the Scala Set was previously obtained from an implicit or - * explicit call of asSet(java.util.Set) then the original - * Java Set will be returned. - * - * @param s The Set to be converted. - * @return A Java Set view of the argument. - */ - implicit def setAsJavaSet[A](s: Set[A]): ju.Set[A] = s match { - case null => null - case JSetWrapper(wrapped) => wrapped - case _ => new SetWrapper(s) - } - - /** - * Implicitly converts a Scala mutable Map to a Java Map. - * The returned Java Map is backed by the provided Scala - * Map and any side-effects of using it via the Java interface will - * be visible via the Scala interface and vice versa. - * - * If the Scala Map was previously obtained from an implicit or - * explicit call of `asMap(java.util.Map)` then the original - * Java Map will be returned. - * - * @param m The Map to be converted. - * @return A Java Map view of the argument. - */ - implicit def mutableMapAsJavaMap[A, B](m: mutable.Map[A, B]): ju.Map[A, B] = m match { - case null => null - case JMapWrapper(wrapped) => wrapped - case _ => new MutableMapWrapper(m) - } - - /** - * Implicitly converts a Scala mutable `Map` to a Java `Dictionary`. - * - * The returned Java `Dictionary` is backed by the provided Scala - * `Dictionary` and any side-effects of using it via the Java interface - * will be visible via the Scala interface and vice versa. - * - * If the Scala `Dictionary` was previously obtained from an implicit or - * explicit call of `asMap(java.util.Dictionary)` then the original - * Java Dictionary will be returned. - * - * @param m The `Map` to be converted. - * @return A Java `Dictionary` view of the argument. - */ - implicit def asJavaDictionary[A, B](m: mutable.Map[A, B]): ju.Dictionary[A, B] = m match { - case null => null - case JDictionaryWrapper(wrapped) => wrapped - case _ => new DictionaryWrapper(m) - } - - /** - * Implicitly converts a Scala `Map` to a Java `Map`. - * - * The returned Java `Map` is backed by the provided Scala `Map` and - * any side-effects of using it via the Java interface will be visible - * via the Scala interface and vice versa. - * - * If the Scala `Map` was previously obtained from an implicit or - * explicit call of `asMap(java.util.Map)` then the original - * Java `Map` will be returned. - * - * @param m The `Map` to be converted. - * @return A Java `Map` view of the argument. - */ - implicit def mapAsJavaMap[A, B](m: Map[A, B]): ju.Map[A, B] = m match { - case null => null - case JMapWrapper(wrapped) => wrapped.asInstanceOf[ju.Map[A, B]] - case _ => new MapWrapper(m) - } - - /** - * Implicitly converts a Scala mutable `concurrent.Map` to a Java - * `ConcurrentMap`. - * - * The returned Java `ConcurrentMap` is backed by the provided Scala - * `concurrent.Map` and any side-effects of using it via the Java interface - * will be visible via the Scala interface and vice versa. - * - * If the Scala `concurrent.Map` was previously obtained from an implicit or - * explicit call of `mapAsScalaConcurrentMap(java.util.concurrent.ConcurrentMap)` - * then the original Java ConcurrentMap will be returned. - * - * @param m The Scala `concurrent.Map` to be converted. - * @return A Java `ConcurrentMap` view of the argument. - */ - implicit def mapAsJavaConcurrentMap[A, B](m: concurrent.Map[A, B]): juc.ConcurrentMap[A, B] = m match { - case null => null - case JConcurrentMapWrapper(wrapped) => wrapped - case _ => new ConcurrentMapWrapper(m) - } -} - -@deprecated("use JavaConverters or consider ImplicitConversionsToJava", since="2.12.0") -object WrapAsJava extends WrapAsJava diff --git a/tests/scala2-library/src/library/scala/collection/convert/WrapAsScala.scala b/tests/scala2-library/src/library/scala/collection/convert/WrapAsScala.scala deleted file mode 100644 index fbaafde79875..000000000000 --- a/tests/scala2-library/src/library/scala/collection/convert/WrapAsScala.scala +++ /dev/null @@ -1,229 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2006-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://www.scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala -package collection -package convert - -import java.{ lang => jl, util => ju }, java.util.{ concurrent => juc } -import scala.language.implicitConversions - -@deprecated("use JavaConverters or consider ToScalaImplicits", since="2.12.0") -trait WrapAsScala extends LowPriorityWrapAsScala { - // provide higher-priority implicits with names that don't exist in JavaConverters for the case - // when importing both JavaConverters._ and JavaConversions._. otherwise implicit conversions - // would not apply, see https://github.com/scala/scala/pull/5109#issuecomment-212417789 - implicit def `deprecated asScalaIterator`[A](it: ju.Iterator[A]): Iterator[A] = asScalaIterator(it) - implicit def `deprecated enumerationAsScalaIterator`[A](i: ju.Enumeration[A]): Iterator[A] = enumerationAsScalaIterator(i) - implicit def `deprecated iterableAsScalaIterable`[A](i: jl.Iterable[A]): Iterable[A] = iterableAsScalaIterable(i) - implicit def `deprecated collectionAsScalaIterable`[A](i: ju.Collection[A]): Iterable[A] = collectionAsScalaIterable(i) - implicit def `deprecated asScalaBuffer`[A](l: ju.List[A]): mutable.Buffer[A] = asScalaBuffer(l) - implicit def `deprecated asScalaSet`[A](s: ju.Set[A]): mutable.Set[A] = asScalaSet(s) - implicit def `deprecated mapAsScalaMap`[A, B](m: ju.Map[A, B]): mutable.Map[A, B] = mapAsScalaMap(m) - implicit def `deprecated mapAsScalaConcurrentMap`[A, B](m: juc.ConcurrentMap[A, B]): concurrent.Map[A, B] = mapAsScalaConcurrentMap(m) - implicit def `deprecated dictionaryAsScalaMap`[A, B](p: ju.Dictionary[A, B]): mutable.Map[A, B] = dictionaryAsScalaMap(p) - implicit def `deprecated propertiesAsScalaMap`(p: ju.Properties): mutable.Map[String, String] = propertiesAsScalaMap(p) -} - -private[convert] trait LowPriorityWrapAsScala { - import Wrappers._ - - /** - * Implicitly converts a Java `Iterator` to a Scala `Iterator`. - * - * The returned Scala `Iterator` is backed by the provided Java `Iterator` - * and any side-effects of using it via the Scala interface will be visible - * via the Java interface and vice versa. - * - * If the Java `Iterator` was previously obtained from an implicit or - * explicit call of `asIterator(scala.collection.Iterator)` then the - * original Scala `Iterator` will be returned. - * - * @param it The `Iterator` to be converted. - * @return A Scala `Iterator` view of the argument. - */ - implicit def asScalaIterator[A](it: ju.Iterator[A]): Iterator[A] = it match { - case null => null - case IteratorWrapper(wrapped) => wrapped - case _ => JIteratorWrapper(it) - } - - /** - * Implicitly converts a Java Enumeration to a Scala Iterator. - * The returned Scala Iterator is backed by the provided Java - * Enumeration and any side-effects of using it via the Scala interface will - * be visible via the Java interface and vice versa. - * - * If the Java Enumeration was previously obtained from an implicit or - * explicit call of `enumerationAsScalaIterator(scala.collection.Iterator)` - * then the original Scala Iterator will be returned. - * - * @param i The Enumeration to be converted. - * @return A Scala Iterator view of the argument. - */ - implicit def enumerationAsScalaIterator[A](i: ju.Enumeration[A]): Iterator[A] = i match { - case null => null - case IteratorWrapper(wrapped) => wrapped - case _ => JEnumerationWrapper(i) - } - - /** - * Implicitly converts a Java `Iterable` to a Scala `Iterable`. - * - * The returned Scala `Iterable` is backed by the provided Java `Iterable` - * and any side-effects of using it via the Scala interface will be visible - * via the Java interface and vice versa. - * - * If the Java `Iterable` was previously obtained from an implicit or - * explicit call of `iterableAsScalaIterable(scala.collection.Iterable)` - * then the original Scala Iterable will be returned. - * - * @param i The Iterable to be converted. - * @return A Scala Iterable view of the argument. - */ - implicit def iterableAsScalaIterable[A](i: jl.Iterable[A]): Iterable[A] = i match { - case null => null - case IterableWrapper(wrapped) => wrapped - case _ => JIterableWrapper(i) - } - - /** - * Implicitly converts a Java `Collection` to an Scala `Iterable`. - * - * If the Java `Collection` was previously obtained from an implicit or - * explicit call of `collectionAsScalaIterable(scala.collection.SizedIterable)` - * then the original Scala `Iterable` will be returned. - * - * @param i The Collection to be converted. - * @return A Scala Iterable view of the argument. - */ - implicit def collectionAsScalaIterable[A](i: ju.Collection[A]): Iterable[A] = i match { - case null => null - case IterableWrapper(wrapped) => wrapped - case _ => JCollectionWrapper(i) - } - - /** - * Implicitly converts a Java `List` to a Scala mutable `Buffer`. - * - * The returned Scala `Buffer` is backed by the provided Java `List` - * and any side-effects of using it via the Scala interface will - * be visible via the Java interface and vice versa. - * - * If the Java `List` was previously obtained from an implicit or - * explicit call of `asScalaBuffer(scala.collection.mutable.Buffer)` - * then the original Scala `Buffer` will be returned. - * - * @param l The `List` to be converted. - * @return A Scala mutable `Buffer` view of the argument. - */ - implicit def asScalaBuffer[A](l: ju.List[A]): mutable.Buffer[A] = l match { - case null => null - case MutableBufferWrapper(wrapped) => wrapped - case _ => new JListWrapper(l) - } - - /** - * Implicitly converts a Java Set to a Scala mutable Set. - * The returned Scala Set is backed by the provided Java - * Set and any side-effects of using it via the Scala interface will - * be visible via the Java interface and vice versa. - * - * If the Java Set was previously obtained from an implicit or - * explicit call of `asScalaSet(scala.collection.mutable.Set)` then - * the original Scala Set will be returned. - * - * @param s The Set to be converted. - * @return A Scala mutable Set view of the argument. - */ - implicit def asScalaSet[A](s: ju.Set[A]): mutable.Set[A] = s match { - case null => null - case MutableSetWrapper(wrapped) => wrapped - case _ => new JSetWrapper(s) - } - - /** - * Implicitly converts a Java `Map` to a Scala mutable `Map`. - * - * The returned Scala `Map` is backed by the provided Java `Map` and any - * side-effects of using it via the Scala interface will be visible via - * the Java interface and vice versa. - * - * If the Java `Map` was previously obtained from an implicit or - * explicit call of `mapAsScalaMap(scala.collection.mutable.Map)` then - * the original Scala Map will be returned. - * - * If the wrapped map is synchronized (e.g. from `java.util.Collections.synchronizedMap`), - * it is your responsibility to wrap all - * non-atomic operations with `underlying.synchronized`. - * This includes `get`, as `java.util.Map`'s API does not allow for an - * atomic `get` when `null` values may be present. - * - * @param m The Map to be converted. - * @return A Scala mutable Map view of the argument. - */ - implicit def mapAsScalaMap[A, B](m: ju.Map[A, B]): mutable.Map[A, B] = m match { - case null => null - case MutableMapWrapper(wrapped) => wrapped - case _ => new JMapWrapper(m) - } - - /** - * Implicitly converts a Java ConcurrentMap to a Scala mutable ConcurrentMap. - * The returned Scala ConcurrentMap is backed by the provided Java - * ConcurrentMap and any side-effects of using it via the Scala interface will - * be visible via the Java interface and vice versa. - * - * If the Java ConcurrentMap was previously obtained from an implicit or - * explicit call of `asConcurrentMap(scala.collection.mutable.ConcurrentMap)` - * then the original Scala ConcurrentMap will be returned. - * - * @param m The ConcurrentMap to be converted. - * @return A Scala mutable ConcurrentMap view of the argument. - */ - implicit def mapAsScalaConcurrentMap[A, B](m: juc.ConcurrentMap[A, B]): concurrent.Map[A, B] = m match { - case null => null - case cmw: ConcurrentMapWrapper[_, _] => cmw.underlying - case _ => new JConcurrentMapWrapper(m) - } - - /** - * Implicitly converts a Java `Dictionary` to a Scala mutable - * `Map`. - * - * The returned Scala `Map` is backed by the provided Java - * `Dictionary` and any side-effects of using it via the Scala interface - * will be visible via the Java interface and vice versa. - * - * @param p The Dictionary to be converted. - * @return A Scala mutable Map view of the argument. - */ - implicit def dictionaryAsScalaMap[A, B](p: ju.Dictionary[A, B]): mutable.Map[A, B] = p match { - case null => null - case DictionaryWrapper(wrapped) => wrapped - case _ => new JDictionaryWrapper(p) - } - - /** - * Implicitly converts a Java `Properties` to a Scala `mutable Map[String, String]`. - * - * The returned Scala `Map[String, String]` is backed by the provided Java - * `Properties` and any side-effects of using it via the Scala interface - * will be visible via the Java interface and vice versa. - * - * @param p The Properties to be converted. - * @return A Scala mutable Map[String, String] view of the argument. - */ - implicit def propertiesAsScalaMap(p: ju.Properties): mutable.Map[String, String] = p match { - case null => null - case _ => new JPropertiesWrapper(p) - } -} - -@deprecated("use JavaConverters or consider ImplicitConversionsToScala", since="2.12.0") -object WrapAsScala extends WrapAsScala diff --git a/tests/scala2-library/src/library/scala/collection/convert/Wrappers.scala b/tests/scala2-library/src/library/scala/collection/convert/Wrappers.scala deleted file mode 100644 index 9f7e3e8174ae..000000000000 --- a/tests/scala2-library/src/library/scala/collection/convert/Wrappers.scala +++ /dev/null @@ -1,431 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2006-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://www.scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala -package collection -package convert - -import java.{ lang => jl, util => ju }, java.util.{ concurrent => juc } -import WrapAsScala._ -import WrapAsJava._ - -/** Adapters for Java/Scala collections API. */ -private[collection] trait Wrappers { - trait IterableWrapperTrait[A] extends ju.AbstractCollection[A] { - val underlying: Iterable[A] - def size = underlying.size - override def iterator = IteratorWrapper(underlying.iterator) - override def isEmpty = underlying.isEmpty - } - - case class IteratorWrapper[A](underlying: Iterator[A]) extends ju.Iterator[A] with ju.Enumeration[A] { - def hasNext = underlying.hasNext - def next() = underlying.next() - def hasMoreElements = underlying.hasNext - def nextElement() = underlying.next() - override def remove() = throw new UnsupportedOperationException - } - - class ToIteratorWrapper[A](underlying : Iterator[A]) { - def asJava = new IteratorWrapper(underlying) - } - - case class JIteratorWrapper[A](underlying: ju.Iterator[A]) extends AbstractIterator[A] with Iterator[A] { - def hasNext = underlying.hasNext - def next() = underlying.next - } - - case class JEnumerationWrapper[A](underlying: ju.Enumeration[A]) extends AbstractIterator[A] with Iterator[A] { - def hasNext = underlying.hasMoreElements - def next() = underlying.nextElement - } - - case class IterableWrapper[A](underlying: Iterable[A]) extends ju.AbstractCollection[A] with IterableWrapperTrait[A] { } - - case class JIterableWrapper[A](underlying: jl.Iterable[A]) extends AbstractIterable[A] with Iterable[A] { - def iterator = underlying.iterator - def newBuilder[B] = new mutable.ArrayBuffer[B] - } - - case class JCollectionWrapper[A](underlying: ju.Collection[A]) extends AbstractIterable[A] with Iterable[A] { - def iterator = underlying.iterator - override def size = underlying.size - override def isEmpty = underlying.isEmpty - def newBuilder[B] = new mutable.ArrayBuffer[B] - } - - case class SeqWrapper[A](underlying: Seq[A]) extends ju.AbstractList[A] with IterableWrapperTrait[A] { - def get(i: Int) = underlying(i) - } - - case class MutableSeqWrapper[A](underlying: mutable.Seq[A]) extends ju.AbstractList[A] with IterableWrapperTrait[A] { - def get(i: Int) = underlying(i) - override def set(i: Int, elem: A) = { - val p = underlying(i) - underlying(i) = elem - p - } - } - - case class MutableBufferWrapper[A](underlying: mutable.Buffer[A]) extends ju.AbstractList[A] with IterableWrapperTrait[A] { - def get(i: Int) = underlying(i) - override def set(i: Int, elem: A) = { val p = underlying(i); underlying(i) = elem; p } - override def add(elem: A) = { underlying append elem; true } - override def remove(i: Int) = underlying remove i - } - - case class JListWrapper[A](underlying: ju.List[A]) extends mutable.AbstractBuffer[A] with mutable.Buffer[A] { - def length = underlying.size - override def isEmpty = underlying.isEmpty - override def iterator: Iterator[A] = underlying.iterator - def apply(i: Int) = underlying.get(i) - def update(i: Int, elem: A) = underlying.set(i, elem) - def +=:(elem: A) = { underlying.subList(0, 0) add elem; this } - def +=(elem: A): this.type = { underlying add elem; this } - def insertAll(i: Int, elems: Traversable[A]) = { - val ins = underlying.subList(0, i) - elems.seq.foreach(ins.add(_)) - } - def remove(i: Int) = underlying.remove(i) - def clear() = underlying.clear() - def result = this - // Note: Clone cannot just call underlying.clone because in Java, only specific collections - // expose clone methods. Generically, they're protected. - override def clone(): JListWrapper[A] = JListWrapper(new ju.ArrayList[A](underlying)) - } - - @SerialVersionUID(1L) - class SetWrapper[A](underlying: Set[A]) extends ju.AbstractSet[A] with Serializable { self => - // Note various overrides to avoid performance gotchas. - override def contains(o: Object): Boolean = { - try { underlying.contains(o.asInstanceOf[A]) } - catch { case cce: ClassCastException => false } - } - override def isEmpty = underlying.isEmpty - def size = underlying.size - def iterator = new ju.Iterator[A] { - val ui = underlying.iterator - var prev: Option[A] = None - def hasNext = ui.hasNext - def next = { val e = ui.next(); prev = Some(e); e } - override def remove() = prev match { - case Some(e) => - underlying match { - case ms: mutable.Set[a] => - ms remove e - prev = None - case _ => - throw new UnsupportedOperationException("remove") - } - case _ => - throw new IllegalStateException("next must be called at least once before remove") - } - } - } - - case class MutableSetWrapper[A](underlying: mutable.Set[A]) extends SetWrapper[A](underlying) { - override def add(elem: A) = { - val sz = underlying.size - underlying += elem - sz < underlying.size - } - override def remove(elem: AnyRef) = - try underlying remove elem.asInstanceOf[A] - catch { case ex: ClassCastException => false } - override def clear() = underlying.clear() - } - - case class JSetWrapper[A](underlying: ju.Set[A]) extends mutable.AbstractSet[A] with mutable.Set[A] with mutable.SetLike[A, JSetWrapper[A]] { - - override def size = underlying.size - - def iterator = underlying.iterator - - def contains(elem: A): Boolean = underlying.contains(elem) - - def +=(elem: A): this.type = { underlying add elem; this } - def -=(elem: A): this.type = { underlying remove elem; this } - - override def add(elem: A): Boolean = underlying add elem - override def remove(elem: A): Boolean = underlying remove elem - override def clear() = underlying.clear() - - override def empty = JSetWrapper(new ju.HashSet[A]) - // Note: Clone cannot just call underlying.clone because in Java, only specific collections - // expose clone methods. Generically, they're protected. - override def clone() = - new JSetWrapper[A](new ju.LinkedHashSet[A](underlying)) - } - - @SerialVersionUID(1L) - class MapWrapper[A, B](underlying: Map[A, B]) extends ju.AbstractMap[A, B] with Serializable { self => - override def size = underlying.size - - override def get(key: AnyRef): B = try { - underlying get key.asInstanceOf[A] match { - case None => null.asInstanceOf[B] - case Some(v) => v - } - } catch { - case ex: ClassCastException => null.asInstanceOf[B] - } - - override def entrySet: ju.Set[ju.Map.Entry[A, B]] = new ju.AbstractSet[ju.Map.Entry[A, B]] { - def size = self.size - - def iterator = new ju.Iterator[ju.Map.Entry[A, B]] { - val ui = underlying.iterator - var prev : Option[A] = None - - def hasNext = ui.hasNext - - def next() = { - val (k, v) = ui.next() - prev = Some(k) - new ju.Map.Entry[A, B] { - import scala.util.hashing.byteswap32 - def getKey = k - def getValue = v - def setValue(v1 : B) = self.put(k, v1) - override def hashCode = byteswap32(k.##) + (byteswap32(v.##) << 16) - override def equals(other: Any) = other match { - case e: ju.Map.Entry[_, _] => k == e.getKey && v == e.getValue - case _ => false - } - } - } - - override def remove() { - prev match { - case Some(k) => - underlying match { - case mm: mutable.Map[a, _] => - mm remove k - prev = None - case _ => - throw new UnsupportedOperationException("remove") - } - case _ => - throw new IllegalStateException("next must be called at least once before remove") - } - } - } - } - - override def containsKey(key: AnyRef): Boolean = try { - // Note: Subclass of collection.Map with specific key type may redirect generic - // contains to specific contains, which will throw a ClassCastException if the - // wrong type is passed. This is why we need a type cast to A inside a try/catch. - underlying.contains(key.asInstanceOf[A]) - } catch { - case ex: ClassCastException => false - } - } - - case class MutableMapWrapper[A, B](underlying: mutable.Map[A, B]) extends MapWrapper[A, B](underlying) { - override def put(k: A, v: B) = underlying.put(k, v) match { - case Some(v1) => v1 - case None => null.asInstanceOf[B] - } - - override def remove(k: AnyRef): B = try { - underlying remove k.asInstanceOf[A] match { - case None => null.asInstanceOf[B] - case Some(v) => v - } - } catch { - case ex: ClassCastException => null.asInstanceOf[B] - } - - override def clear() = underlying.clear() - } - - trait JMapWrapperLike[A, B, +Repr <: mutable.MapLike[A, B, Repr] with mutable.Map[A, B]] extends mutable.Map[A, B] with mutable.MapLike[A, B, Repr] { - def underlying: ju.Map[A, B] - - override def size = underlying.size - - def get(k: A) = { - val v = underlying get k - if (v != null) - Some(v) - else if (underlying containsKey k) - Some(null.asInstanceOf[B]) - else - None - } - - def +=(kv: (A, B)): this.type = { underlying.put(kv._1, kv._2); this } - def -=(key: A): this.type = { underlying remove key; this } - - override def put(k: A, v: B): Option[B] = Option(underlying.put(k, v)) - - override def update(k: A, v: B) { underlying.put(k, v) } - - override def remove(k: A): Option[B] = Option(underlying remove k) - - def iterator: Iterator[(A, B)] = new AbstractIterator[(A, B)] { - val ui = underlying.entrySet.iterator - def hasNext = ui.hasNext - def next() = { val e = ui.next(); (e.getKey, e.getValue) } - } - - override def clear() = underlying.clear() - - override def empty: Repr = null.asInstanceOf[Repr] - } - - /** Wraps a Java map as a Scala one. If the map is to support concurrent access, - * use [[JConcurrentMapWrapper]] instead. If the wrapped map is synchronized - * (e.g. from `java.util.Collections.synchronizedMap`), it is your responsibility - * to wrap all non-atomic operations with `underlying.synchronized`. - * This includes `get`, as `java.util.Map`'s API does not allow for an - * atomic `get` when `null` values may be present. - */ - case class JMapWrapper[A, B](underlying : ju.Map[A, B]) extends mutable.AbstractMap[A, B] with JMapWrapperLike[A, B, JMapWrapper[A, B]] { - override def empty = JMapWrapper(new ju.HashMap[A, B]) - } - - class ConcurrentMapWrapper[A, B](override val underlying: concurrent.Map[A, B]) extends MutableMapWrapper[A, B](underlying) with juc.ConcurrentMap[A, B] { - - override def putIfAbsent(k: A, v: B) = underlying.putIfAbsent(k, v) match { - case Some(v) => v - case None => null.asInstanceOf[B] - } - - override def remove(k: AnyRef, v: AnyRef) = try { - underlying.remove(k.asInstanceOf[A], v.asInstanceOf[B]) - } catch { - case ex: ClassCastException => - false - } - - override def replace(k: A, v: B): B = underlying.replace(k, v) match { - case Some(v) => v - case None => null.asInstanceOf[B] - } - - override def replace(k: A, oldval: B, newval: B) = underlying.replace(k, oldval, newval) - } - - /** Wraps a concurrent Java map as a Scala one. Single-element concurrent - * access is supported; multi-element operations such as maps and filters - * are not guaranteed to be atomic. - */ - case class JConcurrentMapWrapper[A, B](underlying: juc.ConcurrentMap[A, B]) extends mutable.AbstractMap[A, B] with JMapWrapperLike[A, B, JConcurrentMapWrapper[A, B]] with concurrent.Map[A, B] { - override def get(k: A) = Option(underlying get k) - - override def empty = new JConcurrentMapWrapper(new juc.ConcurrentHashMap[A, B]) - - def putIfAbsent(k: A, v: B): Option[B] = Option(underlying.putIfAbsent(k, v)) - - def remove(k: A, v: B): Boolean = underlying.remove(k, v) - - def replace(k: A, v: B): Option[B] = Option(underlying.replace(k, v)) - - def replace(k: A, oldvalue: B, newvalue: B): Boolean = - underlying.replace(k, oldvalue, newvalue) - } - - case class DictionaryWrapper[A, B](underlying: mutable.Map[A, B]) extends ju.Dictionary[A, B] { - def size: Int = underlying.size - def isEmpty: Boolean = underlying.isEmpty - def keys: ju.Enumeration[A] = asJavaEnumeration(underlying.keysIterator) - def elements: ju.Enumeration[B] = asJavaEnumeration(underlying.valuesIterator) - def get(key: AnyRef) = try { - underlying get key.asInstanceOf[A] match { - case None => null.asInstanceOf[B] - case Some(v) => v - } - } catch { - case ex: ClassCastException => null.asInstanceOf[B] - } - def put(key: A, value: B): B = underlying.put(key, value) match { - case Some(v) => v - case None => null.asInstanceOf[B] - } - override def remove(key: AnyRef) = try { - underlying remove key.asInstanceOf[A] match { - case None => null.asInstanceOf[B] - case Some(v) => v - } - } catch { - case ex: ClassCastException => null.asInstanceOf[B] - } - } - - case class JDictionaryWrapper[A, B](underlying: ju.Dictionary[A, B]) extends mutable.AbstractMap[A, B] with mutable.Map[A, B] { - override def size: Int = underlying.size - - def get(k: A) = Option(underlying get k) - - def +=(kv: (A, B)): this.type = { underlying.put(kv._1, kv._2); this } - def -=(key: A): this.type = { underlying remove key; this } - - override def put(k: A, v: B): Option[B] = Option(underlying.put(k, v)) - - override def update(k: A, v: B) { underlying.put(k, v) } - - override def remove(k: A): Option[B] = Option(underlying remove k) - - def iterator = enumerationAsScalaIterator(underlying.keys) map (k => (k, underlying get k)) - - override def clear() = underlying.clear() - } - - case class JPropertiesWrapper(underlying: ju.Properties) extends mutable.AbstractMap[String, String] - with mutable.Map[String, String] - with mutable.MapLike[String, String, JPropertiesWrapper] { - - override def size = underlying.size - - def get(k: String) = { - val v = underlying get k - if (v != null) Some(v.asInstanceOf[String]) else None - } - - def +=(kv: (String, String)): this.type = { underlying.put(kv._1, kv._2); this } - def -=(key: String): this.type = { underlying remove key; this } - - override def put(k: String, v: String): Option[String] = { - val r = underlying.put(k, v) - if (r != null) Some(r.asInstanceOf[String]) else None - } - - override def update(k: String, v: String) { underlying.put(k, v) } - - override def remove(k: String): Option[String] = { - val r = underlying remove k - if (r != null) Some(r.asInstanceOf[String]) else None - } - - def iterator: Iterator[(String, String)] = new AbstractIterator[(String, String)] { - val ui = underlying.entrySet.iterator - def hasNext = ui.hasNext - def next() = { - val e = ui.next() - (e.getKey.asInstanceOf[String], e.getValue.asInstanceOf[String]) - } - } - - override def clear() = underlying.clear() - - override def empty = JPropertiesWrapper(new ju.Properties) - - def getProperty(key: String) = underlying.getProperty(key) - - def getProperty(key: String, defaultValue: String) = - underlying.getProperty(key, defaultValue) - - def setProperty(key: String, value: String) = - underlying.setProperty(key, value) - } -} - -@SerialVersionUID(0 - 5857859809262781311L) -object Wrappers extends Wrappers with Serializable diff --git a/tests/scala2-library/src/library/scala/collection/convert/package.scala b/tests/scala2-library/src/library/scala/collection/convert/package.scala deleted file mode 100644 index 810d112cd5a5..000000000000 --- a/tests/scala2-library/src/library/scala/collection/convert/package.scala +++ /dev/null @@ -1,26 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2006-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://www.scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala -package collection - -package object convert { - @deprecated("use JavaConverters", since="2.12.0") - val decorateAsJava = new DecorateAsJava { } - @deprecated("use JavaConverters", since="2.12.0") - val decorateAsScala = new DecorateAsScala { } - @deprecated("use JavaConverters", since="2.12.0") - val decorateAll = JavaConverters - - @deprecated("use JavaConverters or consider ImplicitConversionsToJava", since="2.12.0") - val wrapAsJava = new WrapAsJava { } - @deprecated("use JavaConverters or consider ImplicitConversionsToScala", since="2.12.0") - val wrapAsScala = new WrapAsScala { } - @deprecated("use JavaConverters or consider ImplicitConversions", since="2.12.0") - val wrapAll = new WrapAsJava with WrapAsScala { } -} diff --git a/tests/scala2-library/src/library/scala/collection/generic/BitOperations.scala b/tests/scala2-library/src/library/scala/collection/generic/BitOperations.scala deleted file mode 100644 index 2f460eee1fcf..000000000000 --- a/tests/scala2-library/src/library/scala/collection/generic/BitOperations.scala +++ /dev/null @@ -1,46 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala -package collection -package generic - -/** Some bit operations. - * - * See http://www.drmaciver.com/2008/08/unsigned-comparison-in-javascala/ for - * an explanation of unsignedCompare. - */ -private[collection] object BitOperations { - trait Int { - type Int = scala.Int - def zero(i: Int, mask: Int) = (i & mask) == 0 - def mask(i: Int, mask: Int) = i & (complement(mask - 1) ^ mask) - def hasMatch(key: Int, prefix: Int, m: Int) = mask(key, m) == prefix - def unsignedCompare(i: Int, j: Int) = (i < j) ^ (i < 0) ^ (j < 0) - def shorter(m1: Int, m2: Int) = unsignedCompare(m2, m1) - def complement(i: Int) = (-1) ^ i - def bits(num: Int) = 31 to 0 by -1 map (i => (num >>> i & 1) != 0) - def bitString(num: Int, sep: String = "") = bits(num) map (b => if (b) "1" else "0") mkString sep - def highestOneBit(j: Int) = java.lang.Integer.highestOneBit(j) - } - object Int extends Int - - trait Long { - type Long = scala.Long - def zero(i: Long, mask: Long) = (i & mask) == 0L - def mask(i: Long, mask: Long) = i & (complement(mask - 1) ^ mask) - def hasMatch(key: Long, prefix: Long, m: Long) = mask(key, m) == prefix - def unsignedCompare(i: Long, j: Long) = (i < j) ^ (i < 0L) ^ (j < 0L) - def shorter(m1: Long, m2: Long) = unsignedCompare(m2, m1) - def complement(i: Long) = (-1L) ^ i - def bits(num: Long) = 63L to 0L by -1L map (i => (num >>> i & 1L) != 0L) - def bitString(num: Long, sep: String = "") = bits(num) map (b => if (b) "1" else "0") mkString sep - def highestOneBit(j: Long) = java.lang.Long.highestOneBit(j) - } - object Long extends Long -} diff --git a/tests/scala2-library/src/library/scala/collection/generic/BitSetFactory.scala b/tests/scala2-library/src/library/scala/collection/generic/BitSetFactory.scala deleted file mode 100644 index 2e3aae31ac9d..000000000000 --- a/tests/scala2-library/src/library/scala/collection/generic/BitSetFactory.scala +++ /dev/null @@ -1,39 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - - - -package scala -package collection -package generic - -import scala.collection._ -import mutable.Builder - -/** @define coll collection - * @define Coll `Traversable` - * @define factoryInfo - * This object provides a set of operations to create `$Coll` values. - * @author Martin Odersky - * @version 2.8 - * @define canBuildFromInfo - * The standard `CanBuildFrom` instance for $Coll objects. - * @see CanBuildFrom - * @define bitsetCanBuildFrom - * The standard `CanBuildFrom` instance for bitsets. - */ -trait BitSetFactory[Coll <: BitSet with BitSetLike[Coll]] { - def empty: Coll - def newBuilder: Builder[Int, Coll] - def apply(elems: Int*): Coll = (empty /: elems) (_ + _) - def bitsetCanBuildFrom = new CanBuildFrom[Coll, Int, Coll] { - def apply(from: Coll) = newBuilder - def apply() = newBuilder - } -} - diff --git a/tests/scala2-library/src/library/scala/collection/generic/CanBuildFrom.scala b/tests/scala2-library/src/library/scala/collection/generic/CanBuildFrom.scala deleted file mode 100644 index 24e5b2a1ddea..000000000000 --- a/tests/scala2-library/src/library/scala/collection/generic/CanBuildFrom.scala +++ /dev/null @@ -1,47 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - - -package scala -package collection -package generic - -import mutable.Builder -import scala.annotation.implicitNotFound - -/** A base trait for builder factories. - * - * @tparam From the type of the underlying collection that requests - * a builder to be created. - * @tparam Elem the element type of the collection to be created. - * @tparam To the type of the collection to be created. - * - * @see [[scala.collection.mutable.Builder]] - * @author Martin Odersky - * @author Adriaan Moors - * @since 2.8 - */ -@implicitNotFound(msg = "Cannot construct a collection of type ${To} with elements of type ${Elem} based on a collection of type ${From}.") -trait CanBuildFrom[-From, -Elem, +To] { - - /** Creates a new builder on request of a collection. - * @param from the collection requesting the builder to be created. - * @return a builder for collections of type `To` with element type `Elem`. - * The collections framework usually arranges things so - * that the created builder will build the same kind of collection - * as `from`. - */ - def apply(from: From): Builder[Elem, To] - - /** Creates a new builder from scratch. - * - * @return a builder for collections of type `To` with element type `Elem`. - * @see scala.collection.breakOut - */ - def apply(): Builder[Elem, To] -} diff --git a/tests/scala2-library/src/library/scala/collection/generic/CanCombineFrom.scala b/tests/scala2-library/src/library/scala/collection/generic/CanCombineFrom.scala deleted file mode 100644 index 7f70b4580aba..000000000000 --- a/tests/scala2-library/src/library/scala/collection/generic/CanCombineFrom.scala +++ /dev/null @@ -1,27 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2010-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala -package collection -package generic - -import scala.collection.parallel._ - -/** A base trait for parallel builder factories. - * - * @tparam From the type of the underlying collection that requests a - * builder to be created. - * @tparam Elem the element type of the collection to be created. - * @tparam To the type of the collection to be created. - * @since 2.8 - */ -trait CanCombineFrom[-From, -Elem, +To] extends CanBuildFrom[From, Elem, To] with Parallel { - def apply(from: From): Combiner[Elem, To] - def apply(): Combiner[Elem, To] -} - diff --git a/tests/scala2-library/src/library/scala/collection/generic/ClassTagTraversableFactory.scala b/tests/scala2-library/src/library/scala/collection/generic/ClassTagTraversableFactory.scala deleted file mode 100644 index e3db40123dda..000000000000 --- a/tests/scala2-library/src/library/scala/collection/generic/ClassTagTraversableFactory.scala +++ /dev/null @@ -1,33 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2010-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala -package collection -package generic - -import scala.language.higherKinds -import scala.reflect.ClassTag - -/** A template for companion objects of `ClassTagTraversable` and - * subclasses thereof. - * - * @define coll collection - * @define Coll `Traversable` - * @define genericCanBuildFromInfo - * The standard `CanBuildFrom` instance for $Coll objects. - * @author Aleksandar Prokopec - * @since 2.8 - */ -abstract class ClassTagTraversableFactory[CC[X] <: Traversable[X] with GenericClassTagTraversableTemplate[X, CC]] - extends GenericClassTagCompanion[CC] { - - class GenericCanBuildFrom[A](implicit tag: ClassTag[A]) extends CanBuildFrom[CC[_], A, CC[A]] { - def apply(from: CC[_]) = from.genericClassTagBuilder[A] - def apply = newBuilder[A] - } -} diff --git a/tests/scala2-library/src/library/scala/collection/generic/Clearable.scala b/tests/scala2-library/src/library/scala/collection/generic/Clearable.scala deleted file mode 100644 index 3c496051c4cb..000000000000 --- a/tests/scala2-library/src/library/scala/collection/generic/Clearable.scala +++ /dev/null @@ -1,27 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala -package collection -package generic - -/** This trait forms part of collections that can be cleared - * with a clear() call. - * - * @author Paul Phillips - * @version 2.10 - * @since 2.10 - * @define coll clearable collection - * @define Coll `Clearable` - */ -trait Clearable { - /** Clears the $coll's contents. After this operation, the - * $coll is empty. - */ - def clear(): Unit -} diff --git a/tests/scala2-library/src/library/scala/collection/generic/FilterMonadic.scala b/tests/scala2-library/src/library/scala/collection/generic/FilterMonadic.scala deleted file mode 100644 index 8aefbdb92662..000000000000 --- a/tests/scala2-library/src/library/scala/collection/generic/FilterMonadic.scala +++ /dev/null @@ -1,21 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala -package collection -package generic - -/** A template trait that contains just the `map`, `flatMap`, `foreach` and `withFilter` methods - * of trait `TraversableLike`. - */ -trait FilterMonadic[+A, +Repr] extends Any { - def map[B, That](f: A => B)(implicit bf: CanBuildFrom[Repr, B, That]): That - def flatMap[B, That](f: A => scala.collection.GenTraversableOnce[B])(implicit bf: CanBuildFrom[Repr, B, That]): That - def foreach[U](f: A => U): Unit - def withFilter(p: A => Boolean): FilterMonadic[A, Repr] -} diff --git a/tests/scala2-library/src/library/scala/collection/generic/GenMapFactory.scala b/tests/scala2-library/src/library/scala/collection/generic/GenMapFactory.scala deleted file mode 100644 index ae3150115fd2..000000000000 --- a/tests/scala2-library/src/library/scala/collection/generic/GenMapFactory.scala +++ /dev/null @@ -1,62 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala -package collection -package generic - -import mutable.{Builder, MapBuilder} -import scala.language.higherKinds - -/** A template for companion objects of `Map` and subclasses thereof. - * - * @define coll map - * @define Coll `Map` - * @define factoryInfo - * This object provides a set of operations needed to create `$Coll` values. - * @author Martin Odersky - * @version 2.8 - * @since 2.8 - * @define canBuildFromInfo - * The standard `CanBuildFrom` instance for `$Coll` objects. - * @see CanBuildFrom - * @define mapCanBuildFromInfo - * The standard `CanBuildFrom` instance for `$Coll` objects. - * The created value is an instance of class `MapCanBuildFrom`. - * @see CanBuildFrom - * @see GenericCanBuildFrom - */ -abstract class GenMapFactory[CC[A, B] <: GenMap[A, B] with GenMapLike[A, B, CC[A, B]]] { - - /** The type constructor of the collection that can be built by this factory */ - type Coll = CC[_, _] - - /** An empty $Coll */ - def empty[A, B]: CC[A, B] - - /** A collection of type $Coll that contains given key/value bindings. - * @param elems the key/value pairs that make up the $coll - * @tparam A the type of the keys - * @tparam B the type of the associated values - * @return a new $coll consisting key/value pairs given by `elems`. - */ - def apply[A, B](elems: (A, B)*): CC[A, B] = (newBuilder[A, B] ++= elems).result() - - /** The default builder for $Coll objects. - * @tparam A the type of the keys - * @tparam B the type of the associated values - */ - def newBuilder[A, B]: Builder[(A, B), CC[A, B]] = new MapBuilder[A, B, CC[A, B]](empty[A, B]) - - /** The standard `CanBuildFrom` class for maps. - */ - class MapCanBuildFrom[A, B] extends CanBuildFrom[Coll, (A, B), CC[A, B]] { - def apply(from: Coll) = newBuilder[A, B] - def apply() = newBuilder - } -} diff --git a/tests/scala2-library/src/library/scala/collection/generic/GenSeqFactory.scala b/tests/scala2-library/src/library/scala/collection/generic/GenSeqFactory.scala deleted file mode 100644 index 6afbb2e2fb4c..000000000000 --- a/tests/scala2-library/src/library/scala/collection/generic/GenSeqFactory.scala +++ /dev/null @@ -1,22 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - - - -package scala -package collection -package generic - -import scala.language.higherKinds - -/** A template for companion objects of Seq and subclasses thereof. - * - * @since 2.8 - */ -abstract class GenSeqFactory[CC[X] <: GenSeq[X] with GenericTraversableTemplate[X, CC]] -extends GenTraversableFactory[CC] diff --git a/tests/scala2-library/src/library/scala/collection/generic/GenSetFactory.scala b/tests/scala2-library/src/library/scala/collection/generic/GenSetFactory.scala deleted file mode 100644 index 65404a49918c..000000000000 --- a/tests/scala2-library/src/library/scala/collection/generic/GenSetFactory.scala +++ /dev/null @@ -1,50 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - - - -package scala -package collection -package generic - -import mutable.Builder -import scala.language.higherKinds - -/** A template for companion objects of `Set` and subclasses thereof. - * - * @define coll set - * @define Coll `Set` - * @define factoryInfo - * This object provides a set of operations needed to create `$Coll` values. - * @author Martin Odersky - * @version 2.8 - * @since 2.8 - * @define canBuildFromInfo - * The standard `CanBuildFrom` instance for `$Coll` objects. - * @see CanBuildFrom - * @define setCanBuildFromInfo - * The standard `CanBuildFrom` instance for `$Coll` objects. - * @see CanBuildFrom - * @see GenericCanBuildFrom - */ -abstract class GenSetFactory[CC[X] <: GenSet[X] with GenSetLike[X, CC[X]]] - extends GenericCompanion[CC] { - - def newBuilder[A]: Builder[A, CC[A]] - - /** $setCanBuildFromInfo - */ - def setCanBuildFrom[A] = new CanBuildFrom[CC[_], A, CC[A]] { - def apply(from: CC[_]) = from match { - // When building from an existing Set, try to preserve its type: - case from: Set[_] => from.genericBuilder.asInstanceOf[Builder[A, CC[A]]] - case _ => newBuilder[A] - } - def apply() = newBuilder[A] - } -} diff --git a/tests/scala2-library/src/library/scala/collection/generic/GenTraversableFactory.scala b/tests/scala2-library/src/library/scala/collection/generic/GenTraversableFactory.scala deleted file mode 100644 index 7c2aa5615c2e..000000000000 --- a/tests/scala2-library/src/library/scala/collection/generic/GenTraversableFactory.scala +++ /dev/null @@ -1,252 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2006-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - - -package scala -package collection -package generic - -import scala.language.higherKinds - -/** A template for companion objects of `Traversable` and subclasses thereof. - * This class provides a set of operations to create `$Coll` objects. - * It is typically inherited by companion objects of subclasses of `Traversable`. - * - * @since 2.8 - * - * @define coll collection - * @define Coll `Traversable` - * @define factoryInfo - * This object provides a set of operations to create `$Coll` values. - * @author Martin Odersky - * @version 2.8 - * @define canBuildFromInfo - * The standard `CanBuildFrom` instance for $Coll objects. - * @see CanBuildFrom - * @define genericCanBuildFromInfo - * The standard `CanBuildFrom` instance for $Coll objects. - * The created value is an instance of class `GenericCanBuildFrom`, - * which forwards calls to create a new builder to the - * `genericBuilder` method of the requesting collection. - * @see CanBuildFrom - * @see GenericCanBuildFrom - */ -abstract class GenTraversableFactory[CC[X] <: GenTraversable[X] with GenericTraversableTemplate[X, CC]] -extends GenericCompanion[CC] { - - private[this] val ReusableCBFInstance: GenericCanBuildFrom[Nothing] = new GenericCanBuildFrom[Nothing] { - override def apply() = newBuilder[Nothing] - } - def ReusableCBF: GenericCanBuildFrom[Nothing] = ReusableCBFInstance - - /** A generic implementation of the `CanBuildFrom` trait, which forwards - * all calls to `apply(from)` to the `genericBuilder` method of - * $coll `from`, and which forwards all calls of `apply()` to the - * `newBuilder` method of this factory. - */ - class GenericCanBuildFrom[A] extends CanBuildFrom[CC[_], A, CC[A]] { - /** Creates a new builder on request of a collection. - * @param from the collection requesting the builder to be created. - * @return the result of invoking the `genericBuilder` method on `from`. - */ - def apply(from: Coll) = from.genericBuilder[A] - - /** Creates a new builder from scratch - * @return the result of invoking the `newBuilder` method of this factory. - */ - def apply() = newBuilder[A] - } - - /** Concatenates all argument collections into a single $coll. - * - * @param xss the collections that are to be concatenated. - * @return the concatenation of all the collections. - */ - def concat[A](xss: Traversable[A]*): CC[A] = { - val b = newBuilder[A] - // At present we're using IndexedSeq as a proxy for "has a cheap size method". - if (xss forall (_.isInstanceOf[IndexedSeq[_]])) - b.sizeHint(xss.map(_.size).sum) - - for (xs <- xss.seq) b ++= xs - b.result() - } - - /** Produces a $coll containing the results of some element computation a number of times. - * @param n the number of elements contained in the $coll. - * @param elem the element computation - * @return A $coll that contains the results of `n` evaluations of `elem`. - */ - def fill[A](n: Int)(elem: => A): CC[A] = { - val b = newBuilder[A] - b.sizeHint(n) - var i = 0 - while (i < n) { - b += elem - i += 1 - } - b.result() - } - - /** Produces a two-dimensional $coll containing the results of some element computation a number of times. - * @param n1 the number of elements in the 1st dimension - * @param n2 the number of elements in the 2nd dimension - * @param elem the element computation - * @return A $coll that contains the results of `n1 x n2` evaluations of `elem`. - */ - def fill[A](n1: Int, n2: Int)(elem: => A): CC[CC[A]] = - tabulate(n1)(_ => fill(n2)(elem)) - - /** Produces a three-dimensional $coll containing the results of some element computation a number of times. - * @param n1 the number of elements in the 1st dimension - * @param n2 the number of elements in the 2nd dimension - * @param n3 the number of elements in the 3nd dimension - * @param elem the element computation - * @return A $coll that contains the results of `n1 x n2 x n3` evaluations of `elem`. - */ - def fill[A](n1: Int, n2: Int, n3: Int)(elem: => A): CC[CC[CC[A]]] = - tabulate(n1)(_ => fill(n2, n3)(elem)) - - /** Produces a four-dimensional $coll containing the results of some element computation a number of times. - * @param n1 the number of elements in the 1st dimension - * @param n2 the number of elements in the 2nd dimension - * @param n3 the number of elements in the 3nd dimension - * @param n4 the number of elements in the 4th dimension - * @param elem the element computation - * @return A $coll that contains the results of `n1 x n2 x n3 x n4` evaluations of `elem`. - */ - def fill[A](n1: Int, n2: Int, n3: Int, n4: Int)(elem: => A): CC[CC[CC[CC[A]]]] = - tabulate(n1)(_ => fill(n2, n3, n4)(elem)) - - /** Produces a five-dimensional $coll containing the results of some element computation a number of times. - * @param n1 the number of elements in the 1st dimension - * @param n2 the number of elements in the 2nd dimension - * @param n3 the number of elements in the 3nd dimension - * @param n4 the number of elements in the 4th dimension - * @param n5 the number of elements in the 5th dimension - * @param elem the element computation - * @return A $coll that contains the results of `n1 x n2 x n3 x n4 x n5` evaluations of `elem`. - */ - def fill[A](n1: Int, n2: Int, n3: Int, n4: Int, n5: Int)(elem: => A): CC[CC[CC[CC[CC[A]]]]] = - tabulate(n1)(_ => fill(n2, n3, n4, n5)(elem)) - - /** Produces a $coll containing values of a given function over a range of integer values starting from 0. - * @param n The number of elements in the $coll - * @param f The function computing element values - * @return A $coll consisting of elements `f(0), ..., f(n -1)` - */ - def tabulate[A](n: Int)(f: Int => A): CC[A] = { - val b = newBuilder[A] - b.sizeHint(n) - var i = 0 - while (i < n) { - b += f(i) - i += 1 - } - b.result() - } - - /** Produces a two-dimensional $coll containing values of a given function over ranges of integer values starting from 0. - * @param n1 the number of elements in the 1st dimension - * @param n2 the number of elements in the 2nd dimension - * @param f The function computing element values - * @return A $coll consisting of elements `f(i1, i2)` - * for `0 <= i1 < n1` and `0 <= i2 < n2`. - */ - def tabulate[A](n1: Int, n2: Int)(f: (Int, Int) => A): CC[CC[A]] = - tabulate(n1)(i1 => tabulate(n2)(f(i1, _))) - - /** Produces a three-dimensional $coll containing values of a given function over ranges of integer values starting from 0. - * @param n1 the number of elements in the 1st dimension - * @param n2 the number of elements in the 2nd dimension - * @param n3 the number of elements in the 3nd dimension - * @param f The function computing element values - * @return A $coll consisting of elements `f(i1, i2, i3)` - * for `0 <= i1 < n1`, `0 <= i2 < n2`, and `0 <= i3 < n3`. - */ - def tabulate[A](n1: Int, n2: Int, n3: Int)(f: (Int, Int, Int) => A): CC[CC[CC[A]]] = - tabulate(n1)(i1 => tabulate(n2, n3)(f(i1, _, _))) - - /** Produces a four-dimensional $coll containing values of a given function over ranges of integer values starting from 0. - * @param n1 the number of elements in the 1st dimension - * @param n2 the number of elements in the 2nd dimension - * @param n3 the number of elements in the 3nd dimension - * @param n4 the number of elements in the 4th dimension - * @param f The function computing element values - * @return A $coll consisting of elements `f(i1, i2, i3, i4)` - * for `0 <= i1 < n1`, `0 <= i2 < n2`, `0 <= i3 < n3`, and `0 <= i4 < n4`. - */ - def tabulate[A](n1: Int, n2: Int, n3: Int, n4: Int)(f: (Int, Int, Int, Int) => A): CC[CC[CC[CC[A]]]] = - tabulate(n1)(i1 => tabulate(n2, n3, n4)(f(i1, _, _, _))) - - /** Produces a five-dimensional $coll containing values of a given function over ranges of integer values starting from 0. - * @param n1 the number of elements in the 1st dimension - * @param n2 the number of elements in the 2nd dimension - * @param n3 the number of elements in the 3nd dimension - * @param n4 the number of elements in the 4th dimension - * @param n5 the number of elements in the 5th dimension - * @param f The function computing element values - * @return A $coll consisting of elements `f(i1, i2, i3, i4, i5)` - * for `0 <= i1 < n1`, `0 <= i2 < n2`, `0 <= i3 < n3`, `0 <= i4 < n4`, and `0 <= i5 < n5`. - */ - def tabulate[A](n1: Int, n2: Int, n3: Int, n4: Int, n5: Int)(f: (Int, Int, Int, Int, Int) => A): CC[CC[CC[CC[CC[A]]]]] = - tabulate(n1)(i1 => tabulate(n2, n3, n4, n5)(f(i1, _, _, _, _))) - - /** Produces a $coll containing a sequence of increasing of integers. - * - * @param start the first element of the $coll - * @param end the end value of the $coll (the first value NOT contained) - * @return a $coll with values `start, start + 1, ..., end - 1` - */ - def range[T: Integral](start: T, end: T): CC[T] = range(start, end, implicitly[Integral[T]].one) - - /** Produces a $coll containing equally spaced values in some integer interval. - * @param start the start value of the $coll - * @param end the end value of the $coll (the first value NOT contained) - * @param step the difference between successive elements of the $coll (must be positive or negative) - * @return a $coll with values `start, start + step, ...` up to, but excluding `end` - */ - def range[T: Integral](start: T, end: T, step: T): CC[T] = { - val num = implicitly[Integral[T]] - import num._ - - if (step == zero) throw new IllegalArgumentException("zero step") - val b = newBuilder[T] - b sizeHint immutable.NumericRange.count(start, end, step, isInclusive = false) - var i = start - while (if (step < zero) end < i else i < end) { - b += i - i += step - } - b.result() - } - - /** Produces a $coll containing repeated applications of a function to a start value. - * - * @param start the start value of the $coll - * @param len the number of elements contained in the $coll - * @param f the function that's repeatedly applied - * @return a $coll with `len` values in the sequence `start, f(start), f(f(start)), ...` - */ - def iterate[A](start: A, len: Int)(f: A => A): CC[A] = { - val b = newBuilder[A] - if (len > 0) { - b.sizeHint(len) - var acc = start - var i = 1 - b += acc - - while (i < len) { - acc = f(acc) - i += 1 - b += acc - } - } - b.result() - } -} diff --git a/tests/scala2-library/src/library/scala/collection/generic/GenericClassTagCompanion.scala b/tests/scala2-library/src/library/scala/collection/generic/GenericClassTagCompanion.scala deleted file mode 100644 index a8ac2bf7387d..000000000000 --- a/tests/scala2-library/src/library/scala/collection/generic/GenericClassTagCompanion.scala +++ /dev/null @@ -1,34 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala -package collection -package generic - -import mutable.Builder -import scala.language.higherKinds -import scala.reflect.ClassTag - -/** This class represents companions of classes which require ClassTags - * for their element types. - * - * @author Aleksandar Prokopec - */ -abstract class GenericClassTagCompanion[+CC[X] <: Traversable[X]] { - protected[this] type Coll = CC[_] - - def newBuilder[A](implicit ord: ClassTag[A]): Builder[A, CC[A]] - - def empty[A: ClassTag]: CC[A] = newBuilder[A].result() - - def apply[A](elems: A*)(implicit ord: ClassTag[A]): CC[A] = { - val b = newBuilder[A] - b ++= elems - b.result() - } -} diff --git a/tests/scala2-library/src/library/scala/collection/generic/GenericClassTagTraversableTemplate.scala b/tests/scala2-library/src/library/scala/collection/generic/GenericClassTagTraversableTemplate.scala deleted file mode 100644 index 090cd729a416..000000000000 --- a/tests/scala2-library/src/library/scala/collection/generic/GenericClassTagTraversableTemplate.scala +++ /dev/null @@ -1,32 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2010-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala -package collection -package generic - -import mutable.Builder -import scala.annotation.unchecked.uncheckedVariance -import scala.language.higherKinds -import scala.reflect.ClassTag - -/** This trait represents collections classes which require class - * tags for their element types. - * - * @author Aleksandar Prokopec - * @since 2.8 - */ -trait GenericClassTagTraversableTemplate[+A, +CC[X] <: Traversable[X]] extends HasNewBuilder[A, CC[A] @uncheckedVariance] { - implicit protected[this] val tag: ClassTag[A] - def classTagCompanion: GenericClassTagCompanion[CC] - def genericClassTagBuilder[B](implicit tag: ClassTag[B]): Builder[B, CC[B]] = classTagCompanion.newBuilder[B] - @deprecated("use classTagCompanion instead", "2.10.0") - def classManifestCompanion: GenericClassManifestCompanion[CC] = classTagCompanion - @deprecated("use genericClassTagBuilder instead", "2.10.0") - def genericClassManifestBuilder[B](implicit manifest: ClassManifest[B]): Builder[B, CC[B]] = genericClassTagBuilder[B](manifest) -} diff --git a/tests/scala2-library/src/library/scala/collection/generic/GenericCompanion.scala b/tests/scala2-library/src/library/scala/collection/generic/GenericCompanion.scala deleted file mode 100644 index 67d0a9c7f75a..000000000000 --- a/tests/scala2-library/src/library/scala/collection/generic/GenericCompanion.scala +++ /dev/null @@ -1,53 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala -package collection -package generic - -import mutable.Builder -import scala.language.higherKinds - -/** A template class for companion objects of "regular" collection classes - * represent an unconstrained higher-kinded type. Typically - * such classes inherit from trait `GenericTraversableTemplate`. - * @tparam CC The type constructor representing the collection class. - * @see [[scala.collection.generic.GenericTraversableTemplate]] - * @author Martin Odersky - * @since 2.8 - * @define coll collection - * @define Coll `CC` - */ -abstract class GenericCompanion[+CC[X] <: GenTraversable[X]] { - /** The underlying collection type with unknown element type */ - protected[this] type Coll = CC[_] - - /** The default builder for `$Coll` objects. - * @tparam A the type of the ${coll}'s elements - */ - def newBuilder[A]: Builder[A, CC[A]] - - /** An empty collection of type `$Coll[A]` - * @tparam A the type of the ${coll}'s elements - */ - def empty[A]: CC[A] = newBuilder[A].result() - - /** Creates a $coll with the specified elements. - * @tparam A the type of the ${coll}'s elements - * @param elems the elements of the created $coll - * @return a new $coll with elements `elems` - */ - def apply[A](elems: A*): CC[A] = { - if (elems.isEmpty) empty[A] - else { - val b = newBuilder[A] - b ++= elems - b.result() - } - } -} diff --git a/tests/scala2-library/src/library/scala/collection/generic/GenericOrderedCompanion.scala b/tests/scala2-library/src/library/scala/collection/generic/GenericOrderedCompanion.scala deleted file mode 100644 index 5b328bff6ca1..000000000000 --- a/tests/scala2-library/src/library/scala/collection/generic/GenericOrderedCompanion.scala +++ /dev/null @@ -1,35 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2010-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala -package collection -package generic - -import mutable.Builder -import scala.language.higherKinds - -/** This class represents companions of classes which require the ordered trait - * for their element types. - * - * @author Aleksandar Prokopec - * @since 2.8 - */ -abstract class GenericOrderedCompanion[+CC[X] <: Traversable[X]] { - protected[this] type Coll = CC[_] - - def newBuilder[A](implicit ord: Ordering[A]): Builder[A, CC[A]] - - def empty[A: Ordering]: CC[A] = newBuilder[A].result() - - def apply[A](elems: A*)(implicit ord: Ordering[A]): CC[A] = { - val b = newBuilder[A] - b ++= elems - b.result() - } -} - diff --git a/tests/scala2-library/src/library/scala/collection/generic/GenericOrderedTraversableTemplate.scala b/tests/scala2-library/src/library/scala/collection/generic/GenericOrderedTraversableTemplate.scala deleted file mode 100644 index c1a41ce7c4be..000000000000 --- a/tests/scala2-library/src/library/scala/collection/generic/GenericOrderedTraversableTemplate.scala +++ /dev/null @@ -1,29 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - - - -package scala -package collection -package generic - -import mutable.Builder -import scala.annotation.unchecked.uncheckedVariance -import scala.language.higherKinds - -/** This trait represents collections classes which require - * ordered element types. - * - * @author Aleksandar Prokopec - */ -trait GenericOrderedTraversableTemplate[+A, +CC[X] <: Traversable[X]] extends HasNewBuilder[A, CC[A] @uncheckedVariance] { - implicit protected[this] val ord: Ordering[A] - def orderedCompanion: GenericOrderedCompanion[CC] - def genericOrderedBuilder[B](implicit ord: Ordering[B]): Builder[B, CC[B]] = orderedCompanion.newBuilder[B] -} - diff --git a/tests/scala2-library/src/library/scala/collection/generic/GenericParCompanion.scala b/tests/scala2-library/src/library/scala/collection/generic/GenericParCompanion.scala deleted file mode 100644 index 432b9135f825..000000000000 --- a/tests/scala2-library/src/library/scala/collection/generic/GenericParCompanion.scala +++ /dev/null @@ -1,38 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2010-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala -package collection -package generic - -import scala.collection.parallel.Combiner -import scala.collection.parallel.ParIterable -import scala.collection.parallel.ParMap -import scala.language.higherKinds - -/** A template class for companion objects of parallel collection classes. - * They should be mixed in together with `GenericCompanion` type. - * - * @define Coll `ParIterable` - * @tparam CC the type constructor representing the collection class - * @since 2.8 - */ -trait GenericParCompanion[+CC[X] <: ParIterable[X]] { - /** The default builder for $Coll objects. - */ - def newBuilder[A]: Combiner[A, CC[A]] - - /** The parallel builder for $Coll objects. - */ - def newCombiner[A]: Combiner[A, CC[A]] -} - -trait GenericParMapCompanion[+CC[P, Q] <: ParMap[P, Q]] { - def newCombiner[P, Q]: Combiner[(P, Q), CC[P, Q]] -} - diff --git a/tests/scala2-library/src/library/scala/collection/generic/GenericParTemplate.scala b/tests/scala2-library/src/library/scala/collection/generic/GenericParTemplate.scala deleted file mode 100644 index 44a778a95370..000000000000 --- a/tests/scala2-library/src/library/scala/collection/generic/GenericParTemplate.scala +++ /dev/null @@ -1,64 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2010-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala -package collection -package generic - -import scala.collection.parallel.Combiner -import scala.collection.parallel.ParIterable -import scala.collection.parallel.ParMap - -import scala.annotation.unchecked.uncheckedVariance -import scala.language.higherKinds - -/** A template trait for collections having a companion. - * - * @tparam A the element type of the collection - * @tparam CC the type constructor representing the collection class - * @author Aleksandar Prokopec - * @since 2.8 - */ -trait GenericParTemplate[+A, +CC[X] <: ParIterable[X]] -extends GenericTraversableTemplate[A, CC] - with HasNewCombiner[A, CC[A] @uncheckedVariance] -{ - def companion: GenericCompanion[CC] with GenericParCompanion[CC] - - protected[this] override def newBuilder: scala.collection.mutable.Builder[A, CC[A]] = newCombiner - - protected[this] override def newCombiner: Combiner[A, CC[A]] = { - val cb = companion.newCombiner[A] - cb - } - - override def genericBuilder[B]: Combiner[B, CC[B]] = genericCombiner[B] - - def genericCombiner[B]: Combiner[B, CC[B]] = { - val cb = companion.newCombiner[B] - cb - } - -} - - -trait GenericParMapTemplate[K, +V, +CC[X, Y] <: ParMap[X, Y]] extends GenericParTemplate[(K, V), ParIterable] -{ - protected[this] override def newCombiner: Combiner[(K, V), CC[K, V]] = { - val cb = mapCompanion.newCombiner[K, V] - cb - } - - def mapCompanion: GenericParMapCompanion[CC] - - def genericMapCombiner[P, Q]: Combiner[(P, Q), CC[P, Q]] = { - val cb = mapCompanion.newCombiner[P, Q] - cb - } -} - diff --git a/tests/scala2-library/src/library/scala/collection/generic/GenericSeqCompanion.scala b/tests/scala2-library/src/library/scala/collection/generic/GenericSeqCompanion.scala deleted file mode 100644 index fd1e18a0290b..000000000000 --- a/tests/scala2-library/src/library/scala/collection/generic/GenericSeqCompanion.scala +++ /dev/null @@ -1,16 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2006-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala -package collection -package generic - -import scala.language.higherKinds - -trait GenericSeqCompanion[CC[X] <: Traversable[X]] - extends GenericCompanion[CC] diff --git a/tests/scala2-library/src/library/scala/collection/generic/GenericSetTemplate.scala b/tests/scala2-library/src/library/scala/collection/generic/GenericSetTemplate.scala deleted file mode 100644 index 2cadd14948d8..000000000000 --- a/tests/scala2-library/src/library/scala/collection/generic/GenericSetTemplate.scala +++ /dev/null @@ -1,19 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala -package collection -package generic -import scala.language.higherKinds -/** - * @since 2.8 - */ -trait GenericSetTemplate[A, +CC[X] <: GenSet[X]] extends GenericTraversableTemplate[A, CC] { - def empty: CC[A] = companion.empty[A] -} - diff --git a/tests/scala2-library/src/library/scala/collection/generic/GenericTraversableTemplate.scala b/tests/scala2-library/src/library/scala/collection/generic/GenericTraversableTemplate.scala deleted file mode 100644 index bdd91ba7a41e..000000000000 --- a/tests/scala2-library/src/library/scala/collection/generic/GenericTraversableTemplate.scala +++ /dev/null @@ -1,232 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - - - -package scala -package collection -package generic - -import mutable.Builder -import scala.annotation.migration -import scala.annotation.unchecked.uncheckedVariance -import scala.language.higherKinds - -/** A template class for companion objects of ``regular`` collection classes - * that represent an unconstrained higher-kinded type. - * - * @tparam A The type of the collection elements. - * @tparam CC The type constructor representing the collection class. - * @author Martin Odersky - * @since 2.8 - * @define coll collection - * @define Coll Traversable - */ -trait GenericTraversableTemplate[+A, +CC[X] <: GenTraversable[X]] extends HasNewBuilder[A, CC[A] @uncheckedVariance] { - - /** Applies a function `f` to all elements of this $coll. - * - * @param f the function that is applied for its side-effect to every element. - * The result of function `f` is discarded. - * - * @tparam U the type parameter describing the result of function `f`. - * This result will always be ignored. Typically `U` is `Unit`, - * but this is not necessary. - * - * @usecase def foreach(f: A => Unit): Unit - */ - def foreach[U](f: A => U): Unit - - /** Selects the first element of this $coll. - * - * @return the first element of this $coll. - * @throws NoSuchElementException if the $coll is empty. - */ - def head: A - - /** Tests whether this $coll is empty. - * - * @return `true` if the $coll contain no elements, `false` otherwise. - */ - def isEmpty: Boolean - - /** The factory companion object that builds instances of class $Coll. - * (or its `Iterable` superclass where class $Coll is not a `Seq`.) - */ - def companion: GenericCompanion[CC] - - /** The builder that builds instances of type $Coll[A] - */ - protected[this] def newBuilder: Builder[A, CC[A]] = companion.newBuilder[A] - - /** The generic builder that builds instances of $Coll - * at arbitrary element types. - */ - def genericBuilder[B]: Builder[B, CC[B]] = companion.newBuilder[B] - - private def sequential: TraversableOnce[A] = this.asInstanceOf[GenTraversableOnce[A]].seq - - /** Converts this $coll of pairs into two collections of the first and second - * half of each pair. - * - * {{{ - * val xs = $Coll( - * (1, "one"), - * (2, "two"), - * (3, "three")).unzip - * // xs == ($Coll(1, 2, 3), - * // $Coll(one, two, three)) - * }}} - * - * @tparam A1 the type of the first half of the element pairs - * @tparam A2 the type of the second half of the element pairs - * @param asPair an implicit conversion which asserts that the element type - * of this $coll is a pair. - * @return a pair of ${coll}s, containing the first, respectively second - * half of each element pair of this $coll. - */ - def unzip[A1, A2](implicit asPair: A => (A1, A2)): (CC[A1], CC[A2]) = { - val b1 = genericBuilder[A1] - val b2 = genericBuilder[A2] - for (xy <- sequential) { - val (x, y) = asPair(xy) - b1 += x - b2 += y - } - (b1.result(), b2.result()) - } - - /** Converts this $coll of triples into three collections of the first, second, - * and third element of each triple. - * - * {{{ - * val xs = $Coll( - * (1, "one", '1'), - * (2, "two", '2'), - * (3, "three", '3')).unzip3 - * // xs == ($Coll(1, 2, 3), - * // $Coll(one, two, three), - * // $Coll(1, 2, 3)) - * }}} - * - * @tparam A1 the type of the first member of the element triples - * @tparam A2 the type of the second member of the element triples - * @tparam A3 the type of the third member of the element triples - * @param asTriple an implicit conversion which asserts that the element type - * of this $coll is a triple. - * @return a triple of ${coll}s, containing the first, second, respectively - * third member of each element triple of this $coll. - */ - def unzip3[A1, A2, A3](implicit asTriple: A => (A1, A2, A3)): (CC[A1], CC[A2], CC[A3]) = { - val b1 = genericBuilder[A1] - val b2 = genericBuilder[A2] - val b3 = genericBuilder[A3] - - for (xyz <- sequential) { - val (x, y, z) = asTriple(xyz) - b1 += x - b2 += y - b3 += z - } - (b1.result(), b2.result(), b3.result()) - } - - /** Converts this $coll of traversable collections into - * a $coll formed by the elements of these traversable - * collections. - * - * @tparam B the type of the elements of each traversable collection. - * @param asTraversable an implicit conversion which asserts that the element - * type of this $coll is a `GenTraversable`. - * @return a new $coll resulting from concatenating all element ${coll}s. - * - * @usecase def flatten[B]: $Coll[B] - * - * @inheritdoc - * - * The resulting collection's type will be guided by the - * static type of $coll. For example: - * - * {{{ - * val xs = List( - * Set(1, 2, 3), - * Set(1, 2, 3) - * ).flatten - * // xs == List(1, 2, 3, 1, 2, 3) - * - * val ys = Set( - * List(1, 2, 3), - * List(3, 2, 1) - * ).flatten - * // ys == Set(1, 2, 3) - * }}} - */ - def flatten[B](implicit asTraversable: A => /*<: /*<:= headSize) fail - bs(i) += x - i += 1 - } - if (i != headSize) - fail - } - val bb = genericBuilder[CC[B]] - for (b <- bs) bb += b.result - bb.result() - } -} - diff --git a/tests/scala2-library/src/library/scala/collection/generic/Growable.scala b/tests/scala2-library/src/library/scala/collection/generic/Growable.scala deleted file mode 100644 index 366221ed9bdb..000000000000 --- a/tests/scala2-library/src/library/scala/collection/generic/Growable.scala +++ /dev/null @@ -1,68 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala -package collection -package generic - -import scala.annotation.tailrec - -/** This trait forms part of collections that can be augmented - * using a `+=` operator and that can be cleared of all elements using - * a `clear` method. - * - * @author Martin Odersky - * @version 2.8 - * @since 2.8 - * @define coll growable collection - * @define Coll `Growable` - * @define add add - * @define Add add - */ -trait Growable[-A] extends Clearable { - - /** ${Add}s a single element to this $coll. - * - * @param elem the element to $add. - * @return the $coll itself - */ - def +=(elem: A): this.type - - /** ${Add}s two or more elements to this $coll. - * - * @param elem1 the first element to $add. - * @param elem2 the second element to $add. - * @param elems the remaining elements to $add. - * @return the $coll itself - */ - def +=(elem1: A, elem2: A, elems: A*): this.type = this += elem1 += elem2 ++= elems - - /** ${Add}s all elements produced by a TraversableOnce to this $coll. - * - * @param xs the TraversableOnce producing the elements to $add. - * @return the $coll itself. - */ - def ++=(xs: TraversableOnce[A]): this.type = { - @tailrec def loop(xs: scala.collection.LinearSeq[A]) { - if (xs.nonEmpty) { - this += xs.head - loop(xs.tail) - } - } - xs match { - case xs: scala.collection.LinearSeq[A] => loop(xs) - case xs => xs foreach += - } - this - } - - /** Clears the $coll's contents. After this operation, the - * $coll is empty. - */ - def clear(): Unit -} diff --git a/tests/scala2-library/src/library/scala/collection/generic/HasNewBuilder.scala b/tests/scala2-library/src/library/scala/collection/generic/HasNewBuilder.scala deleted file mode 100644 index aa0ce6698db4..000000000000 --- a/tests/scala2-library/src/library/scala/collection/generic/HasNewBuilder.scala +++ /dev/null @@ -1,17 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ -package scala -package collection -package generic - -import mutable.Builder - -trait HasNewBuilder[+A, +Repr] extends Any { - /** The builder that builds instances of Repr */ - protected[this] def newBuilder: Builder[A, Repr] -} diff --git a/tests/scala2-library/src/library/scala/collection/generic/HasNewCombiner.scala b/tests/scala2-library/src/library/scala/collection/generic/HasNewCombiner.scala deleted file mode 100644 index 99a0722c3d1b..000000000000 --- a/tests/scala2-library/src/library/scala/collection/generic/HasNewCombiner.scala +++ /dev/null @@ -1,21 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2010-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala -package collection -package generic - -import scala.collection.parallel.Combiner - -/** - * @since 2.8 - */ -trait HasNewCombiner[+T, +Repr] { - protected[this] def newCombiner: Combiner[T, Repr] -} - diff --git a/tests/scala2-library/src/library/scala/collection/generic/ImmutableMapFactory.scala b/tests/scala2-library/src/library/scala/collection/generic/ImmutableMapFactory.scala deleted file mode 100644 index 7d857bf1b4f6..000000000000 --- a/tests/scala2-library/src/library/scala/collection/generic/ImmutableMapFactory.scala +++ /dev/null @@ -1,21 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - - -package scala -package collection -package generic - -import scala.language.higherKinds - -/** A template for companion objects of `immutable.Map` and subclasses thereof. - * @author Martin Odersky - * @version 2.8 - * @since 2.8 - */ -abstract class ImmutableMapFactory[CC[A, +B] <: immutable.Map[A, B] with immutable.MapLike[A, B, CC[A, B]]] extends MapFactory[CC] diff --git a/tests/scala2-library/src/library/scala/collection/generic/ImmutableSetFactory.scala b/tests/scala2-library/src/library/scala/collection/generic/ImmutableSetFactory.scala deleted file mode 100644 index a72caf263392..000000000000 --- a/tests/scala2-library/src/library/scala/collection/generic/ImmutableSetFactory.scala +++ /dev/null @@ -1,21 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala -package collection -package generic - -import mutable.{ Builder, SetBuilder } -import scala.language.higherKinds - -abstract class ImmutableSetFactory[CC[X] <: immutable.Set[X] with SetLike[X, CC[X]]] - extends SetFactory[CC] { - private[collection] def emptyInstance: CC[Any] - override def empty[A] = emptyInstance.asInstanceOf[CC[A]] - def newBuilder[A]: Builder[A, CC[A]] = new SetBuilder[A, CC[A]](empty[A]) -} diff --git a/tests/scala2-library/src/library/scala/collection/generic/ImmutableSortedMapFactory.scala b/tests/scala2-library/src/library/scala/collection/generic/ImmutableSortedMapFactory.scala deleted file mode 100644 index 730e58a5275c..000000000000 --- a/tests/scala2-library/src/library/scala/collection/generic/ImmutableSortedMapFactory.scala +++ /dev/null @@ -1,29 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - - - -package scala -package collection -package generic - -import scala.language.higherKinds - -/** A template for companion objects of `SortedMap` and subclasses thereof. - * - * @since 2.8 - * @define Coll `SortedMap` - * @define coll sorted map - * @define factoryInfo - * This object provides a set of operations needed to create sorted maps of type `$Coll`. - * @author Martin Odersky - * @version 2.8 - * @define sortedMapCanBuildFromInfo - * The standard `CanBuildFrom` instance for sorted maps - */ -abstract class ImmutableSortedMapFactory[CC[A, B] <: immutable.SortedMap[A, B] with SortedMapLike[A, B, CC[A, B]]] extends SortedMapFactory[CC] diff --git a/tests/scala2-library/src/library/scala/collection/generic/ImmutableSortedSetFactory.scala b/tests/scala2-library/src/library/scala/collection/generic/ImmutableSortedSetFactory.scala deleted file mode 100644 index 1fd4a8c99d93..000000000000 --- a/tests/scala2-library/src/library/scala/collection/generic/ImmutableSortedSetFactory.scala +++ /dev/null @@ -1,29 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - - - -package scala -package collection -package generic - -import scala.language.higherKinds - -/** A template for companion objects of `SortedSet` and subclasses thereof. - * - * @since 2.8 - * @define Coll `immutable.SortedSet` - * @define coll immutable sorted set - * @define factoryInfo - * This object provides a set of operations needed to create sorted sets of type `$Coll`. - * @author Martin Odersky - * @version 2.8 - * @define sortedSetCanBuildFromInfo - * The standard `CanBuildFrom` instance for sorted sets - */ -abstract class ImmutableSortedSetFactory[CC[A] <: immutable.SortedSet[A] with SortedSetLike[A, CC[A]]] extends SortedSetFactory[CC] diff --git a/tests/scala2-library/src/library/scala/collection/generic/IndexedSeqFactory.scala b/tests/scala2-library/src/library/scala/collection/generic/IndexedSeqFactory.scala deleted file mode 100644 index ddc0141aa9df..000000000000 --- a/tests/scala2-library/src/library/scala/collection/generic/IndexedSeqFactory.scala +++ /dev/null @@ -1,22 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala -package collection -package generic - -import language.higherKinds - -/** A template for companion objects of IndexedSeq and subclasses thereof. - * - * @since 2.11 - */ -abstract class IndexedSeqFactory[CC[X] <: IndexedSeq[X] with GenericTraversableTemplate[X, CC]] extends SeqFactory[CC] { - override def ReusableCBF: GenericCanBuildFrom[Nothing] = - scala.collection.IndexedSeq.ReusableCBF.asInstanceOf[GenericCanBuildFrom[Nothing]] -} diff --git a/tests/scala2-library/src/library/scala/collection/generic/IsSeqLike.scala b/tests/scala2-library/src/library/scala/collection/generic/IsSeqLike.scala deleted file mode 100644 index 4c857ad1bb10..000000000000 --- a/tests/scala2-library/src/library/scala/collection/generic/IsSeqLike.scala +++ /dev/null @@ -1,58 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala -package collection -package generic - -/** Type class witnessing that a collection representation type `Repr` has - * elements of type `A` and has a conversion to `SeqLike[A, Repr]`. - * - * This type enables simple enrichment of `Seq`s with extension methods which - * can make full use of the mechanics of the Scala collections framework in - * their implementation. - * - * Example usage: - * {{{ - * class FilterMapImpl[A, Repr](val r: SeqLike[A, Repr]) { - * final def filterMap[B, That](f: A => Option[B])(implicit cbf: CanBuildFrom[Repr, B, That]): That = - * r.flatMap(f(_)) - * } - * implicit def filterMap[Repr, A](r: Repr)(implicit fr: IsSeqLike[Repr]): FilterMapImpl[fr.A,Repr] = - * new FilterMapImpl(fr.conversion(r)) - * - * val l = List(1, 2, 3, 4, 5) - * List(1, 2, 3, 4, 5) filterMap (i => if(i % 2 == 0) Some(i) else None) - * // == List(2, 4) - * }}} - * - * @see [[scala.collection.Seq]] - * @see [[scala.collection.generic.IsTraversableLike]] - */ -trait IsSeqLike[Repr] { - /** The type of elements we can traverse over. */ - type A - /** A conversion from the representation type `Repr` to a `SeqLike[A,Repr]`. */ - val conversion: Repr => SeqLike[A, Repr] -} - -object IsSeqLike { - import scala.language.higherKinds - - implicit val stringRepr: IsSeqLike[String] { type A = Char } = - new IsSeqLike[String] { - type A = Char - val conversion = implicitly[String => SeqLike[Char, String]] - } - - implicit def seqLikeRepr[C[_], A0](implicit conv: C[A0] => SeqLike[A0,C[A0]]): IsSeqLike[C[A0]] { type A = A0 } = - new IsSeqLike[C[A0]] { - type A = A0 - val conversion = conv - } -} diff --git a/tests/scala2-library/src/library/scala/collection/generic/IsTraversableLike.scala b/tests/scala2-library/src/library/scala/collection/generic/IsTraversableLike.scala deleted file mode 100644 index 22cef555cc0c..000000000000 --- a/tests/scala2-library/src/library/scala/collection/generic/IsTraversableLike.scala +++ /dev/null @@ -1,130 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala -package collection -package generic - -/** A trait which can be used to avoid code duplication when defining extension - * methods that should be applicable both to existing Scala collections (i.e., - * types extending `GenTraversableLike`) as well as other (potentially user-defined) - * types that could be converted to a Scala collection type. This trait - * makes it possible to treat Scala collections and types that can be implicitly - * converted to a collection type uniformly. For example, one can provide - * extension methods that work both on collection types and on `String`s (`String`s - * do not extend `GenTraversableLike`, but can be converted to `GenTraversableLike`) - * - * `IsTraversable` provides two members: - * - * 1. type member `A`, which represents the element type of the target `GenTraversableLike[A, Repr]` - * 1. value member `conversion`, which provides a way to convert between the type we wish to add extension methods to, `Repr`, and `GenTraversableLike[A, Repr]`. - * - * ===Usage=== - * - * One must provide `IsTraversableLike` as an implicit parameter type of an implicit - * conversion. Its usage is shown below. Our objective in the following example - * is to provide a generic extension method `mapReduce` to any type that extends - * or can be converted to `GenTraversableLike`. In our example, this includes - * `String`. - * - * {{{ - * import scala.collection.GenTraversableLike - * import scala.collection.generic.IsTraversableLike - * - * class ExtensionMethods[A, Repr](coll: GenTraversableLike[A, Repr]) { - * def mapReduce[B](mapper: A => B)(reducer: (B, B) => B): B = { - * val iter = coll.toIterator - * var res = mapper(iter.next()) - * while (iter.hasNext) - * res = reducer(res, mapper(iter.next())) - * res - * } - * } - * - * implicit def withExtensions[Repr](coll: Repr)(implicit traversable: IsTraversableLike[Repr]) = - * new ExtensionMethods(traversable.conversion(coll)) - * - * // See it in action! - * List(1, 2, 3).mapReduce(_ * 2)(_ + _) // res0: Int = 12 - * "Yeah, well, you know, that's just, like, your opinion, man.".mapReduce(x => 1)(_ + _) // res1: Int = 59 - *}}} - * - * Here, we begin by creating a class `ExtensionMethods` which contains our - * `mapReduce` extension method. Note that `ExtensionMethods` takes a constructor - * argument `coll` of type `GenTraversableLike[A, Repr]`, where `A` represents the - * element type and `Repr` represents (typically) the collection type. The - * implementation of `mapReduce` itself is straightforward. - * - * The interesting bit is the implicit conversion `withExtensions`, which - * returns an instance of `ExtensionMethods`. This implicit conversion can - * only be applied if there is an implicit value `traversable` of type - * `IsTraversableLike[Repr]` in scope. Since `IsTraversableLike` provides - * value member `conversion`, which gives us a way to convert between whatever - * type we wish to add an extension method to (in this case, `Repr`) and - * `GenTraversableLike[A, Repr]`, we can now convert `coll` from type `Repr` - * to `GenTraversableLike[A, Repr]`. This allows us to create an instance of - * the `ExtensionMethods` class, which we pass our new - * `GenTraversableLike[A, Repr]` to. - * - * When the `mapReduce` method is called on some type of which it is not - * a member, implicit search is triggered. Because implicit conversion - * `withExtensions` is generic, it will be applied as long as an implicit - * value of type `IsTraversableLike[Repr]` can be found. Given that - * `IsTraversableLike` contains implicit members that return values of type - * `IsTraversableLike`, this requirement is typically satisfied, and the chain - * of interactions described in the previous paragraph is set into action. - * (See the `IsTraversableLike` companion object, which contains a precise - * specification of the available implicits.) - * - * ''Note'': Currently, it's not possible to combine the implicit conversion and - * the class with the extension methods into an implicit class due to - * limitations of type inference. - * - * ===Implementing `IsTraversableLike` for New Types=== - * - * One must simply provide an implicit value of type `IsTraversableLike` - * specific to the new type, or an implicit conversion which returns an - * instance of `IsTraversableLike` specific to the new type. - * - * Below is an example of an implementation of the `IsTraversableLike` trait - * where the `Repr` type is `String`. - * - *{{{ - * implicit val stringRepr: IsTraversableLike[String] { type A = Char } = - * new IsTraversableLike[String] { - * type A = Char - * val conversion = implicitly[String => GenTraversableLike[Char, String]] - * } - *}}} - * - * @author Miles Sabin - * @author J. Suereth - * @since 2.10 - */ -trait IsTraversableLike[Repr] { - /** The type of elements we can traverse over. */ - type A - /** A conversion from the representation type `Repr` to a `GenTraversableLike[A,Repr]`. */ - val conversion: Repr => GenTraversableLike[A, Repr] -} - -object IsTraversableLike { - import scala.language.higherKinds - - implicit val stringRepr: IsTraversableLike[String] { type A = Char } = - new IsTraversableLike[String] { - type A = Char - val conversion = implicitly[String => GenTraversableLike[Char, String]] - } - - implicit def genTraversableLikeRepr[C[_], A0](implicit conv: C[A0] => GenTraversableLike[A0,C[A0]]): IsTraversableLike[C[A0]] { type A = A0 } = - new IsTraversableLike[C[A0]] { - type A = A0 - val conversion = conv - } -} diff --git a/tests/scala2-library/src/library/scala/collection/generic/IsTraversableOnce.scala b/tests/scala2-library/src/library/scala/collection/generic/IsTraversableOnce.scala deleted file mode 100644 index 3ee586ae631d..000000000000 --- a/tests/scala2-library/src/library/scala/collection/generic/IsTraversableOnce.scala +++ /dev/null @@ -1,63 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala -package collection -package generic - -/** Type class witnessing that a collection representation type `Repr` has - * elements of type `A` and has a conversion to `GenTraversableOnce[A]`. - * - * This type enables simple enrichment of `GenTraversableOnce`s with extension - * methods which can make full use of the mechanics of the Scala collections - * framework in their implementation. - * - * Example usage, - * {{{ - * class FilterMapImpl[A, Repr](val r: GenTraversableOnce[A]) { - * final def filterMap[B, That](f: A => Option[B])(implicit cbf: CanBuildFrom[Repr, B, That]): That = { - * val b = cbf() - * for(e <- r.seq) f(e) foreach (b +=) - * b.result - * } - * } - * implicit def filterMap[Repr, A](r: Repr)(implicit fr: IsTraversableOnce[Repr]): FilterMapImpl[fr.A,Repr] = - * new FilterMapImpl[fr.A, Repr](fr.conversion(r)) - * - * val l = List(1, 2, 3, 4, 5) - * List(1, 2, 3, 4, 5) filterMap (i => if(i % 2 == 0) Some(i) else None) - * // == List(2, 4) - * }}} - * - * @author Miles Sabin - * @author J. Suereth - * @since 2.10 - */ -trait IsTraversableOnce[Repr] { - /** The type of elements we can traverse over. */ - type A - /** A conversion from the representation type `Repr` to a `GenTraversableOnce[A]`. */ - val conversion: Repr => GenTraversableOnce[A] -} - -object IsTraversableOnce { - import scala.language.higherKinds - - implicit val stringRepr: IsTraversableOnce[String] { type A = Char } = - new IsTraversableOnce[String] { - type A = Char - val conversion = implicitly[String => GenTraversableOnce[Char]] - } - - implicit def genTraversableLikeRepr[C[_], A0](implicit conv: C[A0] => GenTraversableOnce[A0]): IsTraversableOnce[C[A0]] { type A = A0 } = - new IsTraversableOnce[C[A0]] { - type A = A0 - val conversion = conv - } -} - diff --git a/tests/scala2-library/src/library/scala/collection/generic/IterableForwarder.scala b/tests/scala2-library/src/library/scala/collection/generic/IterableForwarder.scala deleted file mode 100644 index 7f6eb6e131be..000000000000 --- a/tests/scala2-library/src/library/scala/collection/generic/IterableForwarder.scala +++ /dev/null @@ -1,40 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala -package collection -package generic - -import scala.collection._ - -/** This trait implements a forwarder for iterable objects. It forwards - * all calls to a different iterable object, except for - * - * - `toString`, `hashCode`, `equals`, `stringPrefix` - * - `newBuilder`, `view` - * - all calls creating a new iterable object of the same kind - * - * The above methods are forwarded by subclass `IterableProxy`. - * - * @author Martin Odersky - * @version 2.8 - * @since 2.8 - */ -@deprecated("forwarding is inherently unreliable since it is not automated and methods can be forgotten", "2.11.0") -trait IterableForwarder[+A] extends Iterable[A] with TraversableForwarder[A] { - - /** The iterable object to which calls are forwarded */ - protected def underlying: Iterable[A] - - // Iterable delegates - // Iterable methods could be printed by cat IterableLike.scala | sed -n '/trait Iterable/,$ p' | egrep '^ (override )?def' - - override def iterator: Iterator[A] = underlying.iterator - override def sameElements[B >: A](that: GenIterable[B]): Boolean = underlying.sameElements(that) -} diff --git a/tests/scala2-library/src/library/scala/collection/generic/MapFactory.scala b/tests/scala2-library/src/library/scala/collection/generic/MapFactory.scala deleted file mode 100644 index 255d6953030e..000000000000 --- a/tests/scala2-library/src/library/scala/collection/generic/MapFactory.scala +++ /dev/null @@ -1,37 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala -package collection -package generic - -import scala.language.higherKinds - -/** A template for companion objects of `Map` and subclasses thereof. - * - * @define coll map - * @define Coll Map - * @define factoryInfo - * This object provides a set of operations needed to create `$Coll` values. - * @author Martin Odersky - * @version 2.8 - * @since 2.8 - * @define canBuildFromInfo - * The standard `CanBuildFrom` instance for `$Coll` objects. - * @see CanBuildFrom - * @define mapCanBuildFromInfo - * The standard `CanBuildFrom` instance for `$Coll` objects. - * The created value is an instance of class `MapCanBuildFrom`. - * @see CanBuildFrom - * @see GenericCanBuildFrom - */ -abstract class MapFactory[CC[A, B] <: Map[A, B] with MapLike[A, B, CC[A, B]]] extends GenMapFactory[CC] { - - def empty[A, B]: CC[A, B] - -} diff --git a/tests/scala2-library/src/library/scala/collection/generic/MutableMapFactory.scala b/tests/scala2-library/src/library/scala/collection/generic/MutableMapFactory.scala deleted file mode 100644 index 14c5b6bac3ce..000000000000 --- a/tests/scala2-library/src/library/scala/collection/generic/MutableMapFactory.scala +++ /dev/null @@ -1,31 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - - - -package scala -package collection -package generic - -import mutable.Builder -import scala.language.higherKinds - -/** A template for companion objects of `mutable.Map` and subclasses thereof. - * @author Martin Odersky - * @version 2.8 - * @since 2.8 - */ -abstract class MutableMapFactory[CC[A, B] <: mutable.Map[A, B] with mutable.MapLike[A, B, CC[A, B]]] - extends MapFactory[CC] { - - /** The default builder for $Coll objects. - * @tparam A the type of the keys - * @tparam B the type of the associated values - */ - override def newBuilder[A, B]: Builder[(A, B), CC[A, B]] = empty[A, B] -} diff --git a/tests/scala2-library/src/library/scala/collection/generic/MutableSetFactory.scala b/tests/scala2-library/src/library/scala/collection/generic/MutableSetFactory.scala deleted file mode 100644 index 63944657fc29..000000000000 --- a/tests/scala2-library/src/library/scala/collection/generic/MutableSetFactory.scala +++ /dev/null @@ -1,20 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala -package collection -package generic - -import mutable.{ Builder, GrowingBuilder } -import scala.language.higherKinds - -abstract class MutableSetFactory[CC[X] <: mutable.Set[X] with mutable.SetLike[X, CC[X]]] - extends SetFactory[CC] { - - def newBuilder[A]: Builder[A, CC[A]] = new GrowingBuilder[A, CC[A]](empty[A]) -} diff --git a/tests/scala2-library/src/library/scala/collection/generic/MutableSortedMapFactory.scala b/tests/scala2-library/src/library/scala/collection/generic/MutableSortedMapFactory.scala deleted file mode 100644 index b6fa933ca804..000000000000 --- a/tests/scala2-library/src/library/scala/collection/generic/MutableSortedMapFactory.scala +++ /dev/null @@ -1,24 +0,0 @@ -package scala -package collection -package generic - -import scala.language.higherKinds - -/** - * A template for companion objects of `SortedMap` and subclasses thereof. - * - * @tparam CC the type of the collection. - * - * @author Rui Gonçalves - * @since 2.12 - * @version 2.12 - * - * @define Coll `mutable.SortedMap` - * @define coll mutable sorted map - * @define factoryInfo - * This object provides a set of operations needed to create sorted maps of type `$Coll`. - * @define sortedMapCanBuildFromInfo - * The standard `CanBuildFrom` instance for sorted maps - */ -abstract class MutableSortedMapFactory[CC[A, B] <: mutable.SortedMap[A, B] with SortedMapLike[A, B, CC[A, B]]] - extends SortedMapFactory[CC] diff --git a/tests/scala2-library/src/library/scala/collection/generic/MutableSortedSetFactory.scala b/tests/scala2-library/src/library/scala/collection/generic/MutableSortedSetFactory.scala deleted file mode 100644 index 9bb12c231757..000000000000 --- a/tests/scala2-library/src/library/scala/collection/generic/MutableSortedSetFactory.scala +++ /dev/null @@ -1,35 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala -package collection -package generic - -import scala.collection.mutable.{ Builder, GrowingBuilder } -import scala.language.higherKinds - -/** - * @define Coll `mutable.SortedSet` - * @define coll mutable sorted set - * - * @author Lucien Pereira - * - */ -abstract class MutableSortedSetFactory[CC[A] <: mutable.SortedSet[A] with SortedSetLike[A, CC[A]] with mutable.Set[A] with mutable.SetLike[A, CC[A]]] extends SortedSetFactory[CC] { - - /** - * mutable.SetBuilder uses '+' which is not a primitive for anything extending mutable.SetLike, - * this causes serious performance issues since each time 'elems = elems + x' - * is evaluated elems is cloned (which is O(n)). - * - * Fortunately GrowingBuilder comes to rescue. - * - */ - override def newBuilder[A](implicit ord: Ordering[A]): Builder[A, CC[A]] = new GrowingBuilder[A, CC[A]](empty) - -} diff --git a/tests/scala2-library/src/library/scala/collection/generic/OrderedTraversableFactory.scala b/tests/scala2-library/src/library/scala/collection/generic/OrderedTraversableFactory.scala deleted file mode 100644 index 7657aff2aaa1..000000000000 --- a/tests/scala2-library/src/library/scala/collection/generic/OrderedTraversableFactory.scala +++ /dev/null @@ -1,24 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2006-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - - -package scala -package collection -package generic - -import scala.language.higherKinds - -abstract class OrderedTraversableFactory[CC[X] <: Traversable[X] with GenericOrderedTraversableTemplate[X, CC]] -extends GenericOrderedCompanion[CC] { - - class GenericCanBuildFrom[A](implicit ord: Ordering[A]) extends CanBuildFrom[CC[_], A, CC[A]] { - def apply(from: CC[_]) = from.genericOrderedBuilder[A] - def apply = newBuilder[A] - } - -} diff --git a/tests/scala2-library/src/library/scala/collection/generic/ParFactory.scala b/tests/scala2-library/src/library/scala/collection/generic/ParFactory.scala deleted file mode 100644 index 901e9fc239ce..000000000000 --- a/tests/scala2-library/src/library/scala/collection/generic/ParFactory.scala +++ /dev/null @@ -1,38 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2010-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala -package collection -package generic - -import scala.collection.parallel.ParIterable -import scala.language.higherKinds - -/** A template class for companion objects of `ParIterable` and subclasses - * thereof. This class extends `TraversableFactory` and provides a set of - * operations to create `$Coll` objects. - * - * @define coll parallel collection - * @define Coll `ParIterable` - * @since 2.8 - */ -abstract class ParFactory[CC[X] <: ParIterable[X] with GenericParTemplate[X, CC]] -extends GenTraversableFactory[CC] - with GenericParCompanion[CC] { - - //type EPC[T, C] = scala.collection.parallel.EnvironmentPassingCombiner[T, C] - - /** A generic implementation of the `CanCombineFrom` trait, which forwards - * all calls to `apply(from)` to the `genericParBuilder` method of the $coll - * `from`, and calls to `apply()` to this factory. - */ - class GenericCanCombineFrom[A] extends GenericCanBuildFrom[A] with CanCombineFrom[CC[_], A, CC[A]] { - override def apply(from: Coll) = from.genericCombiner - override def apply() = newBuilder[A] - } -} diff --git a/tests/scala2-library/src/library/scala/collection/generic/ParMapFactory.scala b/tests/scala2-library/src/library/scala/collection/generic/ParMapFactory.scala deleted file mode 100644 index 70797c83e2f8..000000000000 --- a/tests/scala2-library/src/library/scala/collection/generic/ParMapFactory.scala +++ /dev/null @@ -1,52 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2010-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala -package collection -package generic - -import scala.collection.parallel.ParMap -import scala.collection.parallel.ParMapLike -import scala.collection.parallel.Combiner -import scala.collection.mutable.Builder -import scala.language.higherKinds - -/** A template class for companion objects of `ParMap` and subclasses thereof. - * This class extends `TraversableFactory` and provides a set of operations - * to create `$Coll` objects. - * - * @define coll parallel map - * @define Coll `ParMap` - * @author Aleksandar Prokopec - * @since 2.8 - */ -abstract class ParMapFactory[CC[X, Y] <: ParMap[X, Y] with ParMapLike[X, Y, CC[X, Y], _]] -extends GenMapFactory[CC] - with GenericParMapCompanion[CC] { - - type MapColl = CC[_, _] - - /** The default builder for $Coll objects. - * @tparam K the type of the keys - * @tparam V the type of the associated values - */ - override def newBuilder[K, V]: Builder[(K, V), CC[K, V]] = newCombiner[K, V] - - /** The default combiner for $Coll objects. - * @tparam K the type of the keys - * @tparam V the type of the associated values - */ - def newCombiner[K, V]: Combiner[(K, V), CC[K, V]] - - class CanCombineFromMap[K, V] extends CanCombineFrom[CC[_, _], (K, V), CC[K, V]] { - def apply(from: MapColl) = from.genericMapCombiner[K, V].asInstanceOf[Combiner[(K, V), CC[K, V]]] - def apply() = newCombiner[K, V] - } - -} - diff --git a/tests/scala2-library/src/library/scala/collection/generic/ParSetFactory.scala b/tests/scala2-library/src/library/scala/collection/generic/ParSetFactory.scala deleted file mode 100644 index 1341ddcb3846..000000000000 --- a/tests/scala2-library/src/library/scala/collection/generic/ParSetFactory.scala +++ /dev/null @@ -1,35 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2010-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala -package collection -package generic - -import scala.collection.parallel.Combiner -import scala.collection.parallel.ParSet -import scala.collection.parallel.ParSetLike -import scala.language.higherKinds - -/** - * @author Aleksandar Prokopec - * @since 2.8 - */ -abstract class ParSetFactory[CC[X] <: ParSet[X] with ParSetLike[X, CC[X], _] with GenericParTemplate[X, CC]] - extends GenSetFactory[CC] - with GenericParCompanion[CC] -{ - def newBuilder[A]: Combiner[A, CC[A]] = newCombiner[A] - - def newCombiner[A]: Combiner[A, CC[A]] - - class GenericCanCombineFrom[A] extends CanCombineFrom[CC[_], A, CC[A]] { - override def apply(from: Coll) = from.genericCombiner[A] - override def apply() = newCombiner[A] - } -} - diff --git a/tests/scala2-library/src/library/scala/collection/generic/SeqFactory.scala b/tests/scala2-library/src/library/scala/collection/generic/SeqFactory.scala deleted file mode 100644 index 35cce11a79aa..000000000000 --- a/tests/scala2-library/src/library/scala/collection/generic/SeqFactory.scala +++ /dev/null @@ -1,31 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - - - -package scala -package collection -package generic -import scala.language.higherKinds - -/** A template for companion objects of Seq and subclasses thereof. - * - * @since 2.8 - */ -abstract class SeqFactory[CC[X] <: Seq[X] with GenericTraversableTemplate[X, CC]] -extends GenSeqFactory[CC] with TraversableFactory[CC] { - - /** This method is called in a pattern match { case Seq(...) => }. - * - * @param x the selector value - * @return sequence wrapped in an option, if this is a Seq, otherwise none - */ - def unapplySeq[A](x: CC[A]): Some[CC[A]] = Some(x) - -} - diff --git a/tests/scala2-library/src/library/scala/collection/generic/SeqForwarder.scala b/tests/scala2-library/src/library/scala/collection/generic/SeqForwarder.scala deleted file mode 100644 index cee93d2ddbcf..000000000000 --- a/tests/scala2-library/src/library/scala/collection/generic/SeqForwarder.scala +++ /dev/null @@ -1,59 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala -package collection -package generic -import scala.collection._ -import scala.collection.immutable.Range - -/** This class implements a forwarder for sequences. It forwards - * all calls to a different sequence object except for - * - * - `toString`, `hashCode`, `equals`, `stringPrefix` - * - `newBuilder`, `view`, `toSeq` - * - all calls creating a new sequence of the same kind - * - * The above methods are forwarded by subclass `SeqProxy`. - * - * @author Martin Odersky - * @version 2.8 - * @since 2.8 - */ -@deprecated("forwarding is inherently unreliable since it is not automated and new methods can be forgotten", "2.11.0") -trait SeqForwarder[+A] extends Seq[A] with IterableForwarder[A] { - - protected override def underlying: Seq[A] - - override def length: Int = underlying.length - override def apply(idx: Int): A = underlying.apply(idx) - override def lengthCompare(len: Int): Int = underlying lengthCompare len - override def isDefinedAt(x: Int): Boolean = underlying isDefinedAt x - override def segmentLength(p: A => Boolean, from: Int): Int = underlying.segmentLength(p, from) - override def prefixLength(p: A => Boolean) = underlying prefixLength p - override def indexWhere(p: A => Boolean): Int = underlying indexWhere p - override def indexWhere(p: A => Boolean, from: Int): Int = underlying.indexWhere(p, from) - override def indexOf[B >: A](elem: B): Int = underlying indexOf elem - override def indexOf[B >: A](elem: B, from: Int): Int = underlying.indexOf(elem, from) - override def lastIndexOf[B >: A](elem: B): Int = underlying lastIndexOf elem - override def lastIndexOf[B >: A](elem: B, end: Int): Int = underlying.lastIndexOf(elem, end) - override def lastIndexWhere(p: A => Boolean): Int = underlying lastIndexWhere p - override def lastIndexWhere(p: A => Boolean, end: Int): Int = underlying.lastIndexWhere(p, end) - override def reverseIterator: Iterator[A] = underlying.reverseIterator - override def startsWith[B](that: GenSeq[B], offset: Int): Boolean = underlying.startsWith(that, offset) - override def startsWith[B](that: GenSeq[B]): Boolean = underlying startsWith that - override def endsWith[B](that: GenSeq[B]): Boolean = underlying endsWith that - override def indexOfSlice[B >: A](that: GenSeq[B]): Int = underlying indexOfSlice that - override def indexOfSlice[B >: A](that: GenSeq[B], from: Int): Int = underlying.indexOfSlice(that, from) - override def lastIndexOfSlice[B >: A](that: GenSeq[B]): Int = underlying lastIndexOfSlice that - override def lastIndexOfSlice[B >: A](that: GenSeq[B], end: Int): Int = underlying.lastIndexOfSlice(that, end) - override def containsSlice[B](that: GenSeq[B]): Boolean = underlying containsSlice that - override def contains[A1 >: A](elem: A1): Boolean = underlying contains elem - override def corresponds[B](that: GenSeq[B])(p: (A,B) => Boolean): Boolean = underlying.corresponds(that)(p) - override def indices: Range = underlying.indices -} diff --git a/tests/scala2-library/src/library/scala/collection/generic/SetFactory.scala b/tests/scala2-library/src/library/scala/collection/generic/SetFactory.scala deleted file mode 100644 index 5e50844cc9f5..000000000000 --- a/tests/scala2-library/src/library/scala/collection/generic/SetFactory.scala +++ /dev/null @@ -1,18 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - - - -package scala -package collection -package generic - -import scala.language.higherKinds - -abstract class SetFactory[CC[X] <: Set[X] with SetLike[X, CC[X]]] - extends GenSetFactory[CC] with GenericSeqCompanion[CC] diff --git a/tests/scala2-library/src/library/scala/collection/generic/Shrinkable.scala b/tests/scala2-library/src/library/scala/collection/generic/Shrinkable.scala deleted file mode 100644 index dea5bb7217c5..000000000000 --- a/tests/scala2-library/src/library/scala/collection/generic/Shrinkable.scala +++ /dev/null @@ -1,50 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala -package collection -package generic - -/** This trait forms part of collections that can be reduced - * using a `-=` operator. - * - * @author Martin Odersky - * @version 2.8 - * @since 2.8 - * @define coll shrinkable collection - * @define Coll `Shrinkable` - */ -trait Shrinkable[-A] { - - /** Removes a single element from this $coll. - * - * @param elem the element to remove. - * @return the $coll itself - */ - def -=(elem: A): this.type - - /** Removes two or more elements from this $coll. - * - * @param elem1 the first element to remove. - * @param elem2 the second element to remove. - * @param elems the remaining elements to remove. - * @return the $coll itself - */ - def -=(elem1: A, elem2: A, elems: A*): this.type = { - this -= elem1 - this -= elem2 - this --= elems - } - - /** Removes all elements produced by an iterator from this $coll. - * - * @param xs the iterator producing the elements to remove. - * @return the $coll itself - */ - def --=(xs: TraversableOnce[A]): this.type = { xs foreach -= ; this } -} diff --git a/tests/scala2-library/src/library/scala/collection/generic/Signalling.scala b/tests/scala2-library/src/library/scala/collection/generic/Signalling.scala deleted file mode 100644 index 021d289c9da6..000000000000 --- a/tests/scala2-library/src/library/scala/collection/generic/Signalling.scala +++ /dev/null @@ -1,176 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala -package collection -package generic - -import java.util.concurrent.atomic.AtomicInteger - -/** - * A message interface serves as a unique interface to the - * part of the collection capable of receiving messages from - * a different task. - * - * One example of use of this is the `find` method, which can use the - * signalling interface to inform worker threads that an element has - * been found and no further search is necessary. - * - * @author prokopec - * - * @define abortflag - * Abort flag being true means that a worker can abort and produce whatever result, - * since its result will not affect the final result of computation. An example - * of operations using this are `find`, `forall` and `exists` methods. - * - * @define indexflag - * The index flag holds an integer which carries some operation-specific meaning. For - * instance, `takeWhile` operation sets the index flag to the position of the element - * where the predicate fails. Other workers may check this index against the indices - * they are working on and return if this index is smaller than their index. Examples - * of operations using this are `takeWhile`, `dropWhile`, `span` and `indexOf`. - */ -trait Signalling { - /** - * Checks whether an abort signal has been issued. - * - * $abortflag - * @return the state of the abort - */ - def isAborted: Boolean - - /** - * Sends an abort signal to other workers. - * - * $abortflag - */ - def abort(): Unit - - /** - * Returns the value of the index flag. - * - * $indexflag - * @return the value of the index flag - */ - def indexFlag: Int - - /** - * Sets the value of the index flag. - * - * $indexflag - * @param f the value to which the index flag is set. - */ - def setIndexFlag(f: Int) - - /** - * Sets the value of the index flag if argument is greater than current value. - * This method does this atomically. - * - * $indexflag - * @param f the value to which the index flag is set - */ - def setIndexFlagIfGreater(f: Int) - - /** - * Sets the value of the index flag if argument is lesser than current value. - * This method does this atomically. - * - * $indexflag - * @param f the value to which the index flag is set - */ - def setIndexFlagIfLesser(f: Int) - - /** - * A read only tag specific to the signalling object. It is used to give - * specific workers information on the part of the collection being operated on. - */ - def tag: Int -} - -/** - * This signalling implementation returns default values and ignores received signals. - */ -class DefaultSignalling extends Signalling with VolatileAbort { - def indexFlag = -1 - def setIndexFlag(f: Int) {} - def setIndexFlagIfGreater(f: Int) {} - def setIndexFlagIfLesser(f: Int) {} - - def tag = -1 -} - -/** - * An object that returns default values and ignores received signals. - */ -object IdleSignalling extends DefaultSignalling - -/** - * A mixin trait that implements abort flag behaviour using volatile variables. - */ -trait VolatileAbort extends Signalling { - @volatile private var abortflag = false - override def isAborted = abortflag - override def abort() = abortflag = true -} - -/** - * A mixin trait that implements index flag behaviour using atomic integers. - * The `setIndex` operation is wait-free, while conditional set operations `setIndexIfGreater` - * and `setIndexIfLesser` are lock-free and support only monotonic changes. - */ -trait AtomicIndexFlag extends Signalling { - private val intflag: AtomicInteger = new AtomicInteger(-1) - abstract override def indexFlag = intflag.get - abstract override def setIndexFlag(f: Int) = intflag.set(f) - abstract override def setIndexFlagIfGreater(f: Int) = { - var loop = true - do { - val old = intflag.get - if (f <= old) loop = false - else if (intflag.compareAndSet(old, f)) loop = false - } while (loop) - } - abstract override def setIndexFlagIfLesser(f: Int) = { - var loop = true - do { - val old = intflag.get - if (f >= old) loop = false - else if (intflag.compareAndSet(old, f)) loop = false - } while (loop) - } -} - -/** - * An implementation of the signalling interface using delegates. - */ -trait DelegatedSignalling extends Signalling { - /** - * A delegate that method calls are redirected to. - */ - var signalDelegate: Signalling - - def isAborted = signalDelegate.isAborted - def abort() = signalDelegate.abort() - - def indexFlag = signalDelegate.indexFlag - def setIndexFlag(f: Int) = signalDelegate.setIndexFlag(f) - def setIndexFlagIfGreater(f: Int) = signalDelegate.setIndexFlagIfGreater(f) - def setIndexFlagIfLesser(f: Int) = signalDelegate.setIndexFlagIfLesser(f) - - def tag = signalDelegate.tag -} - -/** - * Class implementing delegated signalling. - */ -class DelegatedContext(var signalDelegate: Signalling) extends DelegatedSignalling - -/** - * Class implementing delegated signalling, but having its own distinct `tag`. - */ -class TaggedDelegatedContext(deleg: Signalling, override val tag: Int) extends DelegatedContext(deleg) diff --git a/tests/scala2-library/src/library/scala/collection/generic/Sizing.scala b/tests/scala2-library/src/library/scala/collection/generic/Sizing.scala deleted file mode 100644 index 73584ce82e6a..000000000000 --- a/tests/scala2-library/src/library/scala/collection/generic/Sizing.scala +++ /dev/null @@ -1,17 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2010-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala -package collection -package generic - -/** A trait for objects which have a size. - */ -trait Sizing { - def size: Int -} diff --git a/tests/scala2-library/src/library/scala/collection/generic/SliceInterval.scala b/tests/scala2-library/src/library/scala/collection/generic/SliceInterval.scala deleted file mode 100644 index 82acdd13716f..000000000000 --- a/tests/scala2-library/src/library/scala/collection/generic/SliceInterval.scala +++ /dev/null @@ -1,54 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala -package collection -package generic - -/** A container for the endpoints of a collection slice. - * The constructor is private to enforce the invariants: - * from >= 0, until >= 0, from <= until. - */ -private[collection] class SliceInterval private (val from: Int, val until: Int) { - // The width of this slice from end to end. This is the - // maximum size of the collection slice, but the collection - // need not have this many (or any) elements. Since - // from <= until is a constructor invariant, we don't have to - // check for negative values. - def width = until - from - - /** Returns a new SliceInterval with endpoints calculated in - * terms of the original collection. - * Example: - * {{{ - * val coll = (1 to 100).view.slice(10, 30).slice(1, 3) - * // the second call to slice causes the interval to - * // be recalculated: the result is SliceInterval(11, 13). - * }}} - */ - def recalculate(_from: Int, _until: Int): SliceInterval = { - val lo = _from max 0 - val elems = scala.math.min(_until - lo, width) - val start = from + lo - - if (elems <= 0) new SliceInterval(from, from) - else new SliceInterval(start, start + elems) - } - def recalculate(interval: SliceInterval): SliceInterval = - recalculate(interval.from, interval.until) -} - -object SliceInterval { - def apply(from: Int, until: Int) = { - val lo = from max 0 - val hi = until max 0 - - if (hi <= lo) new SliceInterval(lo, lo) - else new SliceInterval(lo, hi) - } -} diff --git a/tests/scala2-library/src/library/scala/collection/generic/Sorted.scala b/tests/scala2-library/src/library/scala/collection/generic/Sorted.scala deleted file mode 100644 index b2e63daabaa9..000000000000 --- a/tests/scala2-library/src/library/scala/collection/generic/Sorted.scala +++ /dev/null @@ -1,113 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2006-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala -package collection -package generic - -/** Any collection (including maps) whose keys (or elements) are ordered. - * - * @author Sean McDirmid - * @since 2.8 - */ -trait Sorted[K, +This <: Sorted[K, This]] { - def ordering : Ordering[K] - - /** The current collection */ - protected def repr: This - - /** return as a projection the set of keys in this collection */ - def keySet: SortedSet[K] - - /** Returns the first key of the collection. */ - def firstKey: K - - /** Returns the last key of the collection. */ - def lastKey: K - - /** Comparison function that orders keys. */ - def compare(k0: K, k1: K): Int = ordering.compare(k0, k1) - - /** Creates a ranged projection of this collection. Any mutations in the - * ranged projection will update this collection and vice versa. - * - * Note: keys are not guaranteed to be consistent between this collection - * and the projection. This is the case for buffers where indexing is - * relative to the projection. - * - * @param from The lower-bound (inclusive) of the ranged projection. - * `None` if there is no lower bound. - * @param until The upper-bound (exclusive) of the ranged projection. - * `None` if there is no upper bound. - */ - def rangeImpl(from: Option[K], until: Option[K]): This - - /** Creates a ranged projection of this collection with no upper-bound. - * - * @param from The lower-bound (inclusive) of the ranged projection. - */ - def from(from: K): This = rangeImpl(Some(from), None) - - /** Creates a ranged projection of this collection with no lower-bound. - * - * @param until The upper-bound (exclusive) of the ranged projection. - */ - def until(until: K): This = rangeImpl(None, Some(until)) - - /** Creates a ranged projection of this collection with both a lower-bound - * and an upper-bound. - * - * @param from The lower-bound (inclusive) of the ranged projection. - * @param until The upper-bound (exclusive) of the ranged projection. - */ - def range(from: K, until: K): This = rangeImpl(Some(from), Some(until)) - - /** Create a range projection of this collection with no lower-bound. - * @param to The upper-bound (inclusive) of the ranged projection. - */ - def to(to: K): This = { - val i = keySet.from(to).iterator - if (i.isEmpty) return repr - val next = i.next() - if (compare(next, to) == 0) - if (i.isEmpty) repr - else until(i.next()) - else - until(next) - } - - /** - * Creates an iterator over all the keys(or elements) contained in this - * collection greater than or equal to `start` - * according to the ordering of this collection. x.keysIteratorFrom(y) - * is equivalent to but often more efficient than - * x.from(y).keysIterator. - * - * @param start The lower bound (inclusive) - * on the keys to be returned - */ - def keysIteratorFrom(start: K): Iterator[K] - - protected def hasAll(j: Iterator[K]): Boolean = { - val i = keySet.iterator - if (i.isEmpty) return j.isEmpty - - var in = i.next() - while (j.hasNext) { - val jn = j.next() - while ({ - val n = compare(jn, in) - if (n == 0) false - else if (n < 0) return false - else if (!i.hasNext) return false - else true - }) in = i.next() - } - true - } -} diff --git a/tests/scala2-library/src/library/scala/collection/generic/SortedMapFactory.scala b/tests/scala2-library/src/library/scala/collection/generic/SortedMapFactory.scala deleted file mode 100644 index afa11e9ab140..000000000000 --- a/tests/scala2-library/src/library/scala/collection/generic/SortedMapFactory.scala +++ /dev/null @@ -1,37 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - - - -package scala -package collection -package generic - -import mutable.{Builder, MapBuilder} -import scala.language.higherKinds - -/** A template for companion objects of mutable.Map and subclasses thereof. - * - * @since 2.8 - */ -abstract class SortedMapFactory[CC[A, B] <: SortedMap[A, B] with SortedMapLike[A, B, CC[A, B]]] { - - type Coll = CC[_, _] - - def empty[A, B](implicit ord: Ordering[A]): CC[A, B] - - def apply[A, B](elems: (A, B)*)(implicit ord: Ordering[A]): CC[A, B] = (newBuilder[A, B](ord) ++= elems).result() - - def newBuilder[A, B](implicit ord: Ordering[A]): Builder[(A, B), CC[A, B]] = - new MapBuilder[A, B, CC[A, B]](empty(ord)) - - class SortedMapCanBuildFrom[A, B](implicit ord: Ordering[A]) extends CanBuildFrom[Coll, (A, B), CC[A, B]] { - def apply(from: Coll) = newBuilder[A, B](ord) - def apply() = newBuilder[A, B] - } -} diff --git a/tests/scala2-library/src/library/scala/collection/generic/SortedSetFactory.scala b/tests/scala2-library/src/library/scala/collection/generic/SortedSetFactory.scala deleted file mode 100644 index c734830e0b2c..000000000000 --- a/tests/scala2-library/src/library/scala/collection/generic/SortedSetFactory.scala +++ /dev/null @@ -1,37 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2006-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - - - -package scala -package collection -package generic - -import mutable.{Builder, SetBuilder} -import scala.language.higherKinds - -/** A template for companion objects of Set and subclasses thereof. - * - * @since 2.8 - */ -abstract class SortedSetFactory[CC[A] <: SortedSet[A] with SortedSetLike[A, CC[A]]] { - type Coll = CC[_] - - def empty[A](implicit ord: Ordering[A]): CC[A] - - def apply[A](elems: A*)(implicit ord: Ordering[A]): CC[A] = (newBuilder[A](ord) ++= elems).result() - - def newBuilder[A](implicit ord: Ordering[A]): Builder[A, CC[A]] = new SetBuilder[A, CC[A]](empty) - - implicit def newCanBuildFrom[A](implicit ord : Ordering[A]) : CanBuildFrom[Coll, A, CC[A]] = new SortedSetCanBuildFrom()(ord) - - class SortedSetCanBuildFrom[A](implicit ord: Ordering[A]) extends CanBuildFrom[Coll, A, CC[A]] { - def apply(from: Coll) = newBuilder[A](ord) - def apply() = newBuilder[A](ord) - } -} diff --git a/tests/scala2-library/src/library/scala/collection/generic/Subtractable.scala b/tests/scala2-library/src/library/scala/collection/generic/Subtractable.scala deleted file mode 100644 index 32a900029654..000000000000 --- a/tests/scala2-library/src/library/scala/collection/generic/Subtractable.scala +++ /dev/null @@ -1,61 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - - -package scala -package collection -package generic - - -/** This trait represents collection-like objects that can be reduced - * using a '+' operator. It defines variants of `-` and `--` - * as convenience methods in terms of single-element removal `-`. - * @tparam A the type of the elements of the $coll. - * @tparam Repr the type of the $coll itself - * @author Martin Odersky - * @version 2.8 - * @since 2.8 - * @define coll collection - * @define Coll Subtractable - */ -trait Subtractable[A, +Repr <: Subtractable[A, Repr]] { self => - - /** The representation object of type `Repr` which contains the collection's elements - */ - protected def repr: Repr - - /** Creates a new $coll from this $coll with an element removed. - * @param elem the element to remove - * @return a new collection that contains all elements of the current $coll - * except one less occurrence of `elem`. - */ - def -(elem: A): Repr - - /** Creates a new $coll from this $coll with some elements removed. - * - * This method takes two or more elements to be removed. Another overloaded - * variant of this method handles the case where a single element is - * removed. - * @param elem1 the first element to remove. - * @param elem2 the second element to remove. - * @param elems the remaining elements to remove. - * @return a new $coll that contains all elements of the current $coll - * except one less occurrence of each of the given elements. - */ - def -(elem1: A, elem2: A, elems: A*): Repr = - this - elem1 - elem2 -- elems - - /** Creates a new $coll from this $coll by removing all elements of another - * collection. - * - * @param xs the collection containing the removed elements. - * @return a new $coll that contains all elements of the current $coll - * except one less occurrence of each of the elements of `elems`. - */ - def --(xs: GenTraversableOnce[A]): Repr = (repr /: xs.seq) (_ - _) -} diff --git a/tests/scala2-library/src/library/scala/collection/generic/TraversableFactory.scala b/tests/scala2-library/src/library/scala/collection/generic/TraversableFactory.scala deleted file mode 100644 index ad6d8fd1982f..000000000000 --- a/tests/scala2-library/src/library/scala/collection/generic/TraversableFactory.scala +++ /dev/null @@ -1,41 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2006-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - - -package scala -package collection -package generic - -import scala.language.higherKinds - -/** A template for companion objects of `Traversable` and subclasses thereof. - * This class provides a set of operations to create `$Coll` objects. - * It is typically inherited by companion objects of subclasses of `Traversable`. - * - * @since 2.8 - * - * @define coll collection - * @define Coll Traversable - * @define factoryInfo - * This object provides a set of operations to create `$Coll` values. - * @author Martin Odersky - * @version 2.8 - * @define canBuildFromInfo - * The standard `CanBuildFrom` instance for $Coll objects. - * @see CanBuildFrom - * @define genericCanBuildFromInfo - * The standard `CanBuildFrom` instance for $Coll objects. - * The created value is an instance of class `GenericCanBuildFrom`, - * which forwards calls to create a new builder to the - * `genericBuilder` method of the requesting collection. - * @see CanBuildFrom - * @see GenericCanBuildFrom - */ -trait TraversableFactory[CC[X] <: Traversable[X] with GenericTraversableTemplate[X, CC]] - extends GenTraversableFactory[CC] with GenericSeqCompanion[CC] - diff --git a/tests/scala2-library/src/library/scala/collection/generic/TraversableForwarder.scala b/tests/scala2-library/src/library/scala/collection/generic/TraversableForwarder.scala deleted file mode 100644 index b94507d6ef5b..000000000000 --- a/tests/scala2-library/src/library/scala/collection/generic/TraversableForwarder.scala +++ /dev/null @@ -1,79 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala -package collection -package generic - -import scala.collection._ -import mutable.{ Buffer, StringBuilder } -import immutable.{ List, Stream } -import scala.reflect.ClassTag - -/** This trait implements a forwarder for traversable objects. It forwards - * all calls to a different traversable, except for: - * - * - `toString`, `hashCode`, `equals`, `stringPrefix` - * - `newBuilder`, `view` - * - * All calls creating a new traversable of the same kind. - * - * @author Martin Odersky - * @version 2.8 - * @since 2.8 - */ -@deprecated("forwarding is inherently unreliable since it is not automated and new methods can be forgotten", "2.11.0") -trait TraversableForwarder[+A] extends Traversable[A] { - /** The traversable object to which calls are forwarded. */ - protected def underlying: Traversable[A] - - override def foreach[U](f: A => U): Unit = underlying foreach f - override def isEmpty: Boolean = underlying.isEmpty - override def nonEmpty: Boolean = underlying.nonEmpty - override def size: Int = underlying.size - override def hasDefiniteSize = underlying.hasDefiniteSize - override def forall(p: A => Boolean): Boolean = underlying forall p - override def exists(p: A => Boolean): Boolean = underlying exists p - override def count(p: A => Boolean): Int = underlying count p - override def find(p: A => Boolean): Option[A] = underlying find p - override def foldLeft[B](z: B)(op: (B, A) => B): B = underlying.foldLeft(z)(op) - override def /: [B](z: B)(op: (B, A) => B): B = underlying./:(z)(op) - override def foldRight[B](z: B)(op: (A, B) => B): B = underlying.foldRight(z)(op) - override def :\ [B](z: B)(op: (A, B) => B): B = underlying.:\(z)(op) - override def reduceLeft[B >: A](op: (B, A) => B): B = underlying.reduceLeft(op) - override def reduceLeftOption[B >: A](op: (B, A) => B): Option[B] = underlying.reduceLeftOption(op) - override def reduceRight[B >: A](op: (A, B) => B): B = underlying.reduceRight(op) - override def reduceRightOption[B >: A](op: (A, B) => B): Option[B] = underlying.reduceRightOption(op) - override def sum[B >: A](implicit num: Numeric[B]): B = underlying sum num - override def product[B >: A](implicit num: Numeric[B]): B = underlying product num - override def min[B >: A](implicit cmp: Ordering[B]): A = underlying min cmp - override def max[B >: A](implicit cmp: Ordering[B]): A = underlying max cmp - override def head: A = underlying.head - override def headOption: Option[A] = underlying.headOption - override def last: A = underlying.last - override def lastOption: Option[A] = underlying.lastOption - override def copyToBuffer[B >: A](dest: Buffer[B]) = underlying.copyToBuffer(dest) - override def copyToArray[B >: A](xs: Array[B], start: Int, len: Int) = underlying.copyToArray(xs, start, len) - override def copyToArray[B >: A](xs: Array[B], start: Int) = underlying.copyToArray(xs, start) - override def copyToArray[B >: A](xs: Array[B]) = underlying.copyToArray(xs) - override def toArray[B >: A: ClassTag]: Array[B] = underlying.toArray - override def toList: List[A] = underlying.toList - override def toIterable: Iterable[A] = underlying.toIterable - override def toSeq: Seq[A] = underlying.toSeq - override def toIndexedSeq = underlying.toIndexedSeq - override def toBuffer[B >: A] = underlying.toBuffer - override def toStream: Stream[A] = underlying.toStream - override def toSet[B >: A]: immutable.Set[B] = underlying.toSet - override def toMap[T, U](implicit ev: A <:< (T, U)): immutable.Map[T, U] = underlying.toMap(ev) - override def mkString(start: String, sep: String, end: String): String = underlying.mkString(start, sep, end) - override def mkString(sep: String): String = underlying.mkString(sep) - override def mkString: String = underlying.mkString - override def addString(b: StringBuilder, start: String, sep: String, end: String): StringBuilder = underlying.addString(b, start, sep, end) - override def addString(b: StringBuilder, sep: String): StringBuilder = underlying.addString(b, sep) - override def addString(b: StringBuilder): StringBuilder = underlying.addString(b) -} diff --git a/tests/scala2-library/src/library/scala/collection/generic/package.scala b/tests/scala2-library/src/library/scala/collection/generic/package.scala deleted file mode 100644 index 015c3455db9e..000000000000 --- a/tests/scala2-library/src/library/scala/collection/generic/package.scala +++ /dev/null @@ -1,17 +0,0 @@ -package scala -package collection - -import scala.language.higherKinds - -package object generic { - type CanBuild[-Elem, +To] = CanBuildFrom[Nothing, Elem, To] - - @deprecated("use ClassTagTraversableFactory instead", "2.10.0") - type ClassManifestTraversableFactory[CC[X] <: Traversable[X] with GenericClassManifestTraversableTemplate[X, CC]] = ClassTagTraversableFactory[CC] - - @deprecated("use GenericClassTagCompanion instead", "2.10.0") - type GenericClassManifestCompanion[+CC[X] <: Traversable[X]] = GenericClassTagCompanion[CC] - - @deprecated("use GenericClassTagTraversableTemplate instead", "2.10.0") - type GenericClassManifestTraversableTemplate[+A, +CC[X] <: Traversable[X]] = GenericClassTagTraversableTemplate[A, CC] -} diff --git a/tests/scala2-library/src/library/scala/collection/immutable/BitSet.scala b/tests/scala2-library/src/library/scala/collection/immutable/BitSet.scala deleted file mode 100644 index ecf3326c7f95..000000000000 --- a/tests/scala2-library/src/library/scala/collection/immutable/BitSet.scala +++ /dev/null @@ -1,166 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - - - -package scala -package collection -package immutable - -import generic._ -import BitSetLike.{LogWL, updateArray} -import mutable.Builder - -/** A class for immutable bitsets. - * $bitsetinfo - * @see [[http://docs.scala-lang.org/overviews/collections/concrete-immutable-collection-classes.html#immutable_bitsets "Scala's Collection Library overview"]] - * section on `Immutable BitSets` for more information. - * - * @define Coll `immutable.BitSet` - * @define coll immutable bitset - */ -@SerialVersionUID(1611436763290191562L) -abstract class BitSet extends scala.collection.AbstractSet[Int] - with SortedSet[Int] - with scala.collection.BitSet - with BitSetLike[BitSet] - with Serializable { - override def empty = BitSet.empty - - protected def fromBitMaskNoCopy(elems: Array[Long]): BitSet = BitSet.fromBitMaskNoCopy(elems) - - /** Update word at index `idx`; enlarge set if `idx` outside range of set. - */ - protected def updateWord(idx: Int, w: Long): BitSet - - /** Adds element to bitset, returning a new set. - */ - def + (elem: Int): BitSet = { - require(elem >= 0, "bitset element must be >= 0") - if (contains(elem)) this - else { - val idx = elem >> LogWL - updateWord(idx, word(idx) | (1L << elem)) - } - } - - /** Removes element from bitset, returning a new set - */ - def - (elem: Int): BitSet = { - require(elem >= 0, "bitset element must be >= 0") - if (contains(elem)) { - val idx = elem >> LogWL - updateWord(idx, word(idx) & ~(1L << elem)) - } else this - } -} - -/** $factoryInfo - * @define Coll `immutable.BitSet` - * @define coll immutable bitset - */ -object BitSet extends BitSetFactory[BitSet] { - /** The empty bitset */ - val empty: BitSet = new BitSet1(0L) - - private def createSmall(a: Long, b: Long): BitSet = if (b == 0L) new BitSet1(a) else new BitSet2(a, b) - - /** A builder that takes advantage of mutable BitSets. */ - def newBuilder: Builder[Int, BitSet] = new Builder[Int, BitSet] { - private[this] val b = new mutable.BitSet - def += (x: Int) = { b += x; this } - def clear() = b.clear() - def result() = b.toImmutable - } - - /** $bitsetCanBuildFrom */ - implicit def canBuildFrom: CanBuildFrom[BitSet, Int, BitSet] = bitsetCanBuildFrom - - /** A bitset containing all the bits in an array */ - def fromBitMask(elems: Array[Long]): BitSet = { - val len = elems.length - if (len == 0) empty - else if (len == 1) new BitSet1(elems(0)) - else if (len == 2) createSmall(elems(0), elems(1)) - else { - val a = new Array[Long](len) - Array.copy(elems, 0, a, 0, len) - new BitSetN(a) - } - } - - /** A bitset containing all the bits in an array, wrapping the existing - * array without copying. - */ - def fromBitMaskNoCopy(elems: Array[Long]): BitSet = { - val len = elems.length - if (len == 0) empty - else if (len == 1) new BitSet1(elems(0)) - else if (len == 2) createSmall(elems(0), elems(1)) - else new BitSetN(elems) - } - - @SerialVersionUID(2260107458435649300L) - class BitSet1(val elems: Long) extends BitSet { - protected def nwords = 1 - protected def word(idx: Int) = if (idx == 0) elems else 0L - protected def updateWord(idx: Int, w: Long): BitSet = - if (idx == 0) new BitSet1(w) - else if (idx == 1) createSmall(elems, w) - else fromBitMaskNoCopy(updateArray(Array(elems), idx, w)) - override def head: Int = - if (elems == 0L) throw new NoSuchElementException("Empty BitSet") - else java.lang.Long.numberOfTrailingZeros(elems) - override def tail: BitSet = - if (elems == 0L) throw new NoSuchElementException("Empty BitSet") - else new BitSet1(elems - java.lang.Long.lowestOneBit(elems)) - } - - class BitSet2(val elems0: Long, elems1: Long) extends BitSet { - protected def nwords = 2 - protected def word(idx: Int) = if (idx == 0) elems0 else if (idx == 1) elems1 else 0L - protected def updateWord(idx: Int, w: Long): BitSet = - if (idx == 0) new BitSet2(w, elems1) - else if (idx == 1) createSmall(elems0, w) - else fromBitMaskNoCopy(updateArray(Array(elems0, elems1), idx, w)) - override def head: Int = - if (elems0 == 0L) { - if (elems1 == 0) throw new NoSuchElementException("Empty BitSet") - 64 + java.lang.Long.numberOfTrailingZeros(elems1) - } - else java.lang.Long.numberOfTrailingZeros(elems0) - override def tail: BitSet = - if (elems0 == 0L) { - if (elems1 == 0L) throw new NoSuchElementException("Empty BitSet") - createSmall(elems0, elems1 - java.lang.Long.lowestOneBit(elems1)) - } - else new BitSet2(elems0 - java.lang.Long.lowestOneBit(elems0), elems1) - } - - /** The implementing class for bit sets with elements >= 128 (exceeding - * the capacity of two long values). The constructor wraps an existing - * bit mask without copying, thus exposing a mutable part of the internal - * implementation. Care needs to be taken not to modify the exposed - * array. - */ - class BitSetN(val elems: Array[Long]) extends BitSet { - protected def nwords = elems.length - protected def word(idx: Int) = if (idx < nwords) elems(idx) else 0L - protected def updateWord(idx: Int, w: Long): BitSet = fromBitMaskNoCopy(updateArray(elems, idx, w)) - override def tail: BitSet = { - val n = nwords - var i = 0 - while (i < n) { - val wi = word(i) - if (wi != 0L) return fromBitMaskNoCopy(updateArray(elems, i, wi - java.lang.Long.lowestOneBit(wi))) - i += 1 - } - throw new NoSuchElementException("Empty BitSet") - } - } -} diff --git a/tests/scala2-library/src/library/scala/collection/immutable/DefaultMap.scala b/tests/scala2-library/src/library/scala/collection/immutable/DefaultMap.scala deleted file mode 100644 index e9b277b9c412..000000000000 --- a/tests/scala2-library/src/library/scala/collection/immutable/DefaultMap.scala +++ /dev/null @@ -1,52 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala -package collection -package immutable - -/** A default map which implements the `+` and `-` - * methods of maps. It does so using the default builder for - * maps defined in the `Map` object. - * Instances that inherit from `DefaultMap[A, B]` still have to - * define: - * - * {{{ - * def get(key: A): Option[B] - * def iterator: Iterator[(A, B)] - * }}} - * - * It refers back to the original map. - * - * It might also be advisable to override `foreach` or - * `size` if efficient implementations can be found. - * - * @tparam A the type of the keys contained in this map. - * @tparam B the type of the values associated with the keys. - * - * @since 2.8 - */ -trait DefaultMap[A, +B] extends Map[A, B] { self => - - /** A default implementation which creates a new immutable map. - */ - override def +[B1 >: B](kv: (A, B1)): Map[A, B1] = { - val b = Map.newBuilder[A, B1] - b ++= this - b += ((kv._1, kv._2)) - b.result() - } - - /** A default implementation which creates a new immutable map. - */ - override def - (key: A): Map[A, B] = { - val b = newBuilder - for (kv <- this ; if kv._1 != key) b += kv - b.result() - } -} diff --git a/tests/scala2-library/src/library/scala/collection/immutable/HashMap.scala b/tests/scala2-library/src/library/scala/collection/immutable/HashMap.scala deleted file mode 100644 index a99e60882e72..000000000000 --- a/tests/scala2-library/src/library/scala/collection/immutable/HashMap.scala +++ /dev/null @@ -1,625 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala -package collection -package immutable - -import generic._ -import scala.annotation.unchecked.{ uncheckedVariance=> uV } -import parallel.immutable.ParHashMap - -/** This class implements immutable maps using a hash trie. - * - * '''Note:''' The builder of this hash map may return specialized representations for small maps. - * - * @tparam A the type of the keys contained in this hash map. - * @tparam B the type of the values associated with the keys. - * - * @author Martin Odersky - * @author Tiark Rompf - * @version 2.8 - * @since 2.3 - * @see [[http://docs.scala-lang.org/overviews/collections/concrete-immutable-collection-classes.html#hash_tries "Scala's Collection Library overview"]] - * section on `Hash Tries` for more information. - * @define Coll `immutable.HashMap` - * @define coll immutable hash map - * @define mayNotTerminateInf - * @define willNotTerminateInf - */ -@SerialVersionUID(2L) -sealed class HashMap[A, +B] extends AbstractMap[A, B] - with Map[A, B] - with MapLike[A, B, HashMap[A, B]] - with Serializable - with CustomParallelizable[(A, B), ParHashMap[A, B]] -{ - import HashMap.{nullToEmpty, bufferSize} - - override def size: Int = 0 - - override def empty = HashMap.empty[A, B] - - def iterator: Iterator[(A,B)] = Iterator.empty - - override def foreach[U](f: ((A, B)) => U): Unit = () - - def get(key: A): Option[B] = - get0(key, computeHash(key), 0) - - override final def contains(key: A): Boolean = - contains0(key, computeHash(key), 0) - - override def updated [B1 >: B] (key: A, value: B1): HashMap[A, B1] = - updated0(key, computeHash(key), 0, value, null, null) - - override def + [B1 >: B] (kv: (A, B1)): HashMap[A, B1] = - updated0(kv._1, computeHash(kv._1), 0, kv._2, kv, null) - - override def + [B1 >: B] (elem1: (A, B1), elem2: (A, B1), elems: (A, B1) *): HashMap[A, B1] = - this + elem1 + elem2 ++ elems - - def - (key: A): HashMap[A, B] = - removed0(key, computeHash(key), 0) - - override def tail: HashMap[A, B] = this - head._1 - - override def filter(p: ((A, B)) => Boolean) = { - val buffer = new Array[HashMap[A, B]](bufferSize(size)) - nullToEmpty(filter0(p, false, 0, buffer, 0)) - } - - override def filterNot(p: ((A, B)) => Boolean) = { - val buffer = new Array[HashMap[A, B]](bufferSize(size)) - nullToEmpty(filter0(p, true, 0, buffer, 0)) - } - - protected def filter0(p: ((A, B)) => Boolean, negate: Boolean, level: Int, buffer: Array[HashMap[A, B @uV]], offset0: Int): HashMap[A, B] = null - - protected def elemHashCode(key: A) = key.## - - protected final def improve(hcode: Int) = { - var h: Int = hcode + ~(hcode << 9) - h = h ^ (h >>> 14) - h = h + (h << 4) - h ^ (h >>> 10) - } - - private[collection] def computeHash(key: A) = improve(elemHashCode(key)) - - import HashMap.{Merger, MergeFunction, liftMerger} - - private[collection] def get0(key: A, hash: Int, level: Int): Option[B] = None - protected def contains0(key: A, hash: Int, level: Int): Boolean = false - private[collection] def updated0[B1 >: B](key: A, hash: Int, level: Int, value: B1, kv: (A, B1), merger: Merger[A, B1]): HashMap[A, B1] = - new HashMap.HashMap1(key, hash, value, kv) - - protected def removed0(key: A, hash: Int, level: Int): HashMap[A, B] = this - - protected def writeReplace(): AnyRef = new HashMap.SerializationProxy(this) - - def split: Seq[HashMap[A, B]] = Seq(this) - - /** Creates a new map which is the merge of this and the argument hash map. - * - * Uses the specified collision resolution function if two keys are the same. - * The collision resolution function will always take the first argument from - * `this` hash map and the second from `that`. - * - * The `merged` method is on average more performant than doing a traversal and reconstructing a - * new immutable hash map from scratch, or `++`. - * - * @tparam B1 the value type of the other hash map - * @param that the other hash map - * @param mergef the merge function or null if the first key-value pair is to be picked - */ - def merged[B1 >: B](that: HashMap[A, B1])(mergef: MergeFunction[A, B1]): HashMap[A, B1] = merge0(that, 0, liftMerger(mergef)) - - protected def merge0[B1 >: B](that: HashMap[A, B1], level: Int, merger: Merger[A, B1]): HashMap[A, B1] = that - - override def par = ParHashMap.fromTrie(this) - -} - -/** $factoryInfo - * @define Coll `immutable.HashMap` - * @define coll immutable hash map - * - * @author Tiark Rompf - * @since 2.3 - */ -object HashMap extends ImmutableMapFactory[HashMap] with BitOperations.Int { - - private[collection] abstract class Merger[A, B] { - def apply(kv1: (A, B), kv2: (A, B)): (A, B) - def invert: Merger[A, B] - } - - private type MergeFunction[A1, B1] = ((A1, B1), (A1, B1)) => (A1, B1) - - private def liftMerger[A1, B1](mergef: MergeFunction[A1, B1]): Merger[A1, B1] = - if (mergef == null) defaultMerger.asInstanceOf[Merger[A1, B1]] else liftMerger0(mergef) - - private[this] val defaultMerger : Merger[Any, Any] = liftMerger0((a,b) => a) - - private[this] def liftMerger0[A1, B1](mergef: MergeFunction[A1, B1]): Merger[A1, B1] = new Merger[A1, B1] { - self => - def apply(kv1: (A1, B1), kv2: (A1, B1)): (A1, B1) = mergef(kv1, kv2) - val invert: Merger[A1, B1] = new Merger[A1, B1] { - def apply(kv1: (A1, B1), kv2: (A1, B1)): (A1, B1) = mergef(kv2, kv1) - def invert: Merger[A1, B1] = self - } - } - - /** $mapCanBuildFromInfo */ - implicit def canBuildFrom[A, B]: CanBuildFrom[Coll, (A, B), HashMap[A, B]] = new MapCanBuildFrom[A, B] - def empty[A, B]: HashMap[A, B] = EmptyHashMap.asInstanceOf[HashMap[A, B]] - - private object EmptyHashMap extends HashMap[Any, Nothing] { - override def head: (Any, Nothing) = throw new NoSuchElementException("Empty Map") - override def tail: HashMap[Any, Nothing] = throw new NoSuchElementException("Empty Map") - } - - // utility method to create a HashTrieMap from two leaf HashMaps (HashMap1 or HashMapCollision1) with non-colliding hash code) - private def makeHashTrieMap[A, B](hash0:Int, elem0:HashMap[A, B], hash1:Int, elem1:HashMap[A, B], level:Int, size:Int) : HashTrieMap[A, B] = { - val index0 = (hash0 >>> level) & 0x1f - val index1 = (hash1 >>> level) & 0x1f - if(index0 != index1) { - val bitmap = (1 << index0) | (1 << index1) - val elems = new Array[HashMap[A,B]](2) - if(index0 < index1) { - elems(0) = elem0 - elems(1) = elem1 - } else { - elems(0) = elem1 - elems(1) = elem0 - } - new HashTrieMap[A, B](bitmap, elems, size) - } else { - val elems = new Array[HashMap[A,B]](1) - val bitmap = (1 << index0) - elems(0) = makeHashTrieMap(hash0, elem0, hash1, elem1, level + 5, size) - new HashTrieMap[A, B](bitmap, elems, size) - } - } - - @deprecatedInheritance("This class will be made final in a future release.", "2.12.2") - class HashMap1[A,+B](private[collection] val key: A, private[collection] val hash: Int, private[collection] val value: (B @uV), private[collection] var kv: (A,B @uV)) extends HashMap[A,B] { - override def size = 1 - - private[collection] def getKey = key - private[collection] def getHash = hash - private[collection] def computeHashFor(k: A) = computeHash(k) - - override def get0(key: A, hash: Int, level: Int): Option[B] = - if (hash == this.hash && key == this.key) Some(value) else None - - override protected def contains0(key: A, hash: Int, level: Int): Boolean = - hash == this.hash && key == this.key - private[collection] override def updated0[B1 >: B](key: A, hash: Int, level: Int, value: B1, kv: (A, B1), merger: Merger[A, B1]): HashMap[A, B1] = - if (hash == this.hash && key == this.key ) { - if (merger eq null) { - if (this.value.asInstanceOf[AnyRef] eq value.asInstanceOf[AnyRef]) this - else new HashMap1(key, hash, value, kv) - } else { - val nkv = merger(this.ensurePair, if(kv != null) kv else (key, value)) - new HashMap1(nkv._1, hash, nkv._2, nkv) - } - } else { - if (hash != this.hash) { - // they have different hashes, but may collide at this level - find a level at which they don't - val that = new HashMap1[A, B1](key, hash, value, kv) - makeHashTrieMap[A,B1](this.hash, this, hash, that, level, 2) - } else { - // 32-bit hash collision (rare, but not impossible) - new HashMapCollision1(hash, ListMap.empty.updated(this.key,this.value).updated(key,value)) - } - } - - override def removed0(key: A, hash: Int, level: Int): HashMap[A, B] = - if (hash == this.hash && key == this.key) HashMap.empty[A,B] else this - - override protected def filter0(p: ((A, B)) => Boolean, negate: Boolean, level: Int, buffer: Array[HashMap[A, B @uV]], offset0: Int): HashMap[A, B] = - if (negate ^ p(ensurePair)) this else null - - override def iterator: Iterator[(A,B)] = Iterator(ensurePair) - override def foreach[U](f: ((A, B)) => U): Unit = f(ensurePair) - // this method may be called multiple times in a multithreaded environment, but that's ok - private[HashMap] def ensurePair: (A,B) = if (kv ne null) kv else { kv = (key, value); kv } - protected override def merge0[B1 >: B](that: HashMap[A, B1], level: Int, merger: Merger[A, B1]): HashMap[A, B1] = { - that.updated0(key, hash, level, value, kv, merger.invert) - } - } - - private[collection] class HashMapCollision1[A, +B](private[collection] val hash: Int, val kvs: ListMap[A, B @uV]) - extends HashMap[A, B @uV] { - // assert(kvs.size > 1) - - override def size = kvs.size - - override def get0(key: A, hash: Int, level: Int): Option[B] = - if (hash == this.hash) kvs.get(key) else None - - override protected def contains0(key: A, hash: Int, level: Int): Boolean = - hash == this.hash && kvs.contains(key) - - private[collection] override def updated0[B1 >: B](key: A, hash: Int, level: Int, value: B1, kv: (A, B1), merger: Merger[A, B1]): HashMap[A, B1] = - if (hash == this.hash) { - if ((merger eq null) || !kvs.contains(key)) new HashMapCollision1(hash, kvs.updated(key, value)) - else new HashMapCollision1(hash, kvs + merger((key, kvs(key)), kv)) - } else { - val that = new HashMap1(key, hash, value, kv) - makeHashTrieMap(this.hash, this, hash, that, level, size + 1) - } - - override def removed0(key: A, hash: Int, level: Int): HashMap[A, B] = - if (hash == this.hash) { - val kvs1 = kvs - key - kvs1.size match { - case 0 => - HashMap.empty[A,B] - case 1 => - val kv = kvs1.head - new HashMap1(kv._1,hash,kv._2,kv) - case x if x == kvs.size => - this - case _ => - new HashMapCollision1(hash, kvs1) - } - } else this - - override protected def filter0(p: ((A, B)) => Boolean, negate: Boolean, level: Int, buffer: Array[HashMap[A, B @uV]], offset0: Int): HashMap[A, B] = { - val kvs1 = if(negate) kvs.filterNot(p) else kvs.filter(p) - kvs1.size match { - case 0 => - null - case 1 => - val kv@(k,v) = kvs1.head - new HashMap1(k, hash, v, kv) - case x if x == kvs.size => - this - case _ => - new HashMapCollision1(hash, kvs1) - } - } - - override def iterator: Iterator[(A,B)] = kvs.iterator - override def foreach[U](f: ((A, B)) => U): Unit = kvs.foreach(f) - override def split: Seq[HashMap[A, B]] = { - val (x, y) = kvs.splitAt(kvs.size / 2) - def newhm(lm: ListMap[A, B @uV]) = new HashMapCollision1(hash, lm) - List(newhm(x), newhm(y)) - } - protected override def merge0[B1 >: B](that: HashMap[A, B1], level: Int, merger: Merger[A, B1]): HashMap[A, B1] = { - // this can be made more efficient by passing the entire ListMap at once - var m = that - for (p <- kvs) m = m.updated0(p._1, this.hash, level, p._2, p, merger) - m - } - } - - @deprecatedInheritance("This class will be made final in a future release.", "2.12.2") - class HashTrieMap[A, +B]( - private[collection] val bitmap: Int, - private[collection] val elems: Array[HashMap[A, B @uV]], - private[collection] val size0: Int - ) extends HashMap[A, B @uV] { - - // assert(Integer.bitCount(bitmap) == elems.length) - // assert(elems.length > 1 || (elems.length == 1 && elems(0).isInstanceOf[HashTrieMap[_,_]])) - - override def size = size0 - - override def get0(key: A, hash: Int, level: Int): Option[B] = { - // Note: this code is duplicated with `contains0` - val index = (hash >>> level) & 0x1f - if (bitmap == - 1) { - elems(index).get0(key, hash, level + 5) - } else { - val mask = (1 << index) - if ((bitmap & mask) != 0) { - val offset = Integer.bitCount(bitmap & (mask - 1)) - elems(offset).get0(key, hash, level + 5) - } else { - None - } - } - } - - override protected def contains0(key: A, hash: Int, level: Int): Boolean = { - // Note: this code is duplicated from `get0` - val index = (hash >>> level) & 0x1f - if (bitmap == - 1) { - elems(index).contains0(key, hash, level + 5) - } else { - val mask = (1 << index) - if ((bitmap & mask) != 0) { - val offset = Integer.bitCount(bitmap & (mask - 1)) - elems(offset).contains0(key, hash, level + 5) - } else { - false - } - } - } - - private[collection] override def updated0[B1 >: B](key: A, hash: Int, level: Int, value: B1, kv: (A, B1), merger: Merger[A, B1]): HashMap[A, B1] = { - val index = (hash >>> level) & 0x1f - val mask = (1 << index) - val offset = Integer.bitCount(bitmap & (mask - 1)) - if ((bitmap & mask) != 0) { - val sub = elems(offset) - val subNew = sub.updated0(key, hash, level + 5, value, kv, merger) - if(subNew eq sub) this else { - val elemsNew = new Array[HashMap[A,B1]](elems.length) - Array.copy(elems, 0, elemsNew, 0, elems.length) - elemsNew(offset) = subNew - new HashTrieMap(bitmap, elemsNew, size + (subNew.size - sub.size)) - } - } else { - val elemsNew = new Array[HashMap[A,B1]](elems.length + 1) - Array.copy(elems, 0, elemsNew, 0, offset) - elemsNew(offset) = new HashMap1(key, hash, value, kv) - Array.copy(elems, offset, elemsNew, offset + 1, elems.length - offset) - new HashTrieMap(bitmap | mask, elemsNew, size + 1) - } - } - - override def removed0(key: A, hash: Int, level: Int): HashMap[A, B] = { - val index = (hash >>> level) & 0x1f - val mask = (1 << index) - val offset = Integer.bitCount(bitmap & (mask - 1)) - if ((bitmap & mask) != 0) { - val sub = elems(offset) - val subNew = sub.removed0(key, hash, level + 5) - if (subNew eq sub) this - else if (subNew.isEmpty) { - val bitmapNew = bitmap ^ mask - if (bitmapNew != 0) { - val elemsNew = new Array[HashMap[A,B]](elems.length - 1) - Array.copy(elems, 0, elemsNew, 0, offset) - Array.copy(elems, offset + 1, elemsNew, offset, elems.length - offset - 1) - val sizeNew = size - sub.size - // if we have only one child, which is not a HashTrieSet but a self-contained set like - // HashSet1 or HashSetCollision1, return the child instead - if (elemsNew.length == 1 && !elemsNew(0).isInstanceOf[HashTrieMap[_,_]]) - elemsNew(0) - else - new HashTrieMap(bitmapNew, elemsNew, sizeNew) - } else - HashMap.empty[A,B] - } else if(elems.length == 1 && !subNew.isInstanceOf[HashTrieMap[_,_]]) { - subNew - } else { - val elemsNew = new Array[HashMap[A,B]](elems.length) - Array.copy(elems, 0, elemsNew, 0, elems.length) - elemsNew(offset) = subNew - val sizeNew = size + (subNew.size - sub.size) - new HashTrieMap(bitmap, elemsNew, sizeNew) - } - } else { - this - } - } - - override protected def filter0(p: ((A, B)) => Boolean, negate: Boolean, level: Int, buffer: Array[HashMap[A, B @uV]], offset0: Int): HashMap[A, B] = { - // current offset - var offset = offset0 - // result size - var rs = 0 - // bitmap for kept elems - var kept = 0 - // loop over all elements - var i = 0 - while (i < elems.length) { - val result = elems(i).filter0(p, negate, level + 5, buffer, offset) - if (result ne null) { - buffer(offset) = result - offset += 1 - // add the result size - rs += result.size - // mark the bit i as kept - kept |= (1 << i) - } - i += 1 - } - if (offset == offset0) { - // empty - null - } else if (rs == size0) { - // unchanged - this - } else if (offset == offset0 + 1 && !buffer(offset0).isInstanceOf[HashTrieMap[A, B]]) { - // leaf - buffer(offset0) - } else { - // we have to return a HashTrieMap - val length = offset - offset0 - val elems1 = new Array[HashMap[A, B]](length) - System.arraycopy(buffer, offset0, elems1, 0, length) - val bitmap1 = if (length == elems.length) { - // we can reuse the original bitmap - bitmap - } else { - // calculate new bitmap by keeping just bits in the kept bitmask - keepBits(bitmap, kept) - } - new HashTrieMap(bitmap1, elems1, rs) - } - } - - override def iterator: Iterator[(A, B)] = new TrieIterator[(A, B)](elems.asInstanceOf[Array[Iterable[(A, B)]]]) { - final override def getElem(cc: AnyRef): (A, B) = cc.asInstanceOf[HashMap1[A, B]].ensurePair - } - - override def foreach[U](f: ((A, B)) => U): Unit = { - var i = 0 - while (i < elems.length) { - elems(i).foreach(f) - i += 1 - } - } - - private def posOf(n: Int, bm: Int) = { - var left = n - var i = -1 - var b = bm - while (left >= 0) { - i += 1 - if ((b & 1) != 0) left -= 1 - b = b >>> 1 - } - i - } - - override def split: Seq[HashMap[A, B]] = if (size == 1) Seq(this) else { - val nodesize = Integer.bitCount(bitmap) - if (nodesize > 1) { - val splitpoint = nodesize / 2 - val bitsplitpoint = posOf(nodesize / 2, bitmap) - val bm1 = bitmap & (-1 << bitsplitpoint) - val bm2 = bitmap & (-1 >>> (32 - bitsplitpoint)) - - val (e1, e2) = elems.splitAt(splitpoint) - val hm1 = new HashTrieMap(bm1, e1, e1.foldLeft(0)(_ + _.size)) - val hm2 = new HashTrieMap(bm2, e2, e2.foldLeft(0)(_ + _.size)) - - List(hm1, hm2) - } else elems(0).split - } - - protected override def merge0[B1 >: B](that: HashMap[A, B1], level: Int, merger: Merger[A, B1]): HashMap[A, B1] = that match { - case hm: HashMap1[A, B] => - this.updated0(hm.key, hm.hash, level, hm.value.asInstanceOf[B1], hm.kv, merger) - case hm: HashTrieMap[_, _] => - val that = hm.asInstanceOf[HashTrieMap[A, B1]] - val thiselems = this.elems - val thatelems = that.elems - var thisbm = this.bitmap - var thatbm = that.bitmap - - // determine the necessary size for the array - val subcount = Integer.bitCount(thisbm | thatbm) - - // construct a new array of appropriate size - val merged = new Array[HashMap[A, B1]](subcount) - - // run through both bitmaps and add elements to it - var i = 0 - var thisi = 0 - var thati = 0 - var totalelems = 0 - while (i < subcount) { - val thislsb = thisbm ^ (thisbm & (thisbm - 1)) - val thatlsb = thatbm ^ (thatbm & (thatbm - 1)) - - // collision - if (thislsb == thatlsb) { - val m = thiselems(thisi).merge0(thatelems(thati), level + 5, merger) - totalelems += m.size - merged(i) = m - thisbm = thisbm & ~thislsb - thatbm = thatbm & ~thatlsb - thati += 1 - thisi += 1 - } else { - // condition below is due to 2 things: - // 1) no unsigned int compare on JVM - // 2) 0 (no lsb) should always be greater in comparison - if (unsignedCompare(thislsb - 1, thatlsb - 1)) { - val m = thiselems(thisi) - totalelems += m.size - merged(i) = m - thisbm = thisbm & ~thislsb - thisi += 1 - } - else { - val m = thatelems(thati) - totalelems += m.size - merged(i) = m - thatbm = thatbm & ~thatlsb - thati += 1 - } - } - i += 1 - } - - new HashTrieMap[A, B1](this.bitmap | that.bitmap, merged, totalelems) - case hm: HashMapCollision1[_, _] => that.merge0(this, level, merger.invert) - case hm: HashMap[_, _] => this - case _ => sys.error("section supposed to be unreachable.") - } - } - - /** - * Calculates the maximum buffer size given the maximum possible total size of the trie-based collection - * @param size the maximum size of the collection to be generated - * @return the maximum buffer size - */ - @inline private def bufferSize(size: Int): Int = (size + 6) min (32 * 7) - - /** - * In many internal operations the empty map is represented as null for performance reasons. This method converts - * null to the empty map for use in public methods - */ - @inline private def nullToEmpty[A, B](m: HashMap[A, B]): HashMap[A, B] = if (m eq null) empty[A, B] else m - - /** - * Utility method to keep a subset of all bits in a given bitmap - * - * Example - * bitmap (binary): 00000001000000010000000100000001 - * keep (binary): 1010 - * result (binary): 00000001000000000000000100000000 - * - * @param bitmap the bitmap - * @param keep a bitmask containing which bits to keep - * @return the original bitmap with all bits where keep is not 1 set to 0 - */ - private def keepBits(bitmap: Int, keep: Int): Int = { - var result = 0 - var current = bitmap - var kept = keep - while (kept != 0) { - // lowest remaining bit in current - val lsb = current ^ (current & (current - 1)) - if ((kept & 1) != 0) { - // mark bit in result bitmap - result |= lsb - } - // clear lowest remaining one bit in abm - current &= ~lsb - // look at the next kept bit - kept >>>= 1 - } - result - } - - @SerialVersionUID(2L) - private class SerializationProxy[A,B](@transient private var orig: HashMap[A, B]) extends Serializable { - private def writeObject(out: java.io.ObjectOutputStream) { - val s = orig.size - out.writeInt(s) - for ((k,v) <- orig) { - out.writeObject(k) - out.writeObject(v) - } - } - - private def readObject(in: java.io.ObjectInputStream) { - orig = empty - val s = in.readInt() - for (i <- 0 until s) { - val key = in.readObject().asInstanceOf[A] - val value = in.readObject().asInstanceOf[B] - orig = orig.updated(key, value) - } - } - - private def readResolve(): AnyRef = orig - } -} diff --git a/tests/scala2-library/src/library/scala/collection/immutable/HashSet.scala b/tests/scala2-library/src/library/scala/collection/immutable/HashSet.scala deleted file mode 100644 index 9db79c911da6..000000000000 --- a/tests/scala2-library/src/library/scala/collection/immutable/HashSet.scala +++ /dev/null @@ -1,1052 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - - - -package scala -package collection -package immutable - -import generic._ -import scala.collection.parallel.immutable.ParHashSet -import scala.collection.GenSet -import scala.annotation.tailrec - -/** This class implements immutable sets using a hash trie. - * - * '''Note:''' The builder of this hash set may return specialized representations for small sets. - * - * @tparam A the type of the elements contained in this hash set. - * - * @author Martin Odersky - * @author Tiark Rompf - * @version 2.8 - * @since 2.3 - * @define Coll `immutable.HashSet` - * @define coll immutable hash set - */ -@SerialVersionUID(2L) -sealed class HashSet[A] extends AbstractSet[A] - with Set[A] - with GenericSetTemplate[A, HashSet] - with SetLike[A, HashSet[A]] - with CustomParallelizable[A, ParHashSet[A]] - with Serializable -{ - import HashSet.{nullToEmpty, bufferSize, LeafHashSet} - - override def companion: GenericCompanion[HashSet] = HashSet - - //class HashSet[A] extends Set[A] with SetLike[A, HashSet[A]] { - - override def par = ParHashSet.fromTrie(this) - - override def size: Int = 0 - - override def empty = HashSet.empty[A] - - def iterator: Iterator[A] = Iterator.empty - - override def foreach[U](f: A => U): Unit = () - - def contains(e: A): Boolean = get0(e, computeHash(e), 0) - - override def subsetOf(that: GenSet[A]) = that match { - case that:HashSet[A] => - // call the specialized implementation with a level of 0 since both this and that are top-level hash sets - subsetOf0(that, 0) - case _ => - // call the generic implementation - super.subsetOf(that) - } - - /** - * A specialized implementation of subsetOf for when both this and that are HashSet[A] and we can take advantage - * of the tree structure of both operands and the precalculated hashcodes of the HashSet1 instances. - * @param that the other set - * @param level the level of this and that hashset - * The purpose of level is to keep track of how deep we are in the tree. - * We need this information for when we arrive at a leaf and have to call get0 on that - * The value of level is 0 for a top-level HashSet and grows in increments of 5 - * @return true if all elements of this set are contained in that set - */ - protected def subsetOf0(that: HashSet[A], level: Int) = { - // The default implementation is for the empty set and returns true because the empty set is a subset of all sets - true - } - - override def + (e: A): HashSet[A] = updated0(e, computeHash(e), 0) - - override def + (elem1: A, elem2: A, elems: A*): HashSet[A] = - this + elem1 + elem2 ++ elems - - override def union(that: GenSet[A]): HashSet[A] = that match { - case that: HashSet[A] => - val buffer = new Array[HashSet[A]](bufferSize(this.size + that.size)) - nullToEmpty(union0(that, 0, buffer, 0)) - case _ => super.union(that) - } - - override def intersect(that: GenSet[A]): HashSet[A] = that match { - case that: HashSet[A] => - val buffer = new Array[HashSet[A]](bufferSize(this.size min that.size)) - nullToEmpty(intersect0(that, 0, buffer, 0)) - case _ => super.intersect(that) - } - - override def diff(that: GenSet[A]): HashSet[A] = that match { - case that: HashSet[A] => - val buffer = new Array[HashSet[A]](bufferSize(this.size)) - nullToEmpty(diff0(that, 0, buffer, 0)) - case _ => super.diff(that) - } - - /** - * Union with a leaf HashSet at a given level. - * @param that a leaf HashSet - * @param level the depth in the tree. We need this when we have to create a branch node on top of this and that - * @return The union of this and that at the given level. Unless level is zero, the result is not a self-contained - * HashSet but needs to be stored at the correct depth - */ - private[immutable] def union0(that: LeafHashSet[A], level: Int): HashSet[A] = { - // the default implementation is for the empty set, so we just return that - that - } - - /** - * Union with a HashSet at a given level - * @param that a HashSet - * @param level the depth in the tree. We need to keep track of the level to know how deep we are in the tree - * @param buffer a temporary buffer that is used for temporarily storing elements when creating new branch nodes - * @param offset0 the first offset into the buffer in which we are allowed to write - * @return The union of this and that at the given level. Unless level is zero, the result is not a self-contained - * HashSet but needs to be stored at the correct depth - */ - private[immutable] def union0(that: HashSet[A], level: Int, buffer: Array[HashSet[A]], offset0: Int): HashSet[A] = { - // the default implementation is for the empty set, so we just return that - that - } - - /** - * Intersection with another hash set at a given level - * @param level the depth in the tree. We need to keep track of the level to know how deep we are in the tree - * @param buffer a temporary buffer that is used for temporarily storing elements when creating new branch nodes - * @param offset0 the first offset into the buffer in which we are allowed to write - * @return The intersection of this and that at the given level. Unless level is zero, the result is not a - * self-contained HashSet but needs to be stored at the correct depth - */ - private[immutable] def intersect0(that: HashSet[A], level: Int, buffer: Array[HashSet[A]], offset0: Int): HashSet[A] = { - // the default implementation is for the empty set, so we just return the empty set - null - } - - /** - * Diff with another hash set at a given level - * @param level the depth in the tree. We need to keep track of the level to know how deep we are in the tree - * @param buffer a temporary buffer that is used for temporarily storing elements when creating new branch nodes - * @param offset0 the first offset into the buffer in which we are allowed to write - * @return The diff of this and that at the given level. Unless level is zero, the result is not a - * self-contained HashSet but needs to be stored at the correct depth - */ - private[immutable] def diff0(that: HashSet[A], level: Int, buffer: Array[HashSet[A]], offset0: Int): HashSet[A] = { - // the default implementation is for the empty set, so we just return the empty set - null - } - - def - (e: A): HashSet[A] = - nullToEmpty(removed0(e, computeHash(e), 0)) - - override def tail: HashSet[A] = this - head - - override def filter(p: A => Boolean) = { - val buffer = new Array[HashSet[A]](bufferSize(size)) - nullToEmpty(filter0(p, false, 0, buffer, 0)) - } - - override def filterNot(p: A => Boolean) = { - val buffer = new Array[HashSet[A]](bufferSize(size)) - nullToEmpty(filter0(p, true, 0, buffer, 0)) - } - - protected def filter0(p: A => Boolean, negate: Boolean, level: Int, buffer: Array[HashSet[A]], offset0: Int): HashSet[A] = null - - protected def elemHashCode(key: A) = key.## - - protected final def improve(hcode: Int) = { - var h: Int = hcode + ~(hcode << 9) - h = h ^ (h >>> 14) - h = h + (h << 4) - h ^ (h >>> 10) - } - - private[collection] def computeHash(key: A) = improve(elemHashCode(key)) - - protected def get0(key: A, hash: Int, level: Int): Boolean = false - - private[collection] def updated0(key: A, hash: Int, level: Int): HashSet[A] = - new HashSet.HashSet1(key, hash) - - protected def removed0(key: A, hash: Int, level: Int): HashSet[A] = this - - protected def writeReplace(): AnyRef = new HashSet.SerializationProxy(this) - - override def toSet[B >: A]: Set[B] = this.asInstanceOf[HashSet[B]] -} - -/** $factoryInfo - * @define Coll `immutable.HashSet` - * @define coll immutable hash set - * - * @author Tiark Rompf - * @since 2.3 - * @define Coll `immutable.HashSet` - * @define coll immutable hash set - * @define mayNotTerminateInf - * @define willNotTerminateInf - */ -object HashSet extends ImmutableSetFactory[HashSet] { - - /** $setCanBuildFromInfo */ - implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, HashSet[A]] = setCanBuildFrom[A] - - private object EmptyHashSet extends HashSet[Any] { - override def head: Any = throw new NoSuchElementException("Empty Set") - override def tail: HashSet[Any] = throw new NoSuchElementException("Empty Set") - } - private[collection] def emptyInstance: HashSet[Any] = EmptyHashSet - - // utility method to create a HashTrieSet from two leaf HashSets (HashSet1 or HashSetCollision1) with non-colliding hash code) - private def makeHashTrieSet[A](hash0:Int, elem0:HashSet[A], hash1:Int, elem1:HashSet[A], level:Int) : HashTrieSet[A] = { - val index0 = (hash0 >>> level) & 0x1f - val index1 = (hash1 >>> level) & 0x1f - if(index0 != index1) { - val bitmap = (1 << index0) | (1 << index1) - val elems = new Array[HashSet[A]](2) - if(index0 < index1) { - elems(0) = elem0 - elems(1) = elem1 - } else { - elems(0) = elem1 - elems(1) = elem0 - } - new HashTrieSet[A](bitmap, elems, elem0.size + elem1.size) - } else { - val elems = new Array[HashSet[A]](1) - val bitmap = (1 << index0) - val child = makeHashTrieSet(hash0, elem0, hash1, elem1, level + 5) - elems(0) = child - new HashTrieSet[A](bitmap, elems, child.size) - } - } - - /** - * Common superclass of HashSet1 and HashSetCollision1, which are the two possible leaves of the Trie - */ - private[HashSet] sealed abstract class LeafHashSet[A] extends HashSet[A] { - private[HashSet] def hash:Int - } - - class HashSet1[A](private[HashSet] val key: A, private[HashSet] val hash: Int) extends LeafHashSet[A] { - override def size = 1 - - override protected def get0(key: A, hash: Int, level: Int): Boolean = - (hash == this.hash && key == this.key) - - override protected def subsetOf0(that: HashSet[A], level: Int) = { - // check if that contains this.key - // we use get0 with our key and hash at the correct level instead of calling contains, - // which would not work since that might not be a top-level HashSet - // and in any case would be inefficient because it would require recalculating the hash code - that.get0(key, hash, level) - } - - override private[collection] def updated0(key: A, hash: Int, level: Int): HashSet[A] = - if (hash == this.hash && key == this.key) this - else { - if (hash != this.hash) { - makeHashTrieSet(this.hash, this, hash, new HashSet1(key, hash), level) - } else { - // 32-bit hash collision (rare, but not impossible) - new HashSetCollision1(hash, ListSet.empty + this.key + key) - } - } - - override private[immutable] def union0(that: LeafHashSet[A], level: Int): HashSet[A] = that match { - case that if that.hash != this.hash => - // different hash code, so there is no need to investigate further. - // Just create a branch node containing the two. - makeHashTrieSet(this.hash, this, that.hash, that, level) - case that: HashSet1[A] => - if (this.key == that.key) { - this - } else { - // 32-bit hash collision (rare, but not impossible) - new HashSetCollision1[A](hash, ListSet.empty + this.key + that.key) - } - case that: HashSetCollision1[A] => - val ks1 = that.ks + key - // Could use eq check (faster) if ListSet was guaranteed to return itself - if (ks1.size == that.ks.size) { - that - } else { - new HashSetCollision1[A](hash, ks1) - } - } - - override private[immutable] def union0(that: HashSet[A], level: Int, buffer: Array[HashSet[A]], offset0: Int) = { - // switch to the Leaf version of union - // we can exchange the arguments because union is symmetrical - that.union0(this, level) - } - - override private[immutable] def intersect0(that: HashSet[A], level: Int, buffer: Array[HashSet[A]], offset0: Int): HashSet[A] = - if (that.get0(key, hash, level)) this else null - - override private[immutable] def diff0(that: HashSet[A], level: Int, buffer: Array[HashSet[A]], offset0: Int): HashSet[A] = - if (that.get0(key, hash, level)) null else this - - override protected def removed0(key: A, hash: Int, level: Int): HashSet[A] = - if (hash == this.hash && key == this.key) null else this - - override protected def filter0(p: A => Boolean, negate: Boolean, level: Int, buffer: Array[HashSet[A]], offset0: Int): HashSet[A] = - if (negate ^ p(key)) this else null - - override def iterator: Iterator[A] = Iterator(key) - override def foreach[U](f: A => U): Unit = f(key) - } - - private[immutable] class HashSetCollision1[A](private[HashSet] val hash: Int, val ks: ListSet[A]) extends LeafHashSet[A] { - - override def size = ks.size - - override protected def get0(key: A, hash: Int, level: Int): Boolean = - if (hash == this.hash) ks.contains(key) else false - - override protected def subsetOf0(that: HashSet[A], level: Int) = { - // we have to check each element - // we use get0 with our hash at the correct level instead of calling contains, - // which would not work since that might not be a top-level HashSet - // and in any case would be inefficient because it would require recalculating the hash code - ks.forall(key => that.get0(key, hash, level)) - } - - override private[collection] def updated0(key: A, hash: Int, level: Int): HashSet[A] = - if (hash == this.hash) new HashSetCollision1(hash, ks + key) - else makeHashTrieSet(this.hash, this, hash, new HashSet1(key, hash), level) - - override private[immutable] def union0(that: LeafHashSet[A], level: Int): HashSet[A] = that match { - case that if that.hash != this.hash => - // different hash code, so there is no need to investigate further. - // Just create a branch node containing the two. - makeHashTrieSet(this.hash, this, that.hash, that, level) - case that: HashSet1[A] => - val ks1 = ks + that.key - // Could use eq check (faster) if ListSet was guaranteed to return itself - if (ks1.size == ks.size) { - this - } else { - // create a new HashSetCollision with the existing hash - // we don't have to check for size=1 because union is never going to remove elements - new HashSetCollision1[A](hash, ks1) - } - case that: HashSetCollision1[A] => - val ks1 = this.ks ++ that.ks - ks1.size match { - case size if size == this.ks.size => - // could this check be made faster by doing an eq check? - // I am not sure we can rely on ListSet returning itself when all elements are already in the set, - // so it seems unwise to rely on it. - this - case size if size == that.ks.size => - // we have to check this as well, since we don't want to create a new instance if this is a subset of that - that - case _ => - // create a new HashSetCollision with the existing hash - // we don't have to check for size=1 because union is never going to remove elements - new HashSetCollision1[A](hash, ks1) - } - } - - override private[immutable] def union0(that: HashSet[A], level: Int, buffer: Array[HashSet[A]], offset0: Int): HashSet[A] = that match { - case that: LeafHashSet[A] => - // switch to the simpler Tree/Leaf implementation - this.union0(that, level) - case that: HashTrieSet[A] => - // switch to the simpler Tree/Leaf implementation - // we can swap this and that because union is symmetrical - that.union0(this, level) - case _ => this - } - - override private[immutable] def intersect0(that: HashSet[A], level: Int, buffer: Array[HashSet[A]], offset0: Int): HashSet[A] = { - // filter the keys, taking advantage of the fact that we know their hash code - val ks1 = ks.filter(that.get0(_, hash, level)) - ks1.size match { - case 0 => - // the empty set - null - case size if size == this.size => - // unchanged - // We do this check first since even if the result is of size 1 since - // it is preferable to return the existing set for better structural sharing - this - case size if size == that.size => - // the other set - // We do this check first since even if the result is of size 1 since - // it is preferable to return the existing set for better structural sharing - that - case 1 => - // create a new HashSet1 with the hash we already know - new HashSet1(ks1.head, hash) - case _ => - // create a new HashSetCollision with the hash we already know and the new keys - new HashSetCollision1(hash, ks1) - } - } - - override private[immutable] def diff0(that: HashSet[A], level: Int, buffer: Array[HashSet[A]], offset0: Int): HashSet[A] = { - val ks1 = ks.filterNot(that.get0(_, hash, level)) - ks1.size match { - case 0 => - // the empty set - null - case size if size == this.size => - // unchanged - // We do this check first since even if the result is of size 1 since - // it is preferable to return the existing set for better structural sharing - this - case 1 => - // create a new HashSet1 with the hash we already know - new HashSet1(ks1.head, hash) - case _ => - // create a new HashSetCollision with the hash we already know and the new keys - new HashSetCollision1(hash, ks1) - } - } - - override protected def removed0(key: A, hash: Int, level: Int): HashSet[A] = - if (hash == this.hash) { - val ks1 = ks - key - ks1.size match { - case 0 => - // the empty set - null - case 1 => - // create a new HashSet1 with the hash we already know - new HashSet1(ks1.head, hash) - case size if size == ks.size => - // Should only have HSC1 if size > 1 - this - case _ => - // create a new HashSetCollision with the hash we already know and the new keys - new HashSetCollision1(hash, ks1) - } - } else this - - override protected def filter0(p: A => Boolean, negate: Boolean, level: Int, buffer: Array[HashSet[A]], offset0: Int): HashSet[A] = { - val ks1 = if(negate) ks.filterNot(p) else ks.filter(p) - ks1.size match { - case 0 => - null - case 1 => - new HashSet1(ks1.head, hash) - case x if x == ks.size => - this - case _ => - new HashSetCollision1(hash, ks1) - } - } - - override def iterator: Iterator[A] = ks.iterator - override def foreach[U](f: A => U): Unit = ks.foreach(f) - - private def writeObject(out: java.io.ObjectOutputStream) { - // this cannot work - reading things in might produce different - // hash codes and remove the collision. however this is never called - // because no references to this class are ever handed out to client code - // and HashTrieSet serialization takes care of the situation - sys.error("cannot serialize an immutable.HashSet where all items have the same 32-bit hash code") - //out.writeObject(kvs) - } - - private def readObject(in: java.io.ObjectInputStream) { - sys.error("cannot deserialize an immutable.HashSet where all items have the same 32-bit hash code") - //kvs = in.readObject().asInstanceOf[ListSet[A]] - //hash = computeHash(kvs.) - } - - } - - /** - * A branch node of the HashTrieSet with at least one and up to 32 children. - * - * @param bitmap encodes which element corresponds to which child - * @param elems the up to 32 children of this node. - * the number of children must be identical to the number of 1 bits in bitmap - * @param size0 the total number of elements. This is stored just for performance reasons. - * @tparam A the type of the elements contained in this hash set. - * - * How levels work: - * - * When looking up or adding elements, the part of the hashcode that is used to address the children array depends - * on how deep we are in the tree. This is accomplished by having a level parameter in all internal methods - * that starts at 0 and increases by 5 (32 = 2^5) every time we go deeper into the tree. - * - * hashcode (binary): 00000000000000000000000000000000 - * level=0 (depth=0) ^^^^^ - * level=5 (depth=1) ^^^^^ - * level=10 (depth=2) ^^^^^ - * ... - * - * Be careful: a non-toplevel HashTrieSet is not a self-contained set, so e.g. calling contains on it will not work! - * It relies on its depth in the Trie for which part of a hash to use to address the children, but this information - * (the level) is not stored due to storage efficiency reasons but has to be passed explicitly! - * - * How bitmap and elems correspond: - * - * A naive implementation of a HashTrieSet would always have an array of size 32 for children and leave the unused - * children empty (null). But that would be very wasteful regarding memory. Instead, only non-empty children are - * stored in elems, and the bitmap is used to encode which elem corresponds to which child bucket. The lowest 1 bit - * corresponds to the first element, the second-lowest to the second, etc. - * - * bitmap (binary): 00010000000000000000100000000000 - * elems: [a,b] - * children: ---b----------------a----------- - */ - class HashTrieSet[A](private val bitmap: Int, private[collection] val elems: Array[HashSet[A]], private val size0: Int) - extends HashSet[A] { - assert(Integer.bitCount(bitmap) == elems.length) - // assertion has to remain disabled until scala/bug#6197 is solved - // assert(elems.length > 1 || (elems.length == 1 && elems(0).isInstanceOf[HashTrieSet[_]])) - - override def size = size0 - - override protected def get0(key: A, hash: Int, level: Int): Boolean = { - val index = (hash >>> level) & 0x1f - val mask = (1 << index) - if (bitmap == - 1) { - elems(index & 0x1f).get0(key, hash, level + 5) - } else if ((bitmap & mask) != 0) { - val offset = Integer.bitCount(bitmap & (mask-1)) - elems(offset).get0(key, hash, level + 5) - } else - false - } - - override private[collection] def updated0(key: A, hash: Int, level: Int): HashSet[A] = { - val index = (hash >>> level) & 0x1f - val mask = (1 << index) - val offset = Integer.bitCount(bitmap & (mask-1)) - if ((bitmap & mask) != 0) { - val sub = elems(offset) - val subNew = sub.updated0(key, hash, level + 5) - if (sub eq subNew) this - else { - val elemsNew = new Array[HashSet[A]](elems.length) - Array.copy(elems, 0, elemsNew, 0, elems.length) - elemsNew(offset) = subNew - new HashTrieSet(bitmap, elemsNew, size + (subNew.size - sub.size)) - } - } else { - val elemsNew = new Array[HashSet[A]](elems.length + 1) - Array.copy(elems, 0, elemsNew, 0, offset) - elemsNew(offset) = new HashSet1(key, hash) - Array.copy(elems, offset, elemsNew, offset + 1, elems.length - offset) - val bitmapNew = bitmap | mask - new HashTrieSet(bitmapNew, elemsNew, size + 1) - } - } - - override private[immutable] def union0(that: LeafHashSet[A], level: Int): HashSet[A] = { - val index = (that.hash >>> level) & 0x1f - val mask = (1 << index) - val offset = Integer.bitCount(bitmap & (mask - 1)) - if ((bitmap & mask) != 0) { - val sub = elems(offset) - val sub1 = sub.union0(that, level + 5) - if (sub eq sub1) this - else { - val elems1 = new Array[HashSet[A]](elems.length) - Array.copy(elems, 0, elems1, 0, elems.length) - elems1(offset) = sub1 - new HashTrieSet(bitmap, elems1, size + (sub1.size - sub.size)) - } - } else { - val elems1 = new Array[HashSet[A]](elems.length + 1) - Array.copy(elems, 0, elems1, 0, offset) - elems1(offset) = that - Array.copy(elems, offset, elems1, offset + 1, elems.length - offset) - val bitmap1 = bitmap | mask - new HashTrieSet(bitmap1, elems1, size + that.size) - } - } - - override private[immutable] def union0(that: HashSet[A], level: Int, buffer: Array[HashSet[A]], offset0: Int): HashSet[A] = that match { - case that if that eq this => - // shortcut for when that is this - // this happens often for nodes deeper in the tree, especially when that and this share a common "heritage" - // e.g. you have a large set A and do some small operations (adding and removing elements) to it to create B - // then A and B will have the vast majority of nodes in common, and this eq check will allow not even looking - // at these nodes. - this - case that: LeafHashSet[A] => - // when that is a leaf, we can switch to the simpler Tree/Leaf implementation - this.union0(that, level) - case that: HashTrieSet[A] => - val a = this.elems - var abm = this.bitmap - var ai = 0 - - val b = that.elems - var bbm = that.bitmap - var bi = 0 - - // fetch a new temporary array that is guaranteed to be big enough (32 elements) - var offset = offset0 - var rs = 0 - - // loop as long as there are bits left in either abm or bbm - while ((abm | bbm) != 0) { - // lowest remaining bit in abm - val alsb = abm ^ (abm & (abm - 1)) - // lowest remaining bit in bbm - val blsb = bbm ^ (bbm & (bbm - 1)) - if (alsb == blsb) { - val sub1 = a(ai).union0(b(bi), level + 5, buffer, offset) - rs += sub1.size - buffer(offset) = sub1 - offset += 1 - // clear lowest remaining one bit in abm and increase the a index - abm &= ~alsb - ai += 1 - // clear lowest remaining one bit in bbm and increase the b index - bbm &= ~blsb - bi += 1 - } else if (unsignedCompare(alsb - 1, blsb - 1)) { - // alsb is smaller than blsb, or alsb is set and blsb is 0 - // in any case, alsb is guaranteed to be set here! - val sub1 = a(ai) - rs += sub1.size - buffer(offset) = sub1 - offset += 1 - // clear lowest remaining one bit in abm and increase the a index - abm &= ~alsb - ai += 1 - } else { - // blsb is smaller than alsb, or blsb is set and alsb is 0 - // in any case, blsb is guaranteed to be set here! - val sub1 = b(bi) - rs += sub1.size - buffer(offset) = sub1 - offset += 1 - // clear lowest remaining one bit in bbm and increase the b index - bbm &= ~blsb - bi += 1 - } - } - if (rs == this.size) { - // if the result would be identical to this, we might as well return this - this - } else if (rs == that.size) { - // if the result would be identical to that, we might as well return that - that - } else { - // we don't have to check whether the result is a leaf, since union will only make the set larger - // and this is not a leaf to begin with. - val length = offset - offset0 - val elems = new Array[HashSet[A]](length) - System.arraycopy(buffer, offset0, elems, 0, length) - new HashTrieSet(this.bitmap | that.bitmap, elems, rs) - } - case _ => this - } - - override private[immutable] def intersect0(that: HashSet[A], level: Int, buffer: Array[HashSet[A]], offset0: Int): HashSet[A] = that match { - case that if that eq this => - // shortcut for when that is this - // this happens often for nodes deeper in the tree, especially when that and this share a common "heritage" - // e.g. you have a large set A and do some small operations (adding and removing elements) to it to create B - // then A and B will have the vast majority of nodes in common, and this eq check will allow not even looking - // at these nodes! - this - case that: LeafHashSet[A] => - // when that is a leaf, we can switch to the simpler Tree/Leaf implementation - // it is OK to swap the arguments because intersect is symmetric - // (we can't do this in case of diff, which is not symmetric) - that.intersect0(this, level, buffer, offset0) - case that: HashTrieSet[A] => - val a = this.elems - var abm = this.bitmap - var ai = 0 - - val b = that.elems - var bbm = that.bitmap - var bi = 0 - - // if the bitmasks do not overlap, the result is definitely empty so we can abort here - if ((abm & bbm) == 0) - return null - - // fetch a new temporary array that is guaranteed to be big enough (32 elements) - var offset = offset0 - var rs = 0 - var rbm = 0 - - // loop as long as there are bits left that are set in both abm and bbm - while ((abm & bbm) != 0) { - // highest remaining bit in abm - val alsb = abm ^ (abm & (abm - 1)) - // highest remaining bit in bbm - val blsb = bbm ^ (bbm & (bbm - 1)) - if (alsb == blsb) { - val sub1 = a(ai).intersect0(b(bi), level + 5, buffer, offset) - if (sub1 ne null) { - rs += sub1.size - rbm |= alsb - buffer(offset) = sub1 - offset += 1 - } - // clear lowest remaining one bit in abm and increase the a index - abm &= ~alsb - ai += 1 - // clear lowest remaining one bit in bbm and increase the b index - bbm &= ~blsb - bi += 1 - } else if (unsignedCompare(alsb - 1, blsb - 1)) { - // alsb is smaller than blsb, or alsb is set and blsb is 0 - // in any case, alsb is guaranteed to be set here! - // clear lowest remaining one bit in abm and increase the a index - abm &= ~alsb - ai += 1 - } else { - // blsb is smaller than alsb, or blsb is set and alsb is 0 - // in any case, blsb is guaranteed to be set here! - // clear lowest remaining one bit in bbm and increase the b index - bbm &= ~blsb - bi += 1 - } - } - - if (rbm == 0) { - // if the result bitmap is empty, the result is the empty set - null - } else if (rs == size0) { - // if the result has the same number of elements as this, it must be identical to this, - // so we might as well return this - this - } else if (rs == that.size0) { - // if the result has the same number of elements as that, it must be identical to that, - // so we might as well return that - that - } else { - val length = offset - offset0 - if (length == 1 && !buffer(offset0).isInstanceOf[HashTrieSet[A]]) - buffer(offset0) - else { - val elems = new Array[HashSet[A]](length) - System.arraycopy(buffer, offset0, elems, 0, length) - new HashTrieSet[A](rbm, elems, rs) - } - } - case _ => null - } - - override private[immutable] def diff0(that: HashSet[A], level: Int, buffer: Array[HashSet[A]], offset0: Int): HashSet[A] = that match { - case that if that eq this => - // shortcut for when that is this - // this happens often for nodes deeper in the tree, especially when that and this share a common "heritage" - // e.g. you have a large set A and do some small operations (adding and removing elements) to it to create B - // then A and B will have the vast majority of nodes in common, and this eq check will allow not even looking - // at these nodes! - null - case that: HashSet1[A] => - removed0(that.key, that.hash, level) - case that: HashTrieSet[A] => - val a = this.elems - var abm = this.bitmap - var ai = 0 - - val b = that.elems - var bbm = that.bitmap - var bi = 0 - - // fetch a new temporary array that is guaranteed to be big enough (32 elements) - var offset = offset0 - var rs = 0 - var rbm = 0 - - // loop until there are no more bits in abm - while(abm!=0) { - // highest remaining bit in abm - val alsb = abm ^ (abm & (abm - 1)) - // highest remaining bit in bbm - val blsb = bbm ^ (bbm & (bbm - 1)) - if (alsb == blsb) { - val sub1 = a(ai).diff0(b(bi), level + 5, buffer, offset) - if (sub1 ne null) { - rs += sub1.size - rbm |= alsb - buffer(offset) = sub1 - offset += 1 - } - // clear lowest remaining one bit in abm and increase the a index - abm &= ~alsb; ai += 1 - // clear lowest remaining one bit in bbm and increase the b index - bbm &= ~blsb; bi += 1 - } else if (unsignedCompare(alsb - 1, blsb - 1)) { - // alsb is smaller than blsb, or alsb is set and blsb is 0 - // in any case, alsb is guaranteed to be set here! - val sub1 = a(ai) - rs += sub1.size - rbm |= alsb - buffer(offset) = sub1; offset += 1 - // clear lowest remaining one bit in abm and increase the a index - abm &= ~alsb; ai += 1 - } else { - // blsb is smaller than alsb, or blsb is set and alsb is 0 - // in any case, blsb is guaranteed to be set here! - // clear lowest remaining one bit in bbm and increase the b index - bbm &= ~blsb; bi += 1 - } - } - if (rbm == 0) { - null - } else if (rs == this.size0) { - // if the result has the same number of elements as this, it must be identical to this, - // so we might as well return this - this - } else { - val length = offset - offset0 - if (length == 1 && !buffer(offset0).isInstanceOf[HashTrieSet[A]]) - buffer(offset0) - else { - val elems = new Array[HashSet[A]](length) - System.arraycopy(buffer, offset0, elems, 0, length) - new HashTrieSet[A](rbm, elems, rs) - } - } - case that: HashSetCollision1[A] => - // we remove the elements using removed0 so we can use the fact that we know the hash of all elements - // to be removed - @tailrec def removeAll(s:HashSet[A], r:ListSet[A]) : HashSet[A] = - if(r.isEmpty || (s eq null)) s - else removeAll(s.removed0(r.head, that.hash, level), r.tail) - removeAll(this, that.ks) - case _ => this - } - - override protected def removed0(key: A, hash: Int, level: Int): HashSet[A] = { - val index = (hash >>> level) & 0x1f - val mask = (1 << index) - val offset = Integer.bitCount(bitmap & (mask-1)) - if ((bitmap & mask) != 0) { - val sub = elems(offset) - val subNew = sub.removed0(key, hash, level + 5) - if (sub eq subNew) this - else if (subNew eq null) { - val bitmapNew = bitmap ^ mask - if (bitmapNew != 0) { - val elemsNew = new Array[HashSet[A]](elems.length - 1) - Array.copy(elems, 0, elemsNew, 0, offset) - Array.copy(elems, offset + 1, elemsNew, offset, elems.length - offset - 1) - val sizeNew = size - sub.size - // if we have only one child, which is not a HashTrieSet but a self-contained set like - // HashSet1 or HashSetCollision1, return the child instead - if (elemsNew.length == 1 && !elemsNew(0).isInstanceOf[HashTrieSet[_]]) - elemsNew(0) - else - new HashTrieSet(bitmapNew, elemsNew, sizeNew) - } else - null - } else if(elems.length == 1 && !subNew.isInstanceOf[HashTrieSet[_]]) { - subNew - } else { - val elemsNew = new Array[HashSet[A]](elems.length) - Array.copy(elems, 0, elemsNew, 0, elems.length) - elemsNew(offset) = subNew - val sizeNew = size + (subNew.size - sub.size) - new HashTrieSet(bitmap, elemsNew, sizeNew) - } - } else { - this - } - } - - override protected def subsetOf0(that: HashSet[A], level: Int): Boolean = if (that eq this) true else that match { - case that: HashTrieSet[A] if this.size0 <= that.size0 => - // create local mutable copies of members - var abm = this.bitmap - val a = this.elems - var ai = 0 - val b = that.elems - var bbm = that.bitmap - var bi = 0 - if ((abm & bbm) == abm) { - // I tried rewriting this using tail recursion, but the generated java byte code was less than optimal - while(abm!=0) { - // highest remaining bit in abm - val alsb = abm ^ (abm & (abm - 1)) - // highest remaining bit in bbm - val blsb = bbm ^ (bbm & (bbm - 1)) - // if both trees have a bit set at the same position, we need to check the subtrees - if (alsb == blsb) { - // we are doing a comparison of a child of this with a child of that, - // so we have to increase the level by 5 to keep track of how deep we are in the tree - if (!a(ai).subsetOf0(b(bi), level + 5)) - return false - // clear lowest remaining one bit in abm and increase the a index - abm &= ~alsb; ai += 1 - } - // clear lowermost remaining one bit in bbm and increase the b index - // we must do this in any case - bbm &= ~blsb; bi += 1 - } - true - } else { - // the bitmap of this contains more one bits than the bitmap of that, - // so this can not possibly be a subset of that - false - } - case _ => - // if the other set is a HashTrieSet but has less elements than this, it can not be a subset - // if the other set is a HashSet1, we can not be a subset of it because we are a HashTrieSet with at least two children (see assertion) - // if the other set is a HashSetCollision1, we can not be a subset of it because we are a HashTrieSet with at least two different hash codes - // if the other set is the empty set, we are not a subset of it because we are not empty - false - } - - override protected def filter0(p: A => Boolean, negate: Boolean, level: Int, buffer: Array[HashSet[A]], offset0: Int): HashSet[A] = { - // current offset - var offset = offset0 - // result size - var rs = 0 - // bitmap for kept elems - var kept = 0 - // loop over all elements - var i = 0 - while (i < elems.length) { - val result = elems(i).filter0(p, negate, level + 5, buffer, offset) - if (result ne null) { - buffer(offset) = result - offset += 1 - // add the result size - rs += result.size - // mark the bit i as kept - kept |= (1 << i) - } - i += 1 - } - if (offset == offset0) { - // empty - null - } else if (rs == size0) { - // unchanged - this - } else if (offset == offset0 + 1 && !buffer(offset0).isInstanceOf[HashTrieSet[A]]) { - // leaf - buffer(offset0) - } else { - // we have to return a HashTrieSet - val length = offset - offset0 - val elems1 = new Array[HashSet[A]](length) - System.arraycopy(buffer, offset0, elems1, 0, length) - val bitmap1 = if (length == elems.length) { - // we can reuse the original bitmap - bitmap - } else { - // calculate new bitmap by keeping just bits in the kept bitmask - keepBits(bitmap, kept) - } - new HashTrieSet(bitmap1, elems1, rs) - } - } - - override def iterator = new TrieIterator[A](elems.asInstanceOf[Array[Iterable[A]]]) { - final override def getElem(cc: AnyRef): A = cc.asInstanceOf[HashSet1[A]].key - } - - override def foreach[U](f: A => U): Unit = { - var i = 0 - while (i < elems.length) { - elems(i).foreach(f) - i += 1 - } - } - } - - /** - * Calculates the maximum buffer size given the maximum possible total size of the trie-based collection - * @param size the maximum size of the collection to be generated - * @return the maximum buffer size - */ - @inline private def bufferSize(size: Int): Int = (size + 6) min (32 * 7) - - /** - * In many internal operations the empty set is represented as null for performance reasons. This method converts - * null to the empty set for use in public methods - */ - @inline private def nullToEmpty[A](s: HashSet[A]): HashSet[A] = if (s eq null) empty[A] else s - - /** - * Utility method to keep a subset of all bits in a given bitmap - * - * Example - * bitmap (binary): 00000001000000010000000100000001 - * keep (binary): 1010 - * result (binary): 00000001000000000000000100000000 - * - * @param bitmap the bitmap - * @param keep a bitmask containing which bits to keep - * @return the original bitmap with all bits where keep is not 1 set to 0 - */ - private def keepBits(bitmap: Int, keep: Int): Int = { - var result = 0 - var current = bitmap - var kept = keep - while (kept != 0) { - // lowest remaining bit in current - val lsb = current ^ (current & (current - 1)) - if ((kept & 1) != 0) { - // mark bit in result bitmap - result |= lsb - } - // clear lowest remaining one bit in abm - current &= ~lsb - // look at the next kept bit - kept >>>= 1 - } - result - } - - // unsigned comparison - @inline private[this] def unsignedCompare(i: Int, j: Int) = - (i < j) ^ (i < 0) ^ (j < 0) - - @SerialVersionUID(2L) private class SerializationProxy[A,B](@transient private var orig: HashSet[A]) extends Serializable { - private def writeObject(out: java.io.ObjectOutputStream) { - val s = orig.size - out.writeInt(s) - for (e <- orig) { - out.writeObject(e) - } - } - - private def readObject(in: java.io.ObjectInputStream) { - orig = empty - val s = in.readInt() - for (i <- 0 until s) { - val e = in.readObject().asInstanceOf[A] - orig = orig + e - } - } - - private def readResolve(): AnyRef = orig - } - -} - diff --git a/tests/scala2-library/src/library/scala/collection/immutable/IndexedSeq.scala b/tests/scala2-library/src/library/scala/collection/immutable/IndexedSeq.scala deleted file mode 100644 index 06a44b2bf3fd..000000000000 --- a/tests/scala2-library/src/library/scala/collection/immutable/IndexedSeq.scala +++ /dev/null @@ -1,50 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - - -package scala -package collection -package immutable - -import generic._ -import mutable.{ArrayBuffer, Builder} - -/** A subtrait of `collection.IndexedSeq` which represents indexed sequences - * that are guaranteed immutable. - * $indexedSeqInfo - */ -trait IndexedSeq[+A] extends Seq[A] - with scala.collection.IndexedSeq[A] - with GenericTraversableTemplate[A, IndexedSeq] - with IndexedSeqLike[A, IndexedSeq[A]] { - override def companion: GenericCompanion[IndexedSeq] = IndexedSeq - - /** Returns this $coll as an indexed sequence. - * - * A new indexed sequence will not be built; lazy collections will stay lazy. - */ - @deprecatedOverriding("Immutable indexed sequences should do nothing on toIndexedSeq except cast themselves as an indexed sequence.", "2.11.0") - override def toIndexedSeq: IndexedSeq[A] = this - override def seq: IndexedSeq[A] = this -} - -/** $factoryInfo - * The current default implementation of a $Coll is a `Vector`. - * @define coll indexed sequence - * @define Coll `IndexedSeq` - */ -object IndexedSeq extends IndexedSeqFactory[IndexedSeq] { - class Impl[A](buf: ArrayBuffer[A]) extends AbstractSeq[A] with IndexedSeq[A] with Serializable { - def length = buf.length - def apply(idx: Int) = buf.apply(idx) - } - def newBuilder[A]: Builder[A, IndexedSeq[A]] = Vector.newBuilder[A] - - implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, IndexedSeq[A]] = - ReusableCBF.asInstanceOf[GenericCanBuildFrom[A]] -} diff --git a/tests/scala2-library/src/library/scala/collection/immutable/IntMap.scala b/tests/scala2-library/src/library/scala/collection/immutable/IntMap.scala deleted file mode 100644 index 4ce7ed3d093a..000000000000 --- a/tests/scala2-library/src/library/scala/collection/immutable/IntMap.scala +++ /dev/null @@ -1,449 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala -package collection -package immutable - -import scala.collection.generic.{ CanBuildFrom, BitOperations } -import scala.collection.mutable.{ Builder, MapBuilder } -import scala.annotation.tailrec - -/** Utility class for integer maps. - * @author David MacIver - */ -private[immutable] object IntMapUtils extends BitOperations.Int { - def branchMask(i: Int, j: Int) = highestOneBit(i ^ j) - - def join[T](p1: Int, t1: IntMap[T], p2: Int, t2: IntMap[T]): IntMap[T] = { - val m = branchMask(p1, p2) - val p = mask(p1, m) - if (zero(p1, m)) IntMap.Bin(p, m, t1, t2) - else IntMap.Bin(p, m, t2, t1) - } - - def bin[T](prefix: Int, mask: Int, left: IntMap[T], right: IntMap[T]): IntMap[T] = (left, right) match { - case (left, IntMap.Nil) => left - case (IntMap.Nil, right) => right - case (left, right) => IntMap.Bin(prefix, mask, left, right) - } -} - -import IntMapUtils._ - -/** A companion object for integer maps. - * - * @define Coll `IntMap` - * @define mapCanBuildFromInfo - * The standard `CanBuildFrom` instance for `$Coll` objects. - * The created value is an instance of class `MapCanBuildFrom`. - * @since 2.7 - */ -object IntMap { - /** $mapCanBuildFromInfo */ - implicit def canBuildFrom[A, B]: CanBuildFrom[IntMap[A], (Int, B), IntMap[B]] = new CanBuildFrom[IntMap[A], (Int, B), IntMap[B]] { - def apply(from: IntMap[A]): Builder[(Int, B), IntMap[B]] = apply() - def apply(): Builder[(Int, B), IntMap[B]] = new MapBuilder[Int, B, IntMap[B]](empty[B]) - } - - def empty[T] : IntMap[T] = IntMap.Nil - - def singleton[T](key: Int, value: T): IntMap[T] = IntMap.Tip(key, value) - - def apply[T](elems: (Int, T)*): IntMap[T] = - elems.foldLeft(empty[T])((x, y) => x.updated(y._1, y._2)) - - private[immutable] case object Nil extends IntMap[Nothing] { - // Important! Without this equals method in place, an infinite - // loop from Map.equals => size => pattern-match-on-Nil => equals - // develops. Case objects and custom equality don't mix without - // careful handling. - override def equals(that : Any) = that match { - case _: this.type => true - case _: IntMap[_] => false // The only empty IntMaps are eq Nil - case _ => super.equals(that) - } - } - - private[immutable] case class Tip[+T](key: Int, value: T) extends IntMap[T]{ - def withValue[S](s: S) = - if (s.asInstanceOf[AnyRef] eq value.asInstanceOf[AnyRef]) this.asInstanceOf[IntMap.Tip[S]] - else IntMap.Tip(key, s) - } - private[immutable] case class Bin[+T](prefix: Int, mask: Int, left: IntMap[T], right: IntMap[T]) extends IntMap[T] { - def bin[S](left: IntMap[S], right: IntMap[S]): IntMap[S] = { - if ((this.left eq left) && (this.right eq right)) this.asInstanceOf[IntMap.Bin[S]] - else IntMap.Bin[S](prefix, mask, left, right) - } - } - -} - -import IntMap._ - -// Iterator over a non-empty IntMap. -private[immutable] abstract class IntMapIterator[V, T](it: IntMap[V]) extends AbstractIterator[T] { - - // Basically this uses a simple stack to emulate conversion over the tree. However - // because we know that Ints are at least 32 bits we can have at most 32 IntMap.Bins and - // one IntMap.Tip sitting on the tree at any point. Therefore we know the maximum stack - // depth is 33 and - var index = 0 - var buffer = new Array[AnyRef](33) - - def pop = { - index -= 1 - buffer(index).asInstanceOf[IntMap[V]] - } - - def push(x: IntMap[V]) { - buffer(index) = x.asInstanceOf[AnyRef] - index += 1 - } - push(it) - - /** - * What value do we assign to a tip? - */ - def valueOf(tip: IntMap.Tip[V]): T - - def hasNext = index != 0 - final def next: T = - pop match { - case IntMap.Bin(_,_, t@IntMap.Tip(_, _), right) => { - push(right) - valueOf(t) - } - case IntMap.Bin(_, _, left, right) => { - push(right) - push(left) - next - } - case t@IntMap.Tip(_, _) => valueOf(t) - // This should never happen. We don't allow IntMap.Nil in subtrees of the IntMap - // and don't return an IntMapIterator for IntMap.Nil. - case IntMap.Nil => sys.error("Empty maps not allowed as subtrees") - } -} - -private[immutable] class IntMapEntryIterator[V](it: IntMap[V]) extends IntMapIterator[V, (Int, V)](it) { - def valueOf(tip: IntMap.Tip[V]) = (tip.key, tip.value) -} - -private[immutable] class IntMapValueIterator[V](it: IntMap[V]) extends IntMapIterator[V, V](it) { - def valueOf(tip: IntMap.Tip[V]) = tip.value -} - -private[immutable] class IntMapKeyIterator[V](it: IntMap[V]) extends IntMapIterator[V, Int](it) { - def valueOf(tip: IntMap.Tip[V]) = tip.key -} - -import IntMap._ - -/** Specialised immutable map structure for integer keys, based on - * [[http://ittc.ku.edu/~andygill/papers/IntMap98.pdf Fast Mergeable Integer Maps]] - * by Okasaki and Gill. Essentially a trie based on binary digits of the integers. - * - * '''Note:''' This class is as of 2.8 largely superseded by HashMap. - * - * @tparam T type of the values associated with integer keys. - * - * @since 2.7 - * @define Coll `immutable.IntMap` - * @define coll immutable integer map - * @define mayNotTerminateInf - * @define willNotTerminateInf - */ -sealed abstract class IntMap[+T] extends AbstractMap[Int, T] - with Map[Int, T] - with MapLike[Int, T, IntMap[T]] { - - override def empty: IntMap[T] = IntMap.Nil - - override def toList = { - val buffer = new scala.collection.mutable.ListBuffer[(Int, T)] - foreach(buffer += _) - buffer.toList - } - - /** - * Iterator over key, value pairs of the map in unsigned order of the keys. - * - * @return an iterator over pairs of integer keys and corresponding values. - */ - def iterator: Iterator[(Int, T)] = this match { - case IntMap.Nil => Iterator.empty - case _ => new IntMapEntryIterator(this) - } - - /** - * Loops over the key, value pairs of the map in unsigned order of the keys. - */ - override final def foreach[U](f: ((Int, T)) => U): Unit = this match { - case IntMap.Bin(_, _, left, right) => { left.foreach(f); right.foreach(f) } - case IntMap.Tip(key, value) => f((key, value)) - case IntMap.Nil => - } - - override def keysIterator: Iterator[Int] = this match { - case IntMap.Nil => Iterator.empty - case _ => new IntMapKeyIterator(this) - } - - /** - * Loop over the keys of the map. The same as `keys.foreach(f)`, but may - * be more efficient. - * - * @param f The loop body - */ - final def foreachKey(f: Int => Unit): Unit = this match { - case IntMap.Bin(_, _, left, right) => { left.foreachKey(f); right.foreachKey(f) } - case IntMap.Tip(key, _) => f(key) - case IntMap.Nil => - } - - override def valuesIterator: Iterator[T] = this match { - case IntMap.Nil => Iterator.empty - case _ => new IntMapValueIterator(this) - } - - /** - * Loop over the values of the map. The same as `values.foreach(f)`, but may - * be more efficient. - * - * @param f The loop body - */ - final def foreachValue(f: T => Unit): Unit = this match { - case IntMap.Bin(_, _, left, right) => { left.foreachValue(f); right.foreachValue(f) } - case IntMap.Tip(_, value) => f(value) - case IntMap.Nil => - } - - override def stringPrefix = "IntMap" - - override def isEmpty = this == IntMap.Nil - - override def filter(f: ((Int, T)) => Boolean): IntMap[T] = this match { - case IntMap.Bin(prefix, mask, left, right) => { - val (newleft, newright) = (left.filter(f), right.filter(f)) - if ((left eq newleft) && (right eq newright)) this - else bin(prefix, mask, newleft, newright) - } - case IntMap.Tip(key, value) => - if (f((key, value))) this - else IntMap.Nil - case IntMap.Nil => IntMap.Nil - } - - def transform[S](f: (Int, T) => S): IntMap[S] = this match { - case b@IntMap.Bin(prefix, mask, left, right) => b.bin(left.transform(f), right.transform(f)) - case t@IntMap.Tip(key, value) => t.withValue(f(key, value)) - case IntMap.Nil => IntMap.Nil - } - - final override def size: Int = this match { - case IntMap.Nil => 0 - case IntMap.Tip(_, _) => 1 - case IntMap.Bin(_, _, left, right) => left.size + right.size - } - - final def get(key: Int): Option[T] = this match { - case IntMap.Bin(prefix, mask, left, right) => if (zero(key, mask)) left.get(key) else right.get(key) - case IntMap.Tip(key2, value) => if (key == key2) Some(value) else None - case IntMap.Nil => None - } - - final override def getOrElse[S >: T](key: Int, default: => S): S = this match { - case IntMap.Nil => default - case IntMap.Tip(key2, value) => if (key == key2) value else default - case IntMap.Bin(prefix, mask, left, right) => - if (zero(key, mask)) left.getOrElse(key, default) else right.getOrElse(key, default) - } - - final override def apply(key: Int): T = this match { - case IntMap.Bin(prefix, mask, left, right) => if (zero(key, mask)) left(key) else right(key) - case IntMap.Tip(key2, value) => if (key == key2) value else sys.error("Key not found") - case IntMap.Nil => sys.error("key not found") - } - - def + [S >: T] (kv: (Int, S)): IntMap[S] = updated(kv._1, kv._2) - - override def updated[S >: T](key: Int, value: S): IntMap[S] = this match { - case IntMap.Bin(prefix, mask, left, right) => - if (!hasMatch(key, prefix, mask)) join(key, IntMap.Tip(key, value), prefix, this) - else if (zero(key, mask)) IntMap.Bin(prefix, mask, left.updated(key, value), right) - else IntMap.Bin(prefix, mask, left, right.updated(key, value)) - case IntMap.Tip(key2, value2) => - if (key == key2) IntMap.Tip(key, value) - else join(key, IntMap.Tip(key, value), key2, this) - case IntMap.Nil => IntMap.Tip(key, value) - } - - /** - * Updates the map, using the provided function to resolve conflicts if the key is already present. - * - * Equivalent to: - * {{{ - * this.get(key) match { - * case None => this.update(key, value) - * case Some(oldvalue) => this.update(key, f(oldvalue, value) - * } - * }}} - * - * @tparam S The supertype of values in this `LongMap`. - * @param key The key to update - * @param value The value to use if there is no conflict - * @param f The function used to resolve conflicts. - * @return The updated map. - */ - def updateWith[S >: T](key: Int, value: S, f: (T, S) => S): IntMap[S] = this match { - case IntMap.Bin(prefix, mask, left, right) => - if (!hasMatch(key, prefix, mask)) join(key, IntMap.Tip(key, value), prefix, this) - else if (zero(key, mask)) IntMap.Bin(prefix, mask, left.updateWith(key, value, f), right) - else IntMap.Bin(prefix, mask, left, right.updateWith(key, value, f)) - case IntMap.Tip(key2, value2) => - if (key == key2) IntMap.Tip(key, f(value2, value)) - else join(key, IntMap.Tip(key, value), key2, this) - case IntMap.Nil => IntMap.Tip(key, value) - } - - def - (key: Int): IntMap[T] = this match { - case IntMap.Bin(prefix, mask, left, right) => - if (!hasMatch(key, prefix, mask)) this - else if (zero(key, mask)) bin(prefix, mask, left - key, right) - else bin(prefix, mask, left, right - key) - case IntMap.Tip(key2, _) => - if (key == key2) IntMap.Nil - else this - case IntMap.Nil => IntMap.Nil - } - - /** - * A combined transform and filter function. Returns an `IntMap` such that - * for each `(key, value)` mapping in this map, if `f(key, value) == None` - * the map contains no mapping for key, and if `f(key, value)`. - * - * @tparam S The type of the values in the resulting `LongMap`. - * @param f The transforming function. - * @return The modified map. - */ - def modifyOrRemove[S](f: (Int, T) => Option[S]): IntMap[S] = this match { - case IntMap.Bin(prefix, mask, left, right) => - val newleft = left.modifyOrRemove(f) - val newright = right.modifyOrRemove(f) - if ((left eq newleft) && (right eq newright)) this.asInstanceOf[IntMap[S]] - else bin(prefix, mask, newleft, newright) - case IntMap.Tip(key, value) => f(key, value) match { - case None => - IntMap.Nil - case Some(value2) => - //hack to preserve sharing - if (value.asInstanceOf[AnyRef] eq value2.asInstanceOf[AnyRef]) this.asInstanceOf[IntMap[S]] - else IntMap.Tip(key, value2) - } - case IntMap.Nil => - IntMap.Nil - } - - /** - * Forms a union map with that map, using the combining function to resolve conflicts. - * - * @tparam S The type of values in `that`, a supertype of values in `this`. - * @param that The map to form a union with. - * @param f The function used to resolve conflicts between two mappings. - * @return Union of `this` and `that`, with identical key conflicts resolved using the function `f`. - */ - def unionWith[S >: T](that: IntMap[S], f: (Int, S, S) => S): IntMap[S] = (this, that) match{ - case (IntMap.Bin(p1, m1, l1, r1), that@(IntMap.Bin(p2, m2, l2, r2))) => - if (shorter(m1, m2)) { - if (!hasMatch(p2, p1, m1)) join[S](p1, this, p2, that) // TODO: remove [S] when scala/bug#5548 is fixed - else if (zero(p2, m1)) IntMap.Bin(p1, m1, l1.unionWith(that, f), r1) - else IntMap.Bin(p1, m1, l1, r1.unionWith(that, f)) - } else if (shorter(m2, m1)){ - if (!hasMatch(p1, p2, m2)) join[S](p1, this, p2, that) // TODO: remove [S] when scala/bug#5548 is fixed - else if (zero(p1, m2)) IntMap.Bin(p2, m2, this.unionWith(l2, f), r2) - else IntMap.Bin(p2, m2, l2, this.unionWith(r2, f)) - } - else { - if (p1 == p2) IntMap.Bin(p1, m1, l1.unionWith(l2,f), r1.unionWith(r2, f)) - else join[S](p1, this, p2, that) // TODO: remove [S] when scala/bug#5548 is fixed - } - case (IntMap.Tip(key, value), x) => x.updateWith[S](key, value, (x, y) => f(key, y, x)) - case (x, IntMap.Tip(key, value)) => x.updateWith[S](key, value, (x, y) => f(key, x, y)) - case (IntMap.Nil, x) => x - case (x, IntMap.Nil) => x - } - - /** - * Forms the intersection of these two maps with a combining function. The - * resulting map is a map that has only keys present in both maps and has - * values produced from the original mappings by combining them with `f`. - * - * @tparam S The type of values in `that`. - * @tparam R The type of values in the resulting `LongMap`. - * @param that The map to intersect with. - * @param f The combining function. - * @return Intersection of `this` and `that`, with values for identical keys produced by function `f`. - */ - def intersectionWith[S, R](that: IntMap[S], f: (Int, T, S) => R): IntMap[R] = (this, that) match { - case (IntMap.Bin(p1, m1, l1, r1), that@IntMap.Bin(p2, m2, l2, r2)) => - if (shorter(m1, m2)) { - if (!hasMatch(p2, p1, m1)) IntMap.Nil - else if (zero(p2, m1)) l1.intersectionWith(that, f) - else r1.intersectionWith(that, f) - } else if (m1 == m2) bin(p1, m1, l1.intersectionWith(l2, f), r1.intersectionWith(r2, f)) - else { - if (!hasMatch(p1, p2, m2)) IntMap.Nil - else if (zero(p1, m2)) this.intersectionWith(l2, f) - else this.intersectionWith(r2, f) - } - case (IntMap.Tip(key, value), that) => that.get(key) match { - case None => IntMap.Nil - case Some(value2) => IntMap.Tip(key, f(key, value, value2)) - } - case (_, IntMap.Tip(key, value)) => this.get(key) match { - case None => IntMap.Nil - case Some(value2) => IntMap.Tip(key, f(key, value2, value)) - } - case (_, _) => IntMap.Nil - } - - /** - * Left biased intersection. Returns the map that has all the same mappings - * as this but only for keys which are present in the other map. - * - * @tparam R The type of values in `that`. - * @param that The map to intersect with. - * @return A map with all the keys both in `this` and `that`, mapped to corresponding values from `this`. - */ - def intersection[R](that: IntMap[R]): IntMap[T] = - this.intersectionWith(that, (key: Int, value: T, value2: R) => value) - - def ++[S >: T](that: IntMap[S]) = - this.unionWith[S](that, (key, x, y) => y) - - /** - * The entry with the lowest key value considered in unsigned order. - */ - @tailrec - final def firstKey: Int = this match { - case Bin(_, _, l, r) => l.firstKey - case Tip(k, v) => k - case IntMap.Nil => sys.error("Empty set") - } - - /** - * The entry with the highest key value considered in unsigned order. - */ - @tailrec - final def lastKey: Int = this match { - case Bin(_, _, l, r) => r.lastKey - case Tip(k, v) => k - case IntMap.Nil => sys.error("Empty set") - } -} diff --git a/tests/scala2-library/src/library/scala/collection/immutable/Iterable.scala b/tests/scala2-library/src/library/scala/collection/immutable/Iterable.scala deleted file mode 100644 index df322396d0c6..000000000000 --- a/tests/scala2-library/src/library/scala/collection/immutable/Iterable.scala +++ /dev/null @@ -1,45 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - - - -package scala -package collection -package immutable - -import generic._ -import mutable.Builder -import parallel.immutable.ParIterable - -/** A base trait for iterable collections that are guaranteed immutable. - * $iterableInfo - * - * @define Coll `immutable.Iterable` - * @define coll immutable iterable collection - */ -trait Iterable[+A] extends Traversable[A] -// with GenIterable[A] - with scala.collection.Iterable[A] - with GenericTraversableTemplate[A, Iterable] - with IterableLike[A, Iterable[A]] - with Parallelizable[A, ParIterable[A]] -{ - override def companion: GenericCompanion[Iterable] = Iterable - protected[this] override def parCombiner = ParIterable.newCombiner[A] // if `immutable.IterableLike` gets introduced, please move this there! - override def seq: Iterable[A] = this -} - -/** $factoryInfo - * The current default implementation of a $Coll is a `List`. - * @define Coll `immutable.Iterable` - * @define coll immutable iterable collection - */ -object Iterable extends TraversableFactory[Iterable] { - implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, Iterable[A]] = ReusableCBF.asInstanceOf[GenericCanBuildFrom[A]] - def newBuilder[A]: Builder[A, Iterable[A]] = new mutable.ListBuffer -} diff --git a/tests/scala2-library/src/library/scala/collection/immutable/LinearSeq.scala b/tests/scala2-library/src/library/scala/collection/immutable/LinearSeq.scala deleted file mode 100644 index 2109bd5211ca..000000000000 --- a/tests/scala2-library/src/library/scala/collection/immutable/LinearSeq.scala +++ /dev/null @@ -1,38 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - - - -package scala -package collection -package immutable - -import generic._ -import mutable.Builder - -/** A subtrait of `collection.LinearSeq` which represents sequences that - * are guaranteed immutable. - * $linearSeqInfo - */ -trait LinearSeq[+A] extends Seq[A] - with scala.collection.LinearSeq[A] - with GenericTraversableTemplate[A, LinearSeq] - with LinearSeqLike[A, LinearSeq[A]] { - override def companion: GenericCompanion[LinearSeq] = LinearSeq - override def seq: LinearSeq[A] = this -} - -/** $factoryInfo - * The current default implementation of a $Coll is a `List`. - * @define coll immutable linear sequence - * @define Coll `immutable.LinearSeq` - */ -object LinearSeq extends SeqFactory[LinearSeq] { - implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, LinearSeq[A]] = ReusableCBF.asInstanceOf[GenericCanBuildFrom[A]] - def newBuilder[A]: Builder[A, LinearSeq[A]] = new mutable.ListBuffer -} diff --git a/tests/scala2-library/src/library/scala/collection/immutable/List.scala b/tests/scala2-library/src/library/scala/collection/immutable/List.scala deleted file mode 100644 index 195e53c8bfe2..000000000000 --- a/tests/scala2-library/src/library/scala/collection/immutable/List.scala +++ /dev/null @@ -1,503 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala -package collection -package immutable - -import generic._ -import mutable.{Builder, ListBuffer} -import scala.annotation.tailrec -import java.io.{ObjectOutputStream, ObjectInputStream} - -/** A class for immutable linked lists representing ordered collections - * of elements of type `A`. - * - * This class comes with two implementing case classes `scala.Nil` - * and `scala.::` that implement the abstract members `isEmpty`, - * `head` and `tail`. - * - * This class is optimal for last-in-first-out (LIFO), stack-like access patterns. If you need another access - * pattern, for example, random access or FIFO, consider using a collection more suited to this than `List`. - * - * $usesMutableState - * - * ==Performance== - * '''Time:''' `List` has `O(1)` prepend and head/tail access. Most other operations are `O(n)` on the number of elements in the list. - * This includes the index-based lookup of elements, `length`, `append` and `reverse`. - * - * '''Space:''' `List` implements '''structural sharing''' of the tail list. This means that many operations are either - * zero- or constant-memory cost. - * {{{ - * val mainList = List(3, 2, 1) - * val with4 = 4 :: mainList // re-uses mainList, costs one :: instance - * val with42 = 42 :: mainList // also re-uses mainList, cost one :: instance - * val shorter = mainList.tail // costs nothing as it uses the same 2::1::Nil instances as mainList - * }}} - * - * @example {{{ - * // Make a list via the companion object factory - * val days = List("Sunday", "Monday", "Tuesday", "Wednesday", "Thursday", "Friday", "Saturday") - * - * // Make a list element-by-element - * val when = "AM" :: "PM" :: List() - * - * // Pattern match - * days match { - * case firstDay :: otherDays => - * println("The first day of the week is: " + firstDay) - * case List() => - * println("There don't seem to be any week days.") - * } - * }}} - * - * @note The functional list is characterized by persistence and structural sharing, thus offering considerable - * performance and space consumption benefits in some scenarios if used correctly. - * However, note that objects having multiple references into the same functional list (that is, - * objects that rely on structural sharing), will be serialized and deserialized with multiple lists, one for - * each reference to it. I.e. structural sharing is lost after serialization/deserialization. - * - * @author Martin Odersky and others - * @version 2.8 - * @since 1.0 - * @see [[http://docs.scala-lang.org/overviews/collections/concrete-immutable-collection-classes.html#lists "Scala's Collection Library overview"]] - * section on `Lists` for more information. - * - * @define coll list - * @define Coll `List` - * @define thatinfo the class of the returned collection. In the standard library configuration, - * `That` is always `List[B]` because an implicit of type `CanBuildFrom[List, B, That]` - * is defined in object `List`. - * @define bfinfo an implicit value of class `CanBuildFrom` which determines the - * result class `That` from the current representation type `Repr` - * and the new element type `B`. This is usually the `canBuildFrom` value - * defined in object `List`. - * @define orderDependent - * @define orderDependentFold - * @define mayNotTerminateInf - * @define willNotTerminateInf - */ -@SerialVersionUID(-6084104484083858598L) // value computed by serialver for 2.11.2, annotation added in 2.11.4 -sealed abstract class List[+A] extends AbstractSeq[A] - with LinearSeq[A] - with Product - with GenericTraversableTemplate[A, List] - with LinearSeqOptimized[A, List[A]] - with scala.Serializable { - override def companion: GenericCompanion[List] = List - - def isEmpty: Boolean - def head: A - def tail: List[A] - - // New methods in List - - /** Adds an element at the beginning of this list. - * @param x the element to prepend. - * @return a list which contains `x` as first element and - * which continues with this list. - * - * @usecase def ::(x: A): List[A] - * @inheritdoc - * - * Example: - * {{{1 :: List(2, 3) = List(2, 3).::(1) = List(1, 2, 3)}}} - */ - def ::[B >: A] (x: B): List[B] = - new scala.collection.immutable.::(x, this) - - /** Adds the elements of a given list in front of this list. - * @param prefix The list elements to prepend. - * @return a list resulting from the concatenation of the given - * list `prefix` and this list. - * - * @usecase def :::(prefix: List[A]): List[A] - * @inheritdoc - * - * Example: - * {{{List(1, 2) ::: List(3, 4) = List(3, 4).:::(List(1, 2)) = List(1, 2, 3, 4)}}} - */ - def :::[B >: A](prefix: List[B]): List[B] = - if (isEmpty) prefix - else if (prefix.isEmpty) this - else (new ListBuffer[B] ++= prefix).prependToList(this) - - /** Adds the elements of a given list in reverse order in front of this list. - * `xs reverse_::: ys` is equivalent to - * `xs.reverse ::: ys` but is more efficient. - * - * @param prefix the prefix to reverse and then prepend - * @return the concatenation of the reversed prefix and the current list. - * - * @usecase def reverse_:::(prefix: List[A]): List[A] - * @inheritdoc - */ - def reverse_:::[B >: A](prefix: List[B]): List[B] = { - var these: List[B] = this - var pres = prefix - while (!pres.isEmpty) { - these = pres.head :: these - pres = pres.tail - } - these - } - - /** Builds a new list by applying a function to all elements of this list. - * Like `xs map f`, but returns `xs` unchanged if function - * `f` maps all elements to themselves (as determined by `eq`). - * - * @param f the function to apply to each element. - * @tparam B the element type of the returned collection. - * @return a list resulting from applying the given function - * `f` to each element of this list and collecting the results. - * - * @usecase def mapConserve(f: A => A): List[A] - * @inheritdoc - */ - /*@inline*/ final def mapConserve[B >: A <: AnyRef](f: A => B): List[B] = { - // Note to developers: there exists a duplication between this function and `reflect.internal.util.Collections#map2Conserve`. - // If any successful optimization attempts or other changes are made, please rehash them there too. - @tailrec - def loop(mappedHead: List[B] = Nil, mappedLast: ::[B], unchanged: List[A], pending: List[A]): List[B] = - if (pending.isEmpty) { - if (mappedHead eq null) unchanged - else { - mappedLast.tl = unchanged - mappedHead - } - } - else { - val head0 = pending.head - val head1 = f(head0) - - if (head1 eq head0.asInstanceOf[AnyRef]) - loop(mappedHead, mappedLast, unchanged, pending.tail) - else { - var xc = unchanged - var mappedHead1: List[B] = mappedHead - var mappedLast1: ::[B] = mappedLast - while (xc ne pending) { - val next = new ::[B](xc.head, Nil) - if (mappedHead1 eq null) mappedHead1 = next - if (mappedLast1 ne null) mappedLast1.tl = next - mappedLast1 = next - xc = xc.tail - } - val next = new ::(head1, Nil) - if (mappedHead1 eq null) mappedHead1 = next - if (mappedLast1 ne null) mappedLast1.tl = next - mappedLast1 = next - val tail0 = pending.tail - loop(mappedHead1, mappedLast1, tail0, tail0) - - } - } - loop(null, null, this, this) - } - - // Overridden methods from IterableLike and SeqLike or overloaded variants of such methods - - override def ++[B >: A, That](that: GenTraversableOnce[B])(implicit bf: CanBuildFrom[List[A], B, That]): That = - if (bf eq List.ReusableCBF) (this ::: that.seq.toList).asInstanceOf[That] - else super.++(that) - - override def +:[B >: A, That](elem: B)(implicit bf: CanBuildFrom[List[A], B, That]): That = bf match { - case _: List.GenericCanBuildFrom[_] => (elem :: this).asInstanceOf[That] - case _ => super.+:(elem)(bf) - } - - override def toList: List[A] = this - - override def take(n: Int): List[A] = if (isEmpty || n <= 0) Nil else { - val h = new ::(head, Nil) - var t = h - var rest = tail - var i = 1 - while ({if (rest.isEmpty) return this; i < n}) { - i += 1 - val nx = new ::(rest.head, Nil) - t.tl = nx - t = nx - rest = rest.tail - } - h - } - - override def drop(n: Int): List[A] = { - var these = this - var count = n - while (!these.isEmpty && count > 0) { - these = these.tail - count -= 1 - } - these - } - - /** - * @example {{{ - * // Given a list - * val letters = List('a','b','c','d','e') - * - * // `slice` returns all elements beginning at index `from` and afterwards, - * // up until index `until` (excluding index `until`.) - * letters.slice(1,3) // Returns List('b','c') - * }}} - */ - override def slice(from: Int, until: Int): List[A] = { - val lo = scala.math.max(from, 0) - if (until <= lo || isEmpty) Nil - else this drop lo take (until - lo) - } - - override def takeRight(n: Int): List[A] = { - @tailrec - def loop(lead: List[A], lag: List[A]): List[A] = lead match { - case Nil => lag - case _ :: tail => loop(tail, lag.tail) - } - loop(drop(n), this) - } - - // dropRight is inherited from LinearSeq - - override def splitAt(n: Int): (List[A], List[A]) = { - val b = new ListBuffer[A] - var i = 0 - var these = this - while (!these.isEmpty && i < n) { - i += 1 - b += these.head - these = these.tail - } - (b.toList, these) - } - - final override def map[B, That](f: A => B)(implicit bf: CanBuildFrom[List[A], B, That]): That = { - if (bf eq List.ReusableCBF) { - if (this eq Nil) Nil.asInstanceOf[That] else { - val h = new ::[B](f(head), Nil) - var t: ::[B] = h - var rest = tail - while (rest ne Nil) { - val nx = new ::(f(rest.head), Nil) - t.tl = nx - t = nx - rest = rest.tail - } - h.asInstanceOf[That] - } - } - else super.map(f) - } - - final override def collect[B, That](pf: PartialFunction[A, B])(implicit bf: CanBuildFrom[List[A], B, That]): That = { - if (bf eq List.ReusableCBF) { - if (this eq Nil) Nil.asInstanceOf[That] else { - var rest = this - var h: ::[B] = null - // Special case for first element - do { - val x: Any = pf.applyOrElse(rest.head, List.partialNotApplied) - if (x.asInstanceOf[AnyRef] ne List.partialNotApplied) h = new ::(x.asInstanceOf[B], Nil) - rest = rest.tail - if (rest eq Nil) return (if (h eq null ) Nil else h).asInstanceOf[That] - } while (h eq null) - var t = h - // Remaining elements - do { - val x: Any = pf.applyOrElse(rest.head, List.partialNotApplied) - if (x.asInstanceOf[AnyRef] ne List.partialNotApplied) { - val nx = new ::(x.asInstanceOf[B], Nil) - t.tl = nx - t = nx - } - rest = rest.tail - } while (rest ne Nil) - h.asInstanceOf[That] - } - } - else super.collect(pf) - } - - final override def flatMap[B, That](f: A => GenTraversableOnce[B])(implicit bf: CanBuildFrom[List[A], B, That]): That = { - if (bf eq List.ReusableCBF) { - if (this eq Nil) Nil.asInstanceOf[That] else { - var rest = this - var found = false - var h: ::[B] = null - var t: ::[B] = null - while (rest ne Nil) { - f(rest.head).seq.foreach{ b => - if (!found) { - h = new ::(b, Nil) - t = h - found = true - } - else { - val nx = new ::(b, Nil) - t.tl = nx - t = nx - } - } - rest = rest.tail - } - (if (!found) Nil else h).asInstanceOf[That] - } - } - else super.flatMap(f) - } - - @inline final override def takeWhile(p: A => Boolean): List[A] = { - val b = new ListBuffer[A] - var these = this - while (!these.isEmpty && p(these.head)) { - b += these.head - these = these.tail - } - b.toList - } - - @inline final override def dropWhile(p: A => Boolean): List[A] = { - @tailrec - def loop(xs: List[A]): List[A] = - if (xs.isEmpty || !p(xs.head)) xs - else loop(xs.tail) - - loop(this) - } - - @inline final override def span(p: A => Boolean): (List[A], List[A]) = { - val b = new ListBuffer[A] - var these = this - while (!these.isEmpty && p(these.head)) { - b += these.head - these = these.tail - } - (b.toList, these) - } - - // Overridden with an implementation identical to the inherited one (at this time) - // solely so it can be finalized and thus inlinable. - @inline final override def foreach[U](f: A => U) { - var these = this - while (!these.isEmpty) { - f(these.head) - these = these.tail - } - } - - override def reverse: List[A] = { - var result: List[A] = Nil - var these = this - while (!these.isEmpty) { - result = these.head :: result - these = these.tail - } - result - } - - override def foldRight[B](z: B)(op: (A, B) => B): B = - reverse.foldLeft(z)((right, left) => op(left, right)) - - override def stringPrefix = "List" - - override def toStream : Stream[A] = - if (isEmpty) Stream.Empty - else new Stream.Cons(head, tail.toStream) - - // Create a proxy for Java serialization that allows us to avoid mutation - // during deserialization. This is the Serialization Proxy Pattern. - protected final def writeReplace(): AnyRef = new List.SerializationProxy(this) -} - -/** The empty list. - * - * @author Martin Odersky - * @version 1.0, 15/07/2003 - * @since 2.8 - */ -@SerialVersionUID(0 - 8256821097970055419L) -case object Nil extends List[Nothing] { - override def isEmpty = true - override def head: Nothing = - throw new NoSuchElementException("head of empty list") - override def tail: List[Nothing] = - throw new UnsupportedOperationException("tail of empty list") - // Removal of equals method here might lead to an infinite recursion similar to IntMap.equals. - override def equals(that: Any) = that match { - case that1: scala.collection.GenSeq[_] => that1.isEmpty - case _ => false - } -} - -/** A non empty list characterized by a head and a tail. - * @param head the first element of the list - * @param tl the list containing the remaining elements of this list after the first one. - * @tparam B the type of the list elements. - * @author Martin Odersky - * @version 1.0, 15/07/2003 - * @since 2.8 - */ -@SerialVersionUID(509929039250432923L) // value computed by serialver for 2.11.2, annotation added in 2.11.4 -final case class ::[B](override val head: B, private[scala] var tl: List[B]) extends List[B] { - override def tail : List[B] = tl - override def isEmpty: Boolean = false -} - -/** $factoryInfo - * @define coll list - * @define Coll `List` - */ -object List extends SeqFactory[List] { - /** $genericCanBuildFromInfo */ - implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, List[A]] = - ReusableCBF.asInstanceOf[GenericCanBuildFrom[A]] - - def newBuilder[A]: Builder[A, List[A]] = new ListBuffer[A] - - override def empty[A]: List[A] = Nil - - override def apply[A](xs: A*): List[A] = xs.toList - - private[collection] val partialNotApplied = new Function1[Any, Any] { def apply(x: Any): Any = this } - - @SerialVersionUID(1L) - private class SerializationProxy[A](@transient private var orig: List[A]) extends Serializable { - - private def writeObject(out: ObjectOutputStream) { - out.defaultWriteObject() - var xs: List[A] = orig - while (!xs.isEmpty) { - out.writeObject(xs.head) - xs = xs.tail - } - out.writeObject(ListSerializeEnd) - } - - // Java serialization calls this before readResolve during deserialization. - // Read the whole list and store it in `orig`. - private def readObject(in: ObjectInputStream) { - in.defaultReadObject() - val builder = List.newBuilder[A] - while (true) in.readObject match { - case ListSerializeEnd => - orig = builder.result() - return - case a => - builder += a.asInstanceOf[A] - } - } - - // Provide the result stored in `orig` for Java serialization - private def readResolve(): AnyRef = orig - } -} - -/** Only used for list serialization */ -@SerialVersionUID(0L - 8476791151975527571L) -private[scala] case object ListSerializeEnd diff --git a/tests/scala2-library/src/library/scala/collection/immutable/ListMap.scala b/tests/scala2-library/src/library/scala/collection/immutable/ListMap.scala deleted file mode 100644 index 589f8bbba94f..000000000000 --- a/tests/scala2-library/src/library/scala/collection/immutable/ListMap.scala +++ /dev/null @@ -1,166 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala -package collection -package immutable - -import generic._ -import scala.annotation.tailrec - -/** - * $factoryInfo - * - * Note that each element insertion takes O(n) time, which means that creating a list map with - * n elements will take O(n^2^) time. This makes the builder suitable only for a small number of - * elements. - * - * @see [[http://docs.scala-lang.org/overviews/collections/concrete-immutable-collection-classes.html#list_maps "Scala's Collection Library overview"]] - * section on `List Maps` for more information. - * @since 1 - * @define Coll ListMap - * @define coll list map - */ -object ListMap extends ImmutableMapFactory[ListMap] { - - /** - * $mapCanBuildFromInfo - */ - implicit def canBuildFrom[A, B]: CanBuildFrom[Coll, (A, B), ListMap[A, B]] = - new MapCanBuildFrom[A, B] - - def empty[A, B]: ListMap[A, B] = EmptyListMap.asInstanceOf[ListMap[A, B]] - - @SerialVersionUID(-8256686706655863282L) - private object EmptyListMap extends ListMap[Any, Nothing] -} - -/** - * This class implements immutable maps using a list-based data structure. List map iterators and - * traversal methods visit key-value pairs in the order whey were first inserted. - * - * Entries are stored internally in reversed insertion order, which means the newest key is at the - * head of the list. As such, methods such as `head` and `tail` are O(n), while `last` and `init` - * are O(1). Other operations, such as inserting or removing entries, are also O(n), which makes - * this collection suitable only for a small number of elements. - * - * Instances of `ListMap` represent empty maps; they can be either created by calling the - * constructor directly, or by applying the function `ListMap.empty`. - * - * @tparam A the type of the keys contained in this list map - * @tparam B the type of the values associated with the keys - * - * @author Matthias Zenger - * @author Martin Odersky - * @version 2.0, 01/01/2007 - * @since 1 - * @define Coll ListMap - * @define coll list map - * @define mayNotTerminateInf - * @define willNotTerminateInf - */ -@SerialVersionUID(301002838095710379L) -sealed class ListMap[A, +B] extends AbstractMap[A, B] - with Map[A, B] - with MapLike[A, B, ListMap[A, B]] - with Serializable { - - override def empty = ListMap.empty - - override def size: Int = 0 - override def isEmpty: Boolean = true - - def get(key: A): Option[B] = None - - override def updated[B1 >: B](key: A, value: B1): ListMap[A, B1] = new Node[B1](key, value) - - def +[B1 >: B](kv: (A, B1)): ListMap[A, B1] = new Node[B1](kv._1, kv._2) - def -(key: A): ListMap[A, B] = this - - override def ++[B1 >: B](xs: GenTraversableOnce[(A, B1)]): ListMap[A, B1] = - if (xs.isEmpty) this - else ((repr: ListMap[A, B1]) /: xs) (_ + _) - - def iterator: Iterator[(A, B)] = { - def reverseList = { - var curr: ListMap[A, B] = this - var res: List[(A, B)] = Nil - while (!curr.isEmpty) { - res = (curr.key, curr.value) :: res - curr = curr.next - } - res - } - reverseList.iterator - } - - protected def key: A = throw new NoSuchElementException("key of empty map") - protected def value: B = throw new NoSuchElementException("value of empty map") - protected def next: ListMap[A, B] = throw new NoSuchElementException("next of empty map") - - override def stringPrefix = "ListMap" - - /** - * Represents an entry in the `ListMap`. - */ - @SerialVersionUID(-6453056603889598734L) - protected class Node[B1 >: B](override protected val key: A, - override protected val value: B1) extends ListMap[A, B1] with Serializable { - - override def size: Int = sizeInternal(this, 0) - - @tailrec private[this] def sizeInternal(cur: ListMap[A, B1], acc: Int): Int = - if (cur.isEmpty) acc - else sizeInternal(cur.next, acc + 1) - - override def isEmpty: Boolean = false - - override def apply(k: A): B1 = applyInternal(this, k) - - @tailrec private[this] def applyInternal(cur: ListMap[A, B1], k: A): B1 = - if (cur.isEmpty) throw new NoSuchElementException("key not found: " + k) - else if (k == cur.key) cur.value - else applyInternal(cur.next, k) - - override def get(k: A): Option[B1] = getInternal(this, k) - - @tailrec private[this] def getInternal(cur: ListMap[A, B1], k: A): Option[B1] = - if (cur.isEmpty) None - else if (k == cur.key) Some(cur.value) - else getInternal(cur.next, k) - - override def contains(k: A): Boolean = containsInternal(this, k) - - @tailrec private[this] def containsInternal(cur: ListMap[A, B1], k: A): Boolean = - if(cur.isEmpty) false - else if (k == cur.key) true - else containsInternal(cur.next, k) - - override def updated[B2 >: B1](k: A, v: B2): ListMap[A, B2] = { - val m = this - k - new m.Node[B2](k, v) - } - - override def +[B2 >: B1](kv: (A, B2)): ListMap[A, B2] = { - val m = this - kv._1 - new m.Node[B2](kv._1, kv._2) - } - - override def -(k: A): ListMap[A, B1] = removeInternal(k, this, Nil) - - @tailrec private[this] def removeInternal(k: A, cur: ListMap[A, B1], acc: List[ListMap[A, B1]]): ListMap[A, B1] = - if (cur.isEmpty) acc.last - else if (k == cur.key) (cur.next /: acc) { case (t, h) => new t.Node(h.key, h.value) } - else removeInternal(k, cur.next, cur :: acc) - - override protected def next: ListMap[A, B1] = ListMap.this - - override def last: (A, B1) = (key, value) - override def init: ListMap[A, B1] = next - } -} diff --git a/tests/scala2-library/src/library/scala/collection/immutable/ListSet.scala b/tests/scala2-library/src/library/scala/collection/immutable/ListSet.scala deleted file mode 100644 index d9795e9161f0..000000000000 --- a/tests/scala2-library/src/library/scala/collection/immutable/ListSet.scala +++ /dev/null @@ -1,136 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala -package collection -package immutable - -import generic._ -import scala.annotation.tailrec - -/** - * $factoryInfo - * - * Note that each element insertion takes O(n) time, which means that creating a list set with - * n elements will take O(n^2^) time. This makes the builder suitable only for a small number of - * elements. - * - * @since 1 - * @define Coll ListSet - * @define coll list set - */ -object ListSet extends ImmutableSetFactory[ListSet] { - - /** - * $setCanBuildFromInfo - */ - implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, ListSet[A]] = - setCanBuildFrom[A] - - @SerialVersionUID(5010379588739277132L) - private object EmptyListSet extends ListSet[Any] - private[collection] def emptyInstance: ListSet[Any] = EmptyListSet -} - -/** - * This class implements immutable sets using a list-based data structure. List set iterators and - * traversal methods visit elements in the order whey were first inserted. - * - * Elements are stored internally in reversed insertion order, which means the newest element is at - * the head of the list. As such, methods such as `head` and `tail` are O(n), while `last` and - * `init` are O(1). Other operations, such as inserting or removing entries, are also O(n), which - * makes this collection suitable only for a small number of elements. - * - * Instances of `ListSet` represent empty sets; they can be either created by calling the - * constructor directly, or by applying the function `ListSet.empty`. - * - * @tparam A the type of the elements contained in this list set - * - * @author Matthias Zenger - * @version 1.0, 09/07/2003 - * @since 1 - * @define Coll ListSet - * @define coll list set - * @define mayNotTerminateInf - * @define willNotTerminateInf - */ -@SerialVersionUID(-8417059026623606218L) -sealed class ListSet[A] extends AbstractSet[A] - with Set[A] - with GenericSetTemplate[A, ListSet] - with SetLike[A, ListSet[A]] - with Serializable { - - override def companion: GenericCompanion[ListSet] = ListSet - - override def size: Int = 0 - override def isEmpty: Boolean = true - - def contains(elem: A): Boolean = false - - def +(elem: A): ListSet[A] = new Node(elem) - def -(elem: A): ListSet[A] = this - - override def ++(xs: GenTraversableOnce[A]): ListSet[A] = - if (xs.isEmpty) this - else (repr /: xs) (_ + _) - - def iterator: Iterator[A] = { - def reverseList = { - var curr: ListSet[A] = this - var res: List[A] = Nil - while (!curr.isEmpty) { - res = curr.elem :: res - curr = curr.next - } - res - } - reverseList.iterator - } - - protected def elem: A = throw new NoSuchElementException("elem of empty set") - protected def next: ListSet[A] = throw new NoSuchElementException("next of empty set") - - override def toSet[B >: A]: Set[B] = this.asInstanceOf[ListSet[B]] - - override def stringPrefix = "ListSet" - - /** - * Represents an entry in the `ListSet`. - */ - @SerialVersionUID(-787710309854855049L) - protected class Node(override protected val elem: A) extends ListSet[A] with Serializable { - - override def size = sizeInternal(this, 0) - - @tailrec private[this] def sizeInternal(n: ListSet[A], acc: Int): Int = - if (n.isEmpty) acc - else sizeInternal(n.next, acc + 1) - - override def isEmpty: Boolean = false - - override def contains(e: A) = containsInternal(this, e) - - @tailrec private[this] def containsInternal(n: ListSet[A], e: A): Boolean = - !n.isEmpty && (n.elem == e || containsInternal(n.next, e)) - - override def +(e: A): ListSet[A] = if (contains(e)) this else new Node(e) - - override def -(e: A): ListSet[A] = removeInternal(e, this, Nil) - - @tailrec private[this] def removeInternal(k: A, cur: ListSet[A], acc: List[ListSet[A]]): ListSet[A] = - if (cur.isEmpty) acc.last - else if (k == cur.elem) (cur.next /: acc) { case (t, h) => new t.Node(h.elem) } - else removeInternal(k, cur.next, cur :: acc) - - override protected def next: ListSet[A] = ListSet.this - - override def last: A = elem - override def init: ListSet[A] = next - } -} diff --git a/tests/scala2-library/src/library/scala/collection/immutable/LongMap.scala b/tests/scala2-library/src/library/scala/collection/immutable/LongMap.scala deleted file mode 100644 index 8f3b100ad148..000000000000 --- a/tests/scala2-library/src/library/scala/collection/immutable/LongMap.scala +++ /dev/null @@ -1,436 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala -package collection -package immutable - -import scala.collection.generic.{ CanBuildFrom, BitOperations } -import scala.collection.mutable.{ Builder, MapBuilder } -import scala.annotation.tailrec - -/** Utility class for long maps. - * @author David MacIver - */ -private[immutable] object LongMapUtils extends BitOperations.Long { - def branchMask(i: Long, j: Long) = highestOneBit(i ^ j) - - def join[T](p1: Long, t1: LongMap[T], p2: Long, t2: LongMap[T]): LongMap[T] = { - val m = branchMask(p1, p2) - val p = mask(p1, m) - if (zero(p1, m)) LongMap.Bin(p, m, t1, t2) - else LongMap.Bin(p, m, t2, t1) - } - - def bin[T](prefix: Long, mask: Long, left: LongMap[T], right: LongMap[T]): LongMap[T] = (left, right) match { - case (left, LongMap.Nil) => left - case (LongMap.Nil, right) => right - case (left, right) => LongMap.Bin(prefix, mask, left, right) - } -} - -import LongMapUtils._ - -/** A companion object for long maps. - * - * @define Coll `LongMap` - * @define mapCanBuildFromInfo - * The standard `CanBuildFrom` instance for `$Coll` objects. - * The created value is an instance of class `MapCanBuildFrom`. - * @since 2.7 - */ -object LongMap { - /** $mapCanBuildFromInfo */ - implicit def canBuildFrom[A, B]: CanBuildFrom[LongMap[A], (Long, B), LongMap[B]] = new CanBuildFrom[LongMap[A], (Long, B), LongMap[B]] { - def apply(from: LongMap[A]): Builder[(Long, B), LongMap[B]] = apply() - def apply(): Builder[(Long, B), LongMap[B]] = new MapBuilder[Long, B, LongMap[B]](empty[B]) - } - - def empty[T]: LongMap[T] = LongMap.Nil - def singleton[T](key: Long, value: T): LongMap[T] = LongMap.Tip(key, value) - def apply[T](elems: (Long, T)*): LongMap[T] = - elems.foldLeft(empty[T])((x, y) => x.updated(y._1, y._2)) - - private[immutable] case object Nil extends LongMap[Nothing] { - // Important, don't remove this! See IntMap for explanation. - override def equals(that : Any) = that match { - case (that: AnyRef) if (this eq that) => true - case (that: LongMap[_]) => false // The only empty LongMaps are eq Nil - case that => super.equals(that) - } - } - - private[immutable] case class Tip[+T](key: Long, value: T) extends LongMap[T] { - def withValue[S](s: S) = - if (s.asInstanceOf[AnyRef] eq value.asInstanceOf[AnyRef]) this.asInstanceOf[LongMap.Tip[S]] - else LongMap.Tip(key, s) - } - private[immutable] case class Bin[+T](prefix: Long, mask: Long, left: LongMap[T], right: LongMap[T]) extends LongMap[T] { - def bin[S](left: LongMap[S], right: LongMap[S]): LongMap[S] = { - if ((this.left eq left) && (this.right eq right)) this.asInstanceOf[LongMap.Bin[S]] - else LongMap.Bin[S](prefix, mask, left, right) - } - } -} - -// Iterator over a non-empty LongMap. -private[immutable] abstract class LongMapIterator[V, T](it: LongMap[V]) extends AbstractIterator[T] { - - // Basically this uses a simple stack to emulate conversion over the tree. However - // because we know that Longs are only 64 bits we can have at most 64 LongMap.Bins and - // one LongMap.Tip sitting on the tree at any point. Therefore we know the maximum stack - // depth is 65 - var index = 0 - var buffer = new Array[AnyRef](65) - - def pop() = { - index -= 1 - buffer(index).asInstanceOf[LongMap[V]] - } - - def push(x: LongMap[V]) { - buffer(index) = x.asInstanceOf[AnyRef] - index += 1 - } - push(it) - - /** - * What value do we assign to a tip? - */ - def valueOf(tip: LongMap.Tip[V]): T - - def hasNext = index != 0 - final def next: T = - pop() match { - case LongMap.Bin(_,_, t@LongMap.Tip(_, _), right) => { - push(right) - valueOf(t) - } - case LongMap.Bin(_, _, left, right) => { - push(right) - push(left) - next - } - case t@LongMap.Tip(_, _) => valueOf(t) - // This should never happen. We don't allow LongMap.Nil in subtrees of the LongMap - // and don't return an LongMapIterator for LongMap.Nil. - case LongMap.Nil => sys.error("Empty maps not allowed as subtrees") - } -} - -private[immutable] class LongMapEntryIterator[V](it: LongMap[V]) extends LongMapIterator[V, (Long, V)](it){ - def valueOf(tip: LongMap.Tip[V]) = (tip.key, tip.value) -} - -private[immutable] class LongMapValueIterator[V](it: LongMap[V]) extends LongMapIterator[V, V](it){ - def valueOf(tip: LongMap.Tip[V]) = tip.value -} - -private[immutable] class LongMapKeyIterator[V](it: LongMap[V]) extends LongMapIterator[V, Long](it){ - def valueOf(tip: LongMap.Tip[V]) = tip.key -} - -/** - * Specialised immutable map structure for long keys, based on - * Fast Mergeable Long Maps - * by Okasaki and Gill. Essentially a trie based on binary digits of the integers. - * - * Note: This class is as of 2.8 largely superseded by HashMap. - * - * @tparam T type of the values associated with the long keys. - * - * @since 2.7 - * @define Coll `immutable.LongMap` - * @define coll immutable long integer map - * @define mayNotTerminateInf - * @define willNotTerminateInf - */ -sealed abstract class LongMap[+T] -extends AbstractMap[Long, T] - with Map[Long, T] - with MapLike[Long, T, LongMap[T]] { - - override def empty: LongMap[T] = LongMap.Nil - - override def toList = { - val buffer = new scala.collection.mutable.ListBuffer[(Long, T)] - foreach(buffer += _) - buffer.toList - } - - /** - * Iterator over key, value pairs of the map in unsigned order of the keys. - * - * @return an iterator over pairs of long keys and corresponding values. - */ - def iterator: Iterator[(Long, T)] = this match { - case LongMap.Nil => Iterator.empty - case _ => new LongMapEntryIterator(this) - } - - /** - * Loops over the key, value pairs of the map in unsigned order of the keys. - */ - override final def foreach[U](f: ((Long, T)) => U): Unit = this match { - case LongMap.Bin(_, _, left, right) => { left.foreach(f); right.foreach(f) } - case LongMap.Tip(key, value) => f((key, value)) - case LongMap.Nil => - } - - override def keysIterator: Iterator[Long] = this match { - case LongMap.Nil => Iterator.empty - case _ => new LongMapKeyIterator(this) - } - - /** - * Loop over the keys of the map. The same as keys.foreach(f), but may - * be more efficient. - * - * @param f The loop body - */ - final def foreachKey(f: Long => Unit): Unit = this match { - case LongMap.Bin(_, _, left, right) => { left.foreachKey(f); right.foreachKey(f) } - case LongMap.Tip(key, _) => f(key) - case LongMap.Nil => - } - - override def valuesIterator: Iterator[T] = this match { - case LongMap.Nil => Iterator.empty - case _ => new LongMapValueIterator(this) - } - - /** - * Loop over the values of the map. The same as values.foreach(f), but may - * be more efficient. - * - * @param f The loop body - */ - final def foreachValue(f: T => Unit): Unit = this match { - case LongMap.Bin(_, _, left, right) => { left.foreachValue(f); right.foreachValue(f) } - case LongMap.Tip(_, value) => f(value) - case LongMap.Nil => - } - - override def stringPrefix = "LongMap" - - override def isEmpty = this == LongMap.Nil - - override def filter(f: ((Long, T)) => Boolean): LongMap[T] = this match { - case LongMap.Bin(prefix, mask, left, right) => { - val (newleft, newright) = (left.filter(f), right.filter(f)) - if ((left eq newleft) && (right eq newright)) this - else bin(prefix, mask, newleft, newright) - } - case LongMap.Tip(key, value) => - if (f((key, value))) this - else LongMap.Nil - case LongMap.Nil => LongMap.Nil - } - - def transform[S](f: (Long, T) => S): LongMap[S] = this match { - case b@LongMap.Bin(prefix, mask, left, right) => b.bin(left.transform(f), right.transform(f)) - case t@LongMap.Tip(key, value) => t.withValue(f(key, value)) - case LongMap.Nil => LongMap.Nil - } - - final override def size: Int = this match { - case LongMap.Nil => 0 - case LongMap.Tip(_, _) => 1 - case LongMap.Bin(_, _, left, right) => left.size + right.size - } - - final def get(key: Long): Option[T] = this match { - case LongMap.Bin(prefix, mask, left, right) => if (zero(key, mask)) left.get(key) else right.get(key) - case LongMap.Tip(key2, value) => if (key == key2) Some(value) else None - case LongMap.Nil => None - } - - final override def getOrElse[S >: T](key: Long, default: => S): S = this match { - case LongMap.Nil => default - case LongMap.Tip(key2, value) => if (key == key2) value else default - case LongMap.Bin(prefix, mask, left, right) => - if (zero(key, mask)) left.getOrElse(key, default) else right.getOrElse(key, default) - } - - final override def apply(key: Long): T = this match { - case LongMap.Bin(prefix, mask, left, right) => if (zero(key, mask)) left(key) else right(key) - case LongMap.Tip(key2, value) => if (key == key2) value else sys.error("Key not found") - case LongMap.Nil => sys.error("key not found") - } - - def + [S >: T] (kv: (Long, S)): LongMap[S] = updated(kv._1, kv._2) - - override def updated[S >: T](key: Long, value: S): LongMap[S] = this match { - case LongMap.Bin(prefix, mask, left, right) => - if (!hasMatch(key, prefix, mask)) join(key, LongMap.Tip(key, value), prefix, this) - else if (zero(key, mask)) LongMap.Bin(prefix, mask, left.updated(key, value), right) - else LongMap.Bin(prefix, mask, left, right.updated(key, value)) - case LongMap.Tip(key2, value2) => - if (key == key2) LongMap.Tip(key, value) - else join(key, LongMap.Tip(key, value), key2, this) - case LongMap.Nil => LongMap.Tip(key, value) - } - - /** - * Updates the map, using the provided function to resolve conflicts if the key is already present. - * - * Equivalent to - * {{{ - * this.get(key) match { - * case None => this.update(key, value) - * case Some(oldvalue) => this.update(key, f(oldvalue, value) - * } - * }}} - * - * @tparam S The supertype of values in this `LongMap`. - * @param key The key to update. - * @param value The value to use if there is no conflict. - * @param f The function used to resolve conflicts. - * @return The updated map. - */ - def updateWith[S >: T](key: Long, value: S, f: (T, S) => S): LongMap[S] = this match { - case LongMap.Bin(prefix, mask, left, right) => - if (!hasMatch(key, prefix, mask)) join(key, LongMap.Tip(key, value), prefix, this) - else if (zero(key, mask)) LongMap.Bin(prefix, mask, left.updateWith(key, value, f), right) - else LongMap.Bin(prefix, mask, left, right.updateWith(key, value, f)) - case LongMap.Tip(key2, value2) => - if (key == key2) LongMap.Tip(key, f(value2, value)) - else join(key, LongMap.Tip(key, value), key2, this) - case LongMap.Nil => LongMap.Tip(key, value) - } - - def -(key: Long): LongMap[T] = this match { - case LongMap.Bin(prefix, mask, left, right) => - if (!hasMatch(key, prefix, mask)) this - else if (zero(key, mask)) bin(prefix, mask, left - key, right) - else bin(prefix, mask, left, right - key) - case LongMap.Tip(key2, _) => - if (key == key2) LongMap.Nil - else this - case LongMap.Nil => LongMap.Nil - } - - /** - * A combined transform and filter function. Returns an `LongMap` such that - * for each `(key, value)` mapping in this map, if `f(key, value) == None` - * the map contains no mapping for key, and if `f(key, value)`. - * - * @tparam S The type of the values in the resulting `LongMap`. - * @param f The transforming function. - * @return The modified map. - */ - def modifyOrRemove[S](f: (Long, T) => Option[S]): LongMap[S] = this match { - case LongMap.Bin(prefix, mask, left, right) => { - val newleft = left.modifyOrRemove(f) - val newright = right.modifyOrRemove(f) - if ((left eq newleft) && (right eq newright)) this.asInstanceOf[LongMap[S]] - else bin(prefix, mask, newleft, newright) - } - case LongMap.Tip(key, value) => f(key, value) match { - case None => LongMap.Nil - case Some(value2) => - //hack to preserve sharing - if (value.asInstanceOf[AnyRef] eq value2.asInstanceOf[AnyRef]) this.asInstanceOf[LongMap[S]] - else LongMap.Tip(key, value2) - } - case LongMap.Nil => LongMap.Nil - } - - /** - * Forms a union map with that map, using the combining function to resolve conflicts. - * - * @tparam S The type of values in `that`, a supertype of values in `this`. - * @param that The map to form a union with. - * @param f The function used to resolve conflicts between two mappings. - * @return Union of `this` and `that`, with identical key conflicts resolved using the function `f`. - */ - def unionWith[S >: T](that: LongMap[S], f: (Long, S, S) => S): LongMap[S] = (this, that) match{ - case (LongMap.Bin(p1, m1, l1, r1), that@(LongMap.Bin(p2, m2, l2, r2))) => - if (shorter(m1, m2)) { - if (!hasMatch(p2, p1, m1)) join[S](p1, this, p2, that) // TODO: remove [S] when scala/bug#5548 is fixed - else if (zero(p2, m1)) LongMap.Bin(p1, m1, l1.unionWith(that, f), r1) - else LongMap.Bin(p1, m1, l1, r1.unionWith(that, f)) - } else if (shorter(m2, m1)){ - if (!hasMatch(p1, p2, m2)) join[S](p1, this, p2, that) // TODO: remove [S] when scala/bug#5548 is fixed - else if (zero(p1, m2)) LongMap.Bin(p2, m2, this.unionWith(l2, f), r2) - else LongMap.Bin(p2, m2, l2, this.unionWith(r2, f)) - } - else { - if (p1 == p2) LongMap.Bin(p1, m1, l1.unionWith(l2,f), r1.unionWith(r2, f)) - else join[S](p1, this, p2, that) // TODO: remove [S] when scala/bug#5548 is fixed - } - case (LongMap.Tip(key, value), x) => x.updateWith[S](key, value, (x, y) => f(key, y, x)) // TODO: remove [S] when scala/bug#5548 is fixed - case (x, LongMap.Tip(key, value)) => x.updateWith[S](key, value, (x, y) => f(key, x, y)) - case (LongMap.Nil, x) => x - case (x, LongMap.Nil) => x - } - - /** - * Forms the intersection of these two maps with a combining function. The - * resulting map is a map that has only keys present in both maps and has - * values produced from the original mappings by combining them with `f`. - * - * @tparam S The type of values in `that`. - * @tparam R The type of values in the resulting `LongMap`. - * @param that The map to intersect with. - * @param f The combining function. - * @return Intersection of `this` and `that`, with values for identical keys produced by function `f`. - */ - def intersectionWith[S, R](that: LongMap[S], f: (Long, T, S) => R): LongMap[R] = (this, that) match { - case (LongMap.Bin(p1, m1, l1, r1), that@LongMap.Bin(p2, m2, l2, r2)) => - if (shorter(m1, m2)) { - if (!hasMatch(p2, p1, m1)) LongMap.Nil - else if (zero(p2, m1)) l1.intersectionWith(that, f) - else r1.intersectionWith(that, f) - } else if (m1 == m2) bin(p1, m1, l1.intersectionWith(l2, f), r1.intersectionWith(r2, f)) - else { - if (!hasMatch(p1, p2, m2)) LongMap.Nil - else if (zero(p1, m2)) this.intersectionWith(l2, f) - else this.intersectionWith(r2, f) - } - case (LongMap.Tip(key, value), that) => that.get(key) match { - case None => LongMap.Nil - case Some(value2) => LongMap.Tip(key, f(key, value, value2)) - } - case (_, LongMap.Tip(key, value)) => this.get(key) match { - case None => LongMap.Nil - case Some(value2) => LongMap.Tip(key, f(key, value2, value)) - } - case (_, _) => LongMap.Nil - } - - /** - * Left biased intersection. Returns the map that has all the same mappings as this but only for keys - * which are present in the other map. - * - * @tparam R The type of values in `that`. - * @param that The map to intersect with. - * @return A map with all the keys both in `this` and `that`, mapped to corresponding values from `this`. - */ - def intersection[R](that: LongMap[R]): LongMap[T] = - this.intersectionWith(that, (key: Long, value: T, value2: R) => value) - - def ++[S >: T](that: LongMap[S]) = - this.unionWith[S](that, (key, x, y) => y) - - @tailrec - final def firstKey: Long = this match { - case LongMap.Bin(_, _, l, r) => l.firstKey - case LongMap.Tip(k, v) => k - case LongMap.Nil => sys.error("Empty set") - } - - @tailrec - final def lastKey: Long = this match { - case LongMap.Bin(_, _, l, r) => r.lastKey - case LongMap.Tip(k , v) => k - case LongMap.Nil => sys.error("Empty set") - } - -} - diff --git a/tests/scala2-library/src/library/scala/collection/immutable/Map.scala b/tests/scala2-library/src/library/scala/collection/immutable/Map.scala deleted file mode 100644 index 4107b6414d84..000000000000 --- a/tests/scala2-library/src/library/scala/collection/immutable/Map.scala +++ /dev/null @@ -1,216 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - - -package scala -package collection -package immutable - -import generic._ - -/** - * A generic trait for immutable maps. Concrete classes have to provide - * functionality for the abstract methods in `Map`: - * - * {{{ - * def get(key: K): Option[V] - * def iterator: Iterator[(K, V)] - * def + [V1 >: V](kv: (K, V1)): Map[K, V1] - * def -(key: K): Map[K, V] - * }}} - * - * @since 1 - */ -trait Map[K, +V] extends Iterable[(K, V)] -// with GenMap[K, V] - with scala.collection.Map[K, V] - with MapLike[K, V, Map[K, V]] { self => - - override def empty: Map[K, V] = Map.empty - - /** Returns this $coll as an immutable map. - * - * A new map will not be built; lazy collections will stay lazy. - */ - @deprecatedOverriding("Immutable maps should do nothing on toMap except return themselves cast as a map.", "2.11.0") - override def toMap[T, U](implicit ev: (K, V) <:< (T, U)): immutable.Map[T, U] = - self.asInstanceOf[immutable.Map[T, U]] - - override def seq: Map[K, V] = this - - /** The same map with a given default function. - * Note: `get`, `contains`, `iterator`, `keys`, etc are not affected by `withDefault`. - * - * Invoking transformer methods (e.g. `map`) will not preserve the default value. - * - * @param d the function mapping keys to values, used for non-present keys - * @return a wrapper of the map with a default value - */ - def withDefault[V1 >: V](d: K => V1): immutable.Map[K, V1] = new Map.WithDefault[K, V1](this, d) - - /** The same map with a given default value. - * Note: `get`, `contains`, `iterator`, `keys`, etc are not affected by `withDefaultValue`. - * - * Invoking transformer methods (e.g. `map`) will not preserve the default value. - * - * @param d default value used for non-present keys - * @return a wrapper of the map with a default value - */ - def withDefaultValue[V1 >: V](d: V1): immutable.Map[K, V1] = new Map.WithDefault[K, V1](this, x => d) - - /** Add a key/value pair to this map. - * @param key the key - * @param value the value - * @return A new map with the new binding added to this map - */ - override def updated [V1 >: V](key: K, value: V1): Map[K, V1] - def + [V1 >: V](kv: (K, V1)): Map[K, V1] -} - -/** $factoryInfo - * @define Coll `immutable.Map` - * @define coll immutable map - */ -object Map extends ImmutableMapFactory[Map] { - - /** $mapCanBuildFromInfo */ - implicit def canBuildFrom[K, V]: CanBuildFrom[Coll, (K, V), Map[K, V]] = new MapCanBuildFrom[K, V] - - def empty[K, V]: Map[K, V] = EmptyMap.asInstanceOf[Map[K, V]] - - class WithDefault[K, +V](underlying: Map[K, V], d: K => V) extends scala.collection.Map.WithDefault[K, V](underlying, d) with Map[K, V] { - override def empty = new WithDefault(underlying.empty, d) - override def updated[V1 >: V](key: K, value: V1): WithDefault[K, V1] = new WithDefault[K, V1](underlying.updated[V1](key, value), d) - override def + [V1 >: V](kv: (K, V1)): WithDefault[K, V1] = updated(kv._1, kv._2) - override def - (key: K): WithDefault[K, V] = new WithDefault(underlying - key, d) - override def withDefault[V1 >: V](d: K => V1): immutable.Map[K, V1] = new WithDefault[K, V1](underlying, d) - override def withDefaultValue[V1 >: V](d: V1): immutable.Map[K, V1] = new WithDefault[K, V1](underlying, x => d) - } - - private object EmptyMap extends AbstractMap[Any, Nothing] with Map[Any, Nothing] with Serializable { - override def size: Int = 0 - override def apply(key: Any) = throw new NoSuchElementException("key not found: " + key) - override def contains(key: Any) = false - def get(key: Any): Option[Nothing] = None - def iterator: Iterator[(Any, Nothing)] = Iterator.empty - override def updated [V1] (key: Any, value: V1): Map[Any, V1] = new Map1(key, value) - def + [V1](kv: (Any, V1)): Map[Any, V1] = updated(kv._1, kv._2) - def - (key: Any): Map[Any, Nothing] = this - } - - class Map1[K, +V](key1: K, value1: V) extends AbstractMap[K, V] with Map[K, V] with Serializable { - override def size = 1 - override def apply(key: K) = if (key == key1) value1 else throw new NoSuchElementException("key not found: " + key) - override def contains(key: K) = key == key1 - def get(key: K): Option[V] = - if (key == key1) Some(value1) else None - def iterator = Iterator((key1, value1)) - override def updated [V1 >: V] (key: K, value: V1): Map[K, V1] = - if (key == key1) new Map1(key1, value) - else new Map2(key1, value1, key, value) - def + [V1 >: V](kv: (K, V1)): Map[K, V1] = updated(kv._1, kv._2) - def - (key: K): Map[K, V] = - if (key == key1) Map.empty else this - override def foreach[U](f: ((K, V)) => U): Unit = { - f((key1, value1)) - } - } - - class Map2[K, +V](key1: K, value1: V, key2: K, value2: V) extends AbstractMap[K, V] with Map[K, V] with Serializable { - override def size = 2 - override def apply(key: K) = - if (key == key1) value1 - else if (key == key2) value2 - else throw new NoSuchElementException("key not found: " + key) - override def contains(key: K) = (key == key1) || (key == key2) - def get(key: K): Option[V] = - if (key == key1) Some(value1) - else if (key == key2) Some(value2) - else None - def iterator = Iterator((key1, value1), (key2, value2)) - override def updated [V1 >: V] (key: K, value: V1): Map[K, V1] = - if (key == key1) new Map2(key1, value, key2, value2) - else if (key == key2) new Map2(key1, value1, key2, value) - else new Map3(key1, value1, key2, value2, key, value) - def + [V1 >: V](kv: (K, V1)): Map[K, V1] = updated(kv._1, kv._2) - def - (key: K): Map[K, V] = - if (key == key1) new Map1(key2, value2) - else if (key == key2) new Map1(key1, value1) - else this - override def foreach[U](f: ((K, V)) => U): Unit = { - f((key1, value1)); f((key2, value2)) - } - } - - class Map3[K, +V](key1: K, value1: V, key2: K, value2: V, key3: K, value3: V) extends AbstractMap[K, V] with Map[K, V] with Serializable { - override def size = 3 - override def apply(key: K) = - if (key == key1) value1 - else if (key == key2) value2 - else if (key == key3) value3 - else throw new NoSuchElementException("key not found: " + key) - override def contains(key: K) = (key == key1) || (key == key2) || (key == key3) - def get(key: K): Option[V] = - if (key == key1) Some(value1) - else if (key == key2) Some(value2) - else if (key == key3) Some(value3) - else None - def iterator = Iterator((key1, value1), (key2, value2), (key3, value3)) - override def updated [V1 >: V] (key: K, value: V1): Map[K, V1] = - if (key == key1) new Map3(key1, value, key2, value2, key3, value3) - else if (key == key2) new Map3(key1, value1, key2, value, key3, value3) - else if (key == key3) new Map3(key1, value1, key2, value2, key3, value) - else new Map4(key1, value1, key2, value2, key3, value3, key, value) - def + [V1 >: V](kv: (K, V1)): Map[K, V1] = updated(kv._1, kv._2) - def - (key: K): Map[K, V] = - if (key == key1) new Map2(key2, value2, key3, value3) - else if (key == key2) new Map2(key1, value1, key3, value3) - else if (key == key3) new Map2(key1, value1, key2, value2) - else this - override def foreach[U](f: ((K, V)) => U): Unit = { - f((key1, value1)); f((key2, value2)); f((key3, value3)) - } - } - - class Map4[K, +V](key1: K, value1: V, key2: K, value2: V, key3: K, value3: V, key4: K, value4: V) extends AbstractMap[K, V] with Map[K, V] with Serializable { - override def size = 4 - override def apply(key: K) = - if (key == key1) value1 - else if (key == key2) value2 - else if (key == key3) value3 - else if (key == key4) value4 - else throw new NoSuchElementException("key not found: " + key) - override def contains(key: K) = (key == key1) || (key == key2) || (key == key3) || (key == key4) - def get(key: K): Option[V] = - if (key == key1) Some(value1) - else if (key == key2) Some(value2) - else if (key == key3) Some(value3) - else if (key == key4) Some(value4) - else None - def iterator = Iterator((key1, value1), (key2, value2), (key3, value3), (key4, value4)) - override def updated [V1 >: V] (key: K, value: V1): Map[K, V1] = - if (key == key1) new Map4(key1, value, key2, value2, key3, value3, key4, value4) - else if (key == key2) new Map4(key1, value1, key2, value, key3, value3, key4, value4) - else if (key == key3) new Map4(key1, value1, key2, value2, key3, value, key4, value4) - else if (key == key4) new Map4(key1, value1, key2, value2, key3, value3, key4, value) - else (new HashMap).updated(key1,value1).updated(key2, value2).updated(key3, value3).updated(key4, value4).updated(key, value) - def + [V1 >: V](kv: (K, V1)): Map[K, V1] = updated(kv._1, kv._2) - def - (key: K): Map[K, V] = - if (key == key1) new Map3(key2, value2, key3, value3, key4, value4) - else if (key == key2) new Map3(key1, value1, key3, value3, key4, value4) - else if (key == key3) new Map3(key1, value1, key2, value2, key4, value4) - else if (key == key4) new Map3(key1, value1, key2, value2, key3, value3) - else this - override def foreach[U](f: ((K, V)) => U): Unit = { - f((key1, value1)); f((key2, value2)); f((key3, value3)); f((key4, value4)) - } - } -} - -/** Explicit instantiation of the `Map` trait to reduce class file size in subclasses. */ -abstract class AbstractMap[K, +V] extends scala.collection.AbstractMap[K, V] with Map[K, V] diff --git a/tests/scala2-library/src/library/scala/collection/immutable/MapLike.scala b/tests/scala2-library/src/library/scala/collection/immutable/MapLike.scala deleted file mode 100644 index 5867383b522e..000000000000 --- a/tests/scala2-library/src/library/scala/collection/immutable/MapLike.scala +++ /dev/null @@ -1,134 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala -package collection -package immutable - -import generic._ -import parallel.immutable.ParMap - -/** - * A generic template for immutable maps from keys of type `K` - * to values of type `V`. - * To implement a concrete map, you need to provide implementations of the - * following methods (where `This` is the type of the actual map implementation): - * - * {{{ - * def get(key: K): Option[V] - * def iterator: Iterator[(K, V)] - * def + [V1 >: V](kv: (K, V)): Map[K, V1] - * def - (key: K): This - * }}} - * - * If you wish that transformer methods like `take`, `drop`, `filter` return the - * same kind of map, you should also override: - * - * {{{ - * def empty: This - * }}} - * - * It is also good idea to override methods `foreach` and - * `size` for efficiency. - * - * @tparam K the type of the keys contained in this collection. - * @tparam V the type of the values associated with the keys. - * @tparam This The type of the actual map implementation. - * - * @author Martin Odersky - * @version 2.8 - * @since 2.8 - * @define Coll immutable.Map - * @define coll immutable map - */ -trait MapLike[K, +V, +This <: MapLike[K, V, This] with Map[K, V]] - extends scala.collection.MapLike[K, V, This] - with Parallelizable[(K, V), ParMap[K, V]] -{ -self => - - protected[this] override def parCombiner = ParMap.newCombiner[K, V] - - /** A new immutable map containing updating this map with a given key/value mapping. - * @param key the key - * @param value the value - * @return A new map with the new key/value mapping - */ - override def updated [V1 >: V](key: K, value: V1): immutable.Map[K, V1] = this + ((key, value)) - - /** Add a key/value pair to this map, returning a new map. - * @param kv the key/value pair. - * @return A new map with the new binding added to this map. - */ - def + [V1 >: V] (kv: (K, V1)): immutable.Map[K, V1] - - /** Adds two or more elements to this collection and returns - * a new collection. - * - * @param elem1 the first element to add. - * @param elem2 the second element to add. - * @param elems the remaining elements to add. - * @return A new map with the new bindings added to this map. - */ - override def + [V1 >: V] (elem1: (K, V1), elem2: (K, V1), elems: (K, V1) *): immutable.Map[K, V1] = - this + elem1 + elem2 ++ elems - - /** Adds a number of elements provided by a traversable object - * and returns a new collection with the added elements. - * - * @param xs the traversable object consisting of key-value pairs. - * @return a new immutable map with the bindings of this map and those from `xs`. - */ - override def ++[V1 >: V](xs: GenTraversableOnce[(K, V1)]): immutable.Map[K, V1] = - ((repr: immutable.Map[K, V1]) /: xs.seq) (_ + _) - - /** Filters this map by retaining only keys satisfying a predicate. - * @param p the predicate used to test keys - * @return an immutable map consisting only of those key value pairs of this map where the key satisfies - * the predicate `p`. The resulting map wraps the original map without copying any elements. - */ - override def filterKeys(p: K => Boolean): Map[K, V] = new FilteredKeys(p) with DefaultMap[K, V] - - /** Transforms this map by applying a function to every retrieved value. - * @param f the function used to transform values of this map. - * @return a map view which maps every key of this map - * to `f(this(key))`. The resulting map wraps the original map without copying any elements. - */ - override def mapValues[W](f: V => W): Map[K, W] = new MappedValues(f) with DefaultMap[K, W] - - /** Collects all keys of this map in a set. - * @return a set containing all keys of this map. - */ - override def keySet: immutable.Set[K] = new ImmutableDefaultKeySet - - protected class ImmutableDefaultKeySet extends super.DefaultKeySet with immutable.Set[K] { - override def + (elem: K): immutable.Set[K] = - if (this(elem)) this - else immutable.Set[K]() ++ this + elem - override def - (elem: K): immutable.Set[K] = - if (this(elem)) immutable.Set[K]() ++ this - elem - else this - - // ImmutableDefaultKeySet is only protected, so we won't warn on override. - // Someone could override in a way that makes widening not okay - // (e.g. by overriding +, though the version in this class is fine) - override def toSet[B >: K]: Set[B] = this.asInstanceOf[Set[B]] - } - - /** This function transforms all the values of mappings contained - * in this map with function `f`. - * - * @param f A function over keys and values - * @return the updated map - */ - def transform[W, That](f: (K, V) => W)(implicit bf: CanBuildFrom[This, (K, W), That]): That = { - val b = bf(repr) - for ((key, value) <- this) b += ((key, f(key, value))) - b.result() - } -} diff --git a/tests/scala2-library/src/library/scala/collection/immutable/MapProxy.scala b/tests/scala2-library/src/library/scala/collection/immutable/MapProxy.scala deleted file mode 100644 index 0d1c17d4b337..000000000000 --- a/tests/scala2-library/src/library/scala/collection/immutable/MapProxy.scala +++ /dev/null @@ -1,43 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - - - -package scala -package collection -package immutable - -/** - * This is a simple wrapper class for `scala.collection.immutable.Map`. - * - * It is most useful for assembling customized map abstractions - * dynamically using object composition and forwarding. - * - * @author Matthias Zenger, Martin Odersky - * @version 2.0, 31/12/2006 - * @since 2.8 - */ -@deprecated("proxying is deprecated due to lack of use and compiler-level support", "2.11.0") -trait MapProxy[A, +B] extends Map[A, B] with MapProxyLike[A, B, Map[A, B]] { - override def repr = this - private def newProxy[B1 >: B](newSelf: Map[A, B1]): MapProxy[A, B1] = - new MapProxy[A, B1] { val self = newSelf } - - override def empty = newProxy(self.empty) - override def updated [B1 >: B](key: A, value: B1) = newProxy(self.updated(key, value)) - - override def -(key: A) = newProxy(self - key) - override def + [B1 >: B](kv: (A, B1)): Map[A, B1] = newProxy(self + kv) - override def + [B1 >: B](elem1: (A, B1), elem2: (A, B1), elems: (A, B1) *) = newProxy(self.+(elem1, elem2, elems: _*)) - override def ++[B1 >: B](xs: GenTraversableOnce[(A, B1)]) = newProxy(self ++ xs.seq) - - override def keySet: immutable.Set[A] = new SetProxy[A] { val self = MapProxy.this.self.keySet } - override def filterKeys(p: A => Boolean) = self.filterKeys(p) - override def mapValues[C](f: B => C) = self.mapValues(f) -} diff --git a/tests/scala2-library/src/library/scala/collection/immutable/NumericRange.scala b/tests/scala2-library/src/library/scala/collection/immutable/NumericRange.scala deleted file mode 100644 index f1b831bf7594..000000000000 --- a/tests/scala2-library/src/library/scala/collection/immutable/NumericRange.scala +++ /dev/null @@ -1,399 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2006-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala -package collection -package immutable - -// TODO: Now the specialization exists there is no clear reason to have -// separate classes for Range/NumericRange. Investigate and consolidate. - -/** `NumericRange` is a more generic version of the - * `Range` class which works with arbitrary types. - * It must be supplied with an `Integral` implementation of the - * range type. - * - * Factories for likely types include `Range.BigInt`, `Range.Long`, - * and `Range.BigDecimal`. `Range.Int` exists for completeness, but - * the `Int`-based `scala.Range` should be more performant. - * - * {{{ - * val r1 = new Range(0, 100, 1) - * val veryBig = Int.MaxValue.toLong + 1 - * val r2 = Range.Long(veryBig, veryBig + 100, 1) - * assert(r1 sameElements r2.map(_ - veryBig)) - * }}} - * - * @author Paul Phillips - * @version 2.8 - * @define Coll `NumericRange` - * @define coll numeric range - * @define mayNotTerminateInf - * @define willNotTerminateInf - */ -abstract class NumericRange[T] - (val start: T, val end: T, val step: T, val isInclusive: Boolean) - (implicit num: Integral[T]) -extends AbstractSeq[T] with IndexedSeq[T] with Serializable { - /** Note that NumericRange must be invariant so that constructs - * such as "1L to 10 by 5" do not infer the range type as AnyVal. - */ - import num._ - - // See comment in Range for why this must be lazy. - private lazy val numRangeElements: Int = - NumericRange.count(start, end, step, isInclusive) - - override def length = numRangeElements - override def isEmpty = length == 0 - override lazy val last: T = - if (length == 0) Nil.last - else locationAfterN(length - 1) - - /** Create a new range with the start and end values of this range and - * a new `step`. - */ - def by(newStep: T): NumericRange[T] = copy(start, end, newStep) - - /** Create a copy of this range. - */ - def copy(start: T, end: T, step: T): NumericRange[T] - - override def foreach[U](f: T => U) { - var count = 0 - var current = start - while (count < length) { - f(current) - current += step - count += 1 - } - } - - // TODO: these private methods are straight copies from Range, duplicated - // to guard against any (most likely illusory) performance drop. They should - // be eliminated one way or another. - - // Tests whether a number is within the endpoints, without testing - // whether it is a member of the sequence (i.e. when step > 1.) - private def isWithinBoundaries(elem: T) = !isEmpty && ( - (step > zero && start <= elem && elem <= last ) || - (step < zero && last <= elem && elem <= start) - ) - // Methods like apply throw exceptions on invalid n, but methods like take/drop - // are forgiving: therefore the checks are with the methods. - private def locationAfterN(n: Int): T = start + (step * fromInt(n)) - - // When one drops everything. Can't ever have unchecked operations - // like "end + 1" or "end - 1" because ranges involving Int.{ MinValue, MaxValue } - // will overflow. This creates an exclusive range where start == end - // based on the given value. - private def newEmptyRange(value: T) = NumericRange(value, value, step) - - final override def take(n: Int): NumericRange[T] = ( - if (n <= 0 || length == 0) newEmptyRange(start) - else if (n >= length) this - else new NumericRange.Inclusive(start, locationAfterN(n - 1), step) - ) - - final override def drop(n: Int): NumericRange[T] = ( - if (n <= 0 || length == 0) this - else if (n >= length) newEmptyRange(end) - else copy(locationAfterN(n), end, step) - ) - - def apply(idx: Int): T = { - if (idx < 0 || idx >= length) throw new IndexOutOfBoundsException(idx.toString) - else locationAfterN(idx) - } - - import NumericRange.defaultOrdering - - override def min[T1 >: T](implicit ord: Ordering[T1]): T = - // We can take the fast path: - // - If the Integral of this NumericRange is also the requested Ordering - // (Integral <: Ordering). This can happen for custom Integral types. - // - The Ordering is the default Ordering of a well-known Integral type. - if ((ord eq num) || defaultOrdering.get(num).exists(ord eq _)) { - if (num.signum(step) > 0) head - else last - } else super.min(ord) - - override def max[T1 >: T](implicit ord: Ordering[T1]): T = - // See comment for fast path in min(). - if ((ord eq num) || defaultOrdering.get(num).exists(ord eq _)) { - if (num.signum(step) > 0) last - else head - } else super.max(ord) - - // Motivated by the desire for Double ranges with BigDecimal precision, - // we need some way to map a Range and get another Range. This can't be - // done in any fully general way because Ranges are not arbitrary - // sequences but step-valued, so we have a custom method only we can call - // which we promise to use responsibly. - // - // The point of it all is that - // - // 0.0 to 1.0 by 0.1 - // - // should result in - // - // NumericRange[Double](0.0, 0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8, 0.9, 1.0) - // - // and not - // - // NumericRange[Double](0.0, 0.1, 0.2, 0.30000000000000004, 0.4, 0.5, 0.6000000000000001, 0.7000000000000001, 0.8, 0.9) - // - // or perhaps more importantly, - // - // (0.1 to 0.3 by 0.1 contains 0.3) == true - // - private[immutable] def mapRange[A](fm: T => A)(implicit unum: Integral[A]): NumericRange[A] = { - val self = this - - // XXX This may be incomplete. - new NumericRange[A](fm(start), fm(end), fm(step), isInclusive) { - def copy(start: A, end: A, step: A): NumericRange[A] = - if (isInclusive) NumericRange.inclusive(start, end, step) - else NumericRange(start, end, step) - - private lazy val underlyingRange: NumericRange[T] = self - override def foreach[U](f: A => U) { underlyingRange foreach (x => f(fm(x))) } - override def isEmpty = underlyingRange.isEmpty - override def apply(idx: Int): A = fm(underlyingRange(idx)) - override def containsTyped(el: A) = underlyingRange exists (x => fm(x) == el) - - override def toString = { - def simpleOf(x: Any): String = x.getClass.getName.split("\\.").last - val stepped = simpleOf(underlyingRange.step) - s"${super.toString} (using $underlyingRange of $stepped)" - } - } - } - - // a well-typed contains method. - def containsTyped(x: T): Boolean = - isWithinBoundaries(x) && (((x - start) % step) == zero) - - override def contains[A1 >: T](x: A1): Boolean = - try containsTyped(x.asInstanceOf[T]) - catch { case _: ClassCastException => false } - - final override def sum[B >: T](implicit num: Numeric[B]): B = { - if (isEmpty) num.zero - else if (numRangeElements == 1) head - else { - // If there is no overflow, use arithmetic series formula - // a + ... (n terms total) ... + b = n*(a+b)/2 - if ((num eq scala.math.Numeric.IntIsIntegral)|| - (num eq scala.math.Numeric.ShortIsIntegral)|| - (num eq scala.math.Numeric.ByteIsIntegral)|| - (num eq scala.math.Numeric.CharIsIntegral)) { - // We can do math with no overflow in a Long--easy - val exact = (numRangeElements * ((num toLong head) + (num toInt last))) / 2 - num fromInt exact.toInt - } - else if (num eq scala.math.Numeric.LongIsIntegral) { - // Uh-oh, might be overflow, so we have to divide before we overflow. - // Either numRangeElements or (head + last) must be even, so divide the even one before multiplying - val a = head.toLong - val b = last.toLong - val ans = - if ((numRangeElements & 1) == 0) (numRangeElements / 2) * (a + b) - else numRangeElements * { - // Sum is even, but we might overflow it, so divide in pieces and add back remainder - val ha = a/2 - val hb = b/2 - ha + hb + ((a - 2*ha) + (b - 2*hb)) / 2 - } - ans.asInstanceOf[B] - } - else if ((num eq scala.math.Numeric.FloatAsIfIntegral) || - (num eq scala.math.Numeric.DoubleAsIfIntegral)) { - // Try to compute sum with reasonable accuracy, avoiding over/underflow - val numAsIntegral = num.asInstanceOf[Integral[B]] - import numAsIntegral._ - val a = math.abs(head.toDouble) - val b = math.abs(last.toDouble) - val two = num fromInt 2 - val nre = num fromInt numRangeElements - if (a > 1e38 || b > 1e38) nre * ((head / two) + (last / two)) // Compute in parts to avoid Infinity if possible - else (nre / two) * (head + last) // Don't need to worry about infinity; this will be more accurate and avoid underflow - } - else if ((num eq scala.math.Numeric.BigIntIsIntegral) || - (num eq scala.math.Numeric.BigDecimalIsFractional)) { - // No overflow, so we can use arithmetic series formula directly - // (not going to worry about running out of memory) - val numAsIntegral = num.asInstanceOf[Integral[B]] - import numAsIntegral._ - ((num fromInt numRangeElements) * (head + last)) / (num fromInt 2) - } - else { - // User provided custom Numeric, so we cannot rely on arithmetic series formula (e.g. won't work on something like Z_6) - if (isEmpty) num.zero - else { - var acc = num.zero - var i = head - var idx = 0 - while(idx < length) { - acc = num.plus(acc, i) - i = i + step - idx = idx + 1 - } - acc - } - } - } - } - - override lazy val hashCode = super.hashCode() - override def equals(other: Any) = other match { - case x: NumericRange[_] => - (x canEqual this) && (length == x.length) && ( - (length == 0) || // all empty sequences are equal - (start == x.start && last == x.last) // same length and same endpoints implies equality - ) - case _ => - super.equals(other) - } - - override def toString = { - val empty = if (isEmpty) "empty " else "" - val preposition = if (isInclusive) "to" else "until" - val stepped = if (step == 1) "" else s" by $step" - s"${empty}NumericRange $start $preposition $end$stepped" - } -} - -/** A companion object for numeric ranges. - */ -object NumericRange { - - /** Calculates the number of elements in a range given start, end, step, and - * whether or not it is inclusive. Throws an exception if step == 0 or - * the number of elements exceeds the maximum Int. - */ - def count[T](start: T, end: T, step: T, isInclusive: Boolean)(implicit num: Integral[T]): Int = { - val zero = num.zero - val upward = num.lt(start, end) - val posStep = num.gt(step, zero) - - if (step == zero) throw new IllegalArgumentException("step cannot be 0.") - else if (start == end) if (isInclusive) 1 else 0 - else if (upward != posStep) 0 - else { - /* We have to be frightfully paranoid about running out of range. - * We also can't assume that the numbers will fit in a Long. - * We will assume that if a > 0, -a can be represented, and if - * a < 0, -a+1 can be represented. We also assume that if we - * can't fit in Int, we can represent 2*Int.MaxValue+3 (at least). - * And we assume that numbers wrap rather than cap when they overflow. - */ - // Check whether we can short-circuit by deferring to Int range. - val startint = num.toInt(start) - if (start == num.fromInt(startint)) { - val endint = num.toInt(end) - if (end == num.fromInt(endint)) { - val stepint = num.toInt(step) - if (step == num.fromInt(stepint)) { - return { - if (isInclusive) Range.inclusive(startint, endint, stepint).length - else Range (startint, endint, stepint).length - } - } - } - } - // If we reach this point, deferring to Int failed. - // Numbers may be big. - val one = num.one - val limit = num.fromInt(Int.MaxValue) - def check(t: T): T = - if (num.gt(t, limit)) throw new IllegalArgumentException("More than Int.MaxValue elements.") - else t - // If the range crosses zero, it might overflow when subtracted - val startside = num.signum(start) - val endside = num.signum(end) - num.toInt{ - if (startside*endside >= 0) { - // We're sure we can subtract these numbers. - // Note that we do not use .rem because of different conventions for Long and BigInt - val diff = num.minus(end, start) - val quotient = check(num.quot(diff, step)) - val remainder = num.minus(diff, num.times(quotient, step)) - if (!isInclusive && zero == remainder) quotient else check(num.plus(quotient, one)) - } - else { - // We might not even be able to subtract these numbers. - // Jump in three pieces: - // * start to -1 or 1, whichever is closer (waypointA) - // * one step, which will take us at least to 0 (ends at waypointB) - // * there to the end - val negone = num.fromInt(-1) - val startlim = if (posStep) negone else one - val startdiff = num.minus(startlim, start) - val startq = check(num.quot(startdiff, step)) - val waypointA = if (startq == zero) start else num.plus(start, num.times(startq, step)) - val waypointB = num.plus(waypointA, step) - check { - if (num.lt(waypointB, end) != upward) { - // No last piece - if (isInclusive && waypointB == end) num.plus(startq, num.fromInt(2)) - else num.plus(startq, one) - } - else { - // There is a last piece - val enddiff = num.minus(end,waypointB) - val endq = check(num.quot(enddiff, step)) - val last = if (endq == zero) waypointB else num.plus(waypointB, num.times(endq, step)) - // Now we have to tally up all the pieces - // 1 for the initial value - // startq steps to waypointA - // 1 step to waypointB - // endq steps to the end (one less if !isInclusive and last==end) - num.plus(startq, num.plus(endq, if (!isInclusive && last==end) one else num.fromInt(2))) - } - } - } - } - } - } - - class Inclusive[T](start: T, end: T, step: T)(implicit num: Integral[T]) - extends NumericRange(start, end, step, true) { - def copy(start: T, end: T, step: T): Inclusive[T] = - NumericRange.inclusive(start, end, step) - - def exclusive: Exclusive[T] = NumericRange(start, end, step) - } - - class Exclusive[T](start: T, end: T, step: T)(implicit num: Integral[T]) - extends NumericRange(start, end, step, false) { - def copy(start: T, end: T, step: T): Exclusive[T] = - NumericRange(start, end, step) - - def inclusive: Inclusive[T] = NumericRange.inclusive(start, end, step) - } - - def apply[T](start: T, end: T, step: T)(implicit num: Integral[T]): Exclusive[T] = - new Exclusive(start, end, step) - def inclusive[T](start: T, end: T, step: T)(implicit num: Integral[T]): Inclusive[T] = - new Inclusive(start, end, step) - - private[collection] val defaultOrdering = Map[Numeric[_], Ordering[_]]( - Numeric.BigIntIsIntegral -> Ordering.BigInt, - Numeric.IntIsIntegral -> Ordering.Int, - Numeric.ShortIsIntegral -> Ordering.Short, - Numeric.ByteIsIntegral -> Ordering.Byte, - Numeric.CharIsIntegral -> Ordering.Char, - Numeric.LongIsIntegral -> Ordering.Long, - Numeric.FloatAsIfIntegral -> Ordering.Float, - Numeric.DoubleAsIfIntegral -> Ordering.Double, - Numeric.BigDecimalAsIfIntegral -> Ordering.BigDecimal - ) - -} - diff --git a/tests/scala2-library/src/library/scala/collection/immutable/PagedSeq.scala b/tests/scala2-library/src/library/scala/collection/immutable/PagedSeq.scala deleted file mode 100644 index 01854b17978a..000000000000 --- a/tests/scala2-library/src/library/scala/collection/immutable/PagedSeq.scala +++ /dev/null @@ -1,272 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2006-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - - - -package scala -package collection -package immutable - -import java.io.{File, FileReader, Reader} -import scala.reflect.ClassTag - -/** The `PagedSeq` object defines a lazy implementations of - * a random access sequence. - * - * Provides utility methods that return instances of `PagedSeq[Char]`. - * `fromIterator` and `fromIterable` provide generalised instances of `PagedSeq` - * @since 2.7 - */ -@deprecated("this object will be moved to the scala-parser-combinators module", "2.11.8") -object PagedSeq { - final val UndeterminedEnd = Int.MaxValue - - /** Constructs a paged sequence from an iterator */ - def fromIterator[T: ClassTag](source: Iterator[T]): PagedSeq[T] = - new PagedSeq[T]((data: Array[T], start: Int, len: Int) => { - var i = 0 - while (i < len && source.hasNext) { - data(start + i) = source.next() - i += 1 - } - if (i == 0) -1 else i - }) - - /** Constructs a paged sequence from an iterable */ - def fromIterable[T: ClassTag](source: Iterable[T]): PagedSeq[T] = - fromIterator(source.iterator) - - /** Constructs a paged character sequence from a string iterator */ - def fromStrings(source: Iterator[String]): PagedSeq[Char] = { - var current: String = "" - def more(data: Array[Char], start: Int, len: Int): Int = - if (current.length != 0) { - val cnt = current.length min len - current.getChars(0, cnt, data, start) - current = current.substring(cnt) - if (cnt == len) cnt - else (more(data, start + cnt, len - cnt) max 0) + cnt - } else if (source.hasNext) { - current = source.next() - more(data, start, len) - } else -1 - new PagedSeq(more(_: Array[Char], _: Int, _: Int)) - } - - /** Constructs a paged character sequence from a string iterable */ - def fromStrings(source: Iterable[String]): PagedSeq[Char] = - fromStrings(source.iterator) - - /** Constructs a paged character sequence from a line iterator - * Lines do not contain trailing `\n` characters; The method inserts - * a line separator `\n` between any two lines in the sequence. - */ - def fromLines(source: Iterator[String]): PagedSeq[Char] = { - var isFirst = true - fromStrings(source map { line => - if (isFirst) { - isFirst = false - line - } else "\n"+line - }) - } - - /** Constructs a paged character sequence from a line iterable - * Lines do not contain trailing `\n` characters; The method inserts - * a line separator `\n` between any two lines in the sequence. - */ - def fromLines(source: Iterable[String]): PagedSeq[Char] = - fromLines(source.iterator) - - /** Constructs a paged character sequence from an input reader - */ - def fromReader(source: Reader): PagedSeq[Char] = - new PagedSeq(source.read(_: Array[Char], _: Int, _: Int)) - - /** Constructs a paged character sequence from an input file - */ - def fromFile(source: File): PagedSeq[Char] = - fromReader(new FileReader(source)) - - /** Constructs a paged character sequence from a file with given name - */ - def fromFile(source: String): PagedSeq[Char] = - fromFile(new File(source)) - - /** Constructs a paged character sequence from a scala.io.Source value - */ - def fromSource(source: scala.io.Source) = - fromLines(source.getLines()) -} - - -import PagedSeq._ - -/** An implementation of lazily computed sequences, where elements are stored - * in "pages", i.e. arrays of fixed size. - * - * A paged sequence is constructed from a function that produces more elements when asked. - * The producer function - `more`, is similar to the read method in java.io.Reader. - * The `more` function takes three parameters: an array of elements, a start index, and an end index. - * It should try to fill the array between start and end indices (excluding end index). - * It returns the number of elements produced, or -1 if end of logical input stream was reached - * before reading any element. - * - * @tparam T the type of the elements contained in this paged sequence, with an `ClassTag` context bound. - * - * @author Martin Odersky - * @since 2.7 - * @define Coll `PagedSeq` - * @define coll paged sequence - * @define mayNotTerminateInf - * @define willNotTerminateInf - */ -@deprecated("this class will be moved to the scala-parser-combinators module", "2.11.8") -class PagedSeq[T: ClassTag] protected( - more: (Array[T], Int, Int) => Int, - first1: Page[T], - start: Int, - end: Int) -extends scala.collection.AbstractSeq[T] - with scala.collection.IndexedSeq[T] -{ - def this(more: (Array[T], Int, Int) => Int) = this(more, new Page[T](0), 0, UndeterminedEnd) - - private var current: Page[T] = first1 - - private def latest = first1.latest - - private def addMore() = latest.addMore(more) - - private def page(absindex: Int) = { - if (absindex < current.start) - current = first1 - while (absindex >= current.end && current.next != null) - current = current.next - while (absindex >= current.end && !current.isLast) { - current = addMore() - } - current - } - - /** The length of the paged sequence - * @note Calling this method will force the entire sequence to be read. - */ - def length: Int = { - while (!latest.isLast && latest.end < end) addMore() - (latest.end min end) - start - } - - /** The element at position `index`. - */ - def apply(index: Int) = - if (isDefinedAt(index)) page(index + start)(index + start) - else throw new IndexOutOfBoundsException(index.toString) - - /** Predicate method to check if an element is defined - * at position `index` of the current sequence. - * Unlike `length` this operation does not force reading - * a lazy sequence to the end. - */ - override def isDefinedAt(index: Int) = - index >= 0 && index < end - start && { - val absidx = index + start - absidx >= 0 && absidx < page(absidx).end - } - - /** The subsequence from index `start` up to `end -1` if `end` - * is lesser than the length of the current sequence and up to - * length of the sequence otherwise. This is limited up to the length - * of the current sequence if `end` is larger than its length. - */ - override def slice(_start: Int, _end: Int): PagedSeq[T] = { - page(start) - val s = start + _start - val e = if (_end == UndeterminedEnd) _end else start + _end - var f = first1 - while (f.end <= s && !f.isLast) { - if (f.next eq null) f = f.addMore(more) - else f = f.next - } - // Warning -- not refining `more` means that slices can freely request and obtain - // data outside of their slice. This is part of the design of PagedSeq - // (to read pages!) but can be surprising. - new PagedSeq(more, f, s, e) - } - - /** The subsequence from index `start` up to - * the length of the current sequence. - */ - def slice(start: Int): PagedSeq[T] = slice(start, UndeterminedEnd) - - /** Convert sequence to string */ - override def toString = { - val buf = new StringBuilder - for (ch <- PagedSeq.this.iterator) buf append ch - buf.toString - } -} - - -/** Page containing up to PageSize characters of the input sequence. - */ -private class Page[T: ClassTag](val num: Int) { - - private final val PageSize = 4096 - - /** The next page in the sequence */ - var next : Page[T] = null - - /** A later page in the sequence, serves a cache for pointing to last page */ - var later : Page[T] = this - - /** The number of elements read into this page */ - var filled: Int = 0 - - /** Set true if the current page is the last in the sequence or if - * the `more` function returned -1 signalling end of input. */ - var isLast: Boolean = false - - /** The element array */ - final val data = new Array[T](PageSize) - - /** The index of the first element in this page relative to the whole sequence */ - final def start = num * PageSize - - /** The index of the element following the last element in this page relative - * to the whole sequence */ - final def end = start + filled - - /** The last page as currently present in the sequence; This can change as more - * elements get appended to the sequence. */ - final def latest: Page[T] = { - if (later.next != null) later = later.next.latest - later - } - - /** The element at the given sequence index. - * That index is relative to the whole sequence, not the page. */ - def apply(index: Int) = { - if (index < start || index - start >= filled) throw new IndexOutOfBoundsException(index.toString) - data(index - start) - } - - /** Produces more elements by calling `more` and adds them on the current page, - * or fills a subsequent page if current page is full. - * @note If current page is full, it is the last one in the sequence. */ - final def addMore(more: (Array[T], Int, Int) => Int): Page[T] = - if (filled == PageSize) { - next = new Page[T](num + 1) - next.addMore(more) - } else { - val count = more(data, filled, PageSize - filled) - if (count < 0) isLast = true - else filled += count - this - } -} diff --git a/tests/scala2-library/src/library/scala/collection/immutable/Queue.scala b/tests/scala2-library/src/library/scala/collection/immutable/Queue.scala deleted file mode 100644 index 41c19d032c90..000000000000 --- a/tests/scala2-library/src/library/scala/collection/immutable/Queue.scala +++ /dev/null @@ -1,188 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala -package collection -package immutable - -import generic._ -import mutable.{ Builder, ListBuffer } - -/** `Queue` objects implement data structures that allow to - * insert and retrieve elements in a first-in-first-out (FIFO) manner. - * - * `Queue` is implemented as a pair of `List`s, one containing the ''in'' elements and the other the ''out'' elements. - * Elements are added to the ''in'' list and removed from the ''out'' list. When the ''out'' list runs dry, the - * queue is pivoted by replacing the ''out'' list by ''in.reverse'', and ''in'' by ''Nil''. - * - * Adding items to the queue always has cost `O(1)`. Removing items has cost `O(1)`, except in the case - * where a pivot is required, in which case, a cost of `O(n)` is incurred, where `n` is the number of elements in the queue. When this happens, - * `n` remove operations with `O(1)` cost are guaranteed. Removing an item is on average `O(1)`. - * - * @author Erik Stenman - * @version 1.0, 08/07/2003 - * @since 1 - * @see [[http://docs.scala-lang.org/overviews/collections/concrete-immutable-collection-classes.html#immutable_queues "Scala's Collection Library overview"]] - * section on `Immutable Queues` for more information. - * - * @define Coll `immutable.Queue` - * @define coll immutable queue - * @define mayNotTerminateInf - * @define willNotTerminateInf - */ - -@SerialVersionUID(-7622936493364270175L) -sealed class Queue[+A] protected(protected val in: List[A], protected val out: List[A]) - extends AbstractSeq[A] - with LinearSeq[A] - with GenericTraversableTemplate[A, Queue] - with LinearSeqLike[A, Queue[A]] - with Serializable { - - override def companion: GenericCompanion[Queue] = Queue - - /** Returns the `n`-th element of this queue. - * The first element is at position `0`. - * - * @param n index of the element to return - * @return the element at position `n` in this queue. - * @throws java.util.NoSuchElementException if the queue is too short. - */ - override def apply(n: Int): A = { - val olen = out.length - if (n < olen) out.apply(n) - else { - val m = n - olen - val ilen = in.length - if (m < ilen) in.apply(ilen - m - 1) - else throw new NoSuchElementException("index out of range") - } - } - - /** Returns the elements in the list as an iterator - */ - override def iterator: Iterator[A] = (out ::: in.reverse).iterator - - /** Checks if the queue is empty. - * - * @return true, iff there is no element in the queue. - */ - override def isEmpty: Boolean = in.isEmpty && out.isEmpty - - override def head: A = - if (out.nonEmpty) out.head - else if (in.nonEmpty) in.last - else throw new NoSuchElementException("head on empty queue") - - override def tail: Queue[A] = - if (out.nonEmpty) new Queue(in, out.tail) - else if (in.nonEmpty) new Queue(Nil, in.reverse.tail) - else throw new NoSuchElementException("tail on empty queue") - - /* This is made to avoid inefficient implementation of iterator. */ - override def forall(p: A => Boolean): Boolean = - in.forall(p) && out.forall(p) - - /* This is made to avoid inefficient implementation of iterator. */ - override def exists(p: A => Boolean): Boolean = - in.exists(p) || out.exists(p) - - override def stringPrefix = "Queue" - - /** Returns the length of the queue. - */ - override def length = in.length + out.length - - override def +:[B >: A, That](elem: B)(implicit bf: CanBuildFrom[Queue[A], B, That]): That = bf match { - case _: Queue.GenericCanBuildFrom[_] => new Queue(in, elem :: out).asInstanceOf[That] - case _ => super.+:(elem)(bf) - } - - override def :+[B >: A, That](elem: B)(implicit bf: CanBuildFrom[Queue[A], B, That]): That = bf match { - case _: Queue.GenericCanBuildFrom[_] => enqueue(elem).asInstanceOf[That] - case _ => super.:+(elem)(bf) - } - - override def ++[B >: A, That](that: GenTraversableOnce[B])(implicit bf: CanBuildFrom[Queue[A], B, That]): That = { - if (bf eq Queue.ReusableCBF) { - val newIn = - if (that.isInstanceOf[Queue[_]]) { - val thatQueue: Queue[B] = that.asInstanceOf[Queue[B]] - thatQueue.in ++ (thatQueue.out reverse_::: this.in) - } else { - (new ListBuffer[B] ++= that.seq).prependToList(this.in) - } - new Queue[B](newIn, this.out).asInstanceOf[That] - } else { - super.++(that)(bf) - } - } - - /** Creates a new queue with element added at the end - * of the old queue. - * - * @param elem the element to insert - */ - def enqueue[B >: A](elem: B) = new Queue(elem :: in, out) - - /** Returns a new queue with all elements provided by an `Iterable` object - * added at the end of the queue. - * - * The elements are appended in the order they are given out by the - * iterator. - * - * @param iter an iterable object - */ - def enqueue[B >: A](iter: Iterable[B]) = - new Queue(iter.toList reverse_::: in, out) - - /** Returns a tuple with the first element in the queue, - * and a new queue with this element removed. - * - * @throws java.util.NoSuchElementException - * @return the first element of the queue. - */ - def dequeue: (A, Queue[A]) = out match { - case Nil if !in.isEmpty => val rev = in.reverse ; (rev.head, new Queue(Nil, rev.tail)) - case x :: xs => (x, new Queue(in, xs.asInstanceOf[List[A]])) - case _ => throw new NoSuchElementException("dequeue on empty queue") - } - - /** Optionally retrieves the first element and a queue of the remaining elements. - * - * @return A tuple of the first element of the queue, and a new queue with this element removed. - * If the queue is empty, `None` is returned. - */ - def dequeueOption: Option[(A, Queue[A])] = if(isEmpty) None else Some(dequeue) - - /** Returns the first element in the queue, or throws an error if there - * is no element contained in the queue. - * - * @throws java.util.NoSuchElementException - * @return the first element. - */ - def front: A = head - - /** Returns a string representation of this queue. - */ - override def toString() = mkString("Queue(", ", ", ")") -} - -/** $factoryInfo - * @define Coll `immutable.Queue` - * @define coll immutable queue - */ -object Queue extends SeqFactory[Queue] { - /** $genericCanBuildFromInfo */ - implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, Queue[A]] = ReusableCBF.asInstanceOf[GenericCanBuildFrom[A]] - def newBuilder[A]: Builder[A, Queue[A]] = new ListBuffer[A] mapResult (x => new Queue[A](Nil, x.toList)) - override def empty[A]: Queue[A] = EmptyQueue.asInstanceOf[Queue[A]] - override def apply[A](xs: A*): Queue[A] = new Queue[A](Nil, xs.toList) - - private object EmptyQueue extends Queue[Nothing](Nil, Nil) { } -} diff --git a/tests/scala2-library/src/library/scala/collection/immutable/Range.scala b/tests/scala2-library/src/library/scala/collection/immutable/Range.scala deleted file mode 100644 index 75f5d53d95ff..000000000000 --- a/tests/scala2-library/src/library/scala/collection/immutable/Range.scala +++ /dev/null @@ -1,522 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2006-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - - -package scala -package collection.immutable - -import scala.collection.parallel.immutable.ParRange - -/** The `Range` class represents integer values in range - * ''[start;end)'' with non-zero step value `step`. - * It's a special case of an indexed sequence. - * For example: - * - * {{{ - * val r1 = 0 until 10 - * val r2 = r1.start until r1.end by r1.step + 1 - * println(r2.length) // = 5 - * }}} - * - * Ranges that contain more than `Int.MaxValue` elements can be created, but - * these overfull ranges have only limited capabilities. Any method that - * could require a collection of over `Int.MaxValue` length to be created, or - * could be asked to index beyond `Int.MaxValue` elements will throw an - * exception. Overfull ranges can safely be reduced in size by changing - * the step size (e.g. `by 3`) or taking/dropping elements. `contains`, - * `equals`, and access to the ends of the range (`head`, `last`, `tail`, - * `init`) are also permitted on overfull ranges. - * - * @param start the start of this range. - * @param end the end of the range. For exclusive ranges, e.g. - * `Range(0,3)` or `(0 until 3)`, this is one - * step past the last one in the range. For inclusive - * ranges, e.g. `Range.inclusive(0,3)` or `(0 to 3)`, - * it may be in the range if it is not skipped by the step size. - * To find the last element inside a non-empty range, - use `last` instead. - * @param step the step for the range. - * - * @author Martin Odersky - * @author Paul Phillips - * @version 2.8 - * @since 2.5 - * @see [[http://docs.scala-lang.org/overviews/collections/concrete-immutable-collection-classes.html#ranges "Scala's Collection Library overview"]] - * section on `Ranges` for more information. - * - * @define coll range - * @define mayNotTerminateInf - * @define willNotTerminateInf - * @define doesNotUseBuilders - * '''Note:''' this method does not use builders to construct a new range, - * and its complexity is O(1). - */ -@SerialVersionUID(7618862778670199309L) -sealed class Range(val start: Int, val end: Int, val step: Int) -extends scala.collection.AbstractSeq[Int] - with IndexedSeq[Int] - with scala.collection.CustomParallelizable[Int, ParRange] - with Serializable -{ - override def par = new ParRange(this) - - private def gap = end.toLong - start.toLong - private def isExact = gap % step == 0 - private def hasStub = isInclusive || !isExact - private def longLength = gap / step + ( if (hasStub) 1 else 0 ) - - // Check cannot be evaluated eagerly because we have a pattern where - // ranges are constructed like: "x to y by z" The "x to y" piece - // should not trigger an exception. So the calculation is delayed, - // which means it will not fail fast for those cases where failing was - // correct. - override final val isEmpty = ( - (start > end && step > 0) - || (start < end && step < 0) - || (start == end && !isInclusive) - ) - - private val numRangeElements: Int = { - if (step == 0) throw new IllegalArgumentException("step cannot be 0.") - else if (isEmpty) 0 - else { - val len = longLength - if (len > scala.Int.MaxValue) -1 - else len.toInt - } - } - - // This field has a sensible value only for non-empty ranges - private val lastElement = step match { - case 1 => if (isInclusive) end else end-1 - case -1 => if (isInclusive) end else end+1 - case _ => - val remainder = (gap % step).toInt - if (remainder != 0) end - remainder - else if (isInclusive) end - else end - step - } - - /** The last element of this range. This method will return the correct value - * even if there are too many elements to iterate over. - */ - override def last = if (isEmpty) Nil.last else lastElement - override def head = if (isEmpty) Nil.head else start - - override def min[A1 >: Int](implicit ord: Ordering[A1]): Int = - if (ord eq Ordering.Int) { - if (step > 0) head - else last - } else super.min(ord) - - override def max[A1 >: Int](implicit ord: Ordering[A1]): Int = - if (ord eq Ordering.Int) { - if (step > 0) last - else head - } else super.max(ord) - - protected def copy(start: Int, end: Int, step: Int): Range = new Range(start, end, step) - - /** Create a new range with the `start` and `end` values of this range and - * a new `step`. - * - * @return a new range with a different step - */ - def by(step: Int): Range = copy(start, end, step) - - def isInclusive = false - - override def size = length - override def length = if (numRangeElements < 0) fail() else numRangeElements - - private def description = "%d %s %d by %s".format(start, if (isInclusive) "to" else "until", end, step) - private def fail() = throw new IllegalArgumentException(description + ": seqs cannot contain more than Int.MaxValue elements.") - private def validateMaxLength() { - if (numRangeElements < 0) - fail() - } - - final def apply(idx: Int): Int = { - validateMaxLength() - if (idx < 0 || idx >= numRangeElements) throw new IndexOutOfBoundsException(idx.toString) - else start + (step * idx) - } - - final override def foreach[@specialized(Unit) U](f: Int => U) { - // Implementation chosen on the basis of favorable microbenchmarks - // Note--initialization catches step == 0 so we don't need to here - if (!isEmpty) { - var i = start - while (true) { - f(i) - if (i == lastElement) return - i += step - } - } - } - - /** Creates a new range containing the first `n` elements of this range. - * - * $doesNotUseBuilders - * - * @param n the number of elements to take. - * @return a new range consisting of `n` first elements. - */ - final override def take(n: Int): Range = ( - if (n <= 0 || isEmpty) newEmptyRange(start) - else if (n >= numRangeElements && numRangeElements >= 0) this - else { - // May have more than Int.MaxValue elements in range (numRangeElements < 0) - // but the logic is the same either way: take the first n - new Range.Inclusive(start, locationAfterN(n - 1), step) - } - ) - - /** Creates a new range containing all the elements of this range except the first `n` elements. - * - * $doesNotUseBuilders - * - * @param n the number of elements to drop. - * @return a new range consisting of all the elements of this range except `n` first elements. - */ - final override def drop(n: Int): Range = ( - if (n <= 0 || isEmpty) this - else if (n >= numRangeElements && numRangeElements >= 0) newEmptyRange(end) - else { - // May have more than Int.MaxValue elements (numRangeElements < 0) - // but the logic is the same either way: go forwards n steps, keep the rest - copy(locationAfterN(n), end, step) - } - ) - - /** Creates a new range containing the elements starting at `from` up to but not including `until`. - * - * $doesNotUseBuilders - * - * @param from the element at which to start - * @param until the element at which to end (not included in the range) - * @return a new range consisting of a contiguous interval of values in the old range - */ - override def slice(from: Int, until: Int): Range = - if (from <= 0) take(until) - else if (until >= numRangeElements && numRangeElements >= 0) drop(from) - else { - val fromValue = locationAfterN(from) - if (from >= until) newEmptyRange(fromValue) - else new Range.Inclusive(fromValue, locationAfterN(until-1), step) - } - - /** Creates a new range containing all the elements of this range except the last one. - * - * $doesNotUseBuilders - * - * @return a new range consisting of all the elements of this range except the last one. - */ - final override def init: Range = { - if (isEmpty) - Nil.init - - dropRight(1) - } - - /** Creates a new range containing all the elements of this range except the first one. - * - * $doesNotUseBuilders - * - * @return a new range consisting of all the elements of this range except the first one. - */ - final override def tail: Range = { - if (isEmpty) - Nil.tail - - drop(1) - } - - // Advance from the start while we meet the given test - private def argTakeWhile(p: Int => Boolean): Long = { - if (isEmpty) start - else { - var current = start - val stop = last - while (current != stop && p(current)) current += step - if (current != stop || !p(current)) current - else current.toLong + step - } - } - // Methods like apply throw exceptions on invalid n, but methods like take/drop - // are forgiving: therefore the checks are with the methods. - private def locationAfterN(n: Int) = start + (step * n) - - // When one drops everything. Can't ever have unchecked operations - // like "end + 1" or "end - 1" because ranges involving Int.{ MinValue, MaxValue } - // will overflow. This creates an exclusive range where start == end - // based on the given value. - private def newEmptyRange(value: Int) = new Range(value, value, step) - - final override def takeWhile(p: Int => Boolean): Range = { - val stop = argTakeWhile(p) - if (stop==start) newEmptyRange(start) - else { - val x = (stop - step).toInt - if (x == last) this - else new Range.Inclusive(start, x, step) - } - } - final override def dropWhile(p: Int => Boolean): Range = { - val stop = argTakeWhile(p) - if (stop == start) this - else { - val x = (stop - step).toInt - if (x == last) newEmptyRange(last) - else new Range.Inclusive(x + step, last, step) - } - } - final override def span(p: Int => Boolean): (Range, Range) = { - val border = argTakeWhile(p) - if (border == start) (newEmptyRange(start), this) - else { - val x = (border - step).toInt - if (x == last) (this, newEmptyRange(last)) - else (new Range.Inclusive(start, x, step), new Range.Inclusive(x+step, last, step)) - } - } - - /** Creates a pair of new ranges, first consisting of elements before `n`, and the second - * of elements after `n`. - * - * $doesNotUseBuilders - */ - final override def splitAt(n: Int) = (take(n), drop(n)) - - /** Creates a new range consisting of the last `n` elements of the range. - * - * $doesNotUseBuilders - */ - final override def takeRight(n: Int): Range = { - if (n <= 0) newEmptyRange(start) - else if (numRangeElements >= 0) drop(numRangeElements - n) - else { - // Need to handle over-full range separately - val y = last - val x = y - step.toLong*(n-1) - if ((step > 0 && x < start) || (step < 0 && x > start)) this - else new Range.Inclusive(x.toInt, y, step) - } - } - - /** Creates a new range consisting of the initial `length - n` elements of the range. - * - * $doesNotUseBuilders - */ - final override def dropRight(n: Int): Range = { - if (n <= 0) this - else if (numRangeElements >= 0) take(numRangeElements - n) - else { - // Need to handle over-full range separately - val y = last - step.toInt*n - if ((step > 0 && y < start) || (step < 0 && y > start)) newEmptyRange(start) - else new Range.Inclusive(start, y.toInt, step) - } - } - - /** Returns the reverse of this range. - * - * $doesNotUseBuilders - */ - final override def reverse: Range = - if (isEmpty) this - else new Range.Inclusive(last, start, -step) - - /** Make range inclusive. - */ - def inclusive = - if (isInclusive) this - else new Range.Inclusive(start, end, step) - - final def contains(x: Int) = { - if (x==end && !isInclusive) false - else if (step > 0) { - if (x < start || x > end) false - else (step == 1) || (((x - start) % step) == 0) - } - else { - if (x < end || x > start) false - else (step == -1) || (((x - start) % step) == 0) - } - } - - final override def sum[B >: Int](implicit num: Numeric[B]): Int = { - if (num eq scala.math.Numeric.IntIsIntegral) { - // this is normal integer range with usual addition. arithmetic series formula can be used - if (isEmpty) 0 - else if (numRangeElements == 1) head - else ((numRangeElements * (head.toLong + last)) / 2).toInt - } else { - // user provided custom Numeric, we cannot rely on arithmetic series formula - if (isEmpty) num.toInt(num.zero) - else { - var acc = num.zero - var i = head - while (true) { - acc = num.plus(acc, i) - if (i == lastElement) return num.toInt(acc) - i = i + step - } - 0 // Never hit this--just to satisfy compiler since it doesn't know while(true) has type Nothing - } - } - } - - override def toIterable = this - - override def toSeq = this - - override def equals(other: Any) = other match { - case x: Range => - // Note: this must succeed for overfull ranges (length > Int.MaxValue) - (x canEqual this) && { - if (isEmpty) x.isEmpty // empty sequences are equal - else // this is non-empty... - x.nonEmpty && start == x.start && { // ...so other must contain something and have same start - val l0 = last - (l0 == x.last && ( // And same end - start == l0 || step == x.step // And either the same step, or not take any steps - )) - } - } - case _ => - super.equals(other) - } - - /* Note: hashCode can't be overridden without breaking Seq's equals contract. */ - - override def toString = { - val preposition = if (isInclusive) "to" else "until" - val stepped = if (step == 1) "" else s" by $step" - val prefix = if (isEmpty) "empty " else if (!isExact) "inexact " else "" - s"${prefix}Range $start $preposition $end$stepped" - } -} - -/** A companion object for the `Range` class. - */ -object Range { - /** Counts the number of range elements. - * @pre step != 0 - * If the size of the range exceeds Int.MaxValue, the - * result will be negative. - */ - def count(start: Int, end: Int, step: Int, isInclusive: Boolean): Int = { - if (step == 0) - throw new IllegalArgumentException("step cannot be 0.") - - val isEmpty = ( - if (start == end) !isInclusive - else if (start < end) step < 0 - else step > 0 - ) - if (isEmpty) 0 - else { - // Counts with Longs so we can recognize too-large ranges. - val gap: Long = end.toLong - start.toLong - val jumps: Long = gap / step - // Whether the size of this range is one larger than the - // number of full-sized jumps. - val hasStub = isInclusive || (gap % step != 0) - val result: Long = jumps + ( if (hasStub) 1 else 0 ) - - if (result > scala.Int.MaxValue) -1 - else result.toInt - } - } - def count(start: Int, end: Int, step: Int): Int = - count(start, end, step, isInclusive = false) - - final class Inclusive(start: Int, end: Int, step: Int) extends Range(start, end, step) { -// override def par = new ParRange(this) - override def isInclusive = true - override protected def copy(start: Int, end: Int, step: Int): Range = new Inclusive(start, end, step) - } - - /** Make a range from `start` until `end` (exclusive) with given step value. - * @note step != 0 - */ - def apply(start: Int, end: Int, step: Int): Range = new Range(start, end, step) - - /** Make a range from `start` until `end` (exclusive) with step value 1. - */ - def apply(start: Int, end: Int): Range = new Range(start, end, 1) - - /** Make an inclusive range from `start` to `end` with given step value. - * @note step != 0 - */ - def inclusive(start: Int, end: Int, step: Int): Range.Inclusive = new Inclusive(start, end, step) - - /** Make an inclusive range from `start` to `end` with step value 1. - */ - def inclusive(start: Int, end: Int): Range.Inclusive = new Inclusive(start, end, 1) - - // BigInt and Long are straightforward generic ranges. - object BigInt { - def apply(start: BigInt, end: BigInt, step: BigInt) = NumericRange(start, end, step) - def inclusive(start: BigInt, end: BigInt, step: BigInt) = NumericRange.inclusive(start, end, step) - } - - object Long { - def apply(start: Long, end: Long, step: Long) = NumericRange(start, end, step) - def inclusive(start: Long, end: Long, step: Long) = NumericRange.inclusive(start, end, step) - } - - // BigDecimal uses an alternative implementation of Numeric in which - // it pretends to be Integral[T] instead of Fractional[T]. See Numeric for - // details. The intention is for it to throw an exception anytime - // imprecision or surprises might result from anything, although this may - // not yet be fully implemented. - object BigDecimal { - implicit val bigDecAsIntegral: scala.math.Numeric.BigDecimalAsIfIntegral = scala.math.Numeric.BigDecimalAsIfIntegral - - def apply(start: BigDecimal, end: BigDecimal, step: BigDecimal) = - NumericRange(start, end, step) - def inclusive(start: BigDecimal, end: BigDecimal, step: BigDecimal) = - NumericRange.inclusive(start, end, step) - } - - // Double works by using a BigDecimal under the hood for precise - // stepping, but mapping the sequence values back to doubles with - // .doubleValue. This constructs the BigDecimals by way of the - // String constructor (valueOf) instead of the Double one, which - // is necessary to keep 0.3d at 0.3 as opposed to - // 0.299999999999999988897769753748434595763683319091796875 or so. - object Double { - implicit val bigDecAsIntegral: scala.math.Numeric.BigDecimalAsIfIntegral = scala.math.Numeric.BigDecimalAsIfIntegral - implicit val doubleAsIntegral: scala.math.Numeric.DoubleAsIfIntegral = scala.math.Numeric.DoubleAsIfIntegral - def toBD(x: Double): BigDecimal = scala.math.BigDecimal valueOf x - - def apply(start: Double, end: Double, step: Double) = - BigDecimal(toBD(start), toBD(end), toBD(step)) mapRange (_.doubleValue) - - def inclusive(start: Double, end: Double, step: Double) = - BigDecimal.inclusive(toBD(start), toBD(end), toBD(step)) mapRange (_.doubleValue) - } - - // As there is no appealing default step size for not-really-integral ranges, - // we offer a partially constructed object. - class Partial[T, U](private val f: T => U) extends AnyVal { - def by(x: T): U = f(x) - override def toString = "Range requires step" - } - - // Illustrating genericity with Int Range, which should have the same behavior - // as the original Range class. However we leave the original Range - // indefinitely, for performance and because the compiler seems to bootstrap - // off it and won't do so with our parameterized version without modifications. - object Int { - def apply(start: Int, end: Int, step: Int) = NumericRange(start, end, step) - def inclusive(start: Int, end: Int, step: Int) = NumericRange.inclusive(start, end, step) - } -} diff --git a/tests/scala2-library/src/library/scala/collection/immutable/RedBlackTree.scala b/tests/scala2-library/src/library/scala/collection/immutable/RedBlackTree.scala deleted file mode 100644 index 4f2e9115fe67..000000000000 --- a/tests/scala2-library/src/library/scala/collection/immutable/RedBlackTree.scala +++ /dev/null @@ -1,563 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2005-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - - - -package scala -package collection -package immutable - -import scala.annotation.tailrec -import scala.annotation.meta.getter - -/** An object containing the RedBlack tree implementation used by for `TreeMaps` and `TreeSets`. - * - * Implementation note: since efficiency is important for data structures this implementation - * uses `null` to represent empty trees. This also means pattern matching cannot - * easily be used. The API represented by the RedBlackTree object tries to hide these - * optimizations behind a reasonably clean API. - * - * @since 2.10 - */ -private[collection] -object RedBlackTree { - - def isEmpty(tree: Tree[_, _]): Boolean = tree eq null - - def contains[A: Ordering](tree: Tree[A, _], x: A): Boolean = lookup(tree, x) ne null - def get[A: Ordering, B](tree: Tree[A, B], x: A): Option[B] = lookup(tree, x) match { - case null => None - case tree => Some(tree.value) - } - - @tailrec - def lookup[A, B](tree: Tree[A, B], x: A)(implicit ordering: Ordering[A]): Tree[A, B] = if (tree eq null) null else { - val cmp = ordering.compare(x, tree.key) - if (cmp < 0) lookup(tree.left, x) - else if (cmp > 0) lookup(tree.right, x) - else tree - } - - def count(tree: Tree[_, _]) = if (tree eq null) 0 else tree.count - /** - * Count all the nodes with keys greater than or equal to the lower bound and less than the upper bound. - * The two bounds are optional. - */ - def countInRange[A](tree: Tree[A, _], from: Option[A], to:Option[A])(implicit ordering: Ordering[A]) : Int = - if (tree eq null) 0 else - (from, to) match { - // with no bounds use this node's count - case (None, None) => tree.count - // if node is less than the lower bound, try the tree on the right, it might be in range - case (Some(lb), _) if ordering.lt(tree.key, lb) => countInRange(tree.right, from, to) - // if node is greater than or equal to the upper bound, try the tree on the left, it might be in range - case (_, Some(ub)) if ordering.gteq(tree.key, ub) => countInRange(tree.left, from, to) - // node is in range so the tree on the left will all be less than the upper bound and the tree on the - // right will all be greater than or equal to the lower bound. So 1 for this node plus - // count the subtrees by stripping off the bounds that we don't need any more - case _ => 1 + countInRange(tree.left, from, None) + countInRange(tree.right, None, to) - - } - def update[A: Ordering, B, B1 >: B](tree: Tree[A, B], k: A, v: B1, overwrite: Boolean): Tree[A, B1] = blacken(upd(tree, k, v, overwrite)) - def delete[A: Ordering, B](tree: Tree[A, B], k: A): Tree[A, B] = blacken(del(tree, k)) - def rangeImpl[A: Ordering, B](tree: Tree[A, B], from: Option[A], until: Option[A]): Tree[A, B] = (from, until) match { - case (Some(from), Some(until)) => this.range(tree, from, until) - case (Some(from), None) => this.from(tree, from) - case (None, Some(until)) => this.until(tree, until) - case (None, None) => tree - } - def range[A: Ordering, B](tree: Tree[A, B], from: A, until: A): Tree[A, B] = blacken(doRange(tree, from, until)) - def from[A: Ordering, B](tree: Tree[A, B], from: A): Tree[A, B] = blacken(doFrom(tree, from)) - def to[A: Ordering, B](tree: Tree[A, B], to: A): Tree[A, B] = blacken(doTo(tree, to)) - def until[A: Ordering, B](tree: Tree[A, B], key: A): Tree[A, B] = blacken(doUntil(tree, key)) - - def drop[A: Ordering, B](tree: Tree[A, B], n: Int): Tree[A, B] = blacken(doDrop(tree, n)) - def take[A: Ordering, B](tree: Tree[A, B], n: Int): Tree[A, B] = blacken(doTake(tree, n)) - def slice[A: Ordering, B](tree: Tree[A, B], from: Int, until: Int): Tree[A, B] = blacken(doSlice(tree, from, until)) - - def smallest[A, B](tree: Tree[A, B]): Tree[A, B] = { - if (tree eq null) throw new NoSuchElementException("empty map") - var result = tree - while (result.left ne null) result = result.left - result - } - def greatest[A, B](tree: Tree[A, B]): Tree[A, B] = { - if (tree eq null) throw new NoSuchElementException("empty map") - var result = tree - while (result.right ne null) result = result.right - result - } - - - def foreach[A,B,U](tree:Tree[A,B], f:((A,B)) => U):Unit = if (tree ne null) _foreach(tree,f) - - private[this] def _foreach[A, B, U](tree: Tree[A, B], f: ((A, B)) => U) { - if (tree.left ne null) _foreach(tree.left, f) - f((tree.key, tree.value)) - if (tree.right ne null) _foreach(tree.right, f) - } - - def foreachKey[A, U](tree:Tree[A,_], f: A => U):Unit = if (tree ne null) _foreachKey(tree,f) - - private[this] def _foreachKey[A, U](tree: Tree[A, _], f: A => U) { - if (tree.left ne null) _foreachKey(tree.left, f) - f((tree.key)) - if (tree.right ne null) _foreachKey(tree.right, f) - } - - def iterator[A: Ordering, B](tree: Tree[A, B], start: Option[A] = None): Iterator[(A, B)] = new EntriesIterator(tree, start) - def keysIterator[A: Ordering](tree: Tree[A, _], start: Option[A] = None): Iterator[A] = new KeysIterator(tree, start) - def valuesIterator[A: Ordering, B](tree: Tree[A, B], start: Option[A] = None): Iterator[B] = new ValuesIterator(tree, start) - - @tailrec - def nth[A, B](tree: Tree[A, B], n: Int): Tree[A, B] = { - val count = this.count(tree.left) - if (n < count) nth(tree.left, n) - else if (n > count) nth(tree.right, n - count - 1) - else tree - } - - def isBlack(tree: Tree[_, _]) = (tree eq null) || isBlackTree(tree) - - private[this] def isRedTree(tree: Tree[_, _]) = tree.isInstanceOf[RedTree[_, _]] - private[this] def isBlackTree(tree: Tree[_, _]) = tree.isInstanceOf[BlackTree[_, _]] - - private[this] def blacken[A, B](t: Tree[A, B]): Tree[A, B] = if (t eq null) null else t.black - - private[this] def mkTree[A, B](isBlack: Boolean, k: A, v: B, l: Tree[A, B], r: Tree[A, B]) = - if (isBlack) BlackTree(k, v, l, r) else RedTree(k, v, l, r) - - private[this] def balanceLeft[A, B, B1 >: B](isBlack: Boolean, z: A, zv: B, l: Tree[A, B1], d: Tree[A, B1]): Tree[A, B1] = { - if (isRedTree(l) && isRedTree(l.left)) - RedTree(l.key, l.value, BlackTree(l.left.key, l.left.value, l.left.left, l.left.right), BlackTree(z, zv, l.right, d)) - else if (isRedTree(l) && isRedTree(l.right)) - RedTree(l.right.key, l.right.value, BlackTree(l.key, l.value, l.left, l.right.left), BlackTree(z, zv, l.right.right, d)) - else - mkTree(isBlack, z, zv, l, d) - } - private[this] def balanceRight[A, B, B1 >: B](isBlack: Boolean, x: A, xv: B, a: Tree[A, B1], r: Tree[A, B1]): Tree[A, B1] = { - if (isRedTree(r) && isRedTree(r.left)) - RedTree(r.left.key, r.left.value, BlackTree(x, xv, a, r.left.left), BlackTree(r.key, r.value, r.left.right, r.right)) - else if (isRedTree(r) && isRedTree(r.right)) - RedTree(r.key, r.value, BlackTree(x, xv, a, r.left), BlackTree(r.right.key, r.right.value, r.right.left, r.right.right)) - else - mkTree(isBlack, x, xv, a, r) - } - private[this] def upd[A, B, B1 >: B](tree: Tree[A, B], k: A, v: B1, overwrite: Boolean)(implicit ordering: Ordering[A]): Tree[A, B1] = if (tree eq null) { - RedTree(k, v, null, null) - } else { - val cmp = ordering.compare(k, tree.key) - if (cmp < 0) balanceLeft(isBlackTree(tree), tree.key, tree.value, upd(tree.left, k, v, overwrite), tree.right) - else if (cmp > 0) balanceRight(isBlackTree(tree), tree.key, tree.value, tree.left, upd(tree.right, k, v, overwrite)) - else if (overwrite || k != tree.key) mkTree(isBlackTree(tree), k, v, tree.left, tree.right) - else tree - } - private[this] def updNth[A, B, B1 >: B](tree: Tree[A, B], idx: Int, k: A, v: B1, overwrite: Boolean): Tree[A, B1] = if (tree eq null) { - RedTree(k, v, null, null) - } else { - val rank = count(tree.left) + 1 - if (idx < rank) balanceLeft(isBlackTree(tree), tree.key, tree.value, updNth(tree.left, idx, k, v, overwrite), tree.right) - else if (idx > rank) balanceRight(isBlackTree(tree), tree.key, tree.value, tree.left, updNth(tree.right, idx - rank, k, v, overwrite)) - else if (overwrite) mkTree(isBlackTree(tree), k, v, tree.left, tree.right) - else tree - } - - /* Based on Stefan Kahrs' Haskell version of Okasaki's Red&Black Trees - * Constructing Red-Black Trees, Ralf Hinze: http://www.cs.ox.ac.uk/ralf.hinze/publications/WAAAPL99b.ps.gz - * Red-Black Trees in a Functional Setting, Chris Okasaki: https://wiki.rice.edu/confluence/download/attachments/2761212/Okasaki-Red-Black.pdf */ - private[this] def del[A, B](tree: Tree[A, B], k: A)(implicit ordering: Ordering[A]): Tree[A, B] = if (tree eq null) null else { - def balance(x: A, xv: B, tl: Tree[A, B], tr: Tree[A, B]) = if (isRedTree(tl)) { - if (isRedTree(tr)) { - RedTree(x, xv, tl.black, tr.black) - } else if (isRedTree(tl.left)) { - RedTree(tl.key, tl.value, tl.left.black, BlackTree(x, xv, tl.right, tr)) - } else if (isRedTree(tl.right)) { - RedTree(tl.right.key, tl.right.value, BlackTree(tl.key, tl.value, tl.left, tl.right.left), BlackTree(x, xv, tl.right.right, tr)) - } else { - BlackTree(x, xv, tl, tr) - } - } else if (isRedTree(tr)) { - if (isRedTree(tr.right)) { - RedTree(tr.key, tr.value, BlackTree(x, xv, tl, tr.left), tr.right.black) - } else if (isRedTree(tr.left)) { - RedTree(tr.left.key, tr.left.value, BlackTree(x, xv, tl, tr.left.left), BlackTree(tr.key, tr.value, tr.left.right, tr.right)) - } else { - BlackTree(x, xv, tl, tr) - } - } else { - BlackTree(x, xv, tl, tr) - } - def subl(t: Tree[A, B]) = - if (t.isInstanceOf[BlackTree[_, _]]) t.red - else sys.error("Defect: invariance violation; expected black, got "+t) - - def balLeft(x: A, xv: B, tl: Tree[A, B], tr: Tree[A, B]) = if (isRedTree(tl)) { - RedTree(x, xv, tl.black, tr) - } else if (isBlackTree(tr)) { - balance(x, xv, tl, tr.red) - } else if (isRedTree(tr) && isBlackTree(tr.left)) { - RedTree(tr.left.key, tr.left.value, BlackTree(x, xv, tl, tr.left.left), balance(tr.key, tr.value, tr.left.right, subl(tr.right))) - } else { - sys.error("Defect: invariance violation") - } - def balRight(x: A, xv: B, tl: Tree[A, B], tr: Tree[A, B]) = if (isRedTree(tr)) { - RedTree(x, xv, tl, tr.black) - } else if (isBlackTree(tl)) { - balance(x, xv, tl.red, tr) - } else if (isRedTree(tl) && isBlackTree(tl.right)) { - RedTree(tl.right.key, tl.right.value, balance(tl.key, tl.value, subl(tl.left), tl.right.left), BlackTree(x, xv, tl.right.right, tr)) - } else { - sys.error("Defect: invariance violation") - } - def delLeft = if (isBlackTree(tree.left)) balLeft(tree.key, tree.value, del(tree.left, k), tree.right) else RedTree(tree.key, tree.value, del(tree.left, k), tree.right) - def delRight = if (isBlackTree(tree.right)) balRight(tree.key, tree.value, tree.left, del(tree.right, k)) else RedTree(tree.key, tree.value, tree.left, del(tree.right, k)) - def append(tl: Tree[A, B], tr: Tree[A, B]): Tree[A, B] = if (tl eq null) { - tr - } else if (tr eq null) { - tl - } else if (isRedTree(tl) && isRedTree(tr)) { - val bc = append(tl.right, tr.left) - if (isRedTree(bc)) { - RedTree(bc.key, bc.value, RedTree(tl.key, tl.value, tl.left, bc.left), RedTree(tr.key, tr.value, bc.right, tr.right)) - } else { - RedTree(tl.key, tl.value, tl.left, RedTree(tr.key, tr.value, bc, tr.right)) - } - } else if (isBlackTree(tl) && isBlackTree(tr)) { - val bc = append(tl.right, tr.left) - if (isRedTree(bc)) { - RedTree(bc.key, bc.value, BlackTree(tl.key, tl.value, tl.left, bc.left), BlackTree(tr.key, tr.value, bc.right, tr.right)) - } else { - balLeft(tl.key, tl.value, tl.left, BlackTree(tr.key, tr.value, bc, tr.right)) - } - } else if (isRedTree(tr)) { - RedTree(tr.key, tr.value, append(tl, tr.left), tr.right) - } else if (isRedTree(tl)) { - RedTree(tl.key, tl.value, tl.left, append(tl.right, tr)) - } else { - sys.error("unmatched tree on append: " + tl + ", " + tr) - } - - val cmp = ordering.compare(k, tree.key) - if (cmp < 0) delLeft - else if (cmp > 0) delRight - else append(tree.left, tree.right) - } - - private[this] def doFrom[A, B](tree: Tree[A, B], from: A)(implicit ordering: Ordering[A]): Tree[A, B] = { - if (tree eq null) return null - if (ordering.lt(tree.key, from)) return doFrom(tree.right, from) - val newLeft = doFrom(tree.left, from) - if (newLeft eq tree.left) tree - else if (newLeft eq null) upd(tree.right, tree.key, tree.value, overwrite = false) - else rebalance(tree, newLeft, tree.right) - } - private[this] def doTo[A, B](tree: Tree[A, B], to: A)(implicit ordering: Ordering[A]): Tree[A, B] = { - if (tree eq null) return null - if (ordering.lt(to, tree.key)) return doTo(tree.left, to) - val newRight = doTo(tree.right, to) - if (newRight eq tree.right) tree - else if (newRight eq null) upd(tree.left, tree.key, tree.value, overwrite = false) - else rebalance(tree, tree.left, newRight) - } - private[this] def doUntil[A, B](tree: Tree[A, B], until: A)(implicit ordering: Ordering[A]): Tree[A, B] = { - if (tree eq null) return null - if (ordering.lteq(until, tree.key)) return doUntil(tree.left, until) - val newRight = doUntil(tree.right, until) - if (newRight eq tree.right) tree - else if (newRight eq null) upd(tree.left, tree.key, tree.value, overwrite = false) - else rebalance(tree, tree.left, newRight) - } - private[this] def doRange[A, B](tree: Tree[A, B], from: A, until: A)(implicit ordering: Ordering[A]): Tree[A, B] = { - if (tree eq null) return null - if (ordering.lt(tree.key, from)) return doRange(tree.right, from, until) - if (ordering.lteq(until, tree.key)) return doRange(tree.left, from, until) - val newLeft = doFrom(tree.left, from) - val newRight = doUntil(tree.right, until) - if ((newLeft eq tree.left) && (newRight eq tree.right)) tree - else if (newLeft eq null) upd(newRight, tree.key, tree.value, overwrite = false) - else if (newRight eq null) upd(newLeft, tree.key, tree.value, overwrite = false) - else rebalance(tree, newLeft, newRight) - } - - private[this] def doDrop[A, B](tree: Tree[A, B], n: Int): Tree[A, B] = { - if (n <= 0) return tree - if (n >= this.count(tree)) return null - val count = this.count(tree.left) - if (n > count) return doDrop(tree.right, n - count - 1) - val newLeft = doDrop(tree.left, n) - if (newLeft eq tree.left) tree - else if (newLeft eq null) updNth(tree.right, n - count - 1, tree.key, tree.value, overwrite = false) - else rebalance(tree, newLeft, tree.right) - } - private[this] def doTake[A, B](tree: Tree[A, B], n: Int): Tree[A, B] = { - if (n <= 0) return null - if (n >= this.count(tree)) return tree - val count = this.count(tree.left) - if (n <= count) return doTake(tree.left, n) - val newRight = doTake(tree.right, n - count - 1) - if (newRight eq tree.right) tree - else if (newRight eq null) updNth(tree.left, n, tree.key, tree.value, overwrite = false) - else rebalance(tree, tree.left, newRight) - } - private[this] def doSlice[A, B](tree: Tree[A, B], from: Int, until: Int): Tree[A, B] = { - if (tree eq null) return null - val count = this.count(tree.left) - if (from > count) return doSlice(tree.right, from - count - 1, until - count - 1) - if (until <= count) return doSlice(tree.left, from, until) - val newLeft = doDrop(tree.left, from) - val newRight = doTake(tree.right, until - count - 1) - if ((newLeft eq tree.left) && (newRight eq tree.right)) tree - else if (newLeft eq null) updNth(newRight, from - count - 1, tree.key, tree.value, overwrite = false) - else if (newRight eq null) updNth(newLeft, until, tree.key, tree.value, overwrite = false) - else rebalance(tree, newLeft, newRight) - } - - // The zipper returned might have been traversed left-most (always the left child) - // or right-most (always the right child). Left trees are traversed right-most, - // and right trees are traversed leftmost. - - // Returns the zipper for the side with deepest black nodes depth, a flag - // indicating whether the trees were unbalanced at all, and a flag indicating - // whether the zipper was traversed left-most or right-most. - - // If the trees were balanced, returns an empty zipper - private[this] def compareDepth[A, B](left: Tree[A, B], right: Tree[A, B]): (NList[Tree[A, B]], Boolean, Boolean, Int) = { - import NList.cons - // Once a side is found to be deeper, unzip it to the bottom - def unzip(zipper: NList[Tree[A, B]], leftMost: Boolean): NList[Tree[A, B]] = { - val next = if (leftMost) zipper.head.left else zipper.head.right - if (next eq null) zipper - else unzip(cons(next, zipper), leftMost) - } - - // Unzip left tree on the rightmost side and right tree on the leftmost side until one is - // found to be deeper, or the bottom is reached - def unzipBoth(left: Tree[A, B], - right: Tree[A, B], - leftZipper: NList[Tree[A, B]], - rightZipper: NList[Tree[A, B]], - smallerDepth: Int): (NList[Tree[A, B]], Boolean, Boolean, Int) = { - if (isBlackTree(left) && isBlackTree(right)) { - unzipBoth(left.right, right.left, cons(left, leftZipper), cons(right, rightZipper), smallerDepth + 1) - } else if (isRedTree(left) && isRedTree(right)) { - unzipBoth(left.right, right.left, cons(left, leftZipper), cons(right, rightZipper), smallerDepth) - } else if (isRedTree(right)) { - unzipBoth(left, right.left, leftZipper, cons(right, rightZipper), smallerDepth) - } else if (isRedTree(left)) { - unzipBoth(left.right, right, cons(left, leftZipper), rightZipper, smallerDepth) - } else if ((left eq null) && (right eq null)) { - (null, true, false, smallerDepth) - } else if ((left eq null) && isBlackTree(right)) { - val leftMost = true - (unzip(cons(right, rightZipper), leftMost), false, leftMost, smallerDepth) - } else if (isBlackTree(left) && (right eq null)) { - val leftMost = false - (unzip(cons(left, leftZipper), leftMost), false, leftMost, smallerDepth) - } else { - sys.error("unmatched trees in unzip: " + left + ", " + right) - } - } - unzipBoth(left, right, null, null, 0) - } - - private[this] def rebalance[A, B](tree: Tree[A, B], newLeft: Tree[A, B], newRight: Tree[A, B]) = { - // This is like drop(n-1), but only counting black nodes - @tailrec - def findDepth(zipper: NList[Tree[A, B]], depth: Int): NList[Tree[A, B]] = - if (zipper eq null) { - sys.error("Defect: unexpected empty zipper while computing range") - } else if (isBlackTree(zipper.head)) { - if (depth == 1) zipper else findDepth(zipper.tail, depth - 1) - } else { - findDepth(zipper.tail, depth) - } - - // Blackening the smaller tree avoids balancing problems on union; - // this can't be done later, though, or it would change the result of compareDepth - val blkNewLeft = blacken(newLeft) - val blkNewRight = blacken(newRight) - val (zipper, levelled, leftMost, smallerDepth) = compareDepth(blkNewLeft, blkNewRight) - - if (levelled) { - BlackTree(tree.key, tree.value, blkNewLeft, blkNewRight) - } else { - val zipFrom = findDepth(zipper, smallerDepth) - val union = if (leftMost) { - RedTree(tree.key, tree.value, blkNewLeft, zipFrom.head) - } else { - RedTree(tree.key, tree.value, zipFrom.head, blkNewRight) - } - val zippedTree = NList.foldLeft(zipFrom.tail, union: Tree[A, B]) { (tree, node) => - if (leftMost) - balanceLeft(isBlackTree(node), node.key, node.value, tree, node.right) - else - balanceRight(isBlackTree(node), node.key, node.value, node.left, tree) - } - zippedTree - } - } - - // Null optimized list implementation for tree rebalancing. null presents Nil. - private[this] final class NList[A](val head: A, val tail: NList[A]) - - private[this] final object NList { - - def cons[B](x: B, xs: NList[B]): NList[B] = new NList(x, xs) - - def foldLeft[A, B](xs: NList[A], z: B)(op: (B, A) => B): B = { - var acc = z - var these = xs - while (these ne null) { - acc = op(acc, these.head) - these = these.tail - } - acc - } - - } - - /* - * Forcing direct fields access using the @inline annotation helps speed up - * various operations (especially smallest/greatest and update/delete). - * - * Unfortunately the direct field access is not guaranteed to work (but - * works on the current implementation of the Scala compiler). - * - * An alternative is to implement the these classes using plain old Java code... - */ - sealed abstract class Tree[A, +B]( - @(inline @getter) final val key: A, - @(inline @getter) final val value: B, - @(inline @getter) final val left: Tree[A, B], - @(inline @getter) final val right: Tree[A, B]) - extends Serializable { - @(inline @getter) final val count: Int = 1 + RedBlackTree.count(left) + RedBlackTree.count(right) - def black: Tree[A, B] - def red: Tree[A, B] - } - final class RedTree[A, +B](key: A, - value: B, - left: Tree[A, B], - right: Tree[A, B]) extends Tree[A, B](key, value, left, right) { - override def black: Tree[A, B] = BlackTree(key, value, left, right) - override def red: Tree[A, B] = this - override def toString: String = "RedTree(" + key + ", " + value + ", " + left + ", " + right + ")" - } - final class BlackTree[A, +B](key: A, - value: B, - left: Tree[A, B], - right: Tree[A, B]) extends Tree[A, B](key, value, left, right) { - override def black: Tree[A, B] = this - override def red: Tree[A, B] = RedTree(key, value, left, right) - override def toString: String = "BlackTree(" + key + ", " + value + ", " + left + ", " + right + ")" - } - - object RedTree { - @inline def apply[A, B](key: A, value: B, left: Tree[A, B], right: Tree[A, B]) = new RedTree(key, value, left, right) - def unapply[A, B](t: RedTree[A, B]) = Some((t.key, t.value, t.left, t.right)) - } - object BlackTree { - @inline def apply[A, B](key: A, value: B, left: Tree[A, B], right: Tree[A, B]) = new BlackTree(key, value, left, right) - def unapply[A, B](t: BlackTree[A, B]) = Some((t.key, t.value, t.left, t.right)) - } - - private[this] abstract class TreeIterator[A, B, R](root: Tree[A, B], start: Option[A])(implicit ordering: Ordering[A]) extends Iterator[R] { - protected[this] def nextResult(tree: Tree[A, B]): R - - override def hasNext: Boolean = lookahead ne null - - override def next: R = lookahead match { - case null => - throw new NoSuchElementException("next on empty iterator") - case tree => - lookahead = findLeftMostOrPopOnEmpty(goRight(tree)) - nextResult(tree) - } - - @tailrec - private[this] def findLeftMostOrPopOnEmpty(tree: Tree[A, B]): Tree[A, B] = - if (tree eq null) popNext() - else if (tree.left eq null) tree - else findLeftMostOrPopOnEmpty(goLeft(tree)) - - private[this] def pushNext(tree: Tree[A, B]) { - try { - stackOfNexts(index) = tree - index += 1 - } catch { - case _: ArrayIndexOutOfBoundsException => - /* - * Either the tree became unbalanced or we calculated the maximum height incorrectly. - * To avoid crashing the iterator we expand the path array. Obviously this should never - * happen... - * - * An exception handler is used instead of an if-condition to optimize the normal path. - * This makes a large difference in iteration speed! - */ - assert(index >= stackOfNexts.length) - stackOfNexts :+= null - pushNext(tree) - } - } - private[this] def popNext(): Tree[A, B] = if (index == 0) null else { - index -= 1 - stackOfNexts(index) - } - - private[this] var stackOfNexts = if (root eq null) null else { - /* - * According to "Ralf Hinze. Constructing red-black trees" [http://www.cs.ox.ac.uk/ralf.hinze/publications/#P5] - * the maximum height of a red-black tree is 2*log_2(n + 2) - 2. - * - * According to {@see Integer#numberOfLeadingZeros} ceil(log_2(n)) = (32 - Integer.numberOfLeadingZeros(n - 1)) - * - * Although we don't store the deepest nodes in the path during iteration, - * we potentially do so in `startFrom`. - */ - val maximumHeight = 2 * (32 - Integer.numberOfLeadingZeros(root.count + 2 - 1)) - 2 - new Array[Tree[A, B]](maximumHeight) - } - private[this] var index = 0 - private[this] var lookahead: Tree[A, B] = start map startFrom getOrElse findLeftMostOrPopOnEmpty(root) - - /** - * Find the leftmost subtree whose key is equal to the given key, or if no such thing, - * the leftmost subtree with the key that would be "next" after it according - * to the ordering. Along the way build up the iterator's path stack so that "next" - * functionality works. - */ - private[this] def startFrom(key: A) : Tree[A,B] = if (root eq null) null else { - @tailrec def find(tree: Tree[A, B]): Tree[A, B] = - if (tree eq null) popNext() - else find( - if (ordering.lteq(key, tree.key)) goLeft(tree) - else goRight(tree) - ) - find(root) - } - - private[this] def goLeft(tree: Tree[A, B]) = { - pushNext(tree) - tree.left - } - - private[this] def goRight(tree: Tree[A, B]) = tree.right - } - - private[this] class EntriesIterator[A: Ordering, B](tree: Tree[A, B], focus: Option[A]) extends TreeIterator[A, B, (A, B)](tree, focus) { - override def nextResult(tree: Tree[A, B]) = (tree.key, tree.value) - } - - private[this] class KeysIterator[A: Ordering, B](tree: Tree[A, B], focus: Option[A]) extends TreeIterator[A, B, A](tree, focus) { - override def nextResult(tree: Tree[A, B]) = tree.key - } - - private[this] class ValuesIterator[A: Ordering, B](tree: Tree[A, B], focus: Option[A]) extends TreeIterator[A, B, B](tree, focus) { - override def nextResult(tree: Tree[A, B]) = tree.value - } -} diff --git a/tests/scala2-library/src/library/scala/collection/immutable/Seq.scala b/tests/scala2-library/src/library/scala/collection/immutable/Seq.scala deleted file mode 100644 index 38855ca6b060..000000000000 --- a/tests/scala2-library/src/library/scala/collection/immutable/Seq.scala +++ /dev/null @@ -1,47 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - - - -package scala -package collection -package immutable - -import generic._ -import mutable.Builder -import parallel.immutable.ParSeq - -/** A subtrait of `collection.Seq` which represents sequences - * that are guaranteed immutable. - * - * $seqInfo - * @define Coll `immutable.Seq` - * @define coll immutable sequence - */ -trait Seq[+A] extends Iterable[A] -// with GenSeq[A] - with scala.collection.Seq[A] - with GenericTraversableTemplate[A, Seq] - with SeqLike[A, Seq[A]] - with Parallelizable[A, ParSeq[A]] -{ - override def companion: GenericCompanion[Seq] = Seq - override def toSeq: Seq[A] = this - override def seq: Seq[A] = this - protected[this] override def parCombiner = ParSeq.newCombiner[A] // if `immutable.SeqLike` gets introduced, please move this there! -} - -/** $factoryInfo - * @define Coll `immutable.Seq` - * @define coll immutable sequence - */ -object Seq extends SeqFactory[Seq] { - /** genericCanBuildFromInfo */ - implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, Seq[A]] = ReusableCBF.asInstanceOf[GenericCanBuildFrom[A]] - def newBuilder[A]: Builder[A, Seq[A]] = new mutable.ListBuffer -} diff --git a/tests/scala2-library/src/library/scala/collection/immutable/Set.scala b/tests/scala2-library/src/library/scala/collection/immutable/Set.scala deleted file mode 100644 index f26e4e864a89..000000000000 --- a/tests/scala2-library/src/library/scala/collection/immutable/Set.scala +++ /dev/null @@ -1,228 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - - - -package scala -package collection -package immutable - -import generic._ -import parallel.immutable.ParSet - -/** A generic trait for immutable sets. - * $setNote - * $setTags - * - * @since 1.0 - * @author Matthias Zenger - * @author Martin Odersky - * @define Coll `immutable.Set` - * @define coll immutable set - */ -trait Set[A] extends Iterable[A] -// with GenSet[A] - with scala.collection.Set[A] - with GenericSetTemplate[A, Set] - with SetLike[A, Set[A]] - with Parallelizable[A, ParSet[A]] -{ - override def companion: GenericCompanion[Set] = Set - - - /** Returns this $coll as an immutable set, perhaps accepting a - * wider range of elements. Since it already is an - * immutable set, it will only be rebuilt if the underlying structure - * cannot be expanded to include arbitrary element types. - * For instance, `BitSet` and `SortedSet` will be rebuilt, as - * they require `Int` and sortable elements respectively. - * - * When in doubt, the set will be rebuilt. Rebuilt sets never - * need to be rebuilt again. - */ - override def toSet[B >: A]: Set[B] = { - // This way of building sets typically has the best benchmarks, surprisingly! - val sb = Set.newBuilder[B] - foreach(sb += _) - sb.result() - } - - override def seq: Set[A] = this - protected[this] override def parCombiner = ParSet.newCombiner[A] // if `immutable.SetLike` gets introduced, please move this there! -} - -/** $factoryInfo - * @define Coll `immutable.Set` - * @define coll immutable set - */ -object Set extends ImmutableSetFactory[Set] { - /** $setCanBuildFromInfo */ - implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, Set[A]] = setCanBuildFrom[A] - - /** An optimized representation for immutable empty sets */ - @SerialVersionUID(-2443710944435909512L) - private object EmptySet extends AbstractSet[Any] with Set[Any] with Serializable { - override def size: Int = 0 - def contains(elem: Any): Boolean = false - def + (elem: Any): Set[Any] = new Set1(elem) - def - (elem: Any): Set[Any] = this - def iterator: Iterator[Any] = Iterator.empty - override def foreach[U](f: Any => U): Unit = () - override def toSet[B >: Any]: Set[B] = this.asInstanceOf[Set[B]] - } - private[collection] def emptyInstance: Set[Any] = EmptySet - - /** An optimized representation for immutable sets of size 1 */ - @SerialVersionUID(1233385750652442003L) - class Set1[A] private[collection] (elem1: A) extends AbstractSet[A] with Set[A] with Serializable { - override def size: Int = 1 - def contains(elem: A): Boolean = - elem == elem1 - def + (elem: A): Set[A] = - if (contains(elem)) this - else new Set2(elem1, elem) - def - (elem: A): Set[A] = - if (elem == elem1) Set.empty - else this - def iterator: Iterator[A] = - Iterator(elem1) - override def foreach[U](f: A => U): Unit = { - f(elem1) - } - override def exists(@deprecatedName('f) p: A => Boolean): Boolean = { - p(elem1) - } - override def forall(@deprecatedName('f) p: A => Boolean): Boolean = { - p(elem1) - } - override def find(@deprecatedName('f) p: A => Boolean): Option[A] = { - if (p(elem1)) Some(elem1) - else None - } - override def head: A = elem1 - override def tail: Set[A] = Set.empty - // Why is Set1 non-final? Need to fix that! - @deprecatedOverriding("This immutable set should do nothing on toSet but cast itself to a Set with a wider element type.", "2.11.8") - override def toSet[B >: A]: Set[B] = this.asInstanceOf[Set1[B]] - } - - /** An optimized representation for immutable sets of size 2 */ - @SerialVersionUID(-6443011234944830092L) - class Set2[A] private[collection] (elem1: A, elem2: A) extends AbstractSet[A] with Set[A] with Serializable { - override def size: Int = 2 - def contains(elem: A): Boolean = - elem == elem1 || elem == elem2 - def + (elem: A): Set[A] = - if (contains(elem)) this - else new Set3(elem1, elem2, elem) - def - (elem: A): Set[A] = - if (elem == elem1) new Set1(elem2) - else if (elem == elem2) new Set1(elem1) - else this - def iterator: Iterator[A] = - Iterator(elem1, elem2) - override def foreach[U](f: A => U): Unit = { - f(elem1); f(elem2) - } - override def exists(@deprecatedName('f) p: A => Boolean): Boolean = { - p(elem1) || p(elem2) - } - override def forall(@deprecatedName('f) p: A => Boolean): Boolean = { - p(elem1) && p(elem2) - } - override def find(@deprecatedName('f) p: A => Boolean): Option[A] = { - if (p(elem1)) Some(elem1) - else if (p(elem2)) Some(elem2) - else None - } - override def head: A = elem1 - override def tail: Set[A] = new Set1(elem2) - // Why is Set2 non-final? Need to fix that! - @deprecatedOverriding("This immutable set should do nothing on toSet but cast itself to a Set with a wider element type.", "2.11.8") - override def toSet[B >: A]: Set[B] = this.asInstanceOf[Set2[B]] - } - - /** An optimized representation for immutable sets of size 3 */ - @SerialVersionUID(-3590273538119220064L) - class Set3[A] private[collection] (elem1: A, elem2: A, elem3: A) extends AbstractSet[A] with Set[A] with Serializable { - override def size: Int = 3 - def contains(elem: A): Boolean = - elem == elem1 || elem == elem2 || elem == elem3 - def + (elem: A): Set[A] = - if (contains(elem)) this - else new Set4(elem1, elem2, elem3, elem) - def - (elem: A): Set[A] = - if (elem == elem1) new Set2(elem2, elem3) - else if (elem == elem2) new Set2(elem1, elem3) - else if (elem == elem3) new Set2(elem1, elem2) - else this - def iterator: Iterator[A] = - Iterator(elem1, elem2, elem3) - override def foreach[U](f: A => U): Unit = { - f(elem1); f(elem2); f(elem3) - } - override def exists(@deprecatedName('f) p: A => Boolean): Boolean = { - p(elem1) || p(elem2) || p(elem3) - } - override def forall(@deprecatedName('f) p: A => Boolean): Boolean = { - p(elem1) && p(elem2) && p(elem3) - } - override def find(@deprecatedName('f) p: A => Boolean): Option[A] = { - if (p(elem1)) Some(elem1) - else if (p(elem2)) Some(elem2) - else if (p(elem3)) Some(elem3) - else None - } - override def head: A = elem1 - override def tail: Set[A] = new Set2(elem2, elem3) - // Why is Set3 non-final? Need to fix that! - @deprecatedOverriding("This immutable set should do nothing on toSet but cast itself to a Set with a wider element type.", "2.11.8") - override def toSet[B >: A]: Set[B] = this.asInstanceOf[Set3[B]] - } - - /** An optimized representation for immutable sets of size 4 */ - @SerialVersionUID(-3622399588156184395L) - class Set4[A] private[collection] (elem1: A, elem2: A, elem3: A, elem4: A) extends AbstractSet[A] with Set[A] with Serializable { - override def size: Int = 4 - def contains(elem: A): Boolean = - elem == elem1 || elem == elem2 || elem == elem3 || elem == elem4 - def + (elem: A): Set[A] = - if (contains(elem)) this - else new HashSet[A] + elem1 + elem2 + elem3 + elem4 + elem - def - (elem: A): Set[A] = - if (elem == elem1) new Set3(elem2, elem3, elem4) - else if (elem == elem2) new Set3(elem1, elem3, elem4) - else if (elem == elem3) new Set3(elem1, elem2, elem4) - else if (elem == elem4) new Set3(elem1, elem2, elem3) - else this - def iterator: Iterator[A] = - Iterator(elem1, elem2, elem3, elem4) - override def foreach[U](f: A => U): Unit = { - f(elem1); f(elem2); f(elem3); f(elem4) - } - override def exists(@deprecatedName('f) p: A => Boolean): Boolean = { - p(elem1) || p(elem2) || p(elem3) || p(elem4) - } - override def forall(@deprecatedName('f) p: A => Boolean): Boolean = { - p(elem1) && p(elem2) && p(elem3) && p(elem4) - } - override def find(@deprecatedName('f) p: A => Boolean): Option[A] = { - if (p(elem1)) Some(elem1) - else if (p(elem2)) Some(elem2) - else if (p(elem3)) Some(elem3) - else if (p(elem4)) Some(elem4) - else None - } - override def head: A = elem1 - override def tail: Set[A] = new Set3(elem2, elem3, elem4) - // Why is Set4 non-final? Need to fix that! - @deprecatedOverriding("This immutable set should do nothing on toSet but cast itself to a Set with a wider element type.", "2.11.8") - override def toSet[B >: A]: Set[B] = this.asInstanceOf[Set4[B]] - } -} - diff --git a/tests/scala2-library/src/library/scala/collection/immutable/SetProxy.scala b/tests/scala2-library/src/library/scala/collection/immutable/SetProxy.scala deleted file mode 100644 index b421b48597e0..000000000000 --- a/tests/scala2-library/src/library/scala/collection/immutable/SetProxy.scala +++ /dev/null @@ -1,33 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - - - -package scala -package collection -package immutable - -/** This is a simple wrapper class for [[scala.collection.immutable.Set]]. - * - * It is most useful for assembling customized set abstractions - * dynamically using object composition and forwarding. - * - * @tparam A type of the elements contained in this set proxy. - * - * @since 2.8 - */ -@deprecated("proxying is deprecated due to lack of use and compiler-level support.", "2.11.0") -trait SetProxy[A] extends Set[A] with SetProxyLike[A, Set[A]] { - override def repr = this - private def newProxy[B >: A](newSelf: Set[B]): SetProxy[B] = - new AbstractSet[B] with SetProxy[B] { val self = newSelf } - - override def empty = newProxy(self.empty) - override def + (elem: A) = newProxy(self + elem) - override def - (elem: A) = newProxy(self - elem) -} diff --git a/tests/scala2-library/src/library/scala/collection/immutable/SortedMap.scala b/tests/scala2-library/src/library/scala/collection/immutable/SortedMap.scala deleted file mode 100644 index d0a5e9de8e2f..000000000000 --- a/tests/scala2-library/src/library/scala/collection/immutable/SortedMap.scala +++ /dev/null @@ -1,130 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - - - -package scala -package collection -package immutable - -import generic._ -import mutable.Builder - -/** A map whose keys are sorted. - * - * @tparam A the type of the keys contained in this sorted map. - * @tparam B the type of the values associated with the keys. - * - * @author Sean McDirmid - * @author Martin Odersky - * @version 2.8 - * @since 2.4 - * @define Coll immutable.SortedMap - * @define coll immutable sorted map - */ -trait SortedMap[A, +B] extends Map[A, B] - with scala.collection.SortedMap[A, B] - with MapLike[A, B, SortedMap[A, B]] - with SortedMapLike[A, B, SortedMap[A, B]] -{ -self => - - override protected[this] def newBuilder : Builder[(A, B), SortedMap[A, B]] = - SortedMap.newBuilder[A, B] - - override def empty: SortedMap[A, B] = SortedMap.empty - override def updated [B1 >: B](key: A, value: B1): SortedMap[A, B1] = this + ((key, value)) - override def keySet: immutable.SortedSet[A] = new DefaultKeyImmutableSortedSet - - protected class DefaultKeyImmutableSortedSet extends DefaultKeySortedSet with immutable.SortedSet[A] { - override def + (elem: A): SortedSet[A] = - if (this(elem)) this - else SortedSet[A]() ++ this + elem - override def - (elem: A): SortedSet[A] = - if (this(elem)) SortedSet[A]() ++ this - elem - else this - override def rangeImpl(from : Option[A], until : Option[A]) : SortedSet[A] = { - val map = self.rangeImpl(from, until) - new map.DefaultKeyImmutableSortedSet - } - override def toSet[C >: A]: Set[C] = { - // This way of building sets typically has the best benchmarks, surprisingly! - val sb = Set.newBuilder[C] - foreach(sb += _) - sb.result() - } - } - - /** Add a key/value pair to this map. - * @param kv the key/value pair - * @return A new map with the new binding added to this map - * @note needs to be overridden in subclasses - */ - def + [B1 >: B](kv: (A, B1)): SortedMap[A, B1] = throw new AbstractMethodError("SortedMap.+") - - /** Adds two or more elements to this collection and returns - * a new collection. - * - * @param elem1 the first element to add. - * @param elem2 the second element to add. - * @param elems the remaining elements to add. - */ - override def + [B1 >: B] (elem1: (A, B1), elem2: (A, B1), elems: (A, B1) *): SortedMap[A, B1] = - this + elem1 + elem2 ++ elems - - /** Adds a number of elements provided by a traversable object - * and returns a new collection with the added elements. - * - * @param xs the traversable object. - */ - override def ++[B1 >: B](xs: GenTraversableOnce[(A, B1)]): SortedMap[A, B1] = - ((repr: SortedMap[A, B1]) /: xs.seq) (_ + _) - - override def filterKeys(p: A => Boolean): SortedMap[A, B] = new FilteredKeys(p) with SortedMap.Default[A, B] { - implicit def ordering: Ordering[A] = self.ordering - override def rangeImpl(from : Option[A], until : Option[A]): SortedMap[A, B] = self.rangeImpl(from, until).filterKeys(p) - override def iteratorFrom(start: A) = self iteratorFrom start filter {case (k, _) => p(k)} - override def keysIteratorFrom(start : A) = self keysIteratorFrom start filter p - override def valuesIteratorFrom(start : A) = self iteratorFrom start collect {case (k,v) if p(k) => v} - } - - override def mapValues[C](f: B => C): SortedMap[A, C] = new MappedValues(f) with SortedMap.Default[A, C] { - implicit def ordering: Ordering[A] = self.ordering - override def rangeImpl(from : Option[A], until : Option[A]): SortedMap[A, C] = self.rangeImpl(from, until).mapValues(f) - override def iteratorFrom(start: A) = self iteratorFrom start map {case (k, v) => (k, f(v))} - override def keysIteratorFrom(start : A) = self keysIteratorFrom start - override def valuesIteratorFrom(start : A) = self valuesIteratorFrom start map f - } - -} - -/** $factoryInfo - * @define Coll immutable.SortedMap - * @define coll immutable sorted map - */ -object SortedMap extends ImmutableSortedMapFactory[SortedMap] { - /** $sortedMapCanBuildFromInfo */ - implicit def canBuildFrom[A, B](implicit ord: Ordering[A]): CanBuildFrom[Coll, (A, B), SortedMap[A, B]] = new SortedMapCanBuildFrom[A, B] - def empty[A, B](implicit ord: Ordering[A]): SortedMap[A, B] = TreeMap.empty[A, B] - - private[collection] trait Default[A, +B] extends SortedMap[A, B] with scala.collection.SortedMap.Default[A, B] { - self => - override def +[B1 >: B](kv: (A, B1)): SortedMap[A, B1] = { - val b = SortedMap.newBuilder[A, B1] - b ++= this - b += ((kv._1, kv._2)) - b.result() - } - - override def - (key: A): SortedMap[A, B] = { - val b = newBuilder - for (kv <- this; if kv._1 != key) b += kv - b.result() - } - } -} diff --git a/tests/scala2-library/src/library/scala/collection/immutable/SortedSet.scala b/tests/scala2-library/src/library/scala/collection/immutable/SortedSet.scala deleted file mode 100644 index 75b2b1f4dca2..000000000000 --- a/tests/scala2-library/src/library/scala/collection/immutable/SortedSet.scala +++ /dev/null @@ -1,42 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - - - -package scala -package collection -package immutable - -import generic._ - -/** A subtrait of `collection.SortedSet` which represents sorted sets - * which cannot be mutated. - * - * @author Sean McDirmid - * @author Martin Odersky - * @version 2.8 - * @since 2.4 - * @define Coll `immutable.SortedSet` - * @define coll immutable sorted set - */ -trait SortedSet[A] extends Set[A] with scala.collection.SortedSet[A] with SortedSetLike[A, SortedSet[A]] { - /** Needs to be overridden in subclasses. */ - override def empty: SortedSet[A] = SortedSet.empty[A] -} - -/** $factoryInfo - * @define Coll `immutable.SortedSet` - * @define coll immutable sorted set - */ -object SortedSet extends ImmutableSortedSetFactory[SortedSet] { - /** $sortedSetCanBuildFromInfo */ - def canBuildFrom[A](implicit ord: Ordering[A]): CanBuildFrom[Coll, A, SortedSet[A]] = newCanBuildFrom[A] - def empty[A](implicit ord: Ordering[A]): SortedSet[A] = TreeSet.empty[A] - // Force a declaration here so that BitSet (which does not inherit from SortedSetFactory) can be more specific - override implicit def newCanBuildFrom[A](implicit ord : Ordering[A]) : CanBuildFrom[Coll, A, SortedSet[A]] = super.newCanBuildFrom -} diff --git a/tests/scala2-library/src/library/scala/collection/immutable/Stack.scala b/tests/scala2-library/src/library/scala/collection/immutable/Stack.scala deleted file mode 100644 index 02bdadb5dd11..000000000000 --- a/tests/scala2-library/src/library/scala/collection/immutable/Stack.scala +++ /dev/null @@ -1,132 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala -package collection -package immutable - -import generic._ -import mutable.{ ArrayBuffer, Builder } - -/** $factoryInfo - * @define Coll `immutable.Stack` - * @define coll immutable stack - */ -object Stack extends SeqFactory[Stack] { - /** $genericCanBuildFromInfo */ - implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, Stack[A]] = ReusableCBF.asInstanceOf[GenericCanBuildFrom[A]] - def newBuilder[A]: Builder[A, Stack[A]] = new ArrayBuffer[A] mapResult (buf => new Stack(buf.toList)) -} - -/** This class implements immutable stacks using a list-based data - * structure. - * - * '''Note:''' This class exists only for historical reason and as an - * analogue of mutable stacks. - * Instead of an immutable stack you can just use a list. - * - * @tparam A the type of the elements contained in this stack. - * - * @author Matthias Zenger - * @version 1.0, 10/07/2003 - * @since 1 - * @see [[http://docs.scala-lang.org/overviews/collections/concrete-immutable-collection-classes.html#immutable_stacks "Scala's Collection Library overview"]] - * section on `Immutable stacks` for more information. - * - * @define Coll `immutable.Stack` - * @define coll immutable stack - * @define orderDependent - * @define orderDependentFold - * @define mayNotTerminateInf - * @define willNotTerminateInf - */ -@SerialVersionUID(1976480595012942526L) -@deprecated("Stack is an inelegant and potentially poorly-performing wrapper around List. Use List instead: stack push x becomes x :: list; stack.pop is list.tail.", "2.11.0") -class Stack[+A] protected (protected val elems: List[A]) - extends AbstractSeq[A] - with LinearSeq[A] - with GenericTraversableTemplate[A, Stack] - with LinearSeqOptimized[A, Stack[A]] - with Serializable { - override def companion: GenericCompanion[Stack] = Stack - - def this() = this(Nil) - - /** Checks if this stack is empty. - * - * @return true, iff there is no element on the stack. - */ - override def isEmpty: Boolean = elems.isEmpty - - override def head = elems.head - override def tail = new Stack(elems.tail) - - /** Push an element on the stack. - * - * @param elem the element to push on the stack. - * @return the stack with the new element on top. - */ - def push[B >: A](elem: B): Stack[B] = new Stack(elem :: elems) - - /** Push a sequence of elements onto the stack. The last element - * of the sequence will be on top of the new stack. - * - * @param elems the element sequence. - * @return the stack with the new elements on top. - */ - def push[B >: A](elem1: B, elem2: B, elems: B*): Stack[B] = - this.push(elem1).push(elem2).pushAll(elems) - - /** Push all elements provided by the given traversable object onto - * the stack. The last element returned by the traversable object - * will be on top of the new stack. - * - * @param xs the iterator object. - * @return the stack with the new elements on top. - */ - def pushAll[B >: A](xs: TraversableOnce[B]): Stack[B] = - ((this: Stack[B]) /: xs.toIterator)(_ push _) - - /** Returns the top element of the stack. An error is signaled if - * there is no element on the stack. - * - * @throws java.util.NoSuchElementException - * @return the top element. - */ - def top: A = - if (!isEmpty) elems.head - else throw new NoSuchElementException("top of empty stack") - - /** Removes the top element from the stack. - * Note: should return `(A, Stack[A])` as for queues (mics) - * - * @throws java.util.NoSuchElementException - * @return the new stack without the former top element. - */ - def pop: Stack[A] = - if (!isEmpty) new Stack(elems.tail) - else throw new NoSuchElementException("pop of empty stack") - - def pop2: (A, Stack[A]) = - if (!isEmpty) (elems.head, new Stack(elems.tail)) - else throw new NoSuchElementException("pop of empty stack") - - override def reverse: Stack[A] = new Stack(elems.reverse) - - /** Returns an iterator over all elements on the stack. The iterator - * issues elements in the reversed order they were inserted into the - * stack (LIFO order). - * - * @return an iterator over all stack elements. - */ - override def iterator: Iterator[A] = elems.iterator - - /** Returns a string representation of this stack. - */ - override def toString() = elems.mkString("Stack(", ", ", ")") -} diff --git a/tests/scala2-library/src/library/scala/collection/immutable/Stream.scala b/tests/scala2-library/src/library/scala/collection/immutable/Stream.scala deleted file mode 100644 index 49e919cd916a..000000000000 --- a/tests/scala2-library/src/library/scala/collection/immutable/Stream.scala +++ /dev/null @@ -1,1287 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala -package collection -package immutable - -import generic._ -import mutable.{Builder, StringBuilder, LazyBuilder} -import scala.annotation.tailrec -import Stream.cons -import scala.language.implicitConversions - -/** The class `Stream` implements lazy lists where elements - * are only evaluated when they are needed. Here is an example: - * - * {{{ - * import scala.math.BigInt - * object Main extends App { - * - * lazy val fibs: Stream[BigInt] = BigInt(0) #:: BigInt(1) #:: fibs.zip(fibs.tail).map { n => n._1 + n._2 } - * - * fibs take 5 foreach println - * } - * - * // prints - * // - * // 0 - * // 1 - * // 1 - * // 2 - * // 3 - * }}} - * - * The `Stream` class also employs memoization such that previously computed - * values are converted from `Stream` elements to concrete values of type `A`. - * To illustrate, we will alter body of the `fibs` value above and take some - * more values: - * - * {{{ - * import scala.math.BigInt - * object Main extends App { - * - * lazy val fibs: Stream[BigInt] = BigInt(0) #:: BigInt(1) #:: fibs.zip( - * fibs.tail).map(n => { - * println("Adding %d and %d".format(n._1, n._2)) - * n._1 + n._2 - * }) - * - * fibs take 5 foreach println - * fibs take 6 foreach println - * } - * - * // prints - * // - * // 0 - * // 1 - * // Adding 0 and 1 - * // 1 - * // Adding 1 and 1 - * // 2 - * // Adding 1 and 2 - * // 3 - * - * // And then prints - * // - * // 0 - * // 1 - * // 1 - * // 2 - * // 3 - * // Adding 2 and 3 - * // 5 - * }}} - * - * There are a number of subtle points to the above example. - * - * - The definition of `fibs` is a `val` not a method. The memoization of the - * `Stream` requires us to have somewhere to store the information and a `val` - * allows us to do that. - * - * - While the `Stream` is actually being modified during access, this does not - * change the notion of its immutability. Once the values are memoized they do - * not change and values that have yet to be memoized still "exist", they - * simply haven't been realized yet. - * - * - One must be cautious of memoization; you can very quickly eat up large - * amounts of memory if you're not careful. The reason for this is that the - * memoization of the `Stream` creates a structure much like - * [[scala.collection.immutable.List]]. So long as something is holding on to - * the head, the head holds on to the tail, and so it continues recursively. - * If, on the other hand, there is nothing holding on to the head (e.g. we used - * `def` to define the `Stream`) then once it is no longer being used directly, - * it disappears. - * - * - Note that some operations, including [[drop]], [[dropWhile]], - * [[flatMap]] or [[collect]] may process a large number of intermediate - * elements before returning. These necessarily hold onto the head, since - * they are methods on `Stream`, and a stream holds its own head. For - * computations of this sort where memoization is not desired, use - * `Iterator` when possible. - * - * {{{ - * // For example, let's build the natural numbers and do some silly iteration - * // over them. - * - * // We'll start with a silly iteration - * def loop(s: String, i: Int, iter: Iterator[Int]): Unit = { - * // Stop after 200,000 - * if (i < 200001) { - * if (i % 50000 == 0) println(s + i) - * loop(s, iter.next, iter) - * } - * } - * - * // Our first Stream definition will be a val definition - * val stream1: Stream[Int] = { - * def loop(v: Int): Stream[Int] = v #:: loop(v + 1) - * loop(0) - * } - * - * // Because stream1 is a val, everything that the iterator produces is held - * // by virtue of the fact that the head of the Stream is held in stream1 - * val it1 = stream1.iterator - * loop("Iterator1: ", it1.next, it1) - * - * // We can redefine this Stream such that all we have is the Iterator left - * // and allow the Stream to be garbage collected as required. Using a def - * // to provide the Stream ensures that no val is holding onto the head as - * // is the case with stream1 - * def stream2: Stream[Int] = { - * def loop(v: Int): Stream[Int] = v #:: loop(v + 1) - * loop(0) - * } - * val it2 = stream2.iterator - * loop("Iterator2: ", it2.next, it2) - * - * // And, of course, we don't actually need a Stream at all for such a simple - * // problem. There's no reason to use a Stream if you don't actually need - * // one. - * val it3 = new Iterator[Int] { - * var i = -1 - * def hasNext = true - * def next(): Int = { i += 1; i } - * } - * loop("Iterator3: ", it3.next, it3) - * }}} - * - * - The fact that `tail` works at all is of interest. In the definition of - * `fibs` we have an initial `(0, 1, Stream(...))` so `tail` is deterministic. - * If we defined `fibs` such that only `0` were concretely known then the act - * of determining `tail` would require the evaluation of `tail` which would - * cause an infinite recursion and stack overflow. If we define a definition - * where the tail is not initially computable then we're going to have an - * infinite recursion: - * {{{ - * // The first time we try to access the tail we're going to need more - * // information which will require us to recurse, which will require us to - * // recurse, which... - * lazy val sov: Stream[Vector[Int]] = Vector(0) #:: sov.zip(sov.tail).map { n => n._1 ++ n._2 } - * }}} - * - * The definition of `fibs` above creates a larger number of objects than - * necessary depending on how you might want to implement it. The following - * implementation provides a more "cost effective" implementation due to the - * fact that it has a more direct route to the numbers themselves: - * - * {{{ - * lazy val fib: Stream[Int] = { - * def loop(h: Int, n: Int): Stream[Int] = h #:: loop(n, h + n) - * loop(1, 1) - * } - * }}} - * - * Note that `mkString` forces evaluation of a `Stream`, but `addString` does - * not. In both cases, a `Stream` that is or ends in a cycle - * (e.g. `lazy val s: Stream[Int] = 0 #:: s`) will convert additional trips - * through the cycle to `...`. Additionally, `addString` will display an - * un-memoized tail as `?`. - * - * @tparam A the type of the elements contained in this stream. - * - * @author Martin Odersky, Matthias Zenger - * @version 1.1 08/08/03 - * @since 2.8 - * @see [[http://docs.scala-lang.org/overviews/collections/concrete-immutable-collection-classes.html#streams "Scala's Collection Library overview"]] - * section on `Streams` for more information. - - * @define naturalsEx def naturalsFrom(i: Int): Stream[Int] = i #:: naturalsFrom(i + 1) - * @define Coll `Stream` - * @define coll stream - * @define orderDependent - * @define orderDependentFold - * @define willTerminateInf Note: lazily evaluated; will terminate for infinite-sized collections. - */ -sealed abstract class Stream[+A] extends AbstractSeq[A] - with LinearSeq[A] - with GenericTraversableTemplate[A, Stream] - with LinearSeqOptimized[A, Stream[A]] - with Serializable { self => - - override def companion: GenericCompanion[Stream] = Stream - - /** Indicates whether or not the `Stream` is empty. - * - * @return `true` if the `Stream` is empty and `false` otherwise. - */ - def isEmpty: Boolean - - /** Gives constant time access to the first element of this `Stream`. Using - * the `fibs` example from earlier: - * - * {{{ - * println(fibs head) - * // prints - * // 0 - * }}} - * - * @return The first element of the `Stream`. - * @throws java.util.NoSuchElementException if the stream is empty. - */ - def head: A - - /** A stream consisting of the remaining elements of this stream after the - * first one. - * - * Note that this method does not force evaluation of the `Stream` but merely - * returns the lazy result. - * - * @return The tail of the `Stream`. - * @throws UnsupportedOperationException if the stream is empty. - */ - def tail: Stream[A] - - /** Is the tail of this stream defined? */ - protected def tailDefined: Boolean - - // Implementation of abstract method in Traversable - - // New methods in Stream - - /** The stream resulting from the concatenation of this stream with the argument stream. - * @param rest The stream that gets appended to this stream - * @return The stream containing elements of this stream and the traversable object. - */ - def append[B >: A](rest: => TraversableOnce[B]): Stream[B] = - if (isEmpty) rest.toStream else cons(head, tail append rest) - - /** Forces evaluation of the whole stream and returns it. - * - * @note Often we use `Stream`s to represent an infinite set or series. If - * that's the case for your particular `Stream` then this function will never - * return and will probably crash the VM with an `OutOfMemory` exception. - * This function will not hang on a finite cycle, however. - * - * @return The fully realized `Stream`. - */ - def force: Stream[A] = { - // Use standard 2x 1x iterator trick for cycle detection ("those" is slow one) - var these, those = this - if (!these.isEmpty) these = these.tail - while (those ne these) { - if (these.isEmpty) return this - these = these.tail - if (these.isEmpty) return this - these = these.tail - if (these eq those) return this - those = those.tail - } - this - } - - /** Prints elements of this stream one by one, separated by commas. */ - def print() { print(", ") } - - /** Prints elements of this stream one by one, separated by `sep`. - * @param sep The separator string printed between consecutive elements. - */ - def print(sep: String) { - def loop(these: Stream[A], start: String) { - Console.print(start) - if (these.isEmpty) Console.print("empty") - else { - Console.print(these.head) - loop(these.tail, sep) - } - } - loop(this, "") - } - - /** Returns the length of this `Stream`. - * - * @note In order to compute the length of the `Stream`, it must first be - * fully realized, which could cause the complete evaluation of an infinite - * series, assuming that's what your `Stream` represents. - * - * @return The length of this `Stream`. - */ - override def length: Int = { - var len = 0 - var left = this - while (!left.isEmpty) { - len += 1 - left = left.tail - } - len - } - - // It's an imperfect world, but at least we can bottle up the - // imperfection in a capsule. - @inline private def asThat[That](x: AnyRef): That = x.asInstanceOf[That] - @inline private def asStream[B](x: AnyRef): Stream[B] = x.asInstanceOf[Stream[B]] - @inline private def isStreamBuilder[B, That](bf: CanBuildFrom[Stream[A], B, That]) = - bf(repr).isInstanceOf[Stream.StreamBuilder[_]] - - // Overridden methods from Traversable - - override def toStream: Stream[A] = this - - override def hasDefiniteSize: Boolean = isEmpty || { - if (!tailDefined) false - else { - // Two-iterator trick (2x & 1x speed) for cycle detection. - var those = this - var these = tail - while (those ne these) { - if (these.isEmpty) return true - if (!these.tailDefined) return false - these = these.tail - if (these.isEmpty) return true - if (!these.tailDefined) return false - these = these.tail - if (those eq these) return false - those = those.tail - } - false // Cycle detected - } - } - - /** Create a new stream which contains all elements of this stream followed by - * all elements of Traversable `that`. - * - * @note It's subtle why this works. We know that if the target type of the - * [[scala.collection.mutable.Builder]] `That` is either a `Stream`, or one of - * its supertypes, or undefined, then `StreamBuilder` will be chosen for the - * implicit. We recognize that fact and optimize to get more laziness. - * - * @note This method doesn't cause the `Stream` to be fully realized but it - * should be noted that using the `++` operator from another collection type - * could cause infinite realization of a `Stream`. For example, referring to - * the definition of `fibs` in the preamble, the following would never return: - * `List(BigInt(12)) ++ fibs`. - * - * @tparam B The element type of the returned collection.'''That''' - * @param that The [[scala.collection.GenTraversableOnce]] to be concatenated - * to this `Stream`. - * @return A new collection containing the result of concatenating `this` with - * `that`. - */ - override def ++[B >: A, That](that: GenTraversableOnce[B])(implicit bf: CanBuildFrom[Stream[A], B, That]): That = - // we assume there is no other builder factory on streams and therefore know that That = Stream[A] - if (isStreamBuilder(bf)) asThat( - if (isEmpty) that.toStream - else cons(head, asStream[A](tail ++ that)) - ) - else super.++(that)(bf) - - override def +:[B >: A, That](elem: B)(implicit bf: CanBuildFrom[Stream[A], B, That]): That = - if (isStreamBuilder(bf)) asThat(cons(elem, this)) - else super.+:(elem)(bf) - - /** - * Create a new stream which contains all intermediate results of applying the - * operator to subsequent elements left to right. `scanLeft` is analogous to - * `foldLeft`. - * - * @note This works because the target type of the - * [[scala.collection.mutable.Builder]] `That` is a `Stream`. - * - * @param z The initial value for the scan. - * @param op A function that will apply operations to successive values in the - * `Stream` against previous accumulated results. - * @return A new collection containing the modifications from the application - * of `op`. - */ - override final def scanLeft[B, That](z: B)(op: (B, A) => B)(implicit bf: CanBuildFrom[Stream[A], B, That]): That = - if (isStreamBuilder(bf)) asThat( - if (isEmpty) Stream(z) - else cons(z, asStream[B](tail.scanLeft(op(z, head))(op))) - ) - else super.scanLeft(z)(op)(bf) - - /** Returns the stream resulting from applying the given function `f` to each - * element of this stream. This returns a lazy `Stream` such that it does not - * need to be fully realized. - * - * @example {{{ - * $naturalsEx - * naturalsFrom(1).map(_ + 10) take 5 mkString(", ") - * // produces: "11, 12, 13, 14, 15" - * }}} - * - * @tparam B The element type of the returned collection '''That'''. - * @param f function to apply to each element. - * @return `f(a,,0,,), ..., f(a,,n,,)` if this sequence is `a,,0,,, ..., a,,n,,`. - */ - override final def map[B, That](f: A => B)(implicit bf: CanBuildFrom[Stream[A], B, That]): That = { - if (isStreamBuilder(bf)) asThat( - if (isEmpty) Stream.Empty - else cons(f(head), asStream[B](tail map f)) - ) - else super.map(f)(bf) - } - - override final def collect[B, That](pf: PartialFunction[A, B])(implicit bf: CanBuildFrom[Stream[A], B, That]): That = { - if (!isStreamBuilder(bf)) super.collect(pf)(bf) - else { - // this implementation avoids: - // 1) stackoverflows (could be achieved with tailrec, too) - // 2) out of memory errors for big streams (`this` reference can be eliminated from the stack) - var rest: Stream[A] = this - - // Avoids calling both `pf.isDefined` and `pf.apply`. - var newHead: B = null.asInstanceOf[B] - val runWith = pf.runWith((b: B) => newHead = b) - - while (rest.nonEmpty && !runWith(rest.head)) rest = rest.tail - - // without the call to the companion object, a thunk is created for the tail of the new stream, - // and the closure of the thunk will reference `this` - if (rest.isEmpty) Stream.Empty.asInstanceOf[That] - else Stream.collectedTail(newHead, rest, pf, bf).asInstanceOf[That] - } - } - - /** Applies the given function `f` to each element of this stream, then - * concatenates the results. As with `map` this function does not need to - * realize the entire `Stream` but continues to keep it as a lazy `Stream`. - * - * @example {{{ - * // Let's create a Stream of Vectors, each of which contains the - * // collection of Fibonacci numbers up to the current value. We - * // can then 'flatMap' that Stream. - * - * val fibVec: Stream[Vector[Int]] = Vector(0) #:: Vector(0, 1) #:: fibVec.zip(fibVec.tail).map(n => { - * n._2 ++ Vector(n._1.last + n._2.last) - * }) - * - * fibVec take 5 foreach println - * // prints - * // Vector(0) - * // Vector(0, 1) - * // Vector(0, 1, 1) - * // Vector(0, 1, 1, 2) - * // Vector(0, 1, 1, 2, 3) - * - * // If we now want to `flatMap` across that stream by adding 10 - * // we can see what the series turns into: - * - * fibVec.flatMap(_.map(_ + 10)) take 15 mkString(", ") - * // produces: 10, 10, 11, 10, 11, 11, 10, 11, 11, 12, 10, 11, 11, 12, 13 - * }}} - * - * ''Note:'' Currently `flatMap` will evaluate as much of the Stream as needed - * until it finds a non-empty element for the head, which is non-lazy. - * - * @tparam B The element type of the returned collection '''That'''. - * @param f the function to apply on each element. - * @return `f(a,,0,,) ::: ... ::: f(a,,n,,)` if - * this stream is `[a,,0,,, ..., a,,n,,]`. - */ - override final def flatMap[B, That](f: A => GenTraversableOnce[B])(implicit bf: CanBuildFrom[Stream[A], B, That]): That = - // we assume there is no other builder factory on streams and therefore know that That = Stream[B] - // optimisations are not for speed, but for functionality - // see tickets #153, #498, #2147, and corresponding tests in run/ (as well as run/stream_flatmap_odds.scala) - if (isStreamBuilder(bf)) asThat( - if (isEmpty) Stream.Empty - else { - // establish !prefix.isEmpty || nonEmptyPrefix.isEmpty - var nonEmptyPrefix = this - var prefix = f(nonEmptyPrefix.head).toStream - while (!nonEmptyPrefix.isEmpty && prefix.isEmpty) { - nonEmptyPrefix = nonEmptyPrefix.tail - if(!nonEmptyPrefix.isEmpty) - prefix = f(nonEmptyPrefix.head).toStream - } - - if (nonEmptyPrefix.isEmpty) Stream.empty - else prefix append asStream[B](nonEmptyPrefix.tail flatMap f) - } - ) - else super.flatMap(f)(bf) - - override private[scala] def filterImpl(p: A => Boolean, isFlipped: Boolean): Stream[A] = { - // optimization: drop leading prefix of elems for which f returns false - // var rest = this dropWhile (!p(_)) - forget DRY principle - GC can't collect otherwise - var rest = this - while (!rest.isEmpty && p(rest.head) == isFlipped) rest = rest.tail - // private utility func to avoid `this` on stack (would be needed for the lazy arg) - if (rest.nonEmpty) Stream.filteredTail(rest, p, isFlipped) - else Stream.Empty - } - - /** A FilterMonadic which allows GC of the head of stream during processing */ - @noinline // Workaround scala/bug#9137, see https://github.com/scala/scala/pull/4284#issuecomment-73180791 - override final def withFilter(p: A => Boolean): FilterMonadic[A, Stream[A]] = new Stream.StreamWithFilter(this, p) - - /** A lazier Iterator than LinearSeqLike's. */ - override def iterator: Iterator[A] = new StreamIterator(self) - - /** Apply the given function `f` to each element of this linear sequence - * (while respecting the order of the elements). - * - * @param f The treatment to apply to each element. - * @note Overridden here as final to trigger tail-call optimization, which - * replaces 'this' with 'tail' at each iteration. This is absolutely - * necessary for allowing the GC to collect the underlying stream as elements - * are consumed. - * @note This function will force the realization of the entire stream - * unless the `f` throws an exception. - */ - @tailrec - override final def foreach[U](f: A => U) { - if (!this.isEmpty) { - f(head) - tail.foreach(f) - } - } - - /** Stream specialization of foldLeft which allows GC to collect along the - * way. - * - * @tparam B The type of value being accumulated. - * @param z The initial value seeded into the function `op`. - * @param op The operation to perform on successive elements of the `Stream`. - * @return The accumulated value from successive applications of `op`. - */ - @tailrec - override final def foldLeft[B](z: B)(op: (B, A) => B): B = { - if (this.isEmpty) z - else tail.foldLeft(op(z, head))(op) - } - - /** Stream specialization of reduceLeft which allows GC to collect - * along the way. - * - * @tparam B The type of value being accumulated. - * @param f The operation to perform on successive elements of the `Stream`. - * @return The accumulated value from successive applications of `f`. - */ - override final def reduceLeft[B >: A](f: (B, A) => B): B = { - if (this.isEmpty) throw new UnsupportedOperationException("empty.reduceLeft") - else { - var reducedRes: B = this.head - var left = this.tail - while (!left.isEmpty) { - reducedRes = f(reducedRes, left.head) - left = left.tail - } - reducedRes - } - } - - /** Returns all the elements of this stream that satisfy the predicate `p` - * returning of [[scala.Tuple2]] of `Stream`s obeying the partition predicate - * `p`. The order of the elements is preserved. - * - * @param p the predicate used to filter the stream. - * @return the elements of this stream satisfying `p`. - * - * @example {{{ - * $naturalsEx - * val parts = naturalsFrom(1) partition { _ % 2 == 0 } - * parts._1 take 10 mkString ", " - * // produces: "2, 4, 6, 8, 10, 12, 14, 16, 18, 20" - * parts._2 take 10 mkString ", " - * // produces: "1, 3, 5, 7, 9, 11, 13, 15, 17, 19" - * }}} - * - */ - override def partition(p: A => Boolean): (Stream[A], Stream[A]) = (filter(p(_)), filterNot(p(_))) - - /** Returns a stream formed from this stream and the specified stream `that` - * by associating each element of the former with the element at the same - * position in the latter. - * - * If one of the two streams is longer than the other, its remaining elements - * are ignored. - * - * The return type of this function may not be obvious. The lazy aspect of - * the returned value is different than that of `partition`. In `partition` - * we get back a [[scala.Tuple2]] of two lazy `Stream`s whereas here we get - * back a single lazy `Stream` of [[scala.Tuple2]]s where the - * [[scala.Tuple2]]'s type signature is `(A1, B)`. - * - * @tparam A1 The type of the first parameter of the zipped tuple - * @tparam B The type of the second parameter of the zipped tuple - * @tparam That The type of the returned `Stream`. - * @return `Stream({a,,0,,,b,,0,,}, ..., - * {a,,min(m,n),,,b,,min(m,n),,)}` when - * `Stream(a,,0,,, ..., a,,m,,) - * zip Stream(b,,0,,, ..., b,,n,,)` is invoked. - * - * @example {{{ - * $naturalsEx - * naturalsFrom(1) zip naturalsFrom(2) take 5 foreach println - * // prints - * // (1,2) - * // (2,3) - * // (3,4) - * // (4,5) - * // (5,6) - * }}} - */ - override final def zip[A1 >: A, B, That](that: scala.collection.GenIterable[B])(implicit bf: CanBuildFrom[Stream[A], (A1, B), That]): That = - // we assume there is no other builder factory on streams and therefore know that That = Stream[(A1, B)] - if (isStreamBuilder(bf)) asThat( - if (this.isEmpty || that.isEmpty) Stream.Empty - else cons((this.head, that.head), asStream[(A1, B)](this.tail zip that.tail)) - ) - else super.zip(that)(bf) - - /** Zips this iterable with its indices. `s.zipWithIndex` is equivalent to `s - * zip s.indices`. - * - * This method is much like `zip` in that it returns a single lazy `Stream` of - * [[scala.Tuple2]]. - * - * @tparam A1 The type of the first element of the [[scala.Tuple2]] in the - * resulting stream. - * @tparam That The type of the resulting `Stream`. - * @return `Stream({a,,0,,,0}, ..., {a,,n,,,n)}` - * - * @example {{{ - * $naturalsEx - * (naturalsFrom(1) zipWithIndex) take 5 foreach println - * // prints - * // (1,0) - * // (2,1) - * // (3,2) - * // (4,3) - * // (5,4) - * }}} - */ - override def zipWithIndex[A1 >: A, That](implicit bf: CanBuildFrom[Stream[A], (A1, Int), That]): That = - this.zip[A1, Int, That](Stream.from(0)) - - /** Write all defined elements of this iterable into given string builder. - * The written text begins with the string `start` and is finished by the string - * `end`. Inside, the string representations of defined elements (w.r.t. - * the method `toString()`) are separated by the string `sep`. The method will - * not force evaluation of undefined elements. A tail of such elements will be - * represented by a `"?"` instead. A cyclic stream is represented by a `"..."` - * at the point where the cycle repeats. - * - * @param b The [[collection.mutable.StringBuilder]] factory to which we need - * to add the string elements. - * @param start The prefix of the resulting string (e.g. "Stream(") - * @param sep The separator between elements of the resulting string (e.g. ",") - * @param end The end of the resulting string (e.g. ")") - * @return The original [[collection.mutable.StringBuilder]] containing the - * resulting string. - */ - override def addString(b: StringBuilder, start: String, sep: String, end: String): StringBuilder = { - b append start - if (!isEmpty) { - b append head - var cursor = this - var n = 1 - if (cursor.tailDefined) { // If tailDefined, also !isEmpty - var scout = tail - if (scout.isEmpty) { - // Single element. Bail out early. - b append end - return b - } - if (cursor ne scout) { - cursor = scout - if (scout.tailDefined) { - scout = scout.tail - // Use 2x 1x iterator trick for cycle detection; slow iterator can add strings - while ((cursor ne scout) && scout.tailDefined) { - b append sep append cursor.head - n += 1 - cursor = cursor.tail - scout = scout.tail - if (scout.tailDefined) scout = scout.tail - } - } - } - if (!scout.tailDefined) { // Not a cycle, scout hit an end - while (cursor ne scout) { - b append sep append cursor.head - n += 1 - cursor = cursor.tail - } - if (cursor.nonEmpty) { - b append sep append cursor.head - } - } - else { - // Cycle. - // If we have a prefix of length P followed by a cycle of length C, - // the scout will be at position (P%C) in the cycle when the cursor - // enters it at P. They'll then collide when the scout advances another - // C - (P%C) ahead of the cursor. - // If we run the scout P farther, then it will be at the start of - // the cycle: (C - (P%C) + (P%C)) == C == 0. So if another runner - // starts at the beginning of the prefix, they'll collide exactly at - // the start of the loop. - var runner = this - var k = 0 - while (runner ne scout) { - runner = runner.tail - scout = scout.tail - k += 1 - } - // Now runner and scout are at the beginning of the cycle. Advance - // cursor, adding to string, until it hits; then we'll have covered - // everything once. If cursor is already at beginning, we'd better - // advance one first unless runner didn't go anywhere (in which case - // we've already looped once). - if ((cursor eq scout) && (k > 0)) { - b append sep append cursor.head - n += 1 - cursor = cursor.tail - } - while (cursor ne scout) { - b append sep append cursor.head - n += 1 - cursor = cursor.tail - } - // Subtract prefix length from total length for cycle reporting. - // (Not currently used, but probably a good idea for the future.) - n -= k - } - } - if (!cursor.isEmpty) { - // Either undefined or cyclic; we can check with tailDefined - if (!cursor.tailDefined) b append sep append "?" - else b append sep append "..." - } - } - b append end - b - } - - override def mkString(sep: String): String = mkString("", sep, "") - override def mkString: String = mkString("") - override def mkString(start: String, sep: String, end: String): String = { - this.force - super.mkString(start, sep, end) - } - override def toString = super.mkString(stringPrefix + "(", ", ", ")") - - override def splitAt(n: Int): (Stream[A], Stream[A]) = (take(n), drop(n)) - - /** Returns the `n` first elements of this `Stream` as another `Stream`, or - * else the whole `Stream`, if it has less than `n` elements. - * - * The result of `take` is, again, a `Stream` meaning that it also does not - * make any needless evaluations of the `Stream` itself, delaying that until - * the usage of the resulting `Stream`. - * - * @param n the number of elements to take. - * @return the `n` first elements of this stream. - * - * @example {{{ - * $naturalsEx - * scala> naturalsFrom(5) take 5 - * res1: scala.collection.immutable.Stream[Int] = Stream(5, ?) - * - * scala> naturalsFrom(5) take 5 mkString ", " - * // produces: "5, 6, 7, 8, 9" - * }}} - */ - override def take(n: Int): Stream[A] = ( - // Note that the n == 1 condition appears redundant but is not. - // It prevents "tail" from being referenced (and its head being evaluated) - // when obtaining the last element of the result. Such are the challenges - // of working with a lazy-but-not-really sequence. - if (n <= 0 || isEmpty) Stream.empty - else if (n == 1) cons(head, Stream.empty) - else cons(head, tail take n-1) - ) - - @tailrec final override def drop(n: Int): Stream[A] = - if (n <= 0 || isEmpty) this - else tail drop n-1 - - /** A substream starting at index `from` and extending up to (but not including) - * index `until`. This returns a `Stream` that is lazily evaluated. - * - * @param from The index of the first element of the returned subsequence - * @param until The index of the element following the returned subsequence - * @return A new string containing the elements requested from `start` until - * `end`. - * - * @example {{{ - * naturalsFrom(0) slice(50, 60) mkString ", " - * // produces: "50, 51, 52, 53, 54, 55, 56, 57, 58, 59" - * }}} - */ - override def slice(from: Int, until: Int): Stream[A] = { - val lo = from max 0 - if (until <= lo || isEmpty) Stream.empty - else this drop lo take (until - lo) - } - - /** The stream without its last element. - * - * @return A new `Stream` containing everything but the last element. If your - * `Stream` represents an infinite series, this method will not return. - * - * @throws UnsupportedOperationException if the stream is empty. - */ - override def init: Stream[A] = - if (isEmpty) super.init - else if (tail.isEmpty) Stream.Empty - else cons(head, tail.init) - - /** Returns the rightmost `n` elements from this iterable. - * - * @note Take serious caution here. If the `Stream` represents an infinite - * series then this function ''will not return''. The right most elements of - * an infinite series takes an infinite amount of time to produce. - * - * @param n the number of elements to take - * @return The last `n` elements from this `Stream`. - */ - override def takeRight(n: Int): Stream[A] = { - var these: Stream[A] = this - var lead = this drop n - while (!lead.isEmpty) { - these = these.tail - lead = lead.tail - } - these - } - - /** - * @inheritdoc - * $willTerminateInf - */ - override def dropRight(n: Int): Stream[A] = { - // We make dropRight work for possibly infinite streams by carrying - // a buffer of the dropped size. As long as the buffer is full and the - // rest is non-empty, we can feed elements off the buffer head. When - // the rest becomes empty, the full buffer is the dropped elements. - def advance(stub0: List[A], stub1: List[A], rest: Stream[A]): Stream[A] = { - if (rest.isEmpty) Stream.empty - else if (stub0.isEmpty) advance(stub1.reverse, Nil, rest) - else cons(stub0.head, advance(stub0.tail, rest.head :: stub1, rest.tail)) - } - if (n <= 0) this - else advance((this take n).toList, Nil, this drop n) - } - - /** Returns the longest prefix of this `Stream` whose elements satisfy the - * predicate `p`. - * - * @param p the test predicate. - * @return A new `Stream` representing the values that satisfy the predicate - * `p`. - * - * @example {{{ - + naturalsFrom(0) takeWhile { _ < 5 } mkString ", " - * produces: "0, 1, 2, 3, 4" - * }}} - */ - override def takeWhile(p: A => Boolean): Stream[A] = - if (!isEmpty && p(head)) cons(head, tail takeWhile p) - else Stream.Empty - - /** Returns the a `Stream` representing the longest suffix of this iterable - * whose first element does not satisfy the predicate `p`. - * - * @note This method realizes the entire `Stream` beyond the truth value of - * the predicate `p`. - * - * @param p the test predicate. - * @return A new `Stream` representing the results of applying `p` to the - * original `Stream`. - * - * @example {{{ - * // Assume we have a Stream that takes the first 20 natural numbers - * def naturalsLt50(i: Int): Stream[Int] = i #:: { if (i < 20) naturalsLt50(i * + 1) else Stream.Empty } - * naturalsLt50(0) dropWhile { _ < 10 } - * // produces: "10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20" - * }}} - */ - override def dropWhile(p: A => Boolean): Stream[A] = { - var these: Stream[A] = this - while (!these.isEmpty && p(these.head)) these = these.tail - these - } - - /** Builds a new stream from this stream in which any duplicates (as - * determined by `==`) have been removed. Among duplicate elements, only the - * first one is retained in the resulting `Stream`. - * - * @return A new `Stream` representing the result of applying distinctness to - * the original `Stream`. - * @example {{{ - * // Creates a Stream where every element is duplicated - * def naturalsFrom(i: Int): Stream[Int] = i #:: { i #:: naturalsFrom(i + 1) } - * naturalsFrom(1) take 6 mkString ", " - * // produces: "1, 1, 2, 2, 3, 3" - * (naturalsFrom(1) distinct) take 6 mkString ", " - * // produces: "1, 2, 3, 4, 5, 6" - * }}} - */ - override def distinct: Stream[A] = { - // This should use max memory proportional to N, whereas - // recursively calling distinct on the tail is N^2. - def loop(seen: Set[A], rest: Stream[A]): Stream[A] = { - if (rest.isEmpty) rest - else if (seen(rest.head)) loop(seen, rest.tail) - else cons(rest.head, loop(seen + rest.head, rest.tail)) - } - loop(Set(), this) - } - - /** Returns a new sequence of given length containing the elements of this - * sequence followed by zero or more occurrences of given elements. - * - * @tparam B The type of the value to pad with. - * @tparam That The type contained within the resulting `Stream`. - * @param len The number of elements to pad into the `Stream`. - * @param elem The value of the type `B` to use for padding. - * @return A new `Stream` representing the collection with values padding off - * to the end. If your `Stream` represents an infinite series, this method will - * not return. - * @example {{{ - * def naturalsFrom(i: Int): Stream[Int] = i #:: { if (i < 5) naturalsFrom(i + 1) else Stream.Empty } - * naturalsFrom(1) padTo(10, 0) foreach println - * // prints - * // 1 - * // 2 - * // 3 - * // 4 - * // 5 - * // 0 - * // 0 - * // 0 - * // 0 - * // 0 - * }}} - */ - override def padTo[B >: A, That](len: Int, elem: B)(implicit bf: CanBuildFrom[Stream[A], B, That]): That = { - def loop(len: Int, these: Stream[A]): Stream[B] = - if (these.isEmpty) Stream.fill(len)(elem) - else cons(these.head, loop(len - 1, these.tail)) - - if (isStreamBuilder(bf)) asThat(loop(len, this)) - else super.padTo(len, elem)(bf) - } - - /** A list consisting of all elements of this list in reverse order. - * - * @note This function must realize the entire `Stream` in order to perform - * this operation so if your `Stream` represents an infinite sequence then - * this function will never return. - * - * @return A new `Stream` containing the representing of the original `Stream` - * in reverse order. - * - * @example {{{ - * def naturalsFrom(i: Int): Stream[Int] = i #:: { if (i < 5) naturalsFrom(i + 1) else Stream.Empty } - * (naturalsFrom(1) reverse) foreach println - * // prints - * // 5 - * // 4 - * // 3 - * // 2 - * // 1 - * }}} - */ - override def reverse: Stream[A] = { - var result: Stream[A] = Stream.Empty - var these = this - while (!these.isEmpty) { - val r = Stream.consWrapper(result).#::(these.head) - r.tail // force it! - result = r - these = these.tail - } - result - } - - /** Evaluates and concatenates all elements within the `Stream` into a new - * flattened `Stream`. - * - * @tparam B The type of the elements of the resulting `Stream`. - * @return A new `Stream` of type `B` of the flattened elements of `this` - * `Stream`. - * @example {{{ - * val sov: Stream[Vector[Int]] = Vector(0) #:: Vector(0, 0) #:: sov.zip(sov.tail).map { n => n._1 ++ n._2 } - * sov.flatten take 10 mkString ", " - * // produces: "0, 0, 0, 0, 0, 0, 0, 0, 0, 0" - * }}} - */ - override def flatten[B](implicit asTraversable: A => /*<: Stream[A]) { - lazy val v = st - } - - private var these: LazyCell = _ - - def hasNext: Boolean = these.v.nonEmpty - def next(): A = - if (isEmpty) Iterator.empty.next() - else { - val cur = these.v - val result = cur.head - these = new LazyCell(cur.tail) - result - } - override def toStream = { - val result = these.v - these = new LazyCell(Stream.empty) - result - } - override def toList = toStream.toList -} - -/** - * The object `Stream` provides helper functions to manipulate streams. - * - * @author Martin Odersky, Matthias Zenger - * @version 1.1 08/08/03 - * @since 2.8 - */ -object Stream extends SeqFactory[Stream] { - - /** The factory for streams. - * @note Methods such as map/flatMap will not invoke the `Builder` factory, - * but will return a new stream directly, to preserve laziness. - * The new stream is then cast to the factory's result type. - * This means that every CanBuildFrom that takes a - * Stream as its From type parameter must yield a stream as its result parameter. - * If that assumption is broken, cast errors might result. - */ - class StreamCanBuildFrom[A] extends GenericCanBuildFrom[A] - - implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, Stream[A]] = new StreamCanBuildFrom[A] - - /** Creates a new builder for a stream */ - def newBuilder[A]: Builder[A, Stream[A]] = new StreamBuilder[A] - - /** A builder for streams - * @note This builder is lazy only in the sense that it does not go downs the spine - * of traversables that are added as a whole. If more laziness can be achieved, - * this builder should be bypassed. - */ - class StreamBuilder[A] extends LazyBuilder[A, Stream[A]] { - def result: Stream[A] = parts.toStream flatMap (_.toStream) - } - - object Empty extends Stream[Nothing] { - override def isEmpty = true - override def head = throw new NoSuchElementException("head of empty stream") - override def tail = throw new UnsupportedOperationException("tail of empty stream") - def tailDefined = false - } - - /** The empty stream */ - override def empty[A]: Stream[A] = Empty - - /** A stream consisting of given elements */ - override def apply[A](xs: A*): Stream[A] = xs.toStream - - /** A wrapper class that adds `#::` for cons and `#:::` for concat as operations - * to streams. - */ - class ConsWrapper[A](tl: => Stream[A]) { - /** Construct a stream consisting of a given first element followed by elements - * from a lazily evaluated Stream. - */ - def #::[B >: A](hd: B): Stream[B] = cons(hd, tl) - /** Construct a stream consisting of the concatenation of the given stream and - * a lazily evaluated Stream. - */ - def #:::[B >: A](prefix: Stream[B]): Stream[B] = prefix append tl - } - - /** A wrapper method that adds `#::` for cons and `#:::` for concat as operations - * to streams. - */ - implicit def consWrapper[A](stream: => Stream[A]): ConsWrapper[A] = - new ConsWrapper[A](stream) - - /** An extractor that allows to pattern match streams with `#::`. - */ - object #:: { - def unapply[A](xs: Stream[A]): Option[(A, Stream[A])] = - if (xs.isEmpty) None - else Some((xs.head, xs.tail)) - } - - /** An alternative way of building and matching Streams using Stream.cons(hd, tl). - */ - object cons { - - /** A stream consisting of a given first element and remaining elements - * @param hd The first element of the result stream - * @param tl The remaining elements of the result stream - */ - def apply[A](hd: A, tl: => Stream[A]) = new Cons(hd, tl) - - /** Maps a stream to its head and tail */ - def unapply[A](xs: Stream[A]): Option[(A, Stream[A])] = #::.unapply(xs) - } - - /** A lazy cons cell, from which streams are built. */ - @SerialVersionUID(-602202424901551803L) - final class Cons[+A](hd: A, tl: => Stream[A]) extends Stream[A] { - override def isEmpty = false - override def head = hd - @volatile private[this] var tlVal: Stream[A] = _ - @volatile private[this] var tlGen = tl _ - def tailDefined: Boolean = tlGen eq null - override def tail: Stream[A] = { - if (!tailDefined) - synchronized { - if (!tailDefined) { - tlVal = tlGen() - tlGen = null - } - } - - tlVal - } - - override /*LinearSeqOptimized*/ - def sameElements[B >: A](that: GenIterable[B]): Boolean = { - @tailrec def consEq(a: Cons[_], b: Cons[_]): Boolean = { - if (a.head != b.head) false - else { - a.tail match { - case at: Cons[_] => - b.tail match { - case bt: Cons[_] => (at eq bt) || consEq(at, bt) - case _ => false - } - case _ => b.tail.isEmpty - } - } - } - that match { - case that: Cons[_] => consEq(this, that) - case _ => super.sameElements(that) - } - } - } - - /** An infinite stream that repeatedly applies a given function to a start value. - * - * @param start the start value of the stream - * @param f the function that's repeatedly applied - * @return the stream returning the infinite sequence of values `start, f(start), f(f(start)), ...` - */ - def iterate[A](start: A)(f: A => A): Stream[A] = cons(start, iterate(f(start))(f)) - - override def iterate[A](start: A, len: Int)(f: A => A): Stream[A] = - iterate(start)(f) take len - - /** - * Create an infinite stream starting at `start` and incrementing by - * step `step`. - * - * @param start the start value of the stream - * @param step the increment value of the stream - * @return the stream starting at value `start`. - */ - def from(start: Int, step: Int): Stream[Int] = - cons(start, from(start+step, step)) - - /** - * Create an infinite stream starting at `start` and incrementing by `1`. - * - * @param start the start value of the stream - * @return the stream starting at value `start`. - */ - def from(start: Int): Stream[Int] = from(start, 1) - - /** - * Create an infinite stream containing the given element expression (which - * is computed for each occurrence). - * - * @param elem the element composing the resulting stream - * @return the stream containing an infinite number of elem - */ - def continually[A](elem: => A): Stream[A] = cons(elem, continually(elem)) - - override def fill[A](n: Int)(elem: => A): Stream[A] = - if (n <= 0) Empty else cons(elem, fill(n-1)(elem)) - - override def tabulate[A](n: Int)(f: Int => A): Stream[A] = { - def loop(i: Int): Stream[A] = - if (i >= n) Empty else cons(f(i), loop(i+1)) - loop(0) - } - - override def range[T: Integral](start: T, end: T, step: T): Stream[T] = { - val num = implicitly[Integral[T]] - import num._ - - if (if (step < zero) start <= end else end <= start) Empty - else cons(start, range(start + step, end, step)) - } - - private[immutable] def filteredTail[A](stream: Stream[A], p: A => Boolean, isFlipped: Boolean) = { - cons(stream.head, stream.tail.filterImpl(p, isFlipped)) - } - - private[immutable] def collectedTail[A, B, That](head: B, stream: Stream[A], pf: PartialFunction[A, B], bf: CanBuildFrom[Stream[A], B, That]) = { - cons(head, stream.tail.collect(pf)(bf).asInstanceOf[Stream[B]]) - } - - /** An implementation of `FilterMonadic` allowing GC of the filtered-out elements of - * the `Stream` as it is processed. - * - * Because this is not an inner class of `Stream` with a reference to the original - * head, it is now possible for GC to collect any leading and filtered-out elements - * which do not satisfy the filter, while the tail is still processing (see scala/bug#8990). - */ - private[immutable] final class StreamWithFilter[A](sl: => Stream[A], p: A => Boolean) extends FilterMonadic[A, Stream[A]] { - private var s = sl // set to null to allow GC after filtered - private lazy val filtered = { val f = s filter p; s = null; f } // don't set to null if throw during filter - - def map[B, That](f: A => B)(implicit bf: CanBuildFrom[Stream[A], B, That]): That = - filtered map f - - def flatMap[B, That](f: A => scala.collection.GenTraversableOnce[B])(implicit bf: CanBuildFrom[Stream[A], B, That]): That = - filtered flatMap f - - def foreach[U](f: A => U): Unit = - filtered foreach f - - def withFilter(q: A => Boolean): FilterMonadic[A, Stream[A]] = - new StreamWithFilter[A](filtered, q) - } - -} diff --git a/tests/scala2-library/src/library/scala/collection/immutable/StreamView.scala b/tests/scala2-library/src/library/scala/collection/immutable/StreamView.scala deleted file mode 100644 index 127ed76eb59d..000000000000 --- a/tests/scala2-library/src/library/scala/collection/immutable/StreamView.scala +++ /dev/null @@ -1,5 +0,0 @@ -package scala -package collection -package immutable - -trait StreamView[+A, +Coll] extends StreamViewLike[A, Coll, StreamView[A, Coll]] { } diff --git a/tests/scala2-library/src/library/scala/collection/immutable/StreamViewLike.scala b/tests/scala2-library/src/library/scala/collection/immutable/StreamViewLike.scala deleted file mode 100644 index 4bf8a2be6dc5..000000000000 --- a/tests/scala2-library/src/library/scala/collection/immutable/StreamViewLike.scala +++ /dev/null @@ -1,73 +0,0 @@ -package scala -package collection -package immutable - -import generic._ - -trait StreamViewLike[+A, - +Coll, - +This <: StreamView[A, Coll] with StreamViewLike[A, Coll, This]] -extends SeqView[A, Coll] - with SeqViewLike[A, Coll, This] -{ self => - - override def force[B >: A, That](implicit bf: CanBuildFrom[Coll, B, That]) = { - self.iterator.toStream.asInstanceOf[That] - } - - trait TransformedM[+B] extends StreamView[B, Coll] with super.TransformedS[B] { - override def toString = viewToString - } - - /** Explicit instantiation of the `Transformed` trait to reduce class file size in subclasses. */ - private[collection] abstract class AbstractTransformedM[+B] extends super.AbstractTransformedS[B] with TransformedM[B] - - trait EmptyViewM extends TransformedM[Nothing] with super.EmptyViewS - - trait ForcedM[B] extends super.ForcedS[B] with TransformedM[B] - - trait SlicedM extends super.SlicedS with TransformedM[A] - - trait MappedM[B] extends super.MappedS[B] with TransformedM[B] - - trait FlatMappedM[B] extends super.FlatMappedS[B] with TransformedM[B] - - trait AppendedM[B >: A] extends super.AppendedS[B] with TransformedM[B] - - trait FilteredM extends super.FilteredS with TransformedM[A] - - trait TakenWhileM extends super.TakenWhileS with TransformedM[A] - - trait DroppedWhileM extends super.DroppedWhileS with TransformedM[A] - - trait ZippedM[B] extends super.ZippedS[B] with TransformedM[(A, B)] - - trait ZippedAllM[A1 >: A, B] extends super.ZippedAllS[A1, B] with TransformedM[(A1, B)] - - trait ReversedM extends super.ReversedS with TransformedM[A] - - trait PatchedM[B >: A] extends super.PatchedS[B] with TransformedM[B] - - trait PrependedM[B >: A] extends super.PrependedS[B] with TransformedM[B] - - /** boilerplate */ - protected override def newForced[B](xs: => scala.collection.GenSeq[B]): TransformedM[B] = new AbstractTransformedM[B] with ForcedM[B] { lazy val forced = xs } - protected override def newAppended[B >: A](that: scala.collection.GenTraversable[B]): TransformedM[B] = new AbstractTransformedM[B] with AppendedM[B] { lazy val rest = that } - protected override def newPrepended[B >: A](that: scala.collection.GenTraversable[B]): TransformedM[B] = new AbstractTransformedM[B] with PrependedM[B] { lazy protected[this] val fst = that } - protected override def newMapped[B](f: A => B): TransformedM[B] = new AbstractTransformedM[B] with MappedM[B] { lazy val mapping = f } - protected override def newFlatMapped[B](f: A => scala.collection.GenTraversableOnce[B]): TransformedM[B] = new AbstractTransformedM[B] with FlatMappedM[B] { lazy val mapping = f } - protected override def newFiltered(p: A => Boolean): TransformedM[A] = new AbstractTransformedM[A] with FilteredM { lazy val pred = p } - protected override def newSliced(_endpoints: SliceInterval): TransformedM[A] = new AbstractTransformedM[A] with SlicedM { lazy val endpoints = _endpoints } - protected override def newDroppedWhile(p: A => Boolean): TransformedM[A] = new AbstractTransformedM[A] with DroppedWhileM { lazy val pred = p } - protected override def newTakenWhile(p: A => Boolean): TransformedM[A] = new AbstractTransformedM[A] with TakenWhileM { lazy val pred = p } - protected override def newZipped[B](that: scala.collection.GenIterable[B]): TransformedM[(A, B)] = new AbstractTransformedM[(A, B)] with ZippedM[B] { lazy val other = that } - protected override def newZippedAll[A1 >: A, B](that: scala.collection.GenIterable[B], _thisElem: A1, _thatElem: B): TransformedM[(A1, B)] = { - new AbstractTransformedM[(A1, B)] with ZippedAllM[A1, B] { lazy val other = that; lazy val thisElem = _thisElem; lazy val thatElem = _thatElem } - } - protected override def newReversed: TransformedM[A] = new ReversedM { } - protected override def newPatched[B >: A](_from: Int, _patch: scala.collection.GenSeq[B], _replaced: Int): TransformedM[B] = { - new AbstractTransformedM[B] with PatchedM[B] { lazy val from = _from; lazy val patch = _patch; lazy val replaced = _replaced } - } - - override def stringPrefix = "StreamView" -} diff --git a/tests/scala2-library/src/library/scala/collection/immutable/StringLike.scala b/tests/scala2-library/src/library/scala/collection/immutable/StringLike.scala deleted file mode 100644 index fce0f073aaff..000000000000 --- a/tests/scala2-library/src/library/scala/collection/immutable/StringLike.scala +++ /dev/null @@ -1,368 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala -package collection -package immutable - -import mutable.Builder -import scala.util.matching.Regex -import scala.math.ScalaNumber -import scala.reflect.ClassTag - -/** A companion object for the `StringLike` containing some constants. - * @since 2.8 - */ -object StringLike { - // just statics for companion class. - private final val LF = 0x0A - private final val FF = 0x0C - private final val CR = 0x0D - private final val SU = 0x1A -} - -import StringLike._ - -/** A trait describing stringlike collections. - * - * @tparam Repr The type of the actual collection inheriting `StringLike`. - * - * @since 2.8 - * @define Coll `String` - * @define coll string - * @define orderDependent - * @define orderDependentFold - * @define mayNotTerminateInf - * @define willNotTerminateInf - */ -trait StringLike[+Repr] extends Any with scala.collection.IndexedSeqOptimized[Char, Repr] with Ordered[String] { -self => - - /** Creates a string builder buffer as builder for this class */ - protected[this] def newBuilder: Builder[Char, Repr] - - /** Return element at index `n` - * @throws IndexOutOfBoundsException if the index is not valid - */ - def apply(n: Int): Char = toString charAt n - - def length: Int = toString.length - - override def mkString = toString - - override def slice(from: Int, until: Int): Repr = { - val start = from max 0 - val end = until min length - - if (start >= end) newBuilder.result() - else (newBuilder ++= toString.substring(start, end)).result() - } - - /** Return the current string concatenated `n` times. - */ - def * (n: Int): String = { - val buf = new StringBuilder - for (i <- 0 until n) buf append toString - buf.toString - } - - override def compare(other: String) = toString compareTo other - - private def isLineBreak(c: Char) = c == LF || c == FF - - /** - * Strip trailing line end character from this string if it has one. - * - * A line end character is one of - * - `LF` - line feed (`0x0A` hex) - * - `FF` - form feed (`0x0C` hex) - * - * If a line feed character `LF` is preceded by a carriage return `CR` - * (`0x0D` hex), the `CR` character is also stripped (Windows convention). - */ - def stripLineEnd: String = { - val len = toString.length - if (len == 0) toString - else { - val last = apply(len - 1) - if (isLineBreak(last)) - toString.substring(0, if (last == LF && len >= 2 && apply(len - 2) == CR) len - 2 else len - 1) - else - toString - } - } - - /** Return all lines in this string in an iterator, including trailing - * line end characters. - * - * This method is analogous to `s.split(EOL).toIterator`, - * except that any existing line endings are preserved in the result strings, - * and the empty string yields an empty iterator. - * - * A line end character is one of - * - `LF` - line feed (`0x0A`) - * - `FF` - form feed (`0x0C`) - */ - def linesWithSeparators: Iterator[String] = new AbstractIterator[String] { - val str = self.toString - private val len = str.length - private var index = 0 - def hasNext: Boolean = index < len - def next(): String = { - if (index >= len) throw new NoSuchElementException("next on empty iterator") - val start = index - while (index < len && !isLineBreak(apply(index))) index += 1 - index += 1 - str.substring(start, index min len) - } - } - - /** Return all lines in this string in an iterator, excluding trailing line - * end characters; i.e., apply `.stripLineEnd` to all lines - * returned by `linesWithSeparators`. - */ - def lines: Iterator[String] = - linesWithSeparators map (line => new WrappedString(line).stripLineEnd) - - /** Return all lines in this string in an iterator, excluding trailing line - * end characters; i.e., apply `.stripLineEnd` to all lines - * returned by `linesWithSeparators`. - */ - @deprecated("use `lines` instead","2.11.0") - def linesIterator: Iterator[String] = - linesWithSeparators map (line => new WrappedString(line).stripLineEnd) - - /** Returns this string with first character converted to upper case. - * If the first character of the string is capitalized, it is returned unchanged. - * This method does not convert characters outside the Basic Multilingual Plane (BMP). - */ - def capitalize: String = - if (toString == null) null - else if (toString.length == 0) "" - else if (toString.charAt(0).isUpper) toString - else { - val chars = toString.toCharArray - chars(0) = chars(0).toUpper - new String(chars) - } - - /** Returns this string with the given `prefix` stripped. If this string does not - * start with `prefix`, it is returned unchanged. - */ - def stripPrefix(prefix: String) = - if (toString.startsWith(prefix)) toString.substring(prefix.length) - else toString - - /** Returns this string with the given `suffix` stripped. If this string does not - * end with `suffix`, it is returned unchanged. - */ - def stripSuffix(suffix: String) = - if (toString.endsWith(suffix)) toString.substring(0, toString.length() - suffix.length) - else toString - - /** Replace all literal occurrences of `literal` with the literal string `replacement`. - * This method is equivalent to [[java.lang.String#replace]]. - * - * @param literal the string which should be replaced everywhere it occurs - * @param replacement the replacement string - * @return the resulting string - */ - def replaceAllLiterally(literal: String, replacement: String): String = toString.replace(literal, replacement) - - /** For every line in this string: - * - * Strip a leading prefix consisting of blanks or control characters - * followed by `marginChar` from the line. - */ - def stripMargin(marginChar: Char): String = { - val buf = new StringBuilder - for (line <- linesWithSeparators) { - val len = line.length - var index = 0 - while (index < len && line.charAt(index) <= ' ') index += 1 - buf append - (if (index < len && line.charAt(index) == marginChar) line.substring(index + 1) else line) - } - buf.toString - } - - /** For every line in this string: - * - * Strip a leading prefix consisting of blanks or control characters - * followed by `|` from the line. - */ - def stripMargin: String = stripMargin('|') - - private def escape(ch: Char): String = if ( - (ch >= 'a') && (ch <= 'z') || - (ch >= 'A') && (ch <= 'Z') || - (ch >= '0' && ch <= '9')) ch.toString - else "\\" + ch - - /** Split this string around the separator character - * - * If this string is the empty string, returns an array of strings - * that contains a single empty string. - * - * If this string is not the empty string, returns an array containing - * the substrings terminated by the start of the string, the end of the - * string or the separator character, excluding empty trailing substrings - * - * If the separator character is a surrogate character, only split on - * matching surrogate characters if they are not part of a surrogate pair - * - * The behaviour follows, and is implemented in terms of String.split(re: String) - * - * - * @example {{{ - * "a.b".split('.') //returns Array("a", "b") - * - * //splitting the empty string always returns the array with a single - * //empty string - * "".split('.') //returns Array("") - * - * //only trailing empty substrings are removed - * "a.".split('.') //returns Array("a") - * ".a.".split('.') //returns Array("", "a") - * "..a..".split('.') //returns Array("", "", "a") - * - * //all parts are empty and trailing - * ".".split('.') //returns Array() - * "..".split('.') //returns Array() - * - * //surrogate pairs - * val high = 0xD852.toChar - * val low = 0xDF62.toChar - * val highstring = high.toString - * val lowstring = low.toString - * - * //well-formed surrogate pairs are not split - * val highlow = highstring + lowstring - * highlow.split(high) //returns Array(highlow) - * - * //bare surrogate characters are split - * val bare = "_" + highstring + "_" - * bare.split(high) //returns Array("_", "_") - * - * }}} - * - * @param separator the character used as a delimiter - */ - def split(separator: Char): Array[String] = - toString.split(escape(separator)) - - - @throws(classOf[java.util.regex.PatternSyntaxException]) - def split(separators: Array[Char]): Array[String] = { - val re = separators.foldLeft("[")(_+escape(_)) + "]" - toString.split(re) - } - - /** You can follow a string with `.r`, turning it into a `Regex`. E.g. - * - * `"""A\w*""".r` is the regular expression for identifiers starting with `A`. - */ - def r: Regex = r() - - /** You can follow a string with `.r(g1, ... , gn)`, turning it into a `Regex`, - * with group names g1 through gn. - * - * `"""(\d\d)-(\d\d)-(\d\d\d\d)""".r("month", "day", "year")` matches dates - * and provides its subcomponents through groups named "month", "day" and - * "year". - * - * @param groupNames The names of the groups in the pattern, in the order they appear. - */ - def r(groupNames: String*): Regex = new Regex(toString, groupNames: _*) - - /** - * @throws java.lang.IllegalArgumentException If the string does not contain a parsable `Boolean`. - */ - def toBoolean: Boolean = parseBoolean(toString) - /** - * Parse as a `Byte` (string must contain only decimal digits and optional leading `-`). - * @throws java.lang.NumberFormatException If the string does not contain a parsable `Byte`. - */ - def toByte: Byte = java.lang.Byte.parseByte(toString) - /** - * Parse as a `Short` (string must contain only decimal digits and optional leading `-`). - * @throws java.lang.NumberFormatException If the string does not contain a parsable `Short`. - */ - def toShort: Short = java.lang.Short.parseShort(toString) - /** - * Parse as an `Int` (string must contain only decimal digits and optional leading `-`). - * @throws java.lang.NumberFormatException If the string does not contain a parsable `Int`. - */ - def toInt: Int = java.lang.Integer.parseInt(toString) - /** - * Parse as a `Long` (string must contain only decimal digits and optional leading `-`). - * @throws java.lang.NumberFormatException If the string does not contain a parsable `Long`. - */ - def toLong: Long = java.lang.Long.parseLong(toString) - /** - * Parse as a `Float` (surrounding whitespace is removed with a `trim`). - * @throws java.lang.NumberFormatException If the string does not contain a parsable `Float`. - * @throws java.lang.NullPointerException If the string is null. - */ - def toFloat: Float = java.lang.Float.parseFloat(toString) - /** - * Parse as a `Double` (surrounding whitespace is removed with a `trim`). - * @throws java.lang.NumberFormatException If the string does not contain a parsable `Double`. - * @throws java.lang.NullPointerException If the string is null. - */ - def toDouble: Double = java.lang.Double.parseDouble(toString) - - private def parseBoolean(s: String): Boolean = - if (s != null) s.toLowerCase match { - case "true" => true - case "false" => false - case _ => throw new IllegalArgumentException("For input string: \""+s+"\"") - } - else - throw new IllegalArgumentException("For input string: \"null\"") - - override def toArray[B >: Char : ClassTag]: Array[B] = - toString.toCharArray.asInstanceOf[Array[B]] - - private def unwrapArg(arg: Any): AnyRef = arg match { - case x: ScalaNumber => x.underlying - case x => x.asInstanceOf[AnyRef] - } - - /** Uses the underlying string as a pattern (in a fashion similar to - * printf in C), and uses the supplied arguments to fill in the - * holes. - * - * The interpretation of the formatting patterns is described in - * [[java.util.Formatter]], with the addition that - * classes deriving from `ScalaNumber` (such as [[scala.BigInt]] and - * [[scala.BigDecimal]]) are unwrapped to pass a type which `Formatter` - * understands. - * - * @param args the arguments used to instantiating the pattern. - * @throws java.lang.IllegalArgumentException - */ - def format(args : Any*): String = - java.lang.String.format(toString, args map unwrapArg: _*) - - /** Like `format(args*)` but takes an initial `Locale` parameter - * which influences formatting as in `java.lang.String`'s format. - * - * The interpretation of the formatting patterns is described in - * [[java.util.Formatter]], with the addition that - * classes deriving from `ScalaNumber` (such as `scala.BigInt` and - * `scala.BigDecimal`) are unwrapped to pass a type which `Formatter` - * understands. - * - * @param l an instance of `java.util.Locale` - * @param args the arguments used to instantiating the pattern. - * @throws java.lang.IllegalArgumentException - */ - def formatLocal(l: java.util.Locale, args: Any*): String = - java.lang.String.format(l, toString, args map unwrapArg: _*) -} diff --git a/tests/scala2-library/src/library/scala/collection/immutable/StringOps.scala b/tests/scala2-library/src/library/scala/collection/immutable/StringOps.scala deleted file mode 100644 index 77333badf97b..000000000000 --- a/tests/scala2-library/src/library/scala/collection/immutable/StringOps.scala +++ /dev/null @@ -1,50 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala -package collection -package immutable - -import mutable.StringBuilder - -/** This class serves as a wrapper providing [[scala.Predef.String]]s with all - * the operations found in indexed sequences. Where needed, `String`s are - * implicitly converted into instances of this class. - * - * The difference between this class and `WrappedString` is that calling transformer - * methods such as `filter` and `map` will yield a `String` object, whereas a - * `WrappedString` will remain a `WrappedString`. - * - * @param repr the actual representation of this string operations object. - * - * @since 2.8 - * @define Coll `String` - * @define coll string - */ -final class StringOps(override val repr: String) extends AnyVal with StringLike[String] { - - override protected[this] def thisCollection: WrappedString = new WrappedString(repr) - override protected[this] def toCollection(repr: String): WrappedString = new WrappedString(repr) - - /** Creates a string builder buffer as builder for this class */ - override protected[this] def newBuilder = StringBuilder.newBuilder - - override def apply(index: Int): Char = repr charAt index - override def slice(from: Int, until: Int): String = { - val start = if (from < 0) 0 else from - if (until <= start || start >= repr.length) - return "" - - val end = if (until > length) length else until - repr.substring(start, end) - } - override def toString = repr - override def length = repr.length - - def seq = new WrappedString(repr) -} diff --git a/tests/scala2-library/src/library/scala/collection/immutable/Traversable.scala b/tests/scala2-library/src/library/scala/collection/immutable/Traversable.scala deleted file mode 100644 index 114e5c063255..000000000000 --- a/tests/scala2-library/src/library/scala/collection/immutable/Traversable.scala +++ /dev/null @@ -1,50 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - - - -package scala -package collection -package immutable - -import generic._ -import mutable.Builder - -/** A trait for traversable collections that are guaranteed immutable. - * $traversableInfo - * @define mutability immutable - * - * @define usesMutableState - * - * Note: Despite being an immutable collection, the implementation uses mutable state internally during - * construction. These state changes are invisible in single-threaded code but can lead to race conditions - * in some multi-threaded scenarios. The state of a new collection instance may not have been "published" - * (in the sense of the Java Memory Model specification), so that an unsynchronized non-volatile read from - * another thread may observe the object in an invalid state (see - * [[https://github.com/scala/bug/issues/7838 scala/bug#7838]] for details). Note that such a read is not - * guaranteed to ''ever'' see the written object at all, and should therefore not be used, regardless - * of this issue. The easiest workaround is to exchange values between threads through a volatile var. - */ -trait Traversable[+A] extends scala.collection.Traversable[A] -// with GenTraversable[A] - with GenericTraversableTemplate[A, Traversable] - with TraversableLike[A, Traversable[A]] - with Immutable { - override def companion: GenericCompanion[Traversable] = Traversable - override def seq: Traversable[A] = this -} - -/** $factoryInfo - * The current default implementation of a $Coll is a `List`. - * @define coll immutable traversable collection - * @define Coll `immutable.Traversable` - */ -object Traversable extends TraversableFactory[Traversable] { - implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, Traversable[A]] = ReusableCBF.asInstanceOf[GenericCanBuildFrom[A]] - def newBuilder[A]: Builder[A, Traversable[A]] = new mutable.ListBuffer -} diff --git a/tests/scala2-library/src/library/scala/collection/immutable/TreeMap.scala b/tests/scala2-library/src/library/scala/collection/immutable/TreeMap.scala deleted file mode 100644 index 2d1bf0f6b1d0..000000000000 --- a/tests/scala2-library/src/library/scala/collection/immutable/TreeMap.scala +++ /dev/null @@ -1,203 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala -package collection -package immutable - -import generic._ -import immutable.{RedBlackTree => RB} -import mutable.Builder - -/** $factoryInfo - * @define Coll immutable.TreeMap - * @define coll immutable tree map - */ -object TreeMap extends ImmutableSortedMapFactory[TreeMap] { - def empty[A, B](implicit ord: Ordering[A]) = new TreeMap[A, B]()(ord) - /** $sortedMapCanBuildFromInfo */ - implicit def canBuildFrom[A, B](implicit ord: Ordering[A]): CanBuildFrom[Coll, (A, B), TreeMap[A, B]] = new SortedMapCanBuildFrom[A, B] -} - -/** This class implements immutable maps using a tree. - * - * @tparam A the type of the keys contained in this tree map. - * @tparam B the type of the values associated with the keys. - * @param ordering the implicit ordering used to compare objects of type `A`. - * - * @author Erik Stenman - * @author Matthias Zenger - * @version 1.1, 03/05/2004 - * @since 1 - * @see [[http://docs.scala-lang.org/overviews/collections/concrete-immutable-collection-classes.html#redblack_trees "Scala's Collection Library overview"]] - * section on `Red-Black Trees` for more information. - * - * @define Coll immutable.TreeMap - * @define coll immutable tree map - * @define orderDependent - * @define orderDependentFold - * @define mayNotTerminateInf - * @define willNotTerminateInf - */ -final class TreeMap[A, +B] private (tree: RB.Tree[A, B])(implicit val ordering: Ordering[A]) - extends SortedMap[A, B] - with SortedMapLike[A, B, TreeMap[A, B]] - with MapLike[A, B, TreeMap[A, B]] - with Serializable { - - override protected[this] def newBuilder : Builder[(A, B), TreeMap[A, B]] = - TreeMap.newBuilder[A, B] - - override def size = RB.count(tree) - - def this()(implicit ordering: Ordering[A]) = this(null)(ordering) - - override def rangeImpl(from: Option[A], until: Option[A]): TreeMap[A, B] = new TreeMap[A, B](RB.rangeImpl(tree, from, until)) - override def range(from: A, until: A): TreeMap[A, B] = new TreeMap[A, B](RB.range(tree, from, until)) - override def from(from: A): TreeMap[A, B] = new TreeMap[A, B](RB.from(tree, from)) - override def to(to: A): TreeMap[A, B] = new TreeMap[A, B](RB.to(tree, to)) - override def until(until: A): TreeMap[A, B] = new TreeMap[A, B](RB.until(tree, until)) - - override def firstKey = RB.smallest(tree).key - override def lastKey = RB.greatest(tree).key - override def compare(k0: A, k1: A): Int = ordering.compare(k0, k1) - - override def head = { - val smallest = RB.smallest(tree) - (smallest.key, smallest.value) - } - override def headOption = if (RB.isEmpty(tree)) None else Some(head) - override def last = { - val greatest = RB.greatest(tree) - (greatest.key, greatest.value) - } - override def lastOption = if (RB.isEmpty(tree)) None else Some(last) - - override def tail = new TreeMap(RB.delete(tree, firstKey)) - override def init = new TreeMap(RB.delete(tree, lastKey)) - - override def drop(n: Int) = { - if (n <= 0) this - else if (n >= size) empty - else new TreeMap(RB.drop(tree, n)) - } - - override def take(n: Int) = { - if (n <= 0) empty - else if (n >= size) this - else new TreeMap(RB.take(tree, n)) - } - - override def slice(from: Int, until: Int) = { - if (until <= from) empty - else if (from <= 0) take(until) - else if (until >= size) drop(from) - else new TreeMap(RB.slice(tree, from, until)) - } - - override def dropRight(n: Int) = take(size - math.max(n, 0)) - override def takeRight(n: Int) = drop(size - math.max(n, 0)) - override def splitAt(n: Int) = (take(n), drop(n)) - - private[this] def countWhile(p: ((A, B)) => Boolean): Int = { - var result = 0 - val it = iterator - while (it.hasNext && p(it.next())) result += 1 - result - } - override def dropWhile(p: ((A, B)) => Boolean) = drop(countWhile(p)) - override def takeWhile(p: ((A, B)) => Boolean) = take(countWhile(p)) - override def span(p: ((A, B)) => Boolean) = splitAt(countWhile(p)) - - /** A factory to create empty maps of the same type of keys. - */ - override def empty: TreeMap[A, B] = TreeMap.empty[A, B](ordering) - - /** A new TreeMap with the entry added is returned, - * if key is not in the TreeMap, otherwise - * the key is updated with the new entry. - * - * @tparam B1 type of the value of the new binding which is a supertype of `B` - * @param key the key that should be updated - * @param value the value to be associated with `key` - * @return a new $coll with the updated binding - */ - override def updated [B1 >: B](key: A, value: B1): TreeMap[A, B1] = new TreeMap(RB.update(tree, key, value, overwrite = true)) - - /** Add a key/value pair to this map. - * @tparam B1 type of the value of the new binding, a supertype of `B` - * @param kv the key/value pair - * @return A new $coll with the new binding added to this map - */ - override def + [B1 >: B] (kv: (A, B1)): TreeMap[A, B1] = updated(kv._1, kv._2) - - /** Adds two or more elements to this collection and returns - * either the collection itself (if it is mutable), or a new collection - * with the added elements. - * - * @tparam B1 type of the values of the new bindings, a supertype of `B` - * @param elem1 the first element to add. - * @param elem2 the second element to add. - * @param elems the remaining elements to add. - * @return a new $coll with the updated bindings - */ - override def + [B1 >: B] (elem1: (A, B1), elem2: (A, B1), elems: (A, B1) *): TreeMap[A, B1] = - this + elem1 + elem2 ++ elems - - /** Adds a number of elements provided by a traversable object - * and returns a new collection with the added elements. - * - * @param xs the traversable object. - */ - override def ++[B1 >: B] (xs: GenTraversableOnce[(A, B1)]): TreeMap[A, B1] = - ((repr: TreeMap[A, B1]) /: xs.seq) (_ + _) - - /** A new TreeMap with the entry added is returned, - * assuming that key is not in the TreeMap. - * - * @tparam B1 type of the values of the new bindings, a supertype of `B` - * @param key the key to be inserted - * @param value the value to be associated with `key` - * @return a new $coll with the inserted binding, if it wasn't present in the map - */ - def insert [B1 >: B](key: A, value: B1): TreeMap[A, B1] = { - assert(!RB.contains(tree, key)) - new TreeMap(RB.update(tree, key, value, overwrite = true)) - } - - def - (key:A): TreeMap[A, B] = - if (!RB.contains(tree, key)) this - else new TreeMap(RB.delete(tree, key)) - - /** Check if this map maps `key` to a value and return the - * value if it exists. - * - * @param key the key of the mapping of interest - * @return the value of the mapping, if it exists - */ - override def get(key: A): Option[B] = RB.get(tree, key) - - /** Creates a new iterator over all elements contained in this - * object. - * - * @return the new iterator - */ - override def iterator: Iterator[(A, B)] = RB.iterator(tree) - override def iteratorFrom(start: A): Iterator[(A, B)] = RB.iterator(tree, Some(start)) - - override def keysIterator: Iterator[A] = RB.keysIterator(tree) - override def keysIteratorFrom(start: A): Iterator[A] = RB.keysIterator(tree, Some(start)) - - override def valuesIterator: Iterator[B] = RB.valuesIterator(tree) - override def valuesIteratorFrom(start: A): Iterator[B] = RB.valuesIterator(tree, Some(start)) - - override def contains(key: A): Boolean = RB.contains(tree, key) - override def isDefinedAt(key: A): Boolean = RB.contains(tree, key) - - override def foreach[U](f : ((A,B)) => U) = RB.foreach(tree, f) -} diff --git a/tests/scala2-library/src/library/scala/collection/immutable/TreeSet.scala b/tests/scala2-library/src/library/scala/collection/immutable/TreeSet.scala deleted file mode 100644 index 2cdf3b352113..000000000000 --- a/tests/scala2-library/src/library/scala/collection/immutable/TreeSet.scala +++ /dev/null @@ -1,163 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - - - -package scala -package collection -package immutable - -import generic._ -import immutable.{RedBlackTree => RB} -import mutable.{ Builder, SetBuilder } - -/** $factoryInfo - * @define Coll `immutable.TreeSet` - * @define coll immutable tree set - */ -object TreeSet extends ImmutableSortedSetFactory[TreeSet] { - implicit def implicitBuilder[A](implicit ordering: Ordering[A]): Builder[A, TreeSet[A]] = newBuilder[A](ordering) - override def newBuilder[A](implicit ordering: Ordering[A]): Builder[A, TreeSet[A]] = - new SetBuilder(empty[A](ordering)) - - /** The empty set of this type - */ - def empty[A](implicit ordering: Ordering[A]) = new TreeSet[A] -} - -/** This class implements immutable sets using a tree. - * - * @tparam A the type of the elements contained in this tree set - * @param ordering the implicit ordering used to compare objects of type `A` - * - * @author Martin Odersky - * @version 2.0, 02/01/2007 - * @since 1 - * @see [[http://docs.scala-lang.org/overviews/collections/concrete-immutable-collection-classes.html#redblack_trees "Scala's Collection Library overview"]] - * section on `Red-Black Trees` for more information. - * - * @define Coll `immutable.TreeSet` - * @define coll immutable tree set - * @define orderDependent - * @define orderDependentFold - * @define mayNotTerminateInf - * @define willNotTerminateInf - */ -@SerialVersionUID(-5685982407650748405L) -final class TreeSet[A] private (tree: RB.Tree[A, Unit])(implicit val ordering: Ordering[A]) - extends SortedSet[A] with SortedSetLike[A, TreeSet[A]] with Serializable { - - if (ordering eq null) - throw new NullPointerException("ordering must not be null") - - override def stringPrefix = "TreeSet" - - override def size = RB.count(tree) - - override def head = RB.smallest(tree).key - override def headOption = if (RB.isEmpty(tree)) None else Some(head) - override def last = RB.greatest(tree).key - override def lastOption = if (RB.isEmpty(tree)) None else Some(last) - - override def tail = new TreeSet(RB.delete(tree, firstKey)) - override def init = new TreeSet(RB.delete(tree, lastKey)) - - override def drop(n: Int) = { - if (n <= 0) this - else if (n >= size) empty - else newSet(RB.drop(tree, n)) - } - - override def take(n: Int) = { - if (n <= 0) empty - else if (n >= size) this - else newSet(RB.take(tree, n)) - } - - override def slice(from: Int, until: Int) = { - if (until <= from) empty - else if (from <= 0) take(until) - else if (until >= size) drop(from) - else newSet(RB.slice(tree, from, until)) - } - - override def dropRight(n: Int) = take(size - math.max(n, 0)) - override def takeRight(n: Int) = drop(size - math.max(n, 0)) - override def splitAt(n: Int) = (take(n), drop(n)) - - private[this] def countWhile(p: A => Boolean): Int = { - var result = 0 - val it = iterator - while (it.hasNext && p(it.next())) result += 1 - result - } - override def dropWhile(p: A => Boolean) = drop(countWhile(p)) - override def takeWhile(p: A => Boolean) = take(countWhile(p)) - override def span(p: A => Boolean) = splitAt(countWhile(p)) - - def this()(implicit ordering: Ordering[A]) = this(null)(ordering) - - private def newSet(t: RB.Tree[A, Unit]) = new TreeSet[A](t) - - /** A factory to create empty sets of the same type of keys. - */ - override def empty = TreeSet.empty - - /** Creates a new `TreeSet` with the entry added. - * - * @param elem a new element to add. - * @return a new $coll containing `elem` and all the elements of this $coll. - */ - def + (elem: A): TreeSet[A] = newSet(RB.update(tree, elem, (), overwrite = false)) - - /** A new `TreeSet` with the entry added is returned, - * assuming that elem is not in the TreeSet. - * - * @param elem a new element to add. - * @return a new $coll containing `elem` and all the elements of this $coll. - */ - def insert(elem: A): TreeSet[A] = { - assert(!RB.contains(tree, elem)) - newSet(RB.update(tree, elem, (), overwrite = false)) - } - - /** Creates a new `TreeSet` with the entry removed. - * - * @param elem a new element to add. - * @return a new $coll containing all the elements of this $coll except `elem`. - */ - def - (elem:A): TreeSet[A] = - if (!RB.contains(tree, elem)) this - else newSet(RB.delete(tree, elem)) - - /** Checks if this set contains element `elem`. - * - * @param elem the element to check for membership. - * @return true, iff `elem` is contained in this set. - */ - def contains(elem: A): Boolean = RB.contains(tree, elem) - - /** Creates a new iterator over all elements contained in this - * object. - * - * @return the new iterator - */ - def iterator: Iterator[A] = RB.keysIterator(tree) - override def keysIteratorFrom(start: A): Iterator[A] = RB.keysIterator(tree, Some(start)) - - override def foreach[U](f: A => U) = RB.foreachKey(tree, f) - - override def rangeImpl(from: Option[A], until: Option[A]): TreeSet[A] = newSet(RB.rangeImpl(tree, from, until)) - override def range(from: A, until: A): TreeSet[A] = newSet(RB.range(tree, from, until)) - override def from(from: A): TreeSet[A] = newSet(RB.from(tree, from)) - override def to(to: A): TreeSet[A] = newSet(RB.to(tree, to)) - override def until(until: A): TreeSet[A] = newSet(RB.until(tree, until)) - - override def firstKey = head - override def lastKey = last -} diff --git a/tests/scala2-library/src/library/scala/collection/immutable/TrieIterator.scala b/tests/scala2-library/src/library/scala/collection/immutable/TrieIterator.scala deleted file mode 100644 index e68a409501f1..000000000000 --- a/tests/scala2-library/src/library/scala/collection/immutable/TrieIterator.scala +++ /dev/null @@ -1,226 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala -package collection -package immutable - -import HashMap.{ HashTrieMap, HashMapCollision1, HashMap1 } -import HashSet.{ HashTrieSet, HashSetCollision1, HashSet1 } -import scala.annotation.unchecked.{ uncheckedVariance => uV } -import scala.annotation.tailrec - -/** Abandons any pretense of type safety for speed. You can't say I - * didn't try: see r23934. - */ -private[collection] abstract class TrieIterator[+T]( - elems: Array[Iterable[T]], - val initDepth: Int, - val initArrayStack: Array[Array[Iterable[T @uV]]], - val initPosStack: Array[Int], - val initArrayD: Array[Iterable[T @uV]], - val initPosD: Int, - val initSubIter: Iterator[T]) extends AbstractIterator[T] { outer => - - def this(elems: Array[Iterable[T]]) = - this(elems, - initDepth = 0, - initArrayStack = new Array[Array[Iterable[T]]](6), - initPosStack = new Array[Int](6), - initArrayD = elems, - initPosD = 0, - initSubIter = null) - - private[immutable] def getElem(x: AnyRef): T - - private[this] var depth = initDepth - private[this] var arrayStack: Array[Array[Iterable[T @uV]]] = initArrayStack - private[this] var posStack = initPosStack - private[this] var arrayD: Array[Iterable[T @uV]] = initArrayD - private[this] var posD = initPosD - private[this] var subIter = initSubIter - - private[this] def getElems(x: Iterable[T]): Array[Iterable[T]] = (x match { - case x: HashTrieMap[_, _] => x.elems - case x: HashTrieSet[_] => x.elems - }).asInstanceOf[Array[Iterable[T]]] - - private[this] def collisionToArray(x: Iterable[T]): Array[Iterable[T]] = (x match { - case x: HashMapCollision1[_, _] => x.kvs.map(x => HashMap(x)).toArray - case x: HashSetCollision1[_] => x.ks.map(x => HashSet(x)).toArray - }).asInstanceOf[Array[Iterable[T]]] - - private[this] type SplitIterators = ((Iterator[T], Int), Iterator[T]) - - private def isTrie(x: AnyRef) = x match { - case _: HashTrieMap[_,_] | _: HashTrieSet[_] => true - case _ => false - } - private def isContainer(x: AnyRef) = x match { - case _: HashMap1[_, _] | _: HashSet1[_] => true - case _ => false - } - - final class DupIterator(xs: Array[Iterable[T]]) extends TrieIterator[T](xs, - initDepth = outer.depth, - initArrayStack = outer.arrayStack, - initPosStack = outer.posStack, - initArrayD = outer.arrayD, - initPosD = outer.posD, - initSubIter = outer.subIter) { - final override def getElem(x: AnyRef): T = outer.getElem(x) - } - - def dupIterator: TrieIterator[T] = new DupIterator(elems) - - private[this] def newIterator(xs: Array[Iterable[T]]) = new TrieIterator(xs) { - final override def getElem(x: AnyRef): T = outer.getElem(x) - } - - private[this] def iteratorWithSize(arr: Array[Iterable[T]]): (Iterator[T], Int) = - (newIterator(arr), arr.map(_.size).sum) - - private[this] def arrayToIterators(arr: Array[Iterable[T]]): SplitIterators = { - val (fst, snd) = arr.splitAt(arr.length / 2) - - (iteratorWithSize(snd), newIterator(fst)) - } - private[this] def splitArray(ad: Array[Iterable[T]]): SplitIterators = - if (ad.length > 1) arrayToIterators(ad) - else ad(0) match { - case _: HashMapCollision1[_, _] | _: HashSetCollision1[_] => - arrayToIterators(collisionToArray(ad(0))) - case _ => - splitArray(getElems(ad(0))) - } - - def hasNext = (subIter ne null) || depth >= 0 - def next(): T = { - if (subIter ne null) { - val el = subIter.next() - if (!subIter.hasNext) - subIter = null - el - } else - next0(arrayD, posD) - } - - @tailrec private[this] def next0(elems: Array[Iterable[T]], i: Int): T = { - if (i == elems.length-1) { // reached end of level, pop stack - depth -= 1 - if (depth >= 0) { - arrayD = arrayStack(depth) - posD = posStack(depth) - arrayStack(depth) = null - } else { - arrayD = null - posD = 0 - } - } else - posD += 1 - - val m = elems(i) - - // Note: this block is over twice as fast written this way as it is - // as a pattern match. Haven't started looking into why that is, but - // it's pretty sad the pattern matcher is that much slower. - if (isContainer(m)) - getElem(m) // push current pos onto stack and descend - else if (isTrie(m)) { - if (depth >= 0) { - arrayStack(depth) = arrayD - posStack(depth) = posD - } - depth += 1 - arrayD = getElems(m) - posD = 0 - next0(getElems(m), 0) - } - else { - subIter = m.iterator - next() - } - // The much slower version: - // - // m match { - // case _: HashMap1[_, _] | _: HashSet1[_] => - // getElem(m) // push current pos onto stack and descend - // case _: HashTrieMap[_,_] | _: HashTrieSet[_] => - // if (depth >= 0) { - // arrayStack(depth) = arrayD - // posStack(depth) = posD - // } - // depth += 1 - // arrayD = getElems(m) - // posD = 0 - // next0(getElems(m), 0) - // case _ => - // subIter = m.iterator - // next - // } - } - - // assumption: contains 2 or more elements - // splits this iterator into 2 iterators - // returns the 1st iterator, its number of elements, and the second iterator - def split: SplitIterators = { - // 0) simple case: no elements have been iterated - simply divide arrayD - if (arrayD != null && depth == 0 && posD == 0) - return splitArray(arrayD) - - // otherwise, some elements have been iterated over - // 1) collision case: if we have a subIter, we return subIter and elements after it - if (subIter ne null) { - val buff = subIter.toBuffer - subIter = null - ((buff.iterator, buff.length), this) - } - else { - // otherwise find the topmost array stack element - if (depth > 0) { - // 2) topmost comes before (is not) arrayD - // steal a portion of top to create a new iterator - if (posStack(0) == arrayStack(0).length - 1) { - // 2a) only a single entry left on top - // this means we have to modify this iterator - pop topmost - val snd = Array[Iterable[T]](arrayStack(0).last) - val szsnd = snd(0).size - // modify this - pop - depth -= 1 - 1 until arrayStack.length foreach (i => arrayStack(i - 1) = arrayStack(i)) - arrayStack(arrayStack.length - 1) = Array[Iterable[T]](null) - posStack = posStack.tail ++ Array[Int](0) - // we know that `this` is not empty, since it had something on the arrayStack and arrayStack elements are always non-empty - ((newIterator(snd), szsnd), this) - } else { - // 2b) more than a single entry left on top - val (fst, snd) = arrayStack(0).splitAt(arrayStack(0).length - (arrayStack(0).length - posStack(0) + 1) / 2) - arrayStack(0) = fst - (iteratorWithSize(snd), this) - } - } else { - // 3) no topmost element (arrayD is at the top) - // steal a portion of it and update this iterator - if (posD == arrayD.length - 1) { - // 3a) positioned at the last element of arrayD - val m = arrayD(posD) - arrayToIterators( - if (isTrie(m)) getElems(m) - else collisionToArray(m) - ) - } - else { - // 3b) arrayD has more free elements - val (fst, snd) = arrayD.splitAt(arrayD.length - (arrayD.length - posD + 1) / 2) - arrayD = fst - (iteratorWithSize(snd), this) - } - } - } - } -} diff --git a/tests/scala2-library/src/library/scala/collection/immutable/Vector.scala b/tests/scala2-library/src/library/scala/collection/immutable/Vector.scala deleted file mode 100644 index 1093084b9d49..000000000000 --- a/tests/scala2-library/src/library/scala/collection/immutable/Vector.scala +++ /dev/null @@ -1,1112 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala -package collection -package immutable - -import scala.annotation.unchecked.uncheckedVariance -import scala.collection.generic._ -import scala.collection.mutable.{Builder, ReusableBuilder} -import scala.collection.parallel.immutable.ParVector - -/** Companion object to the Vector class - */ -object Vector extends IndexedSeqFactory[Vector] { - def newBuilder[A]: Builder[A, Vector[A]] = new VectorBuilder[A] - implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, Vector[A]] = - ReusableCBF.asInstanceOf[GenericCanBuildFrom[A]] - private[immutable] val NIL = new Vector[Nothing](0, 0, 0) - override def empty[A]: Vector[A] = NIL - - // Constants governing concat strategy for performance - private final val Log2ConcatFaster = 5 - private final val TinyAppendFaster = 2 -} - -// in principle, most members should be private. however, access privileges must -// be carefully chosen to not prevent method inlining - -/** Vector is a general-purpose, immutable data structure. It provides random access and updates - * in effectively constant time, as well as very fast append and prepend. Because vectors strike - * a good balance between fast random selections and fast random functional updates, they are - * currently the default implementation of immutable indexed sequences. It is backed by a little - * endian bit-mapped vector trie with a branching factor of 32. Locality is very good, but not - * contiguous, which is good for very large sequences. - * - * $usesMutableState - * - * @see [[http://docs.scala-lang.org/overviews/collections/concrete-immutable-collection-classes.html#vectors "Scala's Collection Library overview"]] - * section on `Vectors` for more information. - * - * @tparam A the element type - * - * @define Coll `Vector` - * @define coll vector - * @define thatinfo the class of the returned collection. In the standard library configuration, - * `That` is always `Vector[B]` because an implicit of type `CanBuildFrom[Vector, B, That]` - * is defined in object `Vector`. - * @define bfinfo an implicit value of class `CanBuildFrom` which determines the - * result class `That` from the current representation type `Repr` - * and the new element type `B`. This is usually the `canBuildFrom` value - * defined in object `Vector`. - * @define orderDependent - * @define orderDependentFold - * @define mayNotTerminateInf - * @define willNotTerminateInf - */ -@SerialVersionUID(-1334388273712300479L) -final class Vector[+A] private[immutable] (private[collection] val startIndex: Int, private[collection] val endIndex: Int, focus: Int) -extends AbstractSeq[A] - with IndexedSeq[A] - with GenericTraversableTemplate[A, Vector] - with IndexedSeqLike[A, Vector[A]] - with VectorPointer[A @uncheckedVariance] - with Serializable - with CustomParallelizable[A, ParVector[A]] -{ self => - - override def companion: GenericCompanion[Vector] = Vector - - private[immutable] var dirty = false - - def length = endIndex - startIndex - - override def par = new ParVector(this) - - override def toVector: Vector[A] = this - - override def lengthCompare(len: Int): Int = length - len - - private[collection] final def initIterator[B >: A](s: VectorIterator[B]) { - s.initFrom(this) - if (dirty) s.stabilize(focus) - if (s.depth > 1) s.gotoPos(startIndex, startIndex ^ focus) - } - - override def iterator: VectorIterator[A] = { - val s = new VectorIterator[A](startIndex, endIndex) - initIterator(s) - s - } - - override /*SeqLike*/ - def reverseIterator: Iterator[A] = new AbstractIterator[A] { - private var i = self.length - def hasNext: Boolean = 0 < i - def next(): A = - if (0 < i) { - i -= 1 - self(i) - } else Iterator.empty.next() - } - - // Ideally, clients will inline calls to map all the way down, including the iterator/builder methods. - // In principle, escape analysis could even remove the iterator/builder allocations and do it - // with local variables exclusively. But we're not quite there yet ... - - def apply(index: Int): A = { - val idx = checkRangeConvert(index) - getElem(idx, idx ^ focus) - } - - private def checkRangeConvert(index: Int) = { - val idx = index + startIndex - if (index >= 0 && idx < endIndex) - idx - else - throw new IndexOutOfBoundsException(index.toString) - } - - // If we have a default builder, there are faster ways to perform some operations - @inline private[this] def isDefaultCBF[A, B, That](bf: CanBuildFrom[Vector[A], B, That]): Boolean = - (bf eq IndexedSeq.ReusableCBF) || (bf eq collection.immutable.Seq.ReusableCBF) || (bf eq collection.Seq.ReusableCBF) - - // SeqLike api - - override def updated[B >: A, That](index: Int, elem: B)(implicit bf: CanBuildFrom[Vector[A], B, That]): That = - if (isDefaultCBF[A, B, That](bf)) - updateAt(index, elem).asInstanceOf[That] // ignore bf--it will just give a Vector, and slowly - else super.updated(index, elem)(bf) - - override def +:[B >: A, That](elem: B)(implicit bf: CanBuildFrom[Vector[A], B, That]): That = - if (isDefaultCBF[A, B, That](bf)) - appendFront(elem).asInstanceOf[That] // ignore bf--it will just give a Vector, and slowly - else super.+:(elem)(bf) - - override def :+[B >: A, That](elem: B)(implicit bf: CanBuildFrom[Vector[A], B, That]): That = - if (isDefaultCBF(bf)) - appendBack(elem).asInstanceOf[That] // ignore bf--it will just give a Vector, and slowly - else super.:+(elem)(bf) - - override def take(n: Int): Vector[A] = { - if (n <= 0) - Vector.empty - else if (startIndex < endIndex - n) - dropBack0(startIndex + n) - else - this - } - - override def drop(n: Int): Vector[A] = { - if (n <= 0) - this - else if (startIndex < endIndex - n) - dropFront0(startIndex + n) - else - Vector.empty - } - - override def takeRight(n: Int): Vector[A] = { - if (n <= 0) - Vector.empty - else if (endIndex - n > startIndex) - dropFront0(endIndex - n) - else - this - } - - override def dropRight(n: Int): Vector[A] = { - if (n <= 0) - this - else if (endIndex - n > startIndex) - dropBack0(endIndex - n) - else - Vector.empty - } - - override /*IterableLike*/ - def head: A = { - if (isEmpty) throw new UnsupportedOperationException("empty.head") - apply(0) - } - - override /*TraversableLike*/ - def tail: Vector[A] = { - if (isEmpty) throw new UnsupportedOperationException("empty.tail") - drop(1) - } - - override /*TraversableLike*/ - def last: A = { - if (isEmpty) throw new UnsupportedOperationException("empty.last") - apply(length - 1) - } - - override /*TraversableLike*/ - def init: Vector[A] = { - if (isEmpty) throw new UnsupportedOperationException("empty.init") - dropRight(1) - } - - override /*IterableLike*/ - def slice(from: Int, until: Int): Vector[A] = - take(until).drop(from) - - override /*IterableLike*/ - def splitAt(n: Int): (Vector[A], Vector[A]) = (take(n), drop(n)) - - // concat (suboptimal but avoids worst performance gotchas) - override def ++[B >: A, That](that: GenTraversableOnce[B])(implicit bf: CanBuildFrom[Vector[A], B, That]): That = { - if (isDefaultCBF(bf)) { - // We are sure we will create a Vector, so let's do it efficiently - import Vector.{Log2ConcatFaster, TinyAppendFaster} - if (that.isEmpty) this.asInstanceOf[That] - else { - val again = if (!that.isTraversableAgain) that.toVector else that.seq - again.size match { - // Often it's better to append small numbers of elements (or prepend if RHS is a vector) - case n if n <= TinyAppendFaster || n < (this.size >>> Log2ConcatFaster) => - var v: Vector[B] = this - for (x <- again) v = v :+ x - v.asInstanceOf[That] - case n if this.size < (n >>> Log2ConcatFaster) && again.isInstanceOf[Vector[_]] => - var v = again.asInstanceOf[Vector[B]] - val ri = this.reverseIterator - while (ri.hasNext) v = ri.next +: v - v.asInstanceOf[That] - case _ => super.++(again) - } - } - } - else super.++(that.seq) - } - - // semi-private api - - private[immutable] def updateAt[B >: A](index: Int, elem: B): Vector[B] = { - val idx = checkRangeConvert(index) - val s = new Vector[B](startIndex, endIndex, idx) - s.initFrom(this) - s.dirty = dirty - s.gotoPosWritable(focus, idx, focus ^ idx) // if dirty commit changes; go to new pos and prepare for writing - s.display0(idx & 31) = elem.asInstanceOf[AnyRef] - s - } - - private def gotoPosWritable(oldIndex: Int, newIndex: Int, xor: Int) = if (dirty) { - gotoPosWritable1(oldIndex, newIndex, xor) - } else { - gotoPosWritable0(newIndex, xor) - dirty = true - } - - private def gotoFreshPosWritable(oldIndex: Int, newIndex: Int, xor: Int) = if (dirty) { - gotoFreshPosWritable1(oldIndex, newIndex, xor) - } else { - gotoFreshPosWritable0(oldIndex, newIndex, xor) - dirty = true - } - - private[immutable] def appendFront[B >: A](value: B): Vector[B] = { - if (endIndex != startIndex) { - val blockIndex = (startIndex - 1) & ~31 - val lo = (startIndex - 1) & 31 - - if (startIndex != blockIndex + 32) { - val s = new Vector(startIndex - 1, endIndex, blockIndex) - s.initFrom(this) - s.dirty = dirty - s.gotoPosWritable(focus, blockIndex, focus ^ blockIndex) - s.display0(lo) = value.asInstanceOf[AnyRef] - s - } else { - - val freeSpace = (1 << (5 * depth)) - endIndex // free space at the right given the current tree-structure depth - val shift = freeSpace & ~((1 << (5 * (depth - 1))) - 1) // number of elements by which we'll shift right (only move at top level) - val shiftBlocks = freeSpace >>> (5 * (depth - 1)) // number of top-level blocks - - if (shift != 0) { - // case A: we can shift right on the top level - if (depth > 1) { - val newBlockIndex = blockIndex + shift - val newFocus = focus + shift - - val s = new Vector(startIndex - 1 + shift, endIndex + shift, newBlockIndex) - s.initFrom(this) - s.dirty = dirty - s.shiftTopLevel(0, shiftBlocks) // shift right by n blocks - s.gotoFreshPosWritable(newFocus, newBlockIndex, newFocus ^ newBlockIndex) // maybe create pos; prepare for writing - s.display0(lo) = value.asInstanceOf[AnyRef] - s - } else { - val newBlockIndex = blockIndex + 32 - val newFocus = focus - - val s = new Vector(startIndex - 1 + shift, endIndex + shift, newBlockIndex) - s.initFrom(this) - s.dirty = dirty - s.shiftTopLevel(0, shiftBlocks) // shift right by n elements - s.gotoPosWritable(newFocus, newBlockIndex, newFocus ^ newBlockIndex) // prepare for writing - s.display0(shift - 1) = value.asInstanceOf[AnyRef] - s - } - } else if (blockIndex < 0) { - // case B: we need to move the whole structure - val move = (1 << (5 * (depth + 1))) - (1 << (5 * depth)) - val newBlockIndex = blockIndex + move - val newFocus = focus + move - - val s = new Vector(startIndex - 1 + move, endIndex + move, newBlockIndex) - s.initFrom(this) - s.dirty = dirty - s.gotoFreshPosWritable(newFocus, newBlockIndex, newFocus ^ newBlockIndex) // could optimize: we know it will create a whole branch - s.display0(lo) = value.asInstanceOf[AnyRef] - s - } else { - val newBlockIndex = blockIndex - val newFocus = focus - - val s = new Vector(startIndex - 1, endIndex, newBlockIndex) - s.initFrom(this) - s.dirty = dirty - s.gotoFreshPosWritable(newFocus, newBlockIndex, newFocus ^ newBlockIndex) - s.display0(lo) = value.asInstanceOf[AnyRef] - s - } - } - } else { - // empty vector, just insert single element at the back - val elems = new Array[AnyRef](32) - elems(31) = value.asInstanceOf[AnyRef] - val s = new Vector(31, 32, 0) - s.depth = 1 - s.display0 = elems - s - } - } - - private[immutable] def appendBack[B >: A](value: B): Vector[B] = { - if (endIndex != startIndex) { - val blockIndex = endIndex & ~31 - val lo = endIndex & 31 - - if (endIndex != blockIndex) { - val s = new Vector(startIndex, endIndex + 1, blockIndex) - s.initFrom(this) - s.dirty = dirty - s.gotoPosWritable(focus, blockIndex, focus ^ blockIndex) - s.display0(lo) = value.asInstanceOf[AnyRef] - s - } else { - val shift = startIndex & ~((1 << (5 * (depth - 1))) - 1) - val shiftBlocks = startIndex >>> (5 * (depth - 1)) - - if (shift != 0) { - if (depth > 1) { - val newBlockIndex = blockIndex - shift - val newFocus = focus - shift - - val s = new Vector(startIndex - shift, endIndex + 1 - shift, newBlockIndex) - s.initFrom(this) - s.dirty = dirty - s.shiftTopLevel(shiftBlocks, 0) // shift left by n blocks - s.gotoFreshPosWritable(newFocus, newBlockIndex, newFocus ^ newBlockIndex) - s.display0(lo) = value.asInstanceOf[AnyRef] - s - } else { - val newBlockIndex = blockIndex - 32 - val newFocus = focus - - val s = new Vector(startIndex - shift, endIndex + 1 - shift, newBlockIndex) - s.initFrom(this) - s.dirty = dirty - s.shiftTopLevel(shiftBlocks, 0) // shift right by n elements - s.gotoPosWritable(newFocus, newBlockIndex, newFocus ^ newBlockIndex) - s.display0(32 - shift) = value.asInstanceOf[AnyRef] - s - } - } else { - val newBlockIndex = blockIndex - val newFocus = focus - - val s = new Vector(startIndex, endIndex + 1, newBlockIndex) - s.initFrom(this) - s.dirty = dirty - s.gotoFreshPosWritable(newFocus, newBlockIndex, newFocus ^ newBlockIndex) - s.display0(lo) = value.asInstanceOf[AnyRef] - s - } - } - } else { - val elems = new Array[AnyRef](32) - elems(0) = value.asInstanceOf[AnyRef] - val s = new Vector(0, 1, 0) - s.depth = 1 - s.display0 = elems - s - } - } - - - // low-level implementation (needs cleanup, maybe move to util class) - - private def shiftTopLevel(oldLeft: Int, newLeft: Int) = (depth - 1) match { - case 0 => display0 = copyRange(display0, oldLeft, newLeft) - case 1 => display1 = copyRange(display1, oldLeft, newLeft) - case 2 => display2 = copyRange(display2, oldLeft, newLeft) - case 3 => display3 = copyRange(display3, oldLeft, newLeft) - case 4 => display4 = copyRange(display4, oldLeft, newLeft) - case 5 => display5 = copyRange(display5, oldLeft, newLeft) - } - - private def zeroLeft(array: Array[AnyRef], index: Int): Unit = { - var i = 0 - while (i < index) { - array(i) = null - i += 1 - } - } - - private def zeroRight(array: Array[AnyRef], index: Int): Unit = { - var i = index - while (i < array.length) { - array(i) = null - i += 1 - } - } - - private def copyLeft(array: Array[AnyRef], right: Int): Array[AnyRef] = { - val copy = new Array[AnyRef](array.length) - java.lang.System.arraycopy(array, 0, copy, 0, right) - copy - } - private def copyRight(array: Array[AnyRef], left: Int): Array[AnyRef] = { - val copy = new Array[AnyRef](array.length) - java.lang.System.arraycopy(array, left, copy, left, copy.length - left) - copy - } - - private def preClean(depth: Int) = { - this.depth = depth - (depth - 1) match { - case 0 => - display1 = null - display2 = null - display3 = null - display4 = null - display5 = null - case 1 => - display2 = null - display3 = null - display4 = null - display5 = null - case 2 => - display3 = null - display4 = null - display5 = null - case 3 => - display4 = null - display5 = null - case 4 => - display5 = null - case 5 => - } - } - - // requires structure is at index cutIndex and writable at level 0 - private def cleanLeftEdge(cutIndex: Int) = { - if (cutIndex < (1 << 5)) { - zeroLeft(display0, cutIndex) - } else if (cutIndex < (1 << 10)) { - zeroLeft(display0, cutIndex & 31) - display1 = copyRight(display1, cutIndex >>> 5) - } else if (cutIndex < (1 << 15)) { - zeroLeft(display0, cutIndex & 31) - display1 = copyRight(display1, (cutIndex >>> 5) & 31) - display2 = copyRight(display2, cutIndex >>> 10) - } else if (cutIndex < (1 << 20)) { - zeroLeft(display0, cutIndex & 31) - display1 = copyRight(display1, (cutIndex >>> 5) & 31) - display2 = copyRight(display2, (cutIndex >>> 10) & 31) - display3 = copyRight(display3, cutIndex >>> 15) - } else if (cutIndex < (1 << 25)) { - zeroLeft(display0, cutIndex & 31) - display1 = copyRight(display1, (cutIndex >>> 5) & 31) - display2 = copyRight(display2, (cutIndex >>> 10) & 31) - display3 = copyRight(display3, (cutIndex >>> 15) & 31) - display4 = copyRight(display4, cutIndex >>> 20) - } else if (cutIndex < (1 << 30)) { - zeroLeft(display0, cutIndex & 31) - display1 = copyRight(display1, (cutIndex >>> 5) & 31) - display2 = copyRight(display2, (cutIndex >>> 10) & 31) - display3 = copyRight(display3, (cutIndex >>> 15) & 31) - display4 = copyRight(display4, (cutIndex >>> 20) & 31) - display5 = copyRight(display5, cutIndex >>> 25) - } else { - throw new IllegalArgumentException() - } - } - - // requires structure is writable and at index cutIndex - private def cleanRightEdge(cutIndex: Int) = { - // we're actually sitting one block left if cutIndex lies on a block boundary - // this means that we'll end up erasing the whole block!! - - if (cutIndex <= (1 << 5)) { - zeroRight(display0, cutIndex) - } else if (cutIndex <= (1 << 10)) { - zeroRight(display0, ((cutIndex - 1) & 31) + 1) - display1 = copyLeft(display1, cutIndex >>> 5) - } else if (cutIndex <= (1 << 15)) { - zeroRight(display0, ((cutIndex - 1) & 31) + 1) - display1 = copyLeft(display1, (((cutIndex - 1) >>> 5) & 31) + 1) - display2 = copyLeft(display2, cutIndex >>> 10) - } else if (cutIndex <= (1 << 20)) { - zeroRight(display0, ((cutIndex - 1) & 31) + 1) - display1 = copyLeft(display1, (((cutIndex - 1) >>> 5) & 31) + 1) - display2 = copyLeft(display2, (((cutIndex - 1) >>> 10) & 31) + 1) - display3 = copyLeft(display3, cutIndex >>> 15) - } else if (cutIndex <= (1 << 25)) { - zeroRight(display0, ((cutIndex - 1) & 31) + 1) - display1 = copyLeft(display1, (((cutIndex - 1) >>> 5) & 31) + 1) - display2 = copyLeft(display2, (((cutIndex - 1) >>> 10) & 31) + 1) - display3 = copyLeft(display3, (((cutIndex - 1) >>> 15) & 31) + 1) - display4 = copyLeft(display4, cutIndex >>> 20) - } else if (cutIndex <= (1 << 30)) { - zeroRight(display0, ((cutIndex - 1) & 31) + 1) - display1 = copyLeft(display1, (((cutIndex - 1) >>> 5) & 31) + 1) - display2 = copyLeft(display2, (((cutIndex - 1) >>> 10) & 31) + 1) - display3 = copyLeft(display3, (((cutIndex - 1) >>> 15) & 31) + 1) - display4 = copyLeft(display4, (((cutIndex - 1) >>> 20) & 31) + 1) - display5 = copyLeft(display5, cutIndex >>> 25) - } else { - throw new IllegalArgumentException() - } - } - - private def requiredDepth(xor: Int) = { - if (xor < (1 << 5)) 1 - else if (xor < (1 << 10)) 2 - else if (xor < (1 << 15)) 3 - else if (xor < (1 << 20)) 4 - else if (xor < (1 << 25)) 5 - else if (xor < (1 << 30)) 6 - else throw new IllegalArgumentException() - } - - private def dropFront0(cutIndex: Int): Vector[A] = { - val blockIndex = cutIndex & ~31 - val xor = cutIndex ^ (endIndex - 1) - val d = requiredDepth(xor) - val shift = cutIndex & ~((1 << (5 * d)) - 1) - - // need to init with full display iff going to cutIndex requires swapping block at level >= d - - val s = new Vector(cutIndex - shift, endIndex - shift, blockIndex - shift) - s.initFrom(this) - s.dirty = dirty - s.gotoPosWritable(focus, blockIndex, focus ^ blockIndex) - s.preClean(d) - s.cleanLeftEdge(cutIndex - shift) - s - } - - private def dropBack0(cutIndex: Int): Vector[A] = { - val blockIndex = (cutIndex - 1) & ~31 - val xor = startIndex ^ (cutIndex - 1) - val d = requiredDepth(xor) - val shift = startIndex & ~((1 << (5 * d)) - 1) - - val s = new Vector(startIndex - shift, cutIndex - shift, blockIndex - shift) - s.initFrom(this) - s.dirty = dirty - s.gotoPosWritable(focus, blockIndex, focus ^ blockIndex) - s.preClean(d) - s.cleanRightEdge(cutIndex - shift) - s - } -} - -class VectorIterator[+A](_startIndex: Int, endIndex: Int) -extends AbstractIterator[A] - with Iterator[A] - with VectorPointer[A @uncheckedVariance] { - - private var blockIndex: Int = _startIndex & ~31 - private var lo: Int = _startIndex & 31 - - private var endLo = math.min(endIndex - blockIndex, 32) - - def hasNext = _hasNext - - private var _hasNext = blockIndex + lo < endIndex - - def next(): A = { - if (!_hasNext) throw new NoSuchElementException("reached iterator end") - - val res = display0(lo).asInstanceOf[A] - lo += 1 - - if (lo == endLo) { - if (blockIndex + lo < endIndex) { - val newBlockIndex = blockIndex + 32 - gotoNextBlockStart(newBlockIndex, blockIndex ^ newBlockIndex) - - blockIndex = newBlockIndex - endLo = math.min(endIndex - blockIndex, 32) - lo = 0 - } else { - _hasNext = false - } - } - - res - } - - private[collection] def remainingElementCount: Int = (endIndex - (blockIndex + lo)) max 0 - - /** Creates a new vector which consists of elements remaining in this iterator. - * Such a vector can then be split into several vectors using methods like `take` and `drop`. - */ - private[collection] def remainingVector: Vector[A] = { - val v = new Vector(blockIndex + lo, endIndex, blockIndex + lo) - v.initFrom(this) - v - } -} - -/** A class to build instances of `Vector`. This builder is reusable. */ -final class VectorBuilder[A]() extends ReusableBuilder[A, Vector[A]] with VectorPointer[A @uncheckedVariance] { - - // possible alternative: start with display0 = null, blockIndex = -32, lo = 32 - // to avoid allocating initial array if the result will be empty anyways - - display0 = new Array[AnyRef](32) - depth = 1 - - private var blockIndex = 0 - private var lo = 0 - - def +=(elem: A): this.type = { - if (lo >= display0.length) { - val newBlockIndex = blockIndex + 32 - gotoNextBlockStartWritable(newBlockIndex, blockIndex ^ newBlockIndex) - blockIndex = newBlockIndex - lo = 0 - } - display0(lo) = elem.asInstanceOf[AnyRef] - lo += 1 - this - } - - override def ++=(xs: TraversableOnce[A]): this.type = super.++=(xs) - - def result: Vector[A] = { - val size = blockIndex + lo - if (size == 0) - return Vector.empty - val s = new Vector[A](0, size, 0) // should focus front or back? - s.initFrom(this) - if (depth > 1) s.gotoPos(0, size - 1) // we're currently focused to size - 1, not size! - s - } - - def clear(): Unit = { - display0 = new Array[AnyRef](32) - depth = 1 - blockIndex = 0 - lo = 0 - } -} - -private[immutable] trait VectorPointer[T] { - private[immutable] var depth: Int = _ - private[immutable] var display0: Array[AnyRef] = _ - private[immutable] var display1: Array[AnyRef] = _ - private[immutable] var display2: Array[AnyRef] = _ - private[immutable] var display3: Array[AnyRef] = _ - private[immutable] var display4: Array[AnyRef] = _ - private[immutable] var display5: Array[AnyRef] = _ - - // used - private[immutable] final def initFrom[U](that: VectorPointer[U]): Unit = initFrom(that, that.depth) - - private[immutable] final def initFrom[U](that: VectorPointer[U], depth: Int) = { - this.depth = depth - (depth - 1) match { - case -1 => - case 0 => - display0 = that.display0 - case 1 => - display1 = that.display1 - display0 = that.display0 - case 2 => - display2 = that.display2 - display1 = that.display1 - display0 = that.display0 - case 3 => - display3 = that.display3 - display2 = that.display2 - display1 = that.display1 - display0 = that.display0 - case 4 => - display4 = that.display4 - display3 = that.display3 - display2 = that.display2 - display1 = that.display1 - display0 = that.display0 - case 5 => - display5 = that.display5 - display4 = that.display4 - display3 = that.display3 - display2 = that.display2 - display1 = that.display1 - display0 = that.display0 - } - } - - // requires structure is at pos oldIndex = xor ^ index - private[immutable] final def getElem(index: Int, xor: Int): T = { - if (xor < (1 << 5)) { // level = 0 - (display0 - (index & 31).asInstanceOf[T]) - } else if (xor < (1 << 10)) { // level = 1 - (display1 - ((index >>> 5) & 31).asInstanceOf[Array[AnyRef]] - (index & 31).asInstanceOf[T]) - } else if (xor < (1 << 15)) { // level = 2 - (display2 - ((index >>> 10) & 31).asInstanceOf[Array[AnyRef]] - ((index >>> 5) & 31).asInstanceOf[Array[AnyRef]] - (index & 31).asInstanceOf[T]) - } else if (xor < (1 << 20)) { // level = 3 - (display3 - ((index >>> 15) & 31).asInstanceOf[Array[AnyRef]] - ((index >>> 10) & 31).asInstanceOf[Array[AnyRef]] - ((index >>> 5) & 31).asInstanceOf[Array[AnyRef]] - (index & 31).asInstanceOf[T]) - } else if (xor < (1 << 25)) { // level = 4 - (display4 - ((index >>> 20) & 31).asInstanceOf[Array[AnyRef]] - ((index >>> 15) & 31).asInstanceOf[Array[AnyRef]] - ((index >>> 10) & 31).asInstanceOf[Array[AnyRef]] - ((index >>> 5) & 31).asInstanceOf[Array[AnyRef]] - (index & 31).asInstanceOf[T]) - } else if (xor < (1 << 30)) { // level = 5 - (display5 - ((index >>> 25) & 31).asInstanceOf[Array[AnyRef]] - ((index >>> 20) & 31).asInstanceOf[Array[AnyRef]] - ((index >>> 15) & 31).asInstanceOf[Array[AnyRef]] - ((index >>> 10) & 31).asInstanceOf[Array[AnyRef]] - ((index >>> 5) & 31).asInstanceOf[Array[AnyRef]] - (index & 31).asInstanceOf[T]) - } else { // level = 6 - throw new IllegalArgumentException() - } - } - - // go to specific position - // requires structure is at pos oldIndex = xor ^ index, - // ensures structure is at pos index - private[immutable] final def gotoPos(index: Int, xor: Int): Unit = { - if (xor < (1 << 5)) { // level = 0 - // we're already at the block start pos - } else if (xor < (1 << 10)) { // level = 1 - display0 = display1((index >>> 5) & 31).asInstanceOf[Array[AnyRef]] - } else if (xor < (1 << 15)) { // level = 2 - display1 = display2((index >>> 10) & 31).asInstanceOf[Array[AnyRef]] - display0 = display1((index >>> 5) & 31).asInstanceOf[Array[AnyRef]] - } else if (xor < (1 << 20)) { // level = 3 - display2 = display3((index >>> 15) & 31).asInstanceOf[Array[AnyRef]] - display1 = display2((index >>> 10) & 31).asInstanceOf[Array[AnyRef]] - display0 = display1((index >>> 5) & 31).asInstanceOf[Array[AnyRef]] - } else if (xor < (1 << 25)) { // level = 4 - display3 = display4((index >>> 20) & 31).asInstanceOf[Array[AnyRef]] - display2 = display3((index >>> 15) & 31).asInstanceOf[Array[AnyRef]] - display1 = display2((index >>> 10) & 31).asInstanceOf[Array[AnyRef]] - display0 = display1((index >>> 5) & 31).asInstanceOf[Array[AnyRef]] - } else if (xor < (1 << 30)) { // level = 5 - display4 = display5((index >>> 25) & 31).asInstanceOf[Array[AnyRef]] - display3 = display4((index >>> 20) & 31).asInstanceOf[Array[AnyRef]] - display2 = display3((index >>> 15) & 31).asInstanceOf[Array[AnyRef]] - display1 = display2((index >>> 10) & 31).asInstanceOf[Array[AnyRef]] - display0 = display1((index >>> 5) & 31).asInstanceOf[Array[AnyRef]] - } else { // level = 6 - throw new IllegalArgumentException() - } - } - - // USED BY ITERATOR - - // xor: oldIndex ^ index - private[immutable] final def gotoNextBlockStart(index: Int, xor: Int): Unit = { // goto block start pos - if (xor < (1 << 10)) { // level = 1 - display0 = display1((index >>> 5) & 31).asInstanceOf[Array[AnyRef]] - } else if (xor < (1 << 15)) { // level = 2 - display1 = display2((index >>> 10) & 31).asInstanceOf[Array[AnyRef]] - display0 = display1(0).asInstanceOf[Array[AnyRef]] - } else if (xor < (1 << 20)) { // level = 3 - display2 = display3((index >>> 15) & 31).asInstanceOf[Array[AnyRef]] - display1 = display2(0).asInstanceOf[Array[AnyRef]] - display0 = display1(0).asInstanceOf[Array[AnyRef]] - } else if (xor < (1 << 25)) { // level = 4 - display3 = display4((index >>> 20) & 31).asInstanceOf[Array[AnyRef]] - display2 = display3(0).asInstanceOf[Array[AnyRef]] - display1 = display2(0).asInstanceOf[Array[AnyRef]] - display0 = display1(0).asInstanceOf[Array[AnyRef]] - } else if (xor < (1 << 30)) { // level = 5 - display4 = display5((index >>> 25) & 31).asInstanceOf[Array[AnyRef]] - display3 = display4(0).asInstanceOf[Array[AnyRef]] - display2 = display3(0).asInstanceOf[Array[AnyRef]] - display1 = display2(0).asInstanceOf[Array[AnyRef]] - display0 = display1(0).asInstanceOf[Array[AnyRef]] - } else { // level = 6 - throw new IllegalArgumentException() - } - } - - // USED BY BUILDER - - // xor: oldIndex ^ index - private[immutable] final def gotoNextBlockStartWritable(index: Int, xor: Int): Unit = { // goto block start pos - if (xor < (1 << 10)) { // level = 1 - if (depth == 1) { display1 = new Array(32); display1(0) = display0; depth += 1 } - display0 = new Array(32) - display1((index >>> 5) & 31) = display0 - } else if (xor < (1 << 15)) { // level = 2 - if (depth == 2) { display2 = new Array(32); display2(0) = display1; depth += 1 } - display0 = new Array(32) - display1 = new Array(32) - display1((index >>> 5) & 31) = display0 - display2((index >>> 10) & 31) = display1 - } else if (xor < (1 << 20)) { // level = 3 - if (depth == 3) { display3 = new Array(32); display3(0) = display2; depth += 1 } - display0 = new Array(32) - display1 = new Array(32) - display2 = new Array(32) - display1((index >>> 5) & 31) = display0 - display2((index >>> 10) & 31) = display1 - display3((index >>> 15) & 31) = display2 - } else if (xor < (1 << 25)) { // level = 4 - if (depth == 4) { display4 = new Array(32); display4(0) = display3; depth += 1 } - display0 = new Array(32) - display1 = new Array(32) - display2 = new Array(32) - display3 = new Array(32) - display1((index >>> 5) & 31) = display0 - display2((index >>> 10) & 31) = display1 - display3((index >>> 15) & 31) = display2 - display4((index >>> 20) & 31) = display3 - } else if (xor < (1 << 30)) { // level = 5 - if (depth == 5) { display5 = new Array(32); display5(0) = display4; depth += 1 } - display0 = new Array(32) - display1 = new Array(32) - display2 = new Array(32) - display3 = new Array(32) - display4 = new Array(32) - display1((index >>> 5) & 31) = display0 - display2((index >>> 10) & 31) = display1 - display3((index >>> 15) & 31) = display2 - display4((index >>> 20) & 31) = display3 - display5((index >>> 25) & 31) = display4 - } else { // level = 6 - throw new IllegalArgumentException() - } - } - - // STUFF BELOW USED BY APPEND / UPDATE - - private[immutable] final def copyOf(a: Array[AnyRef]): Array[AnyRef] = { - val copy = new Array[AnyRef](a.length) - java.lang.System.arraycopy(a, 0, copy, 0, a.length) - copy - } - - private[immutable] final def nullSlotAndCopy(array: Array[AnyRef], index: Int): Array[AnyRef] = { - val x = array(index) - array(index) = null - copyOf(x.asInstanceOf[Array[AnyRef]]) - } - - // make sure there is no aliasing - // requires structure is at pos index - // ensures structure is clean and at pos index and writable at all levels except 0 - - private[immutable] final def stabilize(index: Int) = (depth - 1) match { - case 5 => - display5 = copyOf(display5) - display4 = copyOf(display4) - display3 = copyOf(display3) - display2 = copyOf(display2) - display1 = copyOf(display1) - display5((index >>> 25) & 31) = display4 - display4((index >>> 20) & 31) = display3 - display3((index >>> 15) & 31) = display2 - display2((index >>> 10) & 31) = display1 - display1((index >>> 5) & 31) = display0 - case 4 => - display4 = copyOf(display4) - display3 = copyOf(display3) - display2 = copyOf(display2) - display1 = copyOf(display1) - display4((index >>> 20) & 31) = display3 - display3((index >>> 15) & 31) = display2 - display2((index >>> 10) & 31) = display1 - display1((index >>> 5) & 31) = display0 - case 3 => - display3 = copyOf(display3) - display2 = copyOf(display2) - display1 = copyOf(display1) - display3((index >>> 15) & 31) = display2 - display2((index >>> 10) & 31) = display1 - display1((index >>> 5) & 31) = display0 - case 2 => - display2 = copyOf(display2) - display1 = copyOf(display1) - display2((index >>> 10) & 31) = display1 - display1((index >>> 5) & 31) = display0 - case 1 => - display1 = copyOf(display1) - display1((index >>> 5) & 31) = display0 - case 0 => - } - - - /// USED IN UPDATE AND APPEND BACK - - // prepare for writing at an existing position - - // requires structure is clean and at pos oldIndex = xor ^ newIndex, - // ensures structure is dirty and at pos newIndex and writable at level 0 - private[immutable] final def gotoPosWritable0(newIndex: Int, xor: Int): Unit = (depth - 1) match { - case 5 => - display5 = copyOf(display5) - display4 = nullSlotAndCopy(display5, (newIndex >>> 25) & 31) - display3 = nullSlotAndCopy(display4, (newIndex >>> 20) & 31) - display2 = nullSlotAndCopy(display3, (newIndex >>> 15) & 31) - display1 = nullSlotAndCopy(display2, (newIndex >>> 10) & 31) - display0 = nullSlotAndCopy(display1, (newIndex >>> 5) & 31) - case 4 => - display4 = copyOf(display4) - display3 = nullSlotAndCopy(display4, (newIndex >>> 20) & 31) - display2 = nullSlotAndCopy(display3, (newIndex >>> 15) & 31) - display1 = nullSlotAndCopy(display2, (newIndex >>> 10) & 31) - display0 = nullSlotAndCopy(display1, (newIndex >>> 5) & 31) - case 3 => - display3 = copyOf(display3) - display2 = nullSlotAndCopy(display3, (newIndex >>> 15) & 31) - display1 = nullSlotAndCopy(display2, (newIndex >>> 10) & 31) - display0 = nullSlotAndCopy(display1, (newIndex >>> 5) & 31) - case 2 => - display2 = copyOf(display2) - display1 = nullSlotAndCopy(display2, (newIndex >>> 10) & 31) - display0 = nullSlotAndCopy(display1, (newIndex >>> 5) & 31) - case 1 => - display1 = copyOf(display1) - display0 = nullSlotAndCopy(display1, (newIndex >>> 5) & 31) - case 0 => - display0 = copyOf(display0) - } - - - // requires structure is dirty and at pos oldIndex, - // ensures structure is dirty and at pos newIndex and writable at level 0 - private[immutable] final def gotoPosWritable1(oldIndex: Int, newIndex: Int, xor: Int): Unit = { - if (xor < (1 << 5)) { // level = 0 - display0 = copyOf(display0) - } else if (xor < (1 << 10)) { // level = 1 - display1 = copyOf(display1) - display1((oldIndex >>> 5) & 31) = display0 - display0 = nullSlotAndCopy(display1, (newIndex >>> 5) & 31) - } else if (xor < (1 << 15)) { // level = 2 - display1 = copyOf(display1) - display2 = copyOf(display2) - display1((oldIndex >>> 5) & 31) = display0 - display2((oldIndex >>> 10) & 31) = display1 - display1 = nullSlotAndCopy(display2, (newIndex >>> 10) & 31) - display0 = nullSlotAndCopy(display1, (newIndex >>> 5) & 31) - } else if (xor < (1 << 20)) { // level = 3 - display1 = copyOf(display1) - display2 = copyOf(display2) - display3 = copyOf(display3) - display1((oldIndex >>> 5) & 31) = display0 - display2((oldIndex >>> 10) & 31) = display1 - display3((oldIndex >>> 15) & 31) = display2 - display2 = nullSlotAndCopy(display3, (newIndex >>> 15) & 31) - display1 = nullSlotAndCopy(display2, (newIndex >>> 10) & 31) - display0 = nullSlotAndCopy(display1, (newIndex >>> 5) & 31) - } else if (xor < (1 << 25)) { // level = 4 - display1 = copyOf(display1) - display2 = copyOf(display2) - display3 = copyOf(display3) - display4 = copyOf(display4) - display1((oldIndex >>> 5) & 31) = display0 - display2((oldIndex >>> 10) & 31) = display1 - display3((oldIndex >>> 15) & 31) = display2 - display4((oldIndex >>> 20) & 31) = display3 - display3 = nullSlotAndCopy(display4, (newIndex >>> 20) & 31) - display2 = nullSlotAndCopy(display3, (newIndex >>> 15) & 31) - display1 = nullSlotAndCopy(display2, (newIndex >>> 10) & 31) - display0 = nullSlotAndCopy(display1, (newIndex >>> 5) & 31) - } else if (xor < (1 << 30)) { // level = 5 - display1 = copyOf(display1) - display2 = copyOf(display2) - display3 = copyOf(display3) - display4 = copyOf(display4) - display5 = copyOf(display5) - display1((oldIndex >>> 5) & 31) = display0 - display2((oldIndex >>> 10) & 31) = display1 - display3((oldIndex >>> 15) & 31) = display2 - display4((oldIndex >>> 20) & 31) = display3 - display5((oldIndex >>> 25) & 31) = display4 - display4 = nullSlotAndCopy(display5, (newIndex >>> 25) & 31) - display3 = nullSlotAndCopy(display4, (newIndex >>> 20) & 31) - display2 = nullSlotAndCopy(display3, (newIndex >>> 15) & 31) - display1 = nullSlotAndCopy(display2, (newIndex >>> 10) & 31) - display0 = nullSlotAndCopy(display1, (newIndex >>> 5) & 31) - } else { // level = 6 - throw new IllegalArgumentException() - } - } - - - // USED IN DROP - - private[immutable] final def copyRange(array: Array[AnyRef], oldLeft: Int, newLeft: Int) = { - val elems = new Array[AnyRef](32) - java.lang.System.arraycopy(array, oldLeft, elems, newLeft, 32 - math.max(newLeft, oldLeft)) - elems - } - - - // USED IN APPEND - // create a new block at the bottom level (and possibly nodes on its path) and prepares for writing - - // requires structure is clean and at pos oldIndex, - // ensures structure is dirty and at pos newIndex and writable at level 0 - private[immutable] final def gotoFreshPosWritable0(oldIndex: Int, newIndex: Int, xor: Int): Unit = { // goto block start pos - if (xor < (1 << 5)) { // level = 0 - // we're already at the block start - } else if (xor < (1 << 10)) { // level = 1 - if (depth == 1) { - display1 = new Array(32) - display1((oldIndex >>> 5) & 31) = display0 - depth += 1 - } - display0 = new Array(32) - } else if (xor < (1 << 15)) { // level = 2 - if (depth == 2) { - display2 = new Array(32) - display2((oldIndex >>> 10) & 31) = display1 - depth += 1 - } - display1 = display2((newIndex >>> 10) & 31).asInstanceOf[Array[AnyRef]] - if (display1 == null) display1 = new Array(32) - display0 = new Array(32) - } else if (xor < (1 << 20)) { // level = 3 - if (depth == 3) { - display3 = new Array(32) - display3((oldIndex >>> 15) & 31) = display2 - depth += 1 - } - display2 = display3((newIndex >>> 15) & 31).asInstanceOf[Array[AnyRef]] - if (display2 == null) display2 = new Array(32) - display1 = display2((newIndex >>> 10) & 31).asInstanceOf[Array[AnyRef]] - if (display1 == null) display1 = new Array(32) - display0 = new Array(32) - } else if (xor < (1 << 25)) { // level = 4 - if (depth == 4) { - display4 = new Array(32) - display4((oldIndex >>> 20) & 31) = display3 - depth += 1 - } - display3 = display4((newIndex >>> 20) & 31).asInstanceOf[Array[AnyRef]] - if (display3 == null) display3 = new Array(32) - display2 = display3((newIndex >>> 15) & 31).asInstanceOf[Array[AnyRef]] - if (display2 == null) display2 = new Array(32) - display1 = display2((newIndex >>> 10) & 31).asInstanceOf[Array[AnyRef]] - if (display1 == null) display1 = new Array(32) - display0 = new Array(32) - } else if (xor < (1 << 30)) { // level = 5 - if (depth == 5) { - display5 = new Array(32) - display5((oldIndex >>> 25) & 31) = display4 - depth += 1 - } - display4 = display5((newIndex >>> 25) & 31).asInstanceOf[Array[AnyRef]] - if (display4 == null) display4 = new Array(32) - display3 = display4((newIndex >>> 20) & 31).asInstanceOf[Array[AnyRef]] - if (display3 == null) display3 = new Array(32) - display2 = display3((newIndex >>> 15) & 31).asInstanceOf[Array[AnyRef]] - if (display2 == null) display2 = new Array(32) - display1 = display2((newIndex >>> 10) & 31).asInstanceOf[Array[AnyRef]] - if (display1 == null) display1 = new Array(32) - display0 = new Array(32) - } else { // level = 6 - throw new IllegalArgumentException() - } - } - - // requires structure is dirty and at pos oldIndex, - // ensures structure is dirty and at pos newIndex and writable at level 0 - private[immutable] final def gotoFreshPosWritable1(oldIndex: Int, newIndex: Int, xor: Int): Unit = { - stabilize(oldIndex) - gotoFreshPosWritable0(oldIndex, newIndex, xor) - } -} diff --git a/tests/scala2-library/src/library/scala/collection/immutable/WrappedString.scala b/tests/scala2-library/src/library/scala/collection/immutable/WrappedString.scala deleted file mode 100644 index 8726bd2ed903..000000000000 --- a/tests/scala2-library/src/library/scala/collection/immutable/WrappedString.scala +++ /dev/null @@ -1,63 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - - - -package scala -package collection -package immutable - -import generic._ -import mutable.{Builder, StringBuilder} - -/** - * This class serves as a wrapper augmenting `String`s with all the operations - * found in indexed sequences. - * - * The difference between this class and `StringOps` is that calling transformer - * methods such as `filter` and `map` will yield an object of type `WrappedString` - * rather than a `String`. - * - * @param self a string contained within this wrapped string - * - * @since 2.8 - * @define Coll `WrappedString` - * @define coll wrapped string - */ -final class WrappedString(val self: String) extends AbstractSeq[Char] with IndexedSeq[Char] with StringLike[WrappedString] { - - override protected[this] def thisCollection: WrappedString = this - override protected[this] def toCollection(repr: WrappedString): WrappedString = repr - - /** Creates a string builder buffer as builder for this class */ - override protected[this] def newBuilder = WrappedString.newBuilder - - override def slice(from: Int, until: Int): WrappedString = { - val start = if (from < 0) 0 else from - if (until <= start || start >= repr.length) - return new WrappedString("") - - val end = if (until > length) length else until - new WrappedString(repr.substring(start, end)) - } - override def length = self.length - override def toString = self -} - -/** A companion object for wrapped strings. - * - * @since 2.8 - */ -object WrappedString { - implicit def canBuildFrom: CanBuildFrom[WrappedString, Char, WrappedString] = new CanBuildFrom[WrappedString, Char, WrappedString] { - def apply(from: WrappedString) = newBuilder - def apply() = newBuilder - } - - def newBuilder: Builder[Char, WrappedString] = StringBuilder.newBuilder mapResult (x => new WrappedString(x)) -} diff --git a/tests/scala2-library/src/library/scala/collection/mutable/AnyRefMap.scala b/tests/scala2-library/src/library/scala/collection/mutable/AnyRefMap.scala deleted file mode 100644 index 6ff79dd1b87f..000000000000 --- a/tests/scala2-library/src/library/scala/collection/mutable/AnyRefMap.scala +++ /dev/null @@ -1,487 +0,0 @@ -package scala -package collection -package mutable - -import generic.CanBuildFrom - -/** This class implements mutable maps with `AnyRef` keys based on a hash table with open addressing. - * - * Basic map operations on single entries, including `contains` and `get`, - * are typically significantly faster with `AnyRefMap` than [[HashMap]]. - * Note that numbers and characters are not handled specially in AnyRefMap; - * only plain `equals` and `hashCode` are used in comparisons. - * - * Methods that traverse or regenerate the map, including `foreach` and `map`, - * are not in general faster than with `HashMap`. The methods `foreachKey`, - * `foreachValue`, `mapValuesNow`, and `transformValues` are, however, faster - * than alternative ways to achieve the same functionality. - * - * Maps with open addressing may become less efficient at lookup after - * repeated addition/removal of elements. Although `AnyRefMap` makes a - * decent attempt to remain efficient regardless, calling `repack` - * on a map that will no longer have elements removed but will be - * used heavily may save both time and storage space. - * - * This map is not intended to contain more than 2^29^ entries (approximately - * 500 million). The maximum capacity is 2^30^, but performance will degrade - * rapidly as 2^30^ is approached. - * - */ -@SerialVersionUID(1L) -final class AnyRefMap[K <: AnyRef, V] private[collection] (defaultEntry: K => V, initialBufferSize: Int, initBlank: Boolean) -extends AbstractMap[K, V] - with Map[K, V] - with MapLike[K, V, AnyRefMap[K, V]] - with Serializable -{ - import AnyRefMap._ - def this() = this(AnyRefMap.exceptionDefault, 16, true) - - /** Creates a new `AnyRefMap` that returns default values according to a supplied key-value mapping. */ - def this(defaultEntry: K => V) = this(defaultEntry, 16, true) - - /** Creates a new `AnyRefMap` with an initial buffer of specified size. - * - * An `AnyRefMap` can typically contain half as many elements as its buffer size - * before it requires resizing. - */ - def this(initialBufferSize: Int) = this(AnyRefMap.exceptionDefault, initialBufferSize, true) - - /** Creates a new `AnyRefMap` with specified default values and initial buffer size. */ - def this(defaultEntry: K => V, initialBufferSize: Int) = this(defaultEntry, initialBufferSize, true) - - private[this] var mask = 0 - private[this] var _size = 0 - private[this] var _vacant = 0 - private[this] var _hashes: Array[Int] = null - private[this] var _keys: Array[AnyRef] = null - private[this] var _values: Array[AnyRef] = null - - if (initBlank) defaultInitialize(initialBufferSize) - - private[this] def defaultInitialize(n: Int) { - mask = - if (n<0) 0x7 - else (((1 << (32 - java.lang.Integer.numberOfLeadingZeros(n-1))) - 1) & 0x3FFFFFFF) | 0x7 - _hashes = new Array[Int](mask+1) - _keys = new Array[AnyRef](mask+1) - _values = new Array[AnyRef](mask+1) - } - - private[collection] def initializeTo( - m: Int, sz: Int, vc: Int, hz: Array[Int], kz: Array[AnyRef], vz: Array[AnyRef] - ) { - mask = m; _size = sz; _vacant = vc; _hashes = hz; _keys = kz; _values = vz - } - - override def size: Int = _size - override def empty: AnyRefMap[K,V] = new AnyRefMap(defaultEntry) - - private def imbalanced: Boolean = - (_size + _vacant) > 0.5*mask || _vacant > _size - - private def hashOf(key: K): Int = { - if (key eq null) 0x41081989 - else { - val h = key.hashCode - // Part of the MurmurHash3 32 bit finalizer - val i = (h ^ (h >>> 16)) * 0x85EBCA6B - val j = (i ^ (i >>> 13)) - if (j==0) 0x41081989 else j & 0x7FFFFFFF - } - } - - private def seekEntry(h: Int, k: AnyRef): Int = { - var e = h & mask - var x = 0 - var g = 0 - while ({ g = _hashes(e); g != 0}) { - if (g == h && { val q = _keys(e); (q eq k) || ((q ne null) && (q equals k)) }) return e - x += 1 - e = (e + 2*(x+1)*x - 3) & mask - } - e | MissingBit - } - - private def seekEntryOrOpen(h: Int, k: AnyRef): Int = { - var e = h & mask - var x = 0 - var g = 0 - var o = -1 - while ({ g = _hashes(e); g != 0}) { - if (g == h && { val q = _keys(e); (q eq k) || ((q ne null) && (q equals k)) }) return e - else if (o == -1 && g+g == 0) o = e - x += 1 - e = (e + 2*(x+1)*x - 3) & mask - } - if (o >= 0) o | MissVacant else e | MissingBit - } - - override def contains(key: K): Boolean = seekEntry(hashOf(key), key) >= 0 - - override def get(key: K): Option[V] = { - val i = seekEntry(hashOf(key), key) - if (i < 0) None else Some(_values(i).asInstanceOf[V]) - } - - override def getOrElse[V1 >: V](key: K, default: => V1): V1 = { - val i = seekEntry(hashOf(key), key) - if (i < 0) default else _values(i).asInstanceOf[V] - } - - override def getOrElseUpdate(key: K, defaultValue: => V): V = { - val h = hashOf(key) - var i = seekEntryOrOpen(h, key) - if (i < 0) { - // It is possible that the default value computation was side-effecting - // Our hash table may have resized or even contain what we want now - // (but if it does, we'll replace it) - val value = { - val oh = _hashes - val ans = defaultValue - if (oh ne _hashes) { - i = seekEntryOrOpen(h, key) - if (i >= 0) _size -= 1 - } - ans - } - _size += 1 - val j = i & IndexMask - _hashes(j) = h - _keys(j) = key.asInstanceOf[AnyRef] - _values(j) = value.asInstanceOf[AnyRef] - if ((i & VacantBit) != 0) _vacant -= 1 - else if (imbalanced) repack() - value - } - else _values(i).asInstanceOf[V] - } - - /** Retrieves the value associated with a key, or the default for that type if none exists - * (null for AnyRef, 0 for floats and integers). - * - * Note: this is the fastest way to retrieve a value that may or - * may not exist, if the default null/zero is acceptable. For key/value - * pairs that do exist, `apply` (i.e. `map(key)`) is equally fast. - */ - def getOrNull(key: K): V = { - val i = seekEntry(hashOf(key), key) - (if (i < 0) null else _values(i)).asInstanceOf[V] - } - - /** Retrieves the value associated with a key. - * If the key does not exist in the map, the `defaultEntry` for that key - * will be returned instead; an exception will be thrown if no - * `defaultEntry` was supplied. - */ - override def apply(key: K): V = { - val i = seekEntry(hashOf(key), key) - if (i < 0) defaultEntry(key) else _values(i).asInstanceOf[V] - } - - /** Defers to defaultEntry to find a default value for the key. Throws an - * exception if no other default behavior was specified. - */ - override def default(key: K) = defaultEntry(key) - - private def repack(newMask: Int) { - val oh = _hashes - val ok = _keys - val ov = _values - mask = newMask - _hashes = new Array[Int](mask+1) - _keys = new Array[AnyRef](mask+1) - _values = new Array[AnyRef](mask+1) - _vacant = 0 - var i = 0 - while (i < oh.length) { - val h = oh(i) - if (h+h != 0) { - var e = h & mask - var x = 0 - while (_hashes(e) != 0) { x += 1; e = (e + 2*(x+1)*x - 3) & mask } - _hashes(e) = h - _keys(e) = ok(i) - _values(e) = ov(i) - } - i += 1 - } - } - - /** Repacks the contents of this `AnyRefMap` for maximum efficiency of lookup. - * - * For maps that undergo a complex creation process with both addition and - * removal of keys, and then are used heavily with no further removal of - * elements, calling `repack` after the end of the creation can result in - * improved performance. Repacking takes time proportional to the number - * of entries in the map. - */ - def repack() { - var m = mask - if (_size + _vacant >= 0.5*mask && !(_vacant > 0.2*mask)) m = ((m << 1) + 1) & IndexMask - while (m > 8 && 8*_size < m) m = m >>> 1 - repack(m) - } - - override def put(key: K, value: V): Option[V] = { - val h = hashOf(key) - val k = key - val i = seekEntryOrOpen(h, k) - if (i < 0) { - val j = i & IndexMask - _hashes(j) = h - _keys(j) = k - _values(j) = value.asInstanceOf[AnyRef] - _size += 1 - if ((i & VacantBit) != 0) _vacant -= 1 - else if (imbalanced) repack() - None - } - else { - val ans = Some(_values(i).asInstanceOf[V]) - _hashes(i) = h - _keys(i) = k - _values(i) = value.asInstanceOf[AnyRef] - ans - } - } - - /** Updates the map to include a new key-value pair. - * - * This is the fastest way to add an entry to an `AnyRefMap`. - */ - override def update(key: K, value: V): Unit = { - val h = hashOf(key) - val k = key - val i = seekEntryOrOpen(h, k) - if (i < 0) { - val j = i & IndexMask - _hashes(j) = h - _keys(j) = k - _values(j) = value.asInstanceOf[AnyRef] - _size += 1 - if ((i & VacantBit) != 0) _vacant -= 1 - else if (imbalanced) repack() - } - else { - _hashes(i) = h - _keys(i) = k - _values(i) = value.asInstanceOf[AnyRef] - } - } - - /** Adds a new key/value pair to this map and returns the map. */ - def +=(key: K, value: V): this.type = { update(key, value); this } - - def +=(kv: (K, V)): this.type = { update(kv._1, kv._2); this } - - def -=(key: K): this.type = { - val i = seekEntry(hashOf(key), key) - if (i >= 0) { - _size -= 1 - _vacant += 1 - _hashes(i) = Int.MinValue - _keys(i) = null - _values(i) = null - } - this - } - - def iterator: Iterator[(K, V)] = new Iterator[(K, V)] { - private[this] val hz = _hashes - private[this] val kz = _keys - private[this] val vz = _values - - private[this] var index = 0 - - def hasNext: Boolean = index= hz.length) return false - h = hz(index) - } - true - } - - def next: (K, V) = { - if (hasNext) { - val ans = (kz(index).asInstanceOf[K], vz(index).asInstanceOf[V]) - index += 1 - ans - } - else throw new NoSuchElementException("next") - } - } - - override def foreach[U](f: ((K,V)) => U) { - var i = 0 - var e = _size - while (e > 0) { - while(i < _hashes.length && { val h = _hashes(i); h+h == 0 && i < _hashes.length}) i += 1 - if (i < _hashes.length) { - f((_keys(i).asInstanceOf[K], _values(i).asInstanceOf[V])) - i += 1 - e -= 1 - } - else return - } - } - - override def clone(): AnyRefMap[K, V] = { - val hz = java.util.Arrays.copyOf(_hashes, _hashes.length) - val kz = java.util.Arrays.copyOf(_keys, _keys.length) - val vz = java.util.Arrays.copyOf(_values, _values.length) - val arm = new AnyRefMap[K, V](defaultEntry, 1, false) - arm.initializeTo(mask, _size, _vacant, hz, kz, vz) - arm - } - - override def +[V1 >: V](kv: (K, V1)): AnyRefMap[K, V1] = { - val arm = clone().asInstanceOf[AnyRefMap[K, V1]] - arm += kv - arm - } - - override def ++[V1 >: V](xs: GenTraversableOnce[(K, V1)]): AnyRefMap[K, V1] = { - val arm = clone().asInstanceOf[AnyRefMap[K, V1]] - xs.foreach(kv => arm += kv) - arm - } - - override def updated[V1 >: V](key: K, value: V1): AnyRefMap[K, V1] = { - val arm = clone().asInstanceOf[AnyRefMap[K, V1]] - arm += (key, value) - arm - } - - private[this] def foreachElement[A,B](elems: Array[AnyRef], f: A => B) { - var i,j = 0 - while (i < _hashes.length & j < _size) { - val h = _hashes(i) - if (h+h != 0) { - j += 1 - f(elems(i).asInstanceOf[A]) - } - i += 1 - } - } - - /** Applies a function to all keys of this map. */ - def foreachKey[A](f: K => A) { foreachElement[K,A](_keys, f) } - - /** Applies a function to all values of this map. */ - def foreachValue[A](f: V => A) { foreachElement[V,A](_values, f) } - - /** Creates a new `AnyRefMap` with different values. - * Unlike `mapValues`, this method generates a new - * collection immediately. - */ - def mapValuesNow[V1](f: V => V1): AnyRefMap[K, V1] = { - val arm = new AnyRefMap[K,V1](AnyRefMap.exceptionDefault, 1, false) - val hz = java.util.Arrays.copyOf(_hashes, _hashes.length) - val kz = java.util.Arrays.copyOf(_keys, _keys.length) - val vz = new Array[AnyRef](_values.length) - var i,j = 0 - while (i < _hashes.length & j < _size) { - val h = _hashes(i) - if (h+h != 0) { - j += 1 - vz(i) = f(_values(i).asInstanceOf[V]).asInstanceOf[AnyRef] - } - i += 1 - } - arm.initializeTo(mask, _size, _vacant, hz, kz, vz) - arm - } - - /** Applies a transformation function to all values stored in this map. - * Note: the default, if any, is not transformed. - */ - def transformValues(f: V => V): this.type = { - var i,j = 0 - while (i < _hashes.length & j < _size) { - val h = _hashes(i) - if (h+h != 0) { - j += 1 - _values(i) = f(_values(i).asInstanceOf[V]).asInstanceOf[AnyRef] - } - i += 1 - } - this - } - -} - -object AnyRefMap { - private final val IndexMask = 0x3FFFFFFF - private final val MissingBit = 0x80000000 - private final val VacantBit = 0x40000000 - private final val MissVacant = 0xC0000000 - - @SerialVersionUID(1L) - private class ExceptionDefault extends (Any => Nothing) with Serializable { - def apply(k: Any): Nothing = throw new NoSuchElementException(if (k == null) "(null)" else k.toString) - } - private val exceptionDefault = new ExceptionDefault - - implicit def canBuildFrom[K <: AnyRef, V, J <: AnyRef, U]: CanBuildFrom[AnyRefMap[K,V], (J, U), AnyRefMap[J,U]] = - new CanBuildFrom[AnyRefMap[K,V], (J, U), AnyRefMap[J,U]] { - def apply(from: AnyRefMap[K,V]): AnyRefMapBuilder[J, U] = apply() - def apply(): AnyRefMapBuilder[J, U] = new AnyRefMapBuilder[J, U] - } - - /** A builder for instances of `AnyRefMap`. - * - * This builder can be reused to create multiple instances. - */ - final class AnyRefMapBuilder[K <: AnyRef, V] extends ReusableBuilder[(K, V), AnyRefMap[K, V]] { - private[collection] var elems: AnyRefMap[K, V] = new AnyRefMap[K, V] - def +=(entry: (K, V)): this.type = { - elems += entry - this - } - def clear() { elems = new AnyRefMap[K, V] } - def result(): AnyRefMap[K, V] = elems - } - - /** Creates a new `AnyRefMap` with zero or more key/value pairs. */ - def apply[K <: AnyRef, V](elems: (K, V)*): AnyRefMap[K, V] = { - val sz = if (elems.hasDefiniteSize) elems.size else 4 - val arm = new AnyRefMap[K, V](sz * 2) - elems.foreach{ case (k,v) => arm(k) = v } - if (arm.size < (sz>>3)) arm.repack() - arm - } - - /** Creates a new empty `AnyRefMap`. */ - def empty[K <: AnyRef, V]: AnyRefMap[K, V] = new AnyRefMap[K, V] - - /** Creates a new empty `AnyRefMap` with the supplied default */ - def withDefault[K <: AnyRef, V](default: K => V): AnyRefMap[K, V] = new AnyRefMap[K, V](default) - - /** Creates a new `AnyRefMap` from arrays of keys and values. - * Equivalent to but more efficient than `AnyRefMap((keys zip values): _*)`. - */ - def fromZip[K <: AnyRef, V](keys: Array[K], values: Array[V]): AnyRefMap[K, V] = { - val sz = math.min(keys.length, values.length) - val arm = new AnyRefMap[K, V](sz * 2) - var i = 0 - while (i < sz) { arm(keys(i)) = values(i); i += 1 } - if (arm.size < (sz>>3)) arm.repack() - arm - } - - /** Creates a new `AnyRefMap` from keys and values. - * Equivalent to but more efficient than `AnyRefMap((keys zip values): _*)`. - */ - def fromZip[K <: AnyRef, V](keys: Iterable[K], values: Iterable[V]): AnyRefMap[K, V] = { - val sz = math.min(keys.size, values.size) - val arm = new AnyRefMap[K, V](sz * 2) - val ki = keys.iterator - val vi = values.iterator - while (ki.hasNext && vi.hasNext) arm(ki.next) = vi.next - if (arm.size < (sz >> 3)) arm.repack() - arm - } -} diff --git a/tests/scala2-library/src/library/scala/collection/mutable/ArrayBuffer.scala b/tests/scala2-library/src/library/scala/collection/mutable/ArrayBuffer.scala deleted file mode 100644 index 23d386f729d0..000000000000 --- a/tests/scala2-library/src/library/scala/collection/mutable/ArrayBuffer.scala +++ /dev/null @@ -1,196 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - - - -package scala -package collection -package mutable - -import generic._ -import parallel.mutable.ParArray - -/** An implementation of the `Buffer` class using an array to - * represent the assembled sequence internally. Append, update and random - * access take constant time (amortized time). Prepends and removes are - * linear in the buffer size. - * - * @author Matthias Zenger - * @author Martin Odersky - * @version 2.8 - * @since 1 - * @see [[http://docs.scala-lang.org/overviews/collections/concrete-mutable-collection-classes.html#array_buffers "Scala's Collection Library overview"]] - * section on `Array Buffers` for more information. - - * - * @tparam A the type of this arraybuffer's elements. - * - * @define Coll `mutable.ArrayBuffer` - * @define coll array buffer - * @define thatinfo the class of the returned collection. In the standard library configuration, - * `That` is always `ArrayBuffer[B]` because an implicit of type `CanBuildFrom[ArrayBuffer, B, ArrayBuffer[B]]` - * is defined in object `ArrayBuffer`. - * @define bfinfo an implicit value of class `CanBuildFrom` which determines the - * result class `That` from the current representation type `Repr` - * and the new element type `B`. This is usually the `canBuildFrom` value - * defined in object `ArrayBuffer`. - * @define orderDependent - * @define orderDependentFold - * @define mayNotTerminateInf - * @define willNotTerminateInf - */ -@SerialVersionUID(1529165946227428979L) -class ArrayBuffer[A](override protected val initialSize: Int) - extends AbstractBuffer[A] - with Buffer[A] - with GenericTraversableTemplate[A, ArrayBuffer] - with BufferLike[A, ArrayBuffer[A]] - with IndexedSeqOptimized[A, ArrayBuffer[A]] - with Builder[A, ArrayBuffer[A]] - with ResizableArray[A] - with CustomParallelizable[A, ParArray[A]] - with Serializable { - - override def companion: GenericCompanion[ArrayBuffer] = ArrayBuffer - - import scala.collection.Traversable - - def this() = this(16) - - def clear() { reduceToSize(0) } - - override def sizeHint(len: Int) { - if (len > size && len >= 1) { - val newarray = new Array[AnyRef](len) - java.lang.System.arraycopy(array, 0, newarray, 0, size0) - array = newarray - } - } - - override def par = ParArray.handoff[A](array.asInstanceOf[Array[A]], size) - - /** Appends a single element to this buffer and returns - * the identity of the buffer. It takes constant amortized time. - * - * @param elem the element to append. - * @return the updated buffer. - */ - def +=(elem: A): this.type = { - ensureSize(size0 + 1) - array(size0) = elem.asInstanceOf[AnyRef] - size0 += 1 - this - } - - /** Appends a number of elements provided by a traversable object. - * The identity of the buffer is returned. - * - * @param xs the traversable object. - * @return the updated buffer. - */ - override def ++=(xs: TraversableOnce[A]): this.type = xs match { - case v: scala.collection.IndexedSeqLike[_, _] => - val n = v.length - ensureSize(size0 + n) - v.copyToArray(array.asInstanceOf[scala.Array[Any]], size0, n) - size0 += n - this - case _ => - super.++=(xs) - } - - /** Prepends a single element to this buffer and returns - * the identity of the buffer. It takes time linear in - * the buffer size. - * - * @param elem the element to prepend. - * @return the updated buffer. - */ - def +=:(elem: A): this.type = { - ensureSize(size0 + 1) - copy(0, 1, size0) - array(0) = elem.asInstanceOf[AnyRef] - size0 += 1 - this - } - - /** Prepends a number of elements provided by a traversable object. - * The identity of the buffer is returned. - * - * @param xs the traversable object. - * @return the updated buffer. - */ - override def ++=:(xs: TraversableOnce[A]): this.type = { insertAll(0, xs.toTraversable); this } - - /** Inserts new elements at the index `n`. Opposed to method - * `update`, this method will not replace an element with a new - * one. Instead, it will insert a new element at index `n`. - * - * @param n the index where a new element will be inserted. - * @param seq the traversable object providing all elements to insert. - * @throws IndexOutOfBoundsException if `n` is out of bounds. - */ - def insertAll(n: Int, seq: Traversable[A]) { - if (n < 0 || n > size0) throw new IndexOutOfBoundsException(n.toString) - val len = seq.size - val newSize = size0 + len - ensureSize(newSize) - - copy(n, n + len, size0 - n) - seq.copyToArray(array.asInstanceOf[Array[Any]], n) - size0 = newSize - } - - /** Removes the element on a given index position. It takes time linear in - * the buffer size. - * - * @param n the index which refers to the first element to remove. - * @param count the number of elements to remove. - * @throws IndexOutOfBoundsException if the index `n` is not in the valid range - * `0 <= n <= length - count` (with `count > 0`). - * @throws IllegalArgumentException if `count < 0`. - */ - override def remove(n: Int, count: Int) { - if (count < 0) throw new IllegalArgumentException("removing negative number of elements: " + count.toString) - else if (count == 0) return // Did nothing - if (n < 0 || n > size0 - count) throw new IndexOutOfBoundsException("at " + n.toString + " deleting " + count.toString) - copy(n + count, n, size0 - (n + count)) - reduceToSize(size0 - count) - } - - /** Removes the element at a given index position. - * - * @param n the index which refers to the element to delete. - * @return the element that was formerly at position `n`. - */ - def remove(n: Int): A = { - val result = apply(n) - remove(n, 1) - result - } - - def result: ArrayBuffer[A] = this - - /** Defines the prefix of the string representation. - */ - override def stringPrefix: String = "ArrayBuffer" - -} - -/** Factory object for the `ArrayBuffer` class. - * - * $factoryInfo - * @define coll array buffer - * @define Coll `ArrayBuffer` - */ -object ArrayBuffer extends SeqFactory[ArrayBuffer] { - /** $genericCanBuildFromInfo */ - implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, ArrayBuffer[A]] = ReusableCBF.asInstanceOf[GenericCanBuildFrom[A]] - def newBuilder[A]: Builder[A, ArrayBuffer[A]] = new ArrayBuffer[A] -} - diff --git a/tests/scala2-library/src/library/scala/collection/mutable/ArrayBuilder.scala b/tests/scala2-library/src/library/scala/collection/mutable/ArrayBuilder.scala deleted file mode 100644 index d023110c1b42..000000000000 --- a/tests/scala2-library/src/library/scala/collection/mutable/ArrayBuilder.scala +++ /dev/null @@ -1,671 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala -package collection -package mutable - -import scala.reflect.ClassTag - -/** A builder class for arrays. - * - * @since 2.8 - * - * @tparam T the type of the elements for the builder. - */ -abstract class ArrayBuilder[T] extends ReusableBuilder[T, Array[T]] with Serializable - -/** A companion object for array builders. - * - * @since 2.8 - */ -object ArrayBuilder { - - /** Creates a new arraybuilder of type `T`. - * - * @tparam T type of the elements for the array builder, with a `ClassTag` context bound. - * @return a new empty array builder. - */ - def make[T: ClassTag](): ArrayBuilder[T] = { - val tag = implicitly[ClassTag[T]] - tag.runtimeClass match { - case java.lang.Byte.TYPE => new ArrayBuilder.ofByte().asInstanceOf[ArrayBuilder[T]] - case java.lang.Short.TYPE => new ArrayBuilder.ofShort().asInstanceOf[ArrayBuilder[T]] - case java.lang.Character.TYPE => new ArrayBuilder.ofChar().asInstanceOf[ArrayBuilder[T]] - case java.lang.Integer.TYPE => new ArrayBuilder.ofInt().asInstanceOf[ArrayBuilder[T]] - case java.lang.Long.TYPE => new ArrayBuilder.ofLong().asInstanceOf[ArrayBuilder[T]] - case java.lang.Float.TYPE => new ArrayBuilder.ofFloat().asInstanceOf[ArrayBuilder[T]] - case java.lang.Double.TYPE => new ArrayBuilder.ofDouble().asInstanceOf[ArrayBuilder[T]] - case java.lang.Boolean.TYPE => new ArrayBuilder.ofBoolean().asInstanceOf[ArrayBuilder[T]] - case java.lang.Void.TYPE => new ArrayBuilder.ofUnit().asInstanceOf[ArrayBuilder[T]] - case _ => new ArrayBuilder.ofRef[T with AnyRef]()(tag.asInstanceOf[ClassTag[T with AnyRef]]).asInstanceOf[ArrayBuilder[T]] - } - } - - /** A class for array builders for arrays of reference types. - * - * This builder can be reused. - * - * @tparam T type of elements for the array builder, subtype of `AnyRef` with a `ClassTag` context bound. - */ - final class ofRef[T <: AnyRef : ClassTag] extends ArrayBuilder[T] { - - private var elems: Array[T] = _ - private var capacity: Int = 0 - private var size: Int = 0 - - private def mkArray(size: Int): Array[T] = { - val newelems = new Array[T](size) - if (this.size > 0) Array.copy(elems, 0, newelems, 0, this.size) - newelems - } - - private def resize(size: Int) { - elems = mkArray(size) - capacity = size - } - - override def sizeHint(size: Int) { - if (capacity < size) resize(size) - } - - private def ensureSize(size: Int) { - if (capacity < size || capacity == 0) { - var newsize = if (capacity == 0) 16 else capacity * 2 - while (newsize < size) newsize *= 2 - resize(newsize) - } - } - - def +=(elem: T): this.type = { - ensureSize(size + 1) - elems(size) = elem - size += 1 - this - } - - override def ++=(xs: TraversableOnce[T]): this.type = (xs.asInstanceOf[AnyRef]) match { - case xs: WrappedArray.ofRef[_] => - ensureSize(this.size + xs.length) - Array.copy(xs.array, 0, elems, this.size, xs.length) - size += xs.length - this - case _ => - super.++=(xs) - } - - def clear() { size = 0 } - - def result() = { - if (capacity != 0 && capacity == size) { - capacity = 0 - elems - } - else mkArray(size) - } - - override def equals(other: Any): Boolean = other match { - case x: ofRef[_] => (size == x.size) && (elems == x.elems) - case _ => false - } - - override def toString = "ArrayBuilder.ofRef" - } - - /** A class for array builders for arrays of `byte`s. It can be reused. */ - final class ofByte extends ArrayBuilder[Byte] { - - private var elems: Array[Byte] = _ - private var capacity: Int = 0 - private var size: Int = 0 - - private def mkArray(size: Int): Array[Byte] = { - val newelems = new Array[Byte](size) - if (this.size > 0) Array.copy(elems, 0, newelems, 0, this.size) - newelems - } - - private def resize(size: Int) { - elems = mkArray(size) - capacity = size - } - - override def sizeHint(size: Int) { - if (capacity < size) resize(size) - } - - private def ensureSize(size: Int) { - if (capacity < size || capacity == 0) { - var newsize = if (capacity == 0) 16 else capacity * 2 - while (newsize < size) newsize *= 2 - resize(newsize) - } - } - - def +=(elem: Byte): this.type = { - ensureSize(size + 1) - elems(size) = elem - size += 1 - this - } - - override def ++=(xs: TraversableOnce[Byte]): this.type = xs match { - case xs: WrappedArray.ofByte => - ensureSize(this.size + xs.length) - Array.copy(xs.array, 0, elems, this.size, xs.length) - size += xs.length - this - case _ => - super.++=(xs) - } - - def clear() { size = 0 } - - def result() = { - if (capacity != 0 && capacity == size) { - capacity = 0 - elems - } - else mkArray(size) - } - - override def equals(other: Any): Boolean = other match { - case x: ofByte => (size == x.size) && (elems == x.elems) - case _ => false - } - - override def toString = "ArrayBuilder.ofByte" - } - - /** A class for array builders for arrays of `short`s. It can be reused. */ - final class ofShort extends ArrayBuilder[Short] { - - private var elems: Array[Short] = _ - private var capacity: Int = 0 - private var size: Int = 0 - - private def mkArray(size: Int): Array[Short] = { - val newelems = new Array[Short](size) - if (this.size > 0) Array.copy(elems, 0, newelems, 0, this.size) - newelems - } - - private def resize(size: Int) { - elems = mkArray(size) - capacity = size - } - - override def sizeHint(size: Int) { - if (capacity < size) resize(size) - } - - private def ensureSize(size: Int) { - if (capacity < size || capacity == 0) { - var newsize = if (capacity == 0) 16 else capacity * 2 - while (newsize < size) newsize *= 2 - resize(newsize) - } - } - - def +=(elem: Short): this.type = { - ensureSize(size + 1) - elems(size) = elem - size += 1 - this - } - - override def ++=(xs: TraversableOnce[Short]): this.type = xs match { - case xs: WrappedArray.ofShort => - ensureSize(this.size + xs.length) - Array.copy(xs.array, 0, elems, this.size, xs.length) - size += xs.length - this - case _ => - super.++=(xs) - } - - def clear() { size = 0 } - - def result() = { - if (capacity != 0 && capacity == size) { - capacity = 0 - elems - } - else mkArray(size) - } - - override def equals(other: Any): Boolean = other match { - case x: ofShort => (size == x.size) && (elems == x.elems) - case _ => false - } - - override def toString = "ArrayBuilder.ofShort" - } - - /** A class for array builders for arrays of `char`s. It can be reused. */ - final class ofChar extends ArrayBuilder[Char] { - - private var elems: Array[Char] = _ - private var capacity: Int = 0 - private var size: Int = 0 - - private def mkArray(size: Int): Array[Char] = { - val newelems = new Array[Char](size) - if (this.size > 0) Array.copy(elems, 0, newelems, 0, this.size) - newelems - } - - private def resize(size: Int) { - elems = mkArray(size) - capacity = size - } - - override def sizeHint(size: Int) { - if (capacity < size) resize(size) - } - - private def ensureSize(size: Int) { - if (capacity < size || capacity == 0) { - var newsize = if (capacity == 0) 16 else capacity * 2 - while (newsize < size) newsize *= 2 - resize(newsize) - } - } - - def +=(elem: Char): this.type = { - ensureSize(size + 1) - elems(size) = elem - size += 1 - this - } - - override def ++=(xs: TraversableOnce[Char]): this.type = xs match { - case xs: WrappedArray.ofChar => - ensureSize(this.size + xs.length) - Array.copy(xs.array, 0, elems, this.size, xs.length) - size += xs.length - this - case _ => - super.++=(xs) - } - - def clear() { size = 0 } - - def result() = { - if (capacity != 0 && capacity == size) { - capacity = 0 - elems - } - else mkArray(size) - } - - override def equals(other: Any): Boolean = other match { - case x: ofChar => (size == x.size) && (elems == x.elems) - case _ => false - } - - override def toString = "ArrayBuilder.ofChar" - } - - /** A class for array builders for arrays of `int`s. It can be reused. */ - final class ofInt extends ArrayBuilder[Int] { - - private var elems: Array[Int] = _ - private var capacity: Int = 0 - private var size: Int = 0 - - private def mkArray(size: Int): Array[Int] = { - val newelems = new Array[Int](size) - if (this.size > 0) Array.copy(elems, 0, newelems, 0, this.size) - newelems - } - - private def resize(size: Int) { - elems = mkArray(size) - capacity = size - } - - override def sizeHint(size: Int) { - if (capacity < size) resize(size) - } - - private def ensureSize(size: Int) { - if (capacity < size || capacity == 0) { - var newsize = if (capacity == 0) 16 else capacity * 2 - while (newsize < size) newsize *= 2 - resize(newsize) - } - } - - def +=(elem: Int): this.type = { - ensureSize(size + 1) - elems(size) = elem - size += 1 - this - } - - override def ++=(xs: TraversableOnce[Int]): this.type = xs match { - case xs: WrappedArray.ofInt => - ensureSize(this.size + xs.length) - Array.copy(xs.array, 0, elems, this.size, xs.length) - size += xs.length - this - case _ => - super.++=(xs) - } - - def clear() { size = 0 } - - def result() = { - if (capacity != 0 && capacity == size) { - capacity = 0 - elems - } - else mkArray(size) - } - - override def equals(other: Any): Boolean = other match { - case x: ofInt => (size == x.size) && (elems == x.elems) - case _ => false - } - - override def toString = "ArrayBuilder.ofInt" - } - - /** A class for array builders for arrays of `long`s. It can be reused. */ - final class ofLong extends ArrayBuilder[Long] { - - private var elems: Array[Long] = _ - private var capacity: Int = 0 - private var size: Int = 0 - - private def mkArray(size: Int): Array[Long] = { - val newelems = new Array[Long](size) - if (this.size > 0) Array.copy(elems, 0, newelems, 0, this.size) - newelems - } - - private def resize(size: Int) { - elems = mkArray(size) - capacity = size - } - - override def sizeHint(size: Int) { - if (capacity < size) resize(size) - } - - private def ensureSize(size: Int) { - if (capacity < size || capacity == 0) { - var newsize = if (capacity == 0) 16 else capacity * 2 - while (newsize < size) newsize *= 2 - resize(newsize) - } - } - - def +=(elem: Long): this.type = { - ensureSize(size + 1) - elems(size) = elem - size += 1 - this - } - - override def ++=(xs: TraversableOnce[Long]): this.type = xs match { - case xs: WrappedArray.ofLong => - ensureSize(this.size + xs.length) - Array.copy(xs.array, 0, elems, this.size, xs.length) - size += xs.length - this - case _ => - super.++=(xs) - } - - def clear() { size = 0 } - - def result() = { - if (capacity != 0 && capacity == size) { - capacity = 0 - elems - } - else mkArray(size) - } - - override def equals(other: Any): Boolean = other match { - case x: ofLong => (size == x.size) && (elems == x.elems) - case _ => false - } - - override def toString = "ArrayBuilder.ofLong" - } - - /** A class for array builders for arrays of `float`s. It can be reused. */ - final class ofFloat extends ArrayBuilder[Float] { - - private var elems: Array[Float] = _ - private var capacity: Int = 0 - private var size: Int = 0 - - private def mkArray(size: Int): Array[Float] = { - val newelems = new Array[Float](size) - if (this.size > 0) Array.copy(elems, 0, newelems, 0, this.size) - newelems - } - - private def resize(size: Int) { - elems = mkArray(size) - capacity = size - } - - override def sizeHint(size: Int) { - if (capacity < size) resize(size) - } - - private def ensureSize(size: Int) { - if (capacity < size || capacity == 0) { - var newsize = if (capacity == 0) 16 else capacity * 2 - while (newsize < size) newsize *= 2 - resize(newsize) - } - } - - def +=(elem: Float): this.type = { - ensureSize(size + 1) - elems(size) = elem - size += 1 - this - } - - override def ++=(xs: TraversableOnce[Float]): this.type = xs match { - case xs: WrappedArray.ofFloat => - ensureSize(this.size + xs.length) - Array.copy(xs.array, 0, elems, this.size, xs.length) - size += xs.length - this - case _ => - super.++=(xs) - } - - def clear() { size = 0 } - - def result() = { - if (capacity != 0 && capacity == size) { - capacity = 0 - elems - } - else mkArray(size) - } - - override def equals(other: Any): Boolean = other match { - case x: ofFloat => (size == x.size) && (elems == x.elems) - case _ => false - } - - override def toString = "ArrayBuilder.ofFloat" - } - - /** A class for array builders for arrays of `double`s. It can be reused. */ - final class ofDouble extends ArrayBuilder[Double] { - - private var elems: Array[Double] = _ - private var capacity: Int = 0 - private var size: Int = 0 - - private def mkArray(size: Int): Array[Double] = { - val newelems = new Array[Double](size) - if (this.size > 0) Array.copy(elems, 0, newelems, 0, this.size) - newelems - } - - private def resize(size: Int) { - elems = mkArray(size) - capacity = size - } - - override def sizeHint(size: Int) { - if (capacity < size) resize(size) - } - - private def ensureSize(size: Int) { - if (capacity < size || capacity == 0) { - var newsize = if (capacity == 0) 16 else capacity * 2 - while (newsize < size) newsize *= 2 - resize(newsize) - } - } - - def +=(elem: Double): this.type = { - ensureSize(size + 1) - elems(size) = elem - size += 1 - this - } - - override def ++=(xs: TraversableOnce[Double]): this.type = xs match { - case xs: WrappedArray.ofDouble => - ensureSize(this.size + xs.length) - Array.copy(xs.array, 0, elems, this.size, xs.length) - size += xs.length - this - case _ => - super.++=(xs) - } - - def clear() { size = 0 } - - def result() = { - if (capacity != 0 && capacity == size) { - capacity = 0 - elems - } - else mkArray(size) - } - - override def equals(other: Any): Boolean = other match { - case x: ofDouble => (size == x.size) && (elems == x.elems) - case _ => false - } - - override def toString = "ArrayBuilder.ofDouble" - } - - /** A class for array builders for arrays of `boolean`s. It can be reused. */ - class ofBoolean extends ArrayBuilder[Boolean] { - - private var elems: Array[Boolean] = _ - private var capacity: Int = 0 - private var size: Int = 0 - - private def mkArray(size: Int): Array[Boolean] = { - val newelems = new Array[Boolean](size) - if (this.size > 0) Array.copy(elems, 0, newelems, 0, this.size) - newelems - } - - private def resize(size: Int) { - elems = mkArray(size) - capacity = size - } - - override def sizeHint(size: Int) { - if (capacity < size) resize(size) - } - - private def ensureSize(size: Int) { - if (capacity < size || capacity == 0) { - var newsize = if (capacity == 0) 16 else capacity * 2 - while (newsize < size) newsize *= 2 - resize(newsize) - } - } - - def +=(elem: Boolean): this.type = { - ensureSize(size + 1) - elems(size) = elem - size += 1 - this - } - - override def ++=(xs: TraversableOnce[Boolean]): this.type = xs match { - case xs: WrappedArray.ofBoolean => - ensureSize(this.size + xs.length) - Array.copy(xs.array, 0, elems, this.size, xs.length) - size += xs.length - this - case _ => - super.++=(xs) - } - - def clear() { size = 0 } - - def result() = { - if (capacity != 0 && capacity == size) { - capacity = 0 - elems - } - else mkArray(size) - } - - override def equals(other: Any): Boolean = other match { - case x: ofBoolean => (size == x.size) && (elems == x.elems) - case _ => false - } - - override def toString = "ArrayBuilder.ofBoolean" - } - - /** A class for array builders for arrays of `Unit` type. It can be reused. */ - final class ofUnit extends ArrayBuilder[Unit] { - - private var size: Int = 0 - - def +=(elem: Unit): this.type = { - size += 1 - this - } - - override def ++=(xs: TraversableOnce[Unit]): this.type = { - size += xs.size - this - } - - def clear() { size = 0 } - - def result() = { - val ans = new Array[Unit](size) - var i = 0 - while (i < size) { ans(i) = (); i += 1 } - ans - } - - override def equals(other: Any): Boolean = other match { - case x: ofUnit => (size == x.size) - case _ => false - } - - override def toString = "ArrayBuilder.ofUnit" - } -} diff --git a/tests/scala2-library/src/library/scala/collection/mutable/ArrayLike.scala b/tests/scala2-library/src/library/scala/collection/mutable/ArrayLike.scala deleted file mode 100644 index 80b38a847a55..000000000000 --- a/tests/scala2-library/src/library/scala/collection/mutable/ArrayLike.scala +++ /dev/null @@ -1,48 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala -package collection -package mutable - -/** A common supertrait of `ArrayOps` and `WrappedArray` that factors out the - * `deep` method for arrays and wrapped arrays and serves as a marker trait - * for array wrappers. - * - * @tparam A type of the elements contained in the array like object. - * @tparam Repr the type of the actual collection containing the elements. - * - * @define Coll `ArrayLike` - * @version 2.8 - * @since 2.8 - */ -trait ArrayLike[A, +Repr] extends Any with IndexedSeqOptimized[A, Repr] { self => - - /** Creates a possible nested `IndexedSeq` which consists of all the elements - * of this array. If the elements are arrays themselves, the `deep` transformation - * is applied recursively to them. The `stringPrefix` of the `IndexedSeq` is - * "Array", hence the `IndexedSeq` prints like an array with all its - * elements shown, and the same recursively for any subarrays. - * - * Example: - * {{{ - * Array(Array(1, 2), Array(3, 4)).deep.toString - * }}} - * prints: `Array(Array(1, 2), Array(3, 4))` - * - * @return An possibly nested indexed sequence of consisting of all the elements of the array. - */ - def deep: scala.collection.IndexedSeq[Any] = new scala.collection.AbstractSeq[Any] with scala.collection.IndexedSeq[Any] { - def length = self.length - def apply(idx: Int): Any = self.apply(idx) match { - case x: AnyRef if x.getClass.isArray => WrappedArray.make(x).deep - case x => x - } - override def stringPrefix = "Array" - } -} diff --git a/tests/scala2-library/src/library/scala/collection/mutable/ArrayOps.scala b/tests/scala2-library/src/library/scala/collection/mutable/ArrayOps.scala deleted file mode 100644 index fac3986cdeab..000000000000 --- a/tests/scala2-library/src/library/scala/collection/mutable/ArrayOps.scala +++ /dev/null @@ -1,309 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala -package collection -package mutable - -import scala.reflect.ClassTag -import parallel.mutable.ParArray - -/** This class serves as a wrapper for `Array`s with all the operations found in - * indexed sequences. Where needed, instances of arrays are implicitly converted - * into this class. - * - * The difference between this class and `WrappedArray` is that calling transformer - * methods such as `filter` and `map` will yield an array, whereas a `WrappedArray` - * will remain a `WrappedArray`. - * - * @since 2.8 - * - * @tparam T type of the elements contained in this array. - * - * @define Coll `Array` - * @define orderDependent - * @define orderDependentFold - * @define mayNotTerminateInf - * @define willNotTerminateInf - */ -sealed trait ArrayOps[T] extends Any with ArrayLike[T, Array[T]] with CustomParallelizable[T, ParArray[T]] { - - private def elementClass: Class[_] = - repr.getClass.getComponentType - - override def copyToArray[U >: T](xs: Array[U], start: Int, len: Int) { - val l = len min repr.length min (xs.length - start) - if (l > 0) Array.copy(repr, 0, xs, start, l) - } - - override def slice(from: Int, until: Int): Array[T] = { - val lo = math.max(from, 0) - val hi = math.min(math.max(until, 0), repr.length) - val size = math.max(hi - lo, 0) - val result = java.lang.reflect.Array.newInstance(elementClass, size) - if (size > 0) { - Array.copy(repr, lo, result, 0, size) - } - result.asInstanceOf[Array[T]] - } - - override def toArray[U >: T : ClassTag]: Array[U] = { - val thatElementClass = implicitly[ClassTag[U]].runtimeClass - if (elementClass eq thatElementClass) - repr.asInstanceOf[Array[U]] - else - super.toArray[U] - } - - def :+[B >: T: ClassTag](elem: B): Array[B] = { - val result = Array.ofDim[B](repr.length + 1) - Array.copy(repr, 0, result, 0, repr.length) - result(repr.length) = elem - result - } - - def +:[B >: T: ClassTag](elem: B): Array[B] = { - val result = Array.ofDim[B](repr.length + 1) - result(0) = elem - Array.copy(repr, 0, result, 1, repr.length) - result - } - - override def par = ParArray.handoff(repr) - - /** Flattens a two-dimensional array by concatenating all its rows - * into a single array. - * - * @tparam U Type of row elements. - * @param asTrav A function that converts elements of this array to rows - arrays of type `U`. - * @return An array obtained by concatenating rows of this array. - */ - def flatten[U](implicit asTrav: T => scala.collection.Traversable[U], m: ClassTag[U]): Array[U] = { - val b = Array.newBuilder[U] - b.sizeHint(map{case is: scala.collection.IndexedSeq[_] => is.size case _ => 0}.sum) - for (xs <- this) - b ++= asTrav(xs) - b.result() - } - - /** Transposes a two dimensional array. - * - * @tparam U Type of row elements. - * @param asArray A function that converts elements of this array to rows - arrays of type `U`. - * @return An array obtained by replacing elements of this arrays with rows the represent. - */ - def transpose[U](implicit asArray: T => Array[U]): Array[Array[U]] = { - val bb: Builder[Array[U], Array[Array[U]]] = Array.newBuilder(ClassTag[Array[U]](elementClass)) - if (isEmpty) bb.result() - else { - def mkRowBuilder() = Array.newBuilder(ClassTag[U](elementClass.getComponentType)) - val bs = asArray(head) map (_ => mkRowBuilder()) - for (xs <- this) { - var i = 0 - for (x <- asArray(xs)) { - bs(i) += x - i += 1 - } - } - for (b <- bs) bb += b.result() - bb.result() - } - } - - /** Converts an array of pairs into an array of first elements and an array of second elements. - * - * @tparam T1 the type of the first half of the element pairs - * @tparam T2 the type of the second half of the element pairs - * @param asPair an implicit conversion which asserts that the element type - * of this Array is a pair. - * @param ct1 a class tag for T1 type parameter that is required to create an instance - * of Array[T1] - * @param ct2 a class tag for T2 type parameter that is required to create an instance - * of Array[T2] - * @return a pair of Arrays, containing, respectively, the first and second half - * of each element pair of this Array. - */ - // implementation NOTE: ct1 and ct2 can't be written as context bounds because desugared - // implicits are put in front of asPair parameter that is supposed to guide type inference - def unzip[T1, T2](implicit asPair: T => (T1, T2), ct1: ClassTag[T1], ct2: ClassTag[T2]): (Array[T1], Array[T2]) = { - val a1 = new Array[T1](length) - val a2 = new Array[T2](length) - var i = 0 - while (i < length) { - val e = apply(i) - a1(i) = asPair(e)._1 - a2(i) = asPair(e)._2 - i += 1 - } - (a1, a2) - } - - /** Converts an array of triples into three arrays, one containing the elements from each position of the triple. - * - * @tparam T1 the type of the first of three elements in the triple - * @tparam T2 the type of the second of three elements in the triple - * @tparam T3 the type of the third of three elements in the triple - * @param asTriple an implicit conversion which asserts that the element type - * of this Array is a triple. - * @param ct1 a class tag for T1 type parameter that is required to create an instance - * of Array[T1] - * @param ct2 a class tag for T2 type parameter that is required to create an instance - * of Array[T2] - * @param ct3 a class tag for T3 type parameter that is required to create an instance - * of Array[T3] - * @return a triple of Arrays, containing, respectively, the first, second, and third - * elements from each element triple of this Array. - */ - // implementation NOTE: ct1, ct2, ct3 can't be written as context bounds because desugared - // implicits are put in front of asPair parameter that is supposed to guide type inference - def unzip3[T1, T2, T3](implicit asTriple: T => (T1, T2, T3), ct1: ClassTag[T1], ct2: ClassTag[T2], - ct3: ClassTag[T3]): (Array[T1], Array[T2], Array[T3]) = { - val a1 = new Array[T1](length) - val a2 = new Array[T2](length) - val a3 = new Array[T3](length) - var i = 0 - while (i < length) { - val e = apply(i) - a1(i) = asTriple(e)._1 - a2(i) = asTriple(e)._2 - a3(i) = asTriple(e)._3 - i += 1 - } - (a1, a2, a3) - } - - def seq = thisCollection -} - -/** - * A companion object for `ArrayOps`. - * - * @since 2.8 - */ -object ArrayOps { - - /** A class of `ArrayOps` for arrays containing reference types. */ - final class ofRef[T <: AnyRef](override val repr: Array[T]) extends AnyVal with ArrayOps[T] with ArrayLike[T, Array[T]] { - - override protected[this] def thisCollection: WrappedArray[T] = new WrappedArray.ofRef[T](repr) - override protected[this] def toCollection(repr: Array[T]): WrappedArray[T] = new WrappedArray.ofRef[T](repr) - override protected[this] def newBuilder = new ArrayBuilder.ofRef[T]()(ClassTag[T](repr.getClass.getComponentType)) - - def length: Int = repr.length - def apply(index: Int): T = repr(index) - def update(index: Int, elem: T) { repr(index) = elem } - } - - /** A subclass of `ArrayOps` for arrays containing `Byte`s. */ - final class ofByte(override val repr: Array[Byte]) extends AnyVal with ArrayOps[Byte] with ArrayLike[Byte, Array[Byte]] { - - override protected[this] def thisCollection: WrappedArray[Byte] = new WrappedArray.ofByte(repr) - override protected[this] def toCollection(repr: Array[Byte]): WrappedArray[Byte] = new WrappedArray.ofByte(repr) - override protected[this] def newBuilder = new ArrayBuilder.ofByte - - def length: Int = repr.length - def apply(index: Int): Byte = repr(index) - def update(index: Int, elem: Byte) { repr(index) = elem } - } - - /** A subclass of `ArrayOps` for arrays containing `Short`s. */ - final class ofShort(override val repr: Array[Short]) extends AnyVal with ArrayOps[Short] with ArrayLike[Short, Array[Short]] { - - override protected[this] def thisCollection: WrappedArray[Short] = new WrappedArray.ofShort(repr) - override protected[this] def toCollection(repr: Array[Short]): WrappedArray[Short] = new WrappedArray.ofShort(repr) - override protected[this] def newBuilder = new ArrayBuilder.ofShort - - def length: Int = repr.length - def apply(index: Int): Short = repr(index) - def update(index: Int, elem: Short) { repr(index) = elem } - } - - /** A subclass of `ArrayOps` for arrays containing `Char`s. */ - final class ofChar(override val repr: Array[Char]) extends AnyVal with ArrayOps[Char] with ArrayLike[Char, Array[Char]] { - - override protected[this] def thisCollection: WrappedArray[Char] = new WrappedArray.ofChar(repr) - override protected[this] def toCollection(repr: Array[Char]): WrappedArray[Char] = new WrappedArray.ofChar(repr) - override protected[this] def newBuilder = new ArrayBuilder.ofChar - - def length: Int = repr.length - def apply(index: Int): Char = repr(index) - def update(index: Int, elem: Char) { repr(index) = elem } - } - - /** A subclass of `ArrayOps` for arrays containing `Int`s. */ - final class ofInt(override val repr: Array[Int]) extends AnyVal with ArrayOps[Int] with ArrayLike[Int, Array[Int]] { - - override protected[this] def thisCollection: WrappedArray[Int] = new WrappedArray.ofInt(repr) - override protected[this] def toCollection(repr: Array[Int]): WrappedArray[Int] = new WrappedArray.ofInt(repr) - override protected[this] def newBuilder = new ArrayBuilder.ofInt - - def length: Int = repr.length - def apply(index: Int): Int = repr(index) - def update(index: Int, elem: Int) { repr(index) = elem } - } - - /** A subclass of `ArrayOps` for arrays containing `Long`s. */ - final class ofLong(override val repr: Array[Long]) extends AnyVal with ArrayOps[Long] with ArrayLike[Long, Array[Long]] { - - override protected[this] def thisCollection: WrappedArray[Long] = new WrappedArray.ofLong(repr) - override protected[this] def toCollection(repr: Array[Long]): WrappedArray[Long] = new WrappedArray.ofLong(repr) - override protected[this] def newBuilder = new ArrayBuilder.ofLong - - def length: Int = repr.length - def apply(index: Int): Long = repr(index) - def update(index: Int, elem: Long) { repr(index) = elem } - } - - /** A subclass of `ArrayOps` for arrays containing `Float`s. */ - final class ofFloat(override val repr: Array[Float]) extends AnyVal with ArrayOps[Float] with ArrayLike[Float, Array[Float]] { - - override protected[this] def thisCollection: WrappedArray[Float] = new WrappedArray.ofFloat(repr) - override protected[this] def toCollection(repr: Array[Float]): WrappedArray[Float] = new WrappedArray.ofFloat(repr) - override protected[this] def newBuilder = new ArrayBuilder.ofFloat - - def length: Int = repr.length - def apply(index: Int): Float = repr(index) - def update(index: Int, elem: Float) { repr(index) = elem } - } - - /** A subclass of `ArrayOps` for arrays containing `Double`s. */ - final class ofDouble(override val repr: Array[Double]) extends AnyVal with ArrayOps[Double] with ArrayLike[Double, Array[Double]] { - - override protected[this] def thisCollection: WrappedArray[Double] = new WrappedArray.ofDouble(repr) - override protected[this] def toCollection(repr: Array[Double]): WrappedArray[Double] = new WrappedArray.ofDouble(repr) - override protected[this] def newBuilder = new ArrayBuilder.ofDouble - - def length: Int = repr.length - def apply(index: Int): Double = repr(index) - def update(index: Int, elem: Double) { repr(index) = elem } - } - - /** A subclass of `ArrayOps` for arrays containing `Boolean`s. */ - final class ofBoolean(override val repr: Array[Boolean]) extends AnyVal with ArrayOps[Boolean] with ArrayLike[Boolean, Array[Boolean]] { - - override protected[this] def thisCollection: WrappedArray[Boolean] = new WrappedArray.ofBoolean(repr) - override protected[this] def toCollection(repr: Array[Boolean]): WrappedArray[Boolean] = new WrappedArray.ofBoolean(repr) - override protected[this] def newBuilder = new ArrayBuilder.ofBoolean - - def length: Int = repr.length - def apply(index: Int): Boolean = repr(index) - def update(index: Int, elem: Boolean) { repr(index) = elem } - } - - /** A subclass of `ArrayOps` for arrays of `Unit` types. */ - final class ofUnit(override val repr: Array[Unit]) extends AnyVal with ArrayOps[Unit] with ArrayLike[Unit, Array[Unit]] { - - override protected[this] def thisCollection: WrappedArray[Unit] = new WrappedArray.ofUnit(repr) - override protected[this] def toCollection(repr: Array[Unit]): WrappedArray[Unit] = new WrappedArray.ofUnit(repr) - override protected[this] def newBuilder = new ArrayBuilder.ofUnit - - def length: Int = repr.length - def apply(index: Int): Unit = repr(index) - def update(index: Int, elem: Unit) { repr(index) = elem } - } -} diff --git a/tests/scala2-library/src/library/scala/collection/mutable/ArraySeq.scala b/tests/scala2-library/src/library/scala/collection/mutable/ArraySeq.scala deleted file mode 100644 index 1e82096bafcd..000000000000 --- a/tests/scala2-library/src/library/scala/collection/mutable/ArraySeq.scala +++ /dev/null @@ -1,115 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - - - -package scala -package collection -package mutable - -import generic._ -import parallel.mutable.ParArray - -/** A class for polymorphic arrays of elements that's represented - * internally by an array of objects. This means that elements of - * primitive types are boxed. - * - * @author Martin Odersky - * @version 2.8 - * @since 2.8 - * @see [[http://docs.scala-lang.org/overviews/collections/concrete-mutable-collection-classes.html#array_sequences "Scala's Collection Library overview"]] - * section on `Array Sequences` for more information. - * - * @tparam A type of the elements contained in this array sequence. - * @param length the length of the underlying array. - * - * @define Coll `ArraySeq` - * @define coll array sequence - * @define thatinfo the class of the returned collection. In the standard library configuration, - * `That` is always `ArraySeq[B]` because an implicit of type `CanBuildFrom[ArraySeq, B, ArraySeq[B]]` - * is defined in object `ArraySeq`. - * @define bfinfo an implicit value of class `CanBuildFrom` which determines the - * result class `That` from the current representation type `Repr` - * and the new element type `B`. This is usually the `canBuildFrom` value - * defined in object `ArraySeq`. - * @define orderDependent - * @define orderDependentFold - * @define mayNotTerminateInf - * @define willNotTerminateInf - */ -@SerialVersionUID(1530165946227428979L) -class ArraySeq[A](override val length: Int) -extends AbstractSeq[A] - with IndexedSeq[A] - with GenericTraversableTemplate[A, ArraySeq] - with IndexedSeqOptimized[A, ArraySeq[A]] - with CustomParallelizable[A, ParArray[A]] - with Serializable -{ - - override def companion: GenericCompanion[ArraySeq] = ArraySeq - - val array: Array[AnyRef] = new Array[AnyRef](length) - - override def par = ParArray.handoff(array.asInstanceOf[Array[A]], length) - - def apply(idx: Int): A = { - if (idx >= length) throw new IndexOutOfBoundsException(idx.toString) - array(idx).asInstanceOf[A] - } - - def update(idx: Int, elem: A) { - if (idx >= length) throw new IndexOutOfBoundsException(idx.toString) - array(idx) = elem.asInstanceOf[AnyRef] - } - - override def foreach[U](f: A => U) { - var i = 0 - while (i < length) { - f(array(i).asInstanceOf[A]) - i += 1 - } - } - - /** Fills the given array `xs` with at most `len` elements of - * this traversable starting at position `start`. - * Copying will stop once either the end of the current traversable is reached or - * `len` elements have been copied or the end of the array is reached. - * - * @param xs the array to fill. - * @param start starting index. - * @param len number of elements to copy - */ - override def copyToArray[B >: A](xs: Array[B], start: Int, len: Int) { - val len1 = len min (xs.length - start) min length - if (len1 > 0) Array.copy(array, 0, xs, start, len1) - } - - override def clone(): ArraySeq[A] = { - val cloned = array.clone().asInstanceOf[Array[AnyRef]] - new ArraySeq[A](length) { - override val array = cloned - } - } - -} - -/** $factoryInfo - * @define coll array sequence - * @define Coll `ArraySeq` - */ -object ArraySeq extends SeqFactory[ArraySeq] { - /** $genericCanBuildFromInfo */ - implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, ArraySeq[A]] = ReusableCBF.asInstanceOf[GenericCanBuildFrom[A]] - def newBuilder[A]: Builder[A, ArraySeq[A]] = - new ArrayBuffer[A] mapResult { buf => - val result = new ArraySeq[A](buf.length) - buf.copyToArray(result.array.asInstanceOf[Array[Any]], 0) - result - } -} diff --git a/tests/scala2-library/src/library/scala/collection/mutable/ArrayStack.scala b/tests/scala2-library/src/library/scala/collection/mutable/ArrayStack.scala deleted file mode 100644 index 951a90b084e4..000000000000 --- a/tests/scala2-library/src/library/scala/collection/mutable/ArrayStack.scala +++ /dev/null @@ -1,246 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala -package collection -package mutable - -import generic._ -import scala.reflect.ClassTag - -/** Factory object for the `ArrayStack` class. - * - * $factoryInfo - * @define coll array stack - * @define Coll `ArrayStack` - */ -object ArrayStack extends SeqFactory[ArrayStack] { - implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, ArrayStack[A]] = ReusableCBF.asInstanceOf[GenericCanBuildFrom[A]] - def newBuilder[A]: Builder[A, ArrayStack[A]] = new ArrayStack[A] - def empty: ArrayStack[Nothing] = new ArrayStack() - def apply[A: ClassTag](elems: A*): ArrayStack[A] = { - val els: Array[AnyRef] = elems.reverseMap(_.asInstanceOf[AnyRef])(breakOut) - if (els.length == 0) new ArrayStack() - else new ArrayStack[A](els, els.length) - } - - private[mutable] def growArray(x: Array[AnyRef]) = { - val y = new Array[AnyRef](math.max(x.length * 2, 1)) - Array.copy(x, 0, y, 0, x.length) - y - } - - private[mutable] def clone(x: Array[AnyRef]) = { - val y = new Array[AnyRef](x.length) - Array.copy(x, 0, y, 0, x.length) - y - } -} - - -/** Simple stack class backed by an array. Should be significantly faster - * than the standard mutable stack. - * - * @author David MacIver - * @since 2.7 - * @see [[http://docs.scala-lang.org/overviews/collections/concrete-mutable-collection-classes.html#array_stacks "Scala's Collection Library overview"]] - * section on `Array Stacks` for more information. - * - * @tparam T type of the elements contained in this array stack. - * - * @define Coll `ArrayStack` - * @define coll array stack - * @define orderDependent - * @define orderDependentFold - * @define mayNotTerminateInf - * @define willNotTerminateInf - */ -@SerialVersionUID(8565219180626620510L) -class ArrayStack[T] private(private var table : Array[AnyRef], - private var index : Int) -extends AbstractSeq[T] - with IndexedSeq[T] - with IndexedSeqLike[T, ArrayStack[T]] - with GenericTraversableTemplate[T, ArrayStack] - with IndexedSeqOptimized[T, ArrayStack[T]] - with Cloneable[ArrayStack[T]] - with Builder[T, ArrayStack[T]] - with Serializable -{ - def this() = this(new Array[AnyRef](1), 0) - - /** Retrieve n'th element from stack, where top of stack has index 0. - * - * This is a constant time operation. - * - * @param n the index of the element to return - * @return the element at the specified index - * @throws IndexOutOfBoundsException if the index is out of bounds - */ - def apply(n: Int): T = - table(index - 1 - n).asInstanceOf[T] - - /** The number of elements in the stack */ - def length = index - - override def companion = ArrayStack - - /** Replace element at index `n` with the new element `newelem`. - * - * This is a constant time operation. - * - * @param n the index of the element to replace. - * @param newelem the new element. - * @throws IndexOutOfBoundsException if the index is not valid - */ - def update(n: Int, newelem: T) = - table(index - 1 - n) = newelem.asInstanceOf[AnyRef] - - /** Push an element onto the stack. - * - * @param x The element to push - */ - def push(x: T) { - if (index == table.length) table = ArrayStack.growArray(table) - table(index) = x.asInstanceOf[AnyRef] - index += 1 - } - - /** Pop the top element off the stack. - * - * @return the element on top of the stack - */ - def pop(): T = { - if (index == 0) sys.error("Stack empty") - index -= 1 - val x = table(index).asInstanceOf[T] - table(index) = null - x - } - - /** View the top element of the stack. - * - * Does not remove the element on the top. If the stack is empty, - * an exception is thrown. - * - * @return the element on top of the stack. - */ - def top: T = table(index - 1).asInstanceOf[T] - - /** Duplicate the top element of the stack. - * - * After calling this method, the stack will have an additional element at - * the top equal to the element that was previously at the top. - * If the stack is empty, an exception is thrown. - */ - def dup() = push(top) - - /** Empties the stack. */ - def clear() { - index = 0 - table = new Array(1) - } - - /** Empties the stack, passing all elements on it in LIFO order to the - * provided function. - * - * @param f The function to drain to. - */ - def drain(f: T => Unit) = while (!isEmpty) f(pop()) - - /** Pushes all the provided elements in the traversable object onto the stack. - * - * @param xs The source of elements to push. - * @return A reference to this stack. - */ - override def ++=(xs: TraversableOnce[T]): this.type = { xs foreach += ; this } - - /** Does the same as `push`, but returns the updated stack. - * - * @param x The element to push. - * @return A reference to this stack. - */ - def +=(x: T): this.type = { push(x); this } - - def result = { - reverseTable() - this - } - - private def reverseTable() { - var i = 0 - val until = index / 2 - while (i < until) { - val revi = index - i - 1 - val tmp = table(i) - table(i) = table(revi) - table(revi) = tmp - i += 1 - } - } - - /** Pop the top two elements off the stack, apply `f` to them and push the result - * back on to the stack. - * - * This function will throw an exception if stack contains fewer than 2 elements. - * - * @param f The function to apply to the top two elements. - */ - def combine(f: (T, T) => T): Unit = push(f(pop(), pop())) - - /** Repeatedly combine the top elements of the stack until the stack contains only - * one element. - * - * @param f The function to apply repeatedly to topmost elements. - */ - def reduceWith(f: (T, T) => T): Unit = while(size > 1) combine(f) - - override def size = index - - /** Evaluates the expression, preserving the contents of the stack so that - * any changes the evaluation makes to the stack contents will be undone after - * it completes. - * - * @param action The action to run. - */ - def preserving[T](action: => T) = { - val oldIndex = index - val oldTable = ArrayStack.clone(table) - - try { - action - } finally { - index = oldIndex - table = oldTable - } - } - - override def isEmpty: Boolean = index == 0 - - /** Creates and iterator over the stack in LIFO order. - * @return an iterator over the elements of the stack. - */ - override def iterator: Iterator[T] = new AbstractIterator[T] { - var currentIndex = index - def hasNext = currentIndex > 0 - def next() = { - currentIndex -= 1 - table(currentIndex).asInstanceOf[T] - } - } - - override def foreach[U](f: T => U) { - var currentIndex = index - while (currentIndex > 0) { - currentIndex -= 1 - f(table(currentIndex).asInstanceOf[T]) - } - } - - override def clone() = new ArrayStack[T](ArrayStack.clone(table), index) -} diff --git a/tests/scala2-library/src/library/scala/collection/mutable/BitSet.scala b/tests/scala2-library/src/library/scala/collection/mutable/BitSet.scala deleted file mode 100644 index a714cce8816c..000000000000 --- a/tests/scala2-library/src/library/scala/collection/mutable/BitSet.scala +++ /dev/null @@ -1,218 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - - - -package scala -package collection -package mutable - -import generic._ -import BitSetLike.{LogWL, MaxSize} - -/** A class for mutable bitsets. - * - * $bitsetinfo - * - * @see [[http://docs.scala-lang.org/overviews/collections/concrete-mutable-collection-classes.html#mutable_bitsets "Scala's Collection Library overview"]] - * section on `Mutable Bitsets` for more information. - * - * @define Coll `BitSet` - * @define coll bitset - * @define thatinfo the class of the returned collection. In the standard library configuration, - * `That` is always `BitSet[B]` because an implicit of type `CanBuildFrom[BitSet, B, BitSet]` - * is defined in object `BitSet`. - * @define bfinfo an implicit value of class `CanBuildFrom` which determines the - * result class `That` from the current representation type `Repr` - * and the new element type `B`. This is usually the `canBuildFrom` value - * defined in object `BitSet`. - * @define orderDependent - * @define orderDependentFold - * @define mayNotTerminateInf - * @define willNotTerminateInf - */ -@SerialVersionUID(8483111450368547763L) -class BitSet(protected final var elems: Array[Long]) extends AbstractSet[Int] - with SortedSet[Int] - with scala.collection.BitSet - with BitSetLike[BitSet] - with SetLike[Int, BitSet] - with Serializable { - - override def empty = BitSet.empty - - /** Creates the bitset of a certain initial size. - * - * @param initSize initial size of the bitset. - */ - def this(initSize: Int) = this(new Array[Long]((initSize + 63) >> 6 max 1)) - - def this() = this(0) - - @deprecatedOverriding("Internal implementation does not admit sensible overriding of this method.", "2.11.0") - protected def nwords = elems.length - - @deprecatedOverriding("Internal implementation does not admit sensible overriding of this method.", "2.11.0") - protected def word(idx: Int): Long = - if (idx < nwords) elems(idx) else 0L - - protected final def updateWord(idx: Int, w: Long) { - ensureCapacity(idx) - elems(idx) = w - } - - protected final def ensureCapacity(idx: Int) { - require(idx < MaxSize) - if (idx >= nwords) { - var newlen = nwords - while (idx >= newlen) newlen = (newlen * 2) min MaxSize - val elems1 = new Array[Long](newlen) - Array.copy(elems, 0, elems1, 0, nwords) - elems = elems1 - } - } - - protected def fromBitMaskNoCopy(words: Array[Long]): BitSet = { - if (words.length == 0) { - empty - } else { - new BitSet(words) - } - } - - override def add(elem: Int): Boolean = { - require(elem >= 0) - if (contains(elem)) false - else { - val idx = elem >> LogWL - updateWord(idx, word(idx) | (1L << elem)) - true - } - } - - override def remove(elem: Int): Boolean = { - require(elem >= 0) - if (contains(elem)) { - val idx = elem >> LogWL - updateWord(idx, word(idx) & ~(1L << elem)) - true - } else false - } - - @deprecatedOverriding("Override add to prevent += and add from exhibiting different behavior.", "2.11.0") - def += (elem: Int): this.type = { add(elem); this } - - @deprecatedOverriding("Override add to prevent += and add from exhibiting different behavior.", "2.11.0") - def -= (elem: Int): this.type = { remove(elem); this } - - /** Updates this bitset to the union with another bitset by performing a bitwise "or". - * - * @param other the bitset to form the union with. - * @return the bitset itself. - */ - def |= (other: BitSet): this.type = { - ensureCapacity(other.nwords - 1) - for (i <- 0 until other.nwords) - elems(i) = elems(i) | other.word(i) - this - } - /** Updates this bitset to the intersection with another bitset by performing a bitwise "and". - * - * @param other the bitset to form the intersection with. - * @return the bitset itself. - */ - def &= (other: BitSet): this.type = { - // Different from other operations: no need to ensure capacity because - // anything beyond the capacity is 0. Since we use other.word which is 0 - // off the end, we also don't need to make sure we stay in bounds there. - for (i <- 0 until nwords) - elems(i) = elems(i) & other.word(i) - this - } - /** Updates this bitset to the symmetric difference with another bitset by performing a bitwise "xor". - * - * @param other the bitset to form the symmetric difference with. - * @return the bitset itself. - */ - def ^= (other: BitSet): this.type = { - ensureCapacity(other.nwords - 1) - for (i <- 0 until other.nwords) - elems(i) = elems(i) ^ other.word(i) - this - } - /** Updates this bitset to the difference with another bitset by performing a bitwise "and-not". - * - * @param other the bitset to form the difference with. - * @return the bitset itself. - */ - def &~= (other: BitSet): this.type = { - ensureCapacity(other.nwords - 1) - for (i <- 0 until other.nwords) - elems(i) = elems(i) & ~other.word(i) - this - } - - override def clear() { - elems = new Array[Long](elems.length) - } - - /** Wraps this bitset as an immutable bitset backed by the array of bits - * of this bitset. - * - * @note Subsequent changes in this bitset will be reflected in the returned immutable bitset. - * - * @return an immutable set containing all the elements of this set. - */ - @deprecated("If this BitSet contains a value that is 128 or greater, the result of this method is an 'immutable' " + - "BitSet that shares state with this mutable BitSet. Thus, if the mutable BitSet is modified, it will violate the " + - "immutability of the result.", "2.12.0") - def toImmutable = immutable.BitSet.fromBitMaskNoCopy(elems) - - override def clone(): BitSet = { - val elems1 = new Array[Long](elems.length) - Array.copy(elems, 0, elems1, 0, elems.length) - new BitSet(elems1) - } -} - -/** $factoryInfo - * @define coll bitset - * @define Coll `BitSet` - */ -object BitSet extends BitSetFactory[BitSet] { - def empty: BitSet = new BitSet - - /** A growing builder for mutable Sets. */ - def newBuilder: Builder[Int, BitSet] = new GrowingBuilder[Int, BitSet](empty) - - /** $bitsetCanBuildFrom */ - implicit def canBuildFrom: CanBuildFrom[BitSet, Int, BitSet] = bitsetCanBuildFrom - - /** A bitset containing all the bits in an array */ - def fromBitMask(elems: Array[Long]): BitSet = { - val len = elems.length - if (len == 0) { - empty - } else { - val a = new Array[Long](len) - Array.copy(elems, 0, a, 0, len) - new BitSet(a) - } - } - - /** A bitset containing all the bits in an array, wrapping the existing - * array without copying. - */ - def fromBitMaskNoCopy(elems: Array[Long]): BitSet = { - if (elems.length == 0) { - empty - } else { - new BitSet(elems) - } - } -} diff --git a/tests/scala2-library/src/library/scala/collection/mutable/Buffer.scala b/tests/scala2-library/src/library/scala/collection/mutable/Buffer.scala deleted file mode 100644 index 7ec7b0633363..000000000000 --- a/tests/scala2-library/src/library/scala/collection/mutable/Buffer.scala +++ /dev/null @@ -1,49 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - - - -package scala -package collection -package mutable - -import generic._ - -/** Buffers are used to create sequences of elements incrementally by - * appending, prepending, or inserting new elements. It is also - * possible to access and modify elements in a random access fashion - * via the index of the element in the current sequence. - * - * @author Matthias Zenger - * @author Martin Odersky - * @version 2.8 - * @since 1 - * - * @tparam A type of the elements contained in this buffer. - * - * @define Coll `Buffer` - * @define coll buffer - */ -trait Buffer[A] extends Seq[A] - with GenericTraversableTemplate[A, Buffer] - with BufferLike[A, Buffer[A]] - with scala.Cloneable { - override def companion: GenericCompanion[Buffer] = Buffer -} - -/** $factoryInfo - * @define coll buffer - * @define Coll `Buffer` - */ -object Buffer extends SeqFactory[Buffer] { - implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, Buffer[A]] = ReusableCBF.asInstanceOf[GenericCanBuildFrom[A]] - def newBuilder[A]: Builder[A, Buffer[A]] = new ArrayBuffer -} - -/** Explicit instantiation of the `Buffer` trait to reduce class file size in subclasses. */ -abstract class AbstractBuffer[A] extends AbstractSeq[A] with Buffer[A] diff --git a/tests/scala2-library/src/library/scala/collection/mutable/BufferLike.scala b/tests/scala2-library/src/library/scala/collection/mutable/BufferLike.scala deleted file mode 100644 index 7c36c6ff3273..000000000000 --- a/tests/scala2-library/src/library/scala/collection/mutable/BufferLike.scala +++ /dev/null @@ -1,265 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - - - -package scala -package collection -package mutable - -import generic._ -import script._ -import scala.annotation.migration - -/** A template trait for buffers of type `Buffer[A]`. - * - * Buffers are used to create sequences of elements incrementally by - * appending, prepending, or inserting new elements. It is also - * possible to access and modify elements in a random access fashion - * via the index of the element in the current sequence. - * - * @tparam A the type of the elements of the buffer - * @tparam This the type of the buffer itself. - * - * $buffernote - * - * @author Martin Odersky - * @author Matthias Zenger - * @version 2.8 - * @since 2.8 - * @define buffernote @note - * This trait provides most of the operations of a `Buffer` independently of its representation. - * It is typically inherited by concrete implementations of buffers. - * - * To implement a concrete buffer, you need to provide implementations of the - * following methods: - * {{{ - * def apply(idx: Int): A - * def update(idx: Int, elem: A) - * def length: Int - * def clear() - * def +=(elem: A): this.type - * def +=:(elem: A): this.type - * def insertAll(n: Int, iter: Traversable[A]) - * def remove(n: Int): A - * }}} - * @define coll buffer - * @define Coll Buffer - * @define add append - * @define Add Append - * @define willNotTerminateInf - * @define mayNotTerminateInf - * @define compatMutate - * Note that for backward compatibility reasons, this method - * mutates the collection in place, unlike similar but - * undeprecated methods throughout the collections hierarchy. - */ -trait BufferLike[A, +This <: BufferLike[A, This] with Buffer[A]] - extends Growable[A] - with Shrinkable[A] - with Scriptable[A] - with Subtractable[A, This] - with SeqLike[A, This] - with scala.Cloneable -{ self : This => - - // Abstract methods from Seq: - - def apply(n: Int): A - def update(n: Int, newelem: A) - def length: Int - - // Abstract methods from Growable: - - def +=(elem: A): this.type - def clear() - - // Abstract methods new in this class: - - /** Prepends a single element to this buffer. - * @param elem the element to prepend. - * @return the buffer itself. - */ - def +=:(elem: A): this.type - - /** Inserts new elements at a given index into this buffer. - * - * @param n the index where new elements are inserted. - * @param elems the traversable collection containing the elements to insert. - * @throws IndexOutOfBoundsException if the index `n` is not in the valid range - * `0 <= n <= length`. - */ - def insertAll(n: Int, elems: scala.collection.Traversable[A]) - - /** Removes the element at a given index from this buffer. - * - * @param n the index which refers to the element to delete. - * @return the previous element at index `n` - * @throws IndexOutOfBoundsException if the if the index `n` is not in the valid range - * `0 <= n < length`. - */ - def remove(n: Int): A - - /** Removes a number of elements from a given index position. Subclasses of `BufferLike` - * will typically override this method to provide better performance than `count` - * successive calls to single-element `remove`. - * - * @param n the index which refers to the first element to remove. - * @param count the number of elements to remove. - * @throws IndexOutOfBoundsException if the index `n` is not in the valid range - * `0 <= n <= length - count` (with `count > 0`). - * @throws IllegalArgumentException if `count < 0`. - */ - def remove(n: Int, count: Int) { - if (count < 0) throw new IllegalArgumentException("removing negative number of elements: " + count.toString) - for (i <- 0 until count) remove(n) - } - - /** Removes a single element from this buffer, at its first occurrence. - * If the buffer does not contain that element, it is unchanged. - * - * @param x the element to remove. - * @return the buffer itself - */ - def -= (x: A): this.type = { - val i = indexOf(x) - if (i != -1) remove(i) - this - } - - /** Prepends elements to this buffer. - * - * @param xs the TraversableOnce containing the elements to prepend. - * @return the buffer itself. - */ - def ++=:(xs: TraversableOnce[A]): this.type = { insertAll(0, xs.toTraversable); this } - - /** Appends the given elements to this buffer. - * - * @param elems the elements to append. - */ - def append(elems: A*) { appendAll(elems) } - - /** Appends the elements contained in a traversable object to this buffer. - * @param xs the traversable object containing the elements to append. - */ - def appendAll(xs: TraversableOnce[A]) { this ++= xs } - - /** Prepends given elements to this buffer. - * @param elems the elements to prepend. - */ - def prepend(elems: A*) { prependAll(elems) } - - /** Prepends the elements contained in a traversable object to this buffer. - * @param xs the collection containing the elements to prepend. - */ - def prependAll(xs: TraversableOnce[A]) { xs ++=: this } - - /** Inserts new elements at a given index into this buffer. - * - * @param n the index where new elements are inserted. - * @param elems the traversable collection containing the elements to insert. - * @throws IndexOutOfBoundsException if the index `n` is not in the valid range - * `0 <= n <= length`. - */ - def insert(n: Int, elems: A*) { insertAll(n, elems) } - - /** Removes the first ''n'' elements of this buffer. - * - * @param n the number of elements to remove from the beginning - * of this buffer. - */ - def trimStart(n: Int) { remove(0, n) } - - /** Removes the last ''n'' elements of this buffer. - * - * @param n the number of elements to remove from the end - * of this buffer. - */ - def trimEnd(n: Int) { remove(length - n max 0, n) } - - /** Send a message to this scriptable object. - * - * @param cmd the message to send. - */ - @deprecated("scripting is deprecated", "2.11.0") - def <<(cmd: Message[A]): Unit = cmd match { - case Include(Start, x) => prepend(x) - case Include(End, x) => append(x) - case Include(Index(n), x) => insert(n, x) - case Include(NoLo, x) => this += x - - case Update(Start, x) => update(0, x) - case Update(End, x) => update(length - 1, x) - case Update(Index(n), x) => update(n, x) - - case Remove(Start, x) => if (this(0) == x) remove(0) - case Remove(End, x) => if (this(length - 1) == x) remove(length - 1) - case Remove(Index(n), x) => if (this(n) == x) remove(n) - case Remove(NoLo, x) => this -= x - - case Reset() => clear() - case s: Script[A] => s.iterator foreach << - case _ => throw new UnsupportedOperationException("message " + cmd + " not understood") - } - - /** Defines the prefix of this object's `toString` representation. - * @return a string representation which starts the result of `toString` applied to this set. - * Unless overridden this is simply `"Buffer"`. - */ - override def stringPrefix: String = "Buffer" - - /** Creates a new collection containing both the elements of this collection and the provided - * traversable object. - * - * @param xs the traversable object. - * @return a new collection consisting of all the elements of this collection and `xs`. - */ - @migration("`++` creates a new buffer. Use `++=` to add an element from this buffer and return that buffer itself.", "2.8.0") - def ++(xs: GenTraversableOnce[A]): This = clone() ++= xs.seq - - /** Creates a new collection with all the elements of this collection except `elem`. - * - * @param elem the element to remove. - * @return a new collection consisting of all the elements of this collection except `elem`. - */ - @migration("`-` creates a new buffer. Use `-=` to remove an element from this buffer and return that buffer itself.", "2.8.0") - override def -(elem: A): This = clone() -= elem - - /** Creates a new collection with all the elements of this collection except the two - * or more specified elements. - * - * @param elem1 the first element to remove. - * @param elem2 the second element to remove. - * @param elems the remaining elements to remove. - * @return a new collection consisting of all the elements of this collection except - * `elem1`, `elem2` and those in `elems`. - */ - @migration("`-` creates a new buffer. Use `-=` to remove an element from this buffer and return that buffer itself.", "2.8.0") - override def -(elem1: A, elem2: A, elems: A*): This = clone() -= elem1 -= elem2 --= elems - - /** Creates a new collection with all the elements of this collection except those - * provided by the specified traversable object. - * - * @param xs the traversable object. - * @return a new collection with all the elements of this collection except - * those in `xs` - */ - @migration("`--` creates a new buffer. Use `--=` to remove an element from this buffer and return that buffer itself.", "2.8.0") - override def --(xs: GenTraversableOnce[A]): This = clone() --= xs.seq - - /** Return a clone of this buffer. - * - * @return a `Buffer` with the same elements. - */ - override def clone(): This = { - val bf = newBuilder - bf ++= this - bf.result().asInstanceOf[This] - } -} diff --git a/tests/scala2-library/src/library/scala/collection/mutable/BufferProxy.scala b/tests/scala2-library/src/library/scala/collection/mutable/BufferProxy.scala deleted file mode 100644 index 60f0e297466d..000000000000 --- a/tests/scala2-library/src/library/scala/collection/mutable/BufferProxy.scala +++ /dev/null @@ -1,143 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala -package collection -package mutable - -import script._ - -/** This is a simple proxy class for `scala.collection.mutable.Buffer`. - * It is most useful for assembling customized set abstractions - * dynamically using object composition and forwarding. - * - * @author Matthias Zenger - * @version 1.0, 16/04/2004 - * @since 1 - * - * @tparam A type of the elements the buffer proxy contains. - * - * @define Coll `BufferProxy` - * @define coll buffer proxy - */ -@deprecated("proxying is deprecated due to lack of use and compiler-level support", "2.11.0") -trait BufferProxy[A] extends Buffer[A] with Proxy { - - def self: Buffer[A] - - def length: Int = self.length - - override def iterator: Iterator[A] = self.iterator - - def apply(n: Int): A = self.apply(n) - - /** Append a single element to this buffer. - * - * @param elem the element to append. - */ - def +=(elem: A): this.type = { self.+=(elem); this } - - /** Appends a number of elements provided by a traversable object. - * - * @param xs the traversable object. - * @return a reference to this $coll. - */ - override def ++=(xs: TraversableOnce[A]): this.type = { self.++=(xs); this } - - /** Appends a sequence of elements to this buffer. - * - * @param elems the elements to append. - */ - override def append(elems: A*) { self.++=(elems) } - - /** Appends a number of elements provided by a traversable object. - * - * @param xs the traversable object. - */ - override def appendAll(xs: TraversableOnce[A]) { self.appendAll(xs) } - - /** Prepend a single element to this buffer and return - * the identity of the buffer. - * - * @param elem the element to append. - * @return a reference to this $coll. - */ - def +=:(elem: A): this.type = { self.+=:(elem); this } - - override def ++=:(xs: TraversableOnce[A]): this.type = { self.++=:(xs); this } - - /** Prepend an element to this list. - * - * @param elems the elements to prepend. - */ - override def prepend(elems: A*) { self.prependAll(elems) } - - /** Prepends a number of elements provided by a traversable object. - * The identity of the buffer is returned. - * - * @param xs the traversable object. - */ - override def prependAll(xs: TraversableOnce[A]) { self.prependAll(xs) } - - /** Inserts new elements at the index `n`. Opposed to method - * `update`, this method will not replace an element with a - * one. Instead, it will insert the new elements at index `n`. - * - * @param n the index where a new element will be inserted. - * @param elems the new elements to insert. - */ - override def insert(n: Int, elems: A*) { self.insertAll(n, elems) } - - /** Inserts new elements at the index `n`. Opposed to method - * `update`, this method will not replace an element with a - * one. Instead, it will insert a new element at index `n`. - * - * @param n the index where a new element will be inserted. - * @param iter the iterable object providing all elements to insert. - */ - def insertAll(n: Int, iter: scala.collection.Iterable[A]) { - self.insertAll(n, iter) - } - - override def insertAll(n: Int, iter: scala.collection.Traversable[A]) { - self.insertAll(n, iter) - } - - /** Replace element at index `n` with the new element `newelem`. - * - * @param n the index of the element to replace. - * @param newelem the new element. - */ - def update(n: Int, newelem: A) { self.update(n, newelem) } - - /** Removes the element on a given index position. - * - * @param n the index which refers to the element to delete. - */ - def remove(n: Int): A = self.remove(n) - - /** Clears the buffer contents. - */ - def clear() { self.clear() } - - /** Send a message to this scriptable object. - * - * @param cmd the message to send. - */ - @deprecated("scripting is deprecated", "2.11.0") - override def <<(cmd: Message[A]) { self << cmd } - - /** Return a clone of this buffer. - * - * @return a `Buffer` with the same elements. - */ - override def clone(): Buffer[A] = new BufferProxy[A] { - def self = BufferProxy.this.self.clone() - } -} diff --git a/tests/scala2-library/src/library/scala/collection/mutable/Builder.scala b/tests/scala2-library/src/library/scala/collection/mutable/Builder.scala deleted file mode 100644 index 338de2fab3fb..000000000000 --- a/tests/scala2-library/src/library/scala/collection/mutable/Builder.scala +++ /dev/null @@ -1,142 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - - -package scala -package collection -package mutable - -import generic._ - -/** The base trait of all builders. - * A builder lets one construct a collection incrementally, by adding - * elements to the builder with `+=` and then converting to the required - * collection type with `result`. - * - * One cannot assume that a single `Builder` can build more than one - * instance of the desired collection. Particular subclasses may allow - * such behavior. Otherwise, `result` should be treated as a terminal - * operation: after it is called, no further methods should be called on - * the builder. Extend the [[collection.mutable.ReusableBuilder]] trait - * instead of `Builder` for builders that may be reused to build multiple - * instances. - * - * @tparam Elem the type of elements that get added to the builder. - * @tparam To the type of collection that it produced. - * - * @since 2.8 - */ -trait Builder[-Elem, +To] extends Growable[Elem] { - - /** Adds a single element to the builder. - * @param elem the element to be added. - * @return the builder itself. - */ - def +=(elem: Elem): this.type - - /** Clears the contents of this builder. - * After execution of this method the builder will contain no elements. - */ - def clear() - - /** Produces a collection from the added elements. This is a terminal operation: - * the builder's contents are undefined after this operation, and no further - * methods should be called. - * - * @return a collection containing the elements added to this builder. - */ - def result(): To - - /** Gives a hint how many elements are expected to be added - * when the next `result` is called. Some builder classes - * will optimize their representation based on the hint. However, - * builder implementations are still required to work correctly even if the hint is - * wrong, i.e. a different number of elements is added. - * - * @param size the hint how many elements will be added. - */ - def sizeHint(size: Int) {} - - /** Gives a hint that one expects the `result` of this builder - * to have the same size as the given collection, plus some delta. This will - * provide a hint only if the collection is known to have a cheap - * `size` method, which is determined by calling `sizeHint`. - * - * Some builder classes will optimize their representation based on the hint. However, - * builder implementations are still required to work correctly even if the hint is - * wrong, i.e. a different number of elements is added. - * - * @param coll the collection which serves as a hint for the result's size. - */ - def sizeHint(coll: TraversableLike[_, _]) { - coll.sizeHintIfCheap match { - case -1 => - case n => sizeHint(n) - } - } - - /** Gives a hint that one expects the `result` of this builder - * to have the same size as the given collection, plus some delta. This will - * provide a hint only if the collection is known to have a cheap - * `size` method. Currently this is assumed to be the case if and only if - * the collection is of type `IndexedSeqLike`. - * Some builder classes - * will optimize their representation based on the hint. However, - * builder implementations are still required to work correctly even if the hint is - * wrong, i.e. a different number of elements is added. - * - * @param coll the collection which serves as a hint for the result's size. - * @param delta a correction to add to the `coll.size` to produce the size hint. - */ - def sizeHint(coll: TraversableLike[_, _], delta: Int) { - coll.sizeHintIfCheap match { - case -1 => - case n => sizeHint(n + delta) - } - } - - /** Gives a hint how many elements are expected to be added - * when the next `result` is called, together with an upper bound - * given by the size of some other collection. Some builder classes - * will optimize their representation based on the hint. However, - * builder implementations are still required to work correctly even if the hint is - * wrong, i.e. a different number of elements is added. - * - * @param size the hint how many elements will be added. - * @param boundingColl the bounding collection. If it is - * an IndexedSeqLike, then sizes larger - * than collection's size are reduced. - */ - def sizeHintBounded(size: Int, boundingColl: TraversableLike[_, _]) { - boundingColl.sizeHintIfCheap match { - case -1 => - case n => sizeHint(size min n) - } - } - - /** Creates a new builder by applying a transformation function to - * the results of this builder. - * @param f the transformation function. - * @tparam NewTo the type of collection returned by `f`. - * @return a new builder which is the same as the current builder except - * that a transformation function is applied to this builder's result. - * - * @note The original builder should no longer be used after `mapResult` is called. - */ - def mapResult[NewTo](f: To => NewTo): Builder[Elem, NewTo] = - new Builder[Elem, NewTo] with Proxy { - val self: Builder[Elem, To] = Builder.this - def +=(x: Elem): this.type = { self += x; this } - def clear() = self.clear() - override def ++=(xs: TraversableOnce[Elem]): this.type = { self ++= xs; this } - override def sizeHint(size: Int) = self.sizeHint(size) - override def sizeHintBounded(size: Int, boundColl: TraversableLike[_, _]) = self.sizeHintBounded(size, boundColl) - def result: NewTo = f(self.result()) - } -} - diff --git a/tests/scala2-library/src/library/scala/collection/mutable/Cloneable.scala b/tests/scala2-library/src/library/scala/collection/mutable/Cloneable.scala deleted file mode 100644 index 8b2f3f70de21..000000000000 --- a/tests/scala2-library/src/library/scala/collection/mutable/Cloneable.scala +++ /dev/null @@ -1,23 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - - - -package scala -package collection -package mutable - -/** A trait for cloneable collections. - * - * @since 2.8 - * - * @tparam A Type of the elements contained in the collection, covariant and with reference types as upperbound. - */ -trait Cloneable[+A <: AnyRef] extends scala.Cloneable { - override def clone(): A = super.clone().asInstanceOf[A] -} diff --git a/tests/scala2-library/src/library/scala/collection/mutable/DefaultEntry.scala b/tests/scala2-library/src/library/scala/collection/mutable/DefaultEntry.scala deleted file mode 100644 index 7a8ff8b36f63..000000000000 --- a/tests/scala2-library/src/library/scala/collection/mutable/DefaultEntry.scala +++ /dev/null @@ -1,24 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala -package collection -package mutable - -/** Class used internally for default map model. - * @since 2.3 - */ -final class DefaultEntry[A, B](val key: A, var value: B) - extends HashEntry[A, DefaultEntry[A, B]] with Serializable -{ - override def toString: String = chainString - - def chainString = { - "(kv: " + key + ", " + value + ")" + (if (next != null) " -> " + next.toString else "") - } -} diff --git a/tests/scala2-library/src/library/scala/collection/mutable/DefaultMapModel.scala b/tests/scala2-library/src/library/scala/collection/mutable/DefaultMapModel.scala deleted file mode 100644 index 7f832c0766a9..000000000000 --- a/tests/scala2-library/src/library/scala/collection/mutable/DefaultMapModel.scala +++ /dev/null @@ -1,47 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://www.scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - - - -package scala -package collection -package mutable - -/** This class is used internally. It implements the mutable `Map` - * class in terms of three functions: `findEntry`, `addEntry`, and `entries`. - * - * @author Matthias Zenger - * @version 1.0, 08/07/2003 - * @since 1 - */ -@deprecated("this trait will be removed", "2.11.0") -trait DefaultMapModel[A, B] extends Map[A, B] { - - type Entry = DefaultEntry[A, B] - - protected def findEntry(key: A): Entry - protected def addEntry(e: Entry) - protected def entries: Iterator[Entry] - - def get(key: A): Option[B] = { - val e = findEntry(key) - if (e == null) None - else Some(e.value) - } - - override def put(key: A, value: B): Option[B] = { - val e = findEntry(key) - if (e == null) { addEntry(new Entry(key, value)); None } - else { val v = e.value; e.value = value; Some(v) } - } - - def += (kv: (A, B)): this.type = { put(kv._1, kv._2); this } - - def iterator = entries map {e => (e.key, e.value)} - -} diff --git a/tests/scala2-library/src/library/scala/collection/mutable/DoubleLinkedList.scala b/tests/scala2-library/src/library/scala/collection/mutable/DoubleLinkedList.scala deleted file mode 100644 index 698d47e101ae..000000000000 --- a/tests/scala2-library/src/library/scala/collection/mutable/DoubleLinkedList.scala +++ /dev/null @@ -1,103 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - - - -package scala -package collection -package mutable - -import generic._ - -/** This class implements double linked lists where both the head (`elem`), - * the tail (`next`) and a reference to the previous node (`prev`) are mutable. - * - * @author Matthias Zenger - * @author Martin Odersky - * @version 2.8 - * @since 1 - * @see [[http://docs.scala-lang.org/overviews/collections/concrete-mutable-collection-classes.html#double_linked_lists "Scala's Collection Library overview"]] - * section on `Double Linked Lists` for more information. - - * - * @tparam A the type of the elements contained in this double linked list. - * - * @define Coll `DoubleLinkedList` - * @define coll double linked list - * @define thatinfo the class of the returned collection. In the standard library configuration, - * `That` is always `DoubleLinkedList[B]` because an implicit of type `CanBuildFrom[DoubleLinkedList, B, DoubleLinkedList[B]]` - * is defined in object `DoubleLinkedList`. - * @define bfinfo an implicit value of class `CanBuildFrom` which determines the - * result class `That` from the current representation type `Repr` - * and the new element type `B`. This is usually the `canBuildFrom` value - * defined in object `DoubleLinkedList`. - * @define orderDependent - * @define orderDependentFold - * @define mayNotTerminateInf - * @define willNotTerminateInf - */ -@deprecated("low-level linked lists are deprecated due to idiosyncrasies in interface and incomplete features", "2.11.0") -@SerialVersionUID(-8144992287952814767L) -class DoubleLinkedList[A]() extends AbstractSeq[A] - with LinearSeq[A] - with GenericTraversableTemplate[A, DoubleLinkedList] - with DoubleLinkedListLike[A, DoubleLinkedList[A]] - with Serializable { - next = this - - /** Creates a node for the double linked list. - * - * @param elem the element this node contains. - * @param next the next node in the double linked list. - */ - def this(elem: A, next: DoubleLinkedList[A]) { - this() - if (next != null) { - this.elem = elem - this.next = next - this.next.prev = this - } - } - - override def companion: GenericCompanion[DoubleLinkedList] = DoubleLinkedList - - // Accurately clone this collection. See scala/bug#6296 - override def clone(): DoubleLinkedList[A] = { - val builder = newBuilder - builder ++= this - builder.result() - } -} - -/** $factoryInfo - * @define coll double linked list - * @define Coll `DoubleLinkedList` - */ -@deprecated("low-level linked lists are deprecated", "2.11.0") -object DoubleLinkedList extends SeqFactory[DoubleLinkedList] { - /** $genericCanBuildFromInfo */ - implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, DoubleLinkedList[A]] = ReusableCBF.asInstanceOf[GenericCanBuildFrom[A]] - - def newBuilder[A]: Builder[A, DoubleLinkedList[A]] = - new Builder[A, DoubleLinkedList[A]] { - def emptyList() = new DoubleLinkedList[A]() - var current = emptyList() - - def +=(elem: A): this.type = { - if (current.isEmpty) - current = new DoubleLinkedList(elem, emptyList()) - else - current append new DoubleLinkedList(elem, emptyList()) - - this - } - - def clear(): Unit = current = emptyList() - def result() = current - } -} diff --git a/tests/scala2-library/src/library/scala/collection/mutable/DoubleLinkedListLike.scala b/tests/scala2-library/src/library/scala/collection/mutable/DoubleLinkedListLike.scala deleted file mode 100644 index e85ef05319ca..000000000000 --- a/tests/scala2-library/src/library/scala/collection/mutable/DoubleLinkedListLike.scala +++ /dev/null @@ -1,120 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - - - -package scala -package collection -package mutable - -import scala.annotation.migration - -/** This extensible class may be used as a basis for implementing double - * linked lists. Type variable `A` refers to the element type - * of the list, type variable `This` is used to model self - * types of linked lists. - * - * The invariant of this data structure is that `prev` is always a reference to - * the previous node in the list. If `this` is the first node of the list, `prev` - * will be `null`. - * Field `next` is set to `this` iff the list is empty. - * - * Examples (right arrow represents `next`, left arrow represents `prev`, - * `_` represents no value): - * - * {{{ - * - * Empty: - * - * null <-- [ _ ] --, - * [ ] <-` - * - * Single element: - * - * null <-- [ x ] --> [ _ ] --, - * [ ] <-- [ ] <-` - * - * More elements: - * - * null <-- [ x ] --> [ y ] --> [ z ] --> [ _ ] --, - * [ ] <-- [ ] <-- [ ] <-- [ ] <-` - * - * }}} - * - * @author Matthias Zenger - * @version 1.0, 08/07/2003 - * @since 2.8 - * - * @tparam A type of the elements contained in the double linked list - * @tparam This the type of the actual linked list holding the elements - * - * @define Coll `DoubleLinkedList` - * @define coll double linked list - */ -@deprecated("low-level linked lists are deprecated due to idiosyncrasies in interface and incomplete features", "2.11.0") -trait DoubleLinkedListLike[A, This <: Seq[A] with DoubleLinkedListLike[A, This]] extends SeqLike[A, This] with LinkedListLike[A, This] { self => - - /** A reference to the node in the linked list preceding the current node. */ - var prev: This = _ - - // returns that list if this list is empty - // otherwise modifies this list - override def append(that: This): This = - if (isEmpty) - that - else { - if (next.isEmpty) { - next = that - if (that.nonEmpty) that.prev = repr - } else { - next.append(that) - } - repr - } - - // cannot be called on empty lists - override def insert(that: This): Unit = { - super.insert(that) - if (that.nonEmpty) that.prev = repr - } - - /** Removes the current node from the double linked list. - * If the node was chained into a double linked list, it will no longer - * be a part of it. - * If the node was the last node in the list, i.e. a sentinel, this method - * does nothing. - * - * '''Note:''' this method will not set the fields `elem`, `next` or `prev` of the - * current node, i.e. `this` node itself will still point "into" the list it - * was in. - */ - @migration("Double linked list now removes the current node from the list.", "2.9.0") - def remove(): Unit = if (nonEmpty) { - next.prev = prev - if (prev ne null) prev.next = next // because this could be the first node - } - - private def atLocation[T](n: Int)(f: This => T)(onOutOfBounds: => T) = if (isEmpty) onOutOfBounds else { - var loc = repr - var left = n - while (left > 0) { - loc = loc.next - left -= 1 - if (loc.isEmpty) onOutOfBounds - } - f(loc) - } - - private def outofbounds(n: Int) = throw new IndexOutOfBoundsException(n.toString) - - override def drop(n: Int): This = super[SeqLike].drop(n) - override def tail = drop(1) - override def apply(n: Int): A = atLocation(n)(_.elem)(outofbounds(n)) - override def update(n: Int, x: A): Unit = atLocation(n)(_.elem = x)(outofbounds(n)) - override def get(n: Int): Option[A] = atLocation[Option[A]](n)(x => Some(x.elem))(None) -} diff --git a/tests/scala2-library/src/library/scala/collection/mutable/FlatHashTable.scala b/tests/scala2-library/src/library/scala/collection/mutable/FlatHashTable.scala deleted file mode 100644 index a6d5dbd04212..000000000000 --- a/tests/scala2-library/src/library/scala/collection/mutable/FlatHashTable.scala +++ /dev/null @@ -1,437 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala -package collection -package mutable - -import java.lang.Integer.rotateRight -import scala.util.hashing.byteswap32 - -/** An implementation class backing a `HashSet`. - * - * This trait is used internally. It can be mixed in with various collections relying on - * hash table as an implementation. - * - * @define coll flat hash table - * @since 2.3 - * @tparam A the type of the elements contained in the $coll. - */ -trait FlatHashTable[A] extends FlatHashTable.HashUtils[A] { - import FlatHashTable._ - - private final def tableDebug = false - - @transient private[collection] var _loadFactor = defaultLoadFactor - - /** The actual hash table. - */ - @transient protected var table: Array[AnyRef] = new Array(initialCapacity) - - /** The number of mappings contained in this hash table. - */ - @transient protected var tableSize = 0 - - /** The next size value at which to resize (capacity * load factor). - */ - @transient protected var threshold: Int = newThreshold(_loadFactor, initialCapacity) - - /** The array keeping track of number of elements in 32 element blocks. - */ - @transient protected var sizemap: Array[Int] = null - - @transient protected var seedvalue: Int = tableSizeSeed - - protected def capacity(expectedSize: Int) = HashTable.nextPositivePowerOfTwo(expectedSize) - - /** The initial size of the hash table. - */ - def initialSize: Int = 32 - - private def initialCapacity = capacity(initialSize) - - protected def randomSeed = seedGenerator.get.nextInt() - - protected def tableSizeSeed = Integer.bitCount(table.length - 1) - - /** - * Initializes the collection from the input stream. `f` will be called for each element - * read from the input stream in the order determined by the stream. This is useful for - * structures where iteration order is important (e.g. LinkedHashSet). - * - * The serialization format expected is the one produced by `serializeTo`. - */ - private[collection] def init(in: java.io.ObjectInputStream, f: A => Unit) { - in.defaultReadObject - - _loadFactor = in.readInt() - assert(_loadFactor > 0) - - val size = in.readInt() - tableSize = 0 - assert(size >= 0) - - table = new Array(capacity(sizeForThreshold(size, _loadFactor))) - threshold = newThreshold(_loadFactor, table.length) - - seedvalue = in.readInt() - - val smDefined = in.readBoolean() - if (smDefined) sizeMapInit(table.length) else sizemap = null - - var index = 0 - while (index < size) { - val elem = entryToElem(in.readObject()) - f(elem) - addElem(elem) - index += 1 - } - } - - /** - * Serializes the collection to the output stream by saving the load factor, collection - * size and collection elements. `foreach` determines the order in which the elements are saved - * to the stream. To deserialize, `init` should be used. - */ - private[collection] def serializeTo(out: java.io.ObjectOutputStream) { - out.defaultWriteObject - out.writeInt(_loadFactor) - out.writeInt(tableSize) - out.writeInt(seedvalue) - out.writeBoolean(isSizeMapDefined) - iterator.foreach(out.writeObject) - } - - /** Finds an entry in the hash table if such an element exists. */ - @deprecatedOverriding("Internal implementation does not admit sensible overriding of this method.", "2.11.0") - protected def findEntry(elem: A): Option[A] = - findElemImpl(elem) match { - case null => None - case entry => Some(entryToElem(entry)) - } - - - /** Checks whether an element is contained in the hash table. */ - @deprecatedOverriding("Internal implementation does not admit sensible overriding of this method.", "2.11.0") - protected def containsElem(elem: A): Boolean = { - null != findElemImpl(elem) - } - - private def findElemImpl(elem: A): AnyRef = { - val searchEntry = elemToEntry(elem) - var h = index(searchEntry.hashCode) - var curEntry = table(h) - while (null != curEntry && curEntry != searchEntry) { - h = (h + 1) % table.length - curEntry = table(h) - } - curEntry - } - - /** Add elem if not yet in table. - * @return Returns `true` if a new elem was added, `false` otherwise. - */ - protected def addElem(elem: A) : Boolean = { - addEntry(elemToEntry(elem)) - } - - /** - * Add an entry (an elem converted to an entry via elemToEntry) if not yet in - * table. - * @return Returns `true` if a new elem was added, `false` otherwise. - */ - protected def addEntry(newEntry : AnyRef) : Boolean = { - var h = index(newEntry.hashCode) - var curEntry = table(h) - while (null != curEntry) { - if (curEntry == newEntry) return false - h = (h + 1) % table.length - curEntry = table(h) - //Statistics.collisions += 1 - } - table(h) = newEntry - tableSize = tableSize + 1 - nnSizeMapAdd(h) - if (tableSize >= threshold) growTable() - true - - } - - /** - * Removes an elem from the hash table returning true if the element was found (and thus removed) - * or false if it didn't exist. - */ - protected def removeElem(elem: A) : Boolean = { - if (tableDebug) checkConsistent() - def precedes(i: Int, j: Int) = { - val d = table.length >> 1 - if (i <= j) j - i < d - else i - j > d - } - val removalEntry = elemToEntry(elem) - var h = index(removalEntry.hashCode) - var curEntry = table(h) - while (null != curEntry) { - if (curEntry == removalEntry) { - var h0 = h - var h1 = (h0 + 1) % table.length - while (null != table(h1)) { - val h2 = index(table(h1).hashCode) - //Console.println("shift at "+h1+":"+table(h1)+" with h2 = "+h2+"? "+(h2 != h1)+precedes(h2, h0)+table.length) - if (h2 != h1 && precedes(h2, h0)) { - //Console.println("shift "+h1+" to "+h0+"!") - table(h0) = table(h1) - h0 = h1 - } - h1 = (h1 + 1) % table.length - } - table(h0) = null - tableSize -= 1 - nnSizeMapRemove(h0) - if (tableDebug) checkConsistent() - return true - } - h = (h + 1) % table.length - curEntry = table(h) - } - false - } - - protected def iterator: Iterator[A] = new AbstractIterator[A] { - private var i = 0 - def hasNext: Boolean = { - while (i < table.length && (null == table(i))) i += 1 - i < table.length - } - def next(): A = - if (hasNext) { i += 1; entryToElem(table(i - 1)) } - else Iterator.empty.next() - } - - private def growTable() { - val oldtable = table - table = new Array[AnyRef](table.length * 2) - tableSize = 0 - nnSizeMapReset(table.length) - seedvalue = tableSizeSeed - threshold = newThreshold(_loadFactor, table.length) - var i = 0 - while (i < oldtable.length) { - val entry = oldtable(i) - if (null != entry) addEntry(entry) - i += 1 - } - if (tableDebug) checkConsistent() - } - - private def checkConsistent() { - for (i <- 0 until table.length) - if (table(i) != null && !containsElem(entryToElem(table(i)))) - assert(assertion = false, i+" "+table(i)+" "+table.mkString) - } - - - /* Size map handling code */ - - /* - * The following three methods (nn*) modify a size map only if it has been - * initialized, that is, if it's not set to null. - * - * The size map logically divides the hash table into `sizeMapBucketSize` element buckets - * by keeping an integer entry for each such bucket. Each integer entry simply denotes - * the number of elements in the corresponding bucket. - * Best understood through an example, see: - * table = [/, 1, /, 6, 90, /, -3, 5] (8 entries) - * sizemap = [ 2 | 3 ] (2 entries) - * where sizeMapBucketSize == 4. - * - */ - @deprecatedOverriding("Internal implementation does not admit sensible overriding of this method.", "2.11.0") - protected def nnSizeMapAdd(h: Int) = if (sizemap ne null) { - val p = h >> sizeMapBucketBitSize - sizemap(p) += 1 - } - - @deprecatedOverriding("Internal implementation does not admit sensible overriding of this method.", "2.11.0") - protected def nnSizeMapRemove(h: Int) = if (sizemap ne null) { - sizemap(h >> sizeMapBucketBitSize) -= 1 - } - - @deprecatedOverriding("Internal implementation does not admit sensible overriding of this method.", "2.11.0") - protected def nnSizeMapReset(tableLength: Int) = if (sizemap ne null) { - val nsize = calcSizeMapSize(tableLength) - if (sizemap.length != nsize) sizemap = new Array[Int](nsize) - else java.util.Arrays.fill(sizemap, 0) - } - - private[collection] final def totalSizeMapBuckets = (table.length - 1) / sizeMapBucketSize + 1 - - @deprecatedOverriding("Internal implementation does not admit sensible overriding of this method.", "2.11.0") - protected def calcSizeMapSize(tableLength: Int) = (tableLength >> sizeMapBucketBitSize) + 1 - - // discards the previous sizemap and only allocates a new one - @deprecatedOverriding("Internal implementation does not admit sensible overriding of this method.", "2.11.0") - protected def sizeMapInit(tableLength: Int) { - sizemap = new Array[Int](calcSizeMapSize(tableLength)) - } - - // discards the previous sizemap and populates the new one - @deprecatedOverriding("Internal implementation does not admit sensible overriding of this method.", "2.11.0") - protected def sizeMapInitAndRebuild() { - // first allocate - sizeMapInit(table.length) - - // rebuild - val totalbuckets = totalSizeMapBuckets - var bucketidx = 0 - var tableidx = 0 - val tbl = table - var tableuntil = sizeMapBucketSize min tbl.length - while (bucketidx < totalbuckets) { - var currbucketsz = 0 - while (tableidx < tableuntil) { - if (tbl(tableidx) ne null) currbucketsz += 1 - tableidx += 1 - } - sizemap(bucketidx) = currbucketsz - tableuntil += sizeMapBucketSize - bucketidx += 1 - } - } - - private[collection] def printSizeMap() { - println(sizemap.mkString("szmap: [", ", ", "]")) - } - - private[collection] def printContents() { - println(table.mkString("[", ", ", "]")) - } - - protected def sizeMapDisable() = sizemap = null - - protected def isSizeMapDefined = sizemap ne null - - protected def alwaysInitSizeMap = false - - /* End of size map handling code */ - - protected final def index(hcode: Int) = { - // version 1 (no longer used - did not work with parallel hash tables) - // improve(hcode) & (table.length - 1) - - // version 2 (allows for parallel hash table construction) - val improved = improve(hcode, seedvalue) - val ones = table.length - 1 - (improved >>> (32 - java.lang.Integer.bitCount(ones))) & ones - - // version 3 (solves scala/bug#5293 in most cases, but such a case would still arise for parallel hash tables) - // val hc = improve(hcode) - // val bbp = blockbitpos - // val ones = table.length - 1 - // val needed = Integer.bitCount(ones) - // val blockbits = ((hc >>> bbp) & 0x1f) << (needed - 5) - // val rest = ((hc >>> (bbp + 5)) << bbp) | (((1 << bbp) - 1) & hc) - // val restmask = (1 << (needed - 5)) - 1 - // val improved = blockbits | (rest & restmask) - // improved - } - - protected def clearTable() { - var i = table.length - 1 - while (i >= 0) { table(i) = null; i -= 1 } - tableSize = 0 - nnSizeMapReset(table.length) - } - - private[collection] def hashTableContents = new FlatHashTable.Contents[A]( - _loadFactor, - table, - tableSize, - threshold, - seedvalue, - sizemap - ) - - protected def initWithContents(c: FlatHashTable.Contents[A]) = { - if (c != null) { - _loadFactor = c.loadFactor - table = c.table - tableSize = c.tableSize - threshold = c.threshold - seedvalue = c.seedvalue - sizemap = c.sizemap - } - if (alwaysInitSizeMap && sizemap == null) sizeMapInitAndRebuild() - } - -} - - -private[collection] object FlatHashTable { - - /** Creates a specific seed to improve hashcode of a hash table instance - * and ensure that iteration order vulnerabilities are not 'felt' in other - * hash tables. - * - * See scala/bug#5293. - */ - final def seedGenerator = new ThreadLocal[scala.util.Random] { - override def initialValue = new scala.util.Random - } - - private object NullSentinel { - override def hashCode = 0 - override def toString = "NullSentinel" - } - - /** The load factor for the hash table; must be < 500 (0.5) - */ - def defaultLoadFactor: Int = 450 - final def loadFactorDenum = 1000 - - def sizeForThreshold(size: Int, _loadFactor: Int) = scala.math.max(32, (size.toLong * loadFactorDenum / _loadFactor).toInt) - - def newThreshold(_loadFactor: Int, size: Int) = { - val lf = _loadFactor - assert(lf < (loadFactorDenum / 2), "loadFactor too large; must be < 0.5") - (size.toLong * lf / loadFactorDenum ).toInt - } - - class Contents[A]( - val loadFactor: Int, - val table: Array[AnyRef], - val tableSize: Int, - val threshold: Int, - val seedvalue: Int, - val sizemap: Array[Int] - ) - - trait HashUtils[A] { - protected final def sizeMapBucketBitSize = 5 - // so that: - protected final def sizeMapBucketSize = 1 << sizeMapBucketBitSize - - protected final def improve(hcode: Int, seed: Int) = rotateRight(byteswap32(hcode), seed) - - /** - * Elems have type A, but we store AnyRef in the table. Plus we need to deal with - * null elems, which need to be stored as NullSentinel - */ - protected final def elemToEntry(elem : A) : AnyRef = - if (null == elem) NullSentinel else elem.asInstanceOf[AnyRef] - - /** - * Does the inverse translation of elemToEntry - */ - protected final def entryToElem(entry : AnyRef) : A = - (if (entry.isInstanceOf[NullSentinel.type]) null else entry).asInstanceOf[A] - } - -} - - diff --git a/tests/scala2-library/src/library/scala/collection/mutable/GrowingBuilder.scala b/tests/scala2-library/src/library/scala/collection/mutable/GrowingBuilder.scala deleted file mode 100644 index 27d554d98e44..000000000000 --- a/tests/scala2-library/src/library/scala/collection/mutable/GrowingBuilder.scala +++ /dev/null @@ -1,32 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala -package collection -package mutable - -import generic._ - -/** The canonical builder for collections that are growable, i.e. that support an - * efficient `+=` method which adds an element to the collection. - * - * GrowableBuilders can produce only a single instance of the collection they are growing. - * - * @author Paul Phillips - * @version 2.8 - * @since 2.8 - * - * @define Coll `GrowingBuilder` - * @define coll growing builder - */ -class GrowingBuilder[Elem, To <: Growable[Elem]](empty: To) extends Builder[Elem, To] { - protected var elems: To = empty - def +=(x: Elem): this.type = { elems += x; this } - def clear() { empty.clear } - def result: To = elems -} diff --git a/tests/scala2-library/src/library/scala/collection/mutable/HashEntry.scala b/tests/scala2-library/src/library/scala/collection/mutable/HashEntry.scala deleted file mode 100644 index 4c0f6a93e8e1..000000000000 --- a/tests/scala2-library/src/library/scala/collection/mutable/HashEntry.scala +++ /dev/null @@ -1,18 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://www.scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ -package scala -package collection -package mutable - -/** Class used internally. - * @since 2.8 - */ -trait HashEntry [A, E] { - val key: A - var next: E = _ -} diff --git a/tests/scala2-library/src/library/scala/collection/mutable/HashMap.scala b/tests/scala2-library/src/library/scala/collection/mutable/HashMap.scala deleted file mode 100644 index de61ebb796df..000000000000 --- a/tests/scala2-library/src/library/scala/collection/mutable/HashMap.scala +++ /dev/null @@ -1,194 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala -package collection -package mutable - -import generic._ -import scala.collection.parallel.mutable.ParHashMap - -/** This class implements mutable maps using a hashtable. - * - * @since 1 - * @see [[http://docs.scala-lang.org/overviews/collections/concrete-mutable-collection-classes.html#hash_tables "Scala's Collection Library overview"]] - * section on `Hash Tables` for more information. - * - * @tparam A the type of the keys contained in this hash map. - * @tparam B the type of the values assigned to keys in this hash map. - * - * @define Coll `mutable.HashMap` - * @define coll mutable hash map - * @define thatinfo the class of the returned collection. In the standard library configuration, - * `That` is always `HashMap[A, B]` if the elements contained in the resulting collection are - * pairs of type `(A, B)`. This is because an implicit of type `CanBuildFrom[HashMap, (A, B), HashMap[A, B]]` - * is defined in object `HashMap`. Otherwise, `That` resolves to the most specific type that doesn't have - * to contain pairs of type `(A, B)`, which is `Iterable`. - * @define bfinfo an implicit value of class `CanBuildFrom` which determines the - * result class `That` from the current representation type `Repr` - * and the new element type `B`. This is usually the `canBuildFrom` value - * defined in object `HashMap`. - * @define mayNotTerminateInf - * @define willNotTerminateInf - */ -@SerialVersionUID(1L) -class HashMap[A, B] private[collection] (contents: HashTable.Contents[A, DefaultEntry[A, B]]) -extends AbstractMap[A, B] - with Map[A, B] - with MapLike[A, B, HashMap[A, B]] - with HashTable[A, DefaultEntry[A, B]] - with CustomParallelizable[(A, B), ParHashMap[A, B]] - with Serializable -{ - initWithContents(contents) - - type Entry = DefaultEntry[A, B] - - override def empty: HashMap[A, B] = HashMap.empty[A, B] - override def clear() { clearTable() } - override def size: Int = tableSize - - def this() = this(null) - - override def par = new ParHashMap[A, B](hashTableContents) - - // contains and apply overridden to avoid option allocations. - override def contains(key: A): Boolean = findEntry(key) != null - - override def apply(key: A): B = { - val result = findEntry(key) - if (result eq null) default(key) - else result.value - } - - def get(key: A): Option[B] = { - val e = findEntry(key) - if (e eq null) None - else Some(e.value) - } - - override def getOrElseUpdate(key: A, defaultValue: => B): B = { - val hash = elemHashCode(key) - val i = index(hash) - val entry = findEntry(key, i) - if (entry != null) entry.value - else { - val table0 = table - val default = defaultValue - // Avoid recomputing index if the `defaultValue()` hasn't triggered - // a table resize. - val newEntryIndex = if (table0 eq table) i else index(hash) - addEntry(createNewEntry(key, default), newEntryIndex) - } - } - - /* inlined HashTable.findEntry0 to preserve its visibility */ - private[this] def findEntry(key: A, h: Int): Entry = { - var e = table(h).asInstanceOf[Entry] - while (notFound(key, e)) - e = e.next - e - } - private[this] def notFound(key: A, e: Entry): Boolean = (e != null) && !elemEquals(e.key, key) - - /* inlined HashTable.addEntry0 to preserve its visibility */ - private[this] def addEntry(e: Entry, h: Int): B = { - if (tableSize >= threshold) addEntry(e) - else addEntry0(e, h) - e.value - } - - /* extracted to make addEntry inlinable */ - private[this] def addEntry0(e: Entry, h: Int) { - e.next = table(h).asInstanceOf[Entry] - table(h) = e - tableSize += 1 - nnSizeMapAdd(h) - } - - override def put(key: A, value: B): Option[B] = { - val e = findOrAddEntry(key, value) - if (e eq null) None - else { val v = e.value; e.value = value; Some(v) } - } - - override def update(key: A, value: B): Unit = put(key, value) - - override def remove(key: A): Option[B] = { - val e = removeEntry(key) - if (e ne null) Some(e.value) - else None - } - - def += (kv: (A, B)): this.type = { - val e = findOrAddEntry(kv._1, kv._2) - if (e ne null) e.value = kv._2 - this - } - - def -=(key: A): this.type = { removeEntry(key); this } - - def iterator = entriesIterator map (e => ((e.key, e.value))) - - override def foreach[U](f: ((A, B)) => U): Unit = foreachEntry(e => f((e.key, e.value))) - - /* Override to avoid tuple allocation in foreach */ - override def keySet: scala.collection.Set[A] = new DefaultKeySet { - override def foreach[U](f: A => U) = foreachEntry(e => f(e.key)) - } - - /* Override to avoid tuple allocation in foreach */ - override def values: scala.collection.Iterable[B] = new DefaultValuesIterable { - override def foreach[U](f: B => U) = foreachEntry(e => f(e.value)) - } - - /* Override to avoid tuple allocation */ - override def keysIterator: Iterator[A] = new AbstractIterator[A] { - val iter = entriesIterator - def hasNext = iter.hasNext - def next() = iter.next().key - } - - /* Override to avoid tuple allocation */ - override def valuesIterator: Iterator[B] = new AbstractIterator[B] { - val iter = entriesIterator - def hasNext = iter.hasNext - def next() = iter.next().value - } - - /** Toggles whether a size map is used to track hash map statistics. - */ - def useSizeMap(t: Boolean) = if (t) { - if (!isSizeMapDefined) sizeMapInitAndRebuild() - } else sizeMapDisable() - - protected def createNewEntry[B1](key: A, value: B1): Entry = { - new Entry(key, value.asInstanceOf[B]) - } - - private def writeObject(out: java.io.ObjectOutputStream) { - serializeTo(out, { entry => - out.writeObject(entry.key) - out.writeObject(entry.value) - }) - } - - private def readObject(in: java.io.ObjectInputStream) { - init(in, createNewEntry(in.readObject().asInstanceOf[A], in.readObject())) - } - -} - -/** $factoryInfo - * @define Coll `mutable.HashMap` - * @define coll mutable hash map - */ -object HashMap extends MutableMapFactory[HashMap] { - implicit def canBuildFrom[A, B]: CanBuildFrom[Coll, (A, B), HashMap[A, B]] = new MapCanBuildFrom[A, B] - def empty[A, B]: HashMap[A, B] = new HashMap[A, B] -} diff --git a/tests/scala2-library/src/library/scala/collection/mutable/HashSet.scala b/tests/scala2-library/src/library/scala/collection/mutable/HashSet.scala deleted file mode 100644 index 3a16e4efa595..000000000000 --- a/tests/scala2-library/src/library/scala/collection/mutable/HashSet.scala +++ /dev/null @@ -1,109 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - - - -package scala -package collection -package mutable - -import generic._ -import scala.collection.parallel.mutable.ParHashSet - -/** This class implements mutable sets using a hashtable. - * - * @author Matthias Zenger - * @author Martin Odersky - * @version 2.0, 31/12/2006 - * @since 1 - * @see [[http://docs.scala-lang.org/overviews/collections/concrete-mutable-collection-classes.html#hash_tables "Scala's Collection Library overview"]] - * section on `Hash Tables` for more information. - * - * @define Coll `mutable.HashSet` - * @define coll mutable hash set - * @define thatinfo the class of the returned collection. In the standard library configuration, - * `That` is always `HashSet[B]` because an implicit of type `CanBuildFrom[HashSet, B, HashSet[B]]` - * is defined in object `HashSet`. - * @define bfinfo an implicit value of class `CanBuildFrom` which determines the - * result class `That` from the current representation type `Repr` - * and the new element type `B`. This is usually the `canBuildFrom` value - * defined in object `HashSet`. - * @define mayNotTerminateInf - * @define willNotTerminateInf - */ -@SerialVersionUID(1L) -class HashSet[A] private[collection] (contents: FlatHashTable.Contents[A]) -extends AbstractSet[A] - with Set[A] - with GenericSetTemplate[A, HashSet] - with SetLike[A, HashSet[A]] - with FlatHashTable[A] - with CustomParallelizable[A, ParHashSet[A]] - with Serializable -{ - initWithContents(contents) - - def this() = this(null) - - override def companion: GenericCompanion[HashSet] = HashSet - - override def size: Int = tableSize - - def contains(elem: A): Boolean = containsElem(elem) - - def += (elem: A): this.type = { addElem(elem); this } - - def -= (elem: A): this.type = { removeElem(elem); this } - - override def par = new ParHashSet(hashTableContents) - - override def add(elem: A): Boolean = addElem(elem) - - override def remove(elem: A): Boolean = removeElem(elem) - - override def clear() { clearTable() } - - override def iterator: Iterator[A] = super[FlatHashTable].iterator - - override def foreach[U](f: A => U) { - var i = 0 - val len = table.length - while (i < len) { - val curEntry = table(i) - if (curEntry ne null) f(entryToElem(curEntry)) - i += 1 - } - } - - override def clone() = new HashSet[A] ++= this - - private def writeObject(s: java.io.ObjectOutputStream) { - serializeTo(s) - } - - private def readObject(in: java.io.ObjectInputStream) { - init(in, x => ()) - } - - /** Toggles whether a size map is used to track hash map statistics. - */ - def useSizeMap(t: Boolean) = if (t) { - if (!isSizeMapDefined) sizeMapInitAndRebuild() - } else sizeMapDisable() - -} - -/** $factoryInfo - * @define Coll `mutable.HashSet` - * @define coll mutable hash set - */ -object HashSet extends MutableSetFactory[HashSet] { - implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, HashSet[A]] = setCanBuildFrom[A] - override def empty[A]: HashSet[A] = new HashSet[A] -} - diff --git a/tests/scala2-library/src/library/scala/collection/mutable/HashTable.scala b/tests/scala2-library/src/library/scala/collection/mutable/HashTable.scala deleted file mode 100644 index 7ee1987e4621..000000000000 --- a/tests/scala2-library/src/library/scala/collection/mutable/HashTable.scala +++ /dev/null @@ -1,460 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://www.scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - - - -package scala -package collection -package mutable - -import java.lang.Integer.{numberOfLeadingZeros, rotateRight} -import scala.util.hashing.byteswap32 - -/** This class can be used to construct data structures that are based - * on hashtables. Class `HashTable[A]` implements a hashtable - * that maps keys of type `A` to values of the fully abstract - * member type `Entry`. Classes that make use of `HashTable` - * have to provide an implementation for `Entry`. - * - * There are mainly two parameters that affect the performance of a hashtable: - * the initial size and the load factor. The size - * refers to the number of buckets in the hashtable, and the load - * factor is a measure of how full the hashtable is allowed to get before - * its size is automatically doubled. Both parameters may be changed by - * overriding the corresponding values in class `HashTable`. - * - * @author Matthias Zenger - * @author Martin Odersky - * @version 2.0, 31/12/2006 - * @since 1 - * - * @tparam A type of the elements contained in this hash table. - */ -trait HashTable[A, Entry >: Null <: HashEntry[A, Entry]] extends HashTable.HashUtils[A] { - // Replacing Entry type parameter by abstract type member here allows to not expose to public - // implementation-specific entry classes such as `DefaultEntry` or `LinkedEntry`. - // However, I'm afraid it's too late now for such breaking change. - import HashTable._ - - @transient protected var _loadFactor = defaultLoadFactor - - /** The actual hash table. - */ - @transient protected var table: Array[HashEntry[A, Entry]] = new Array(initialCapacity) - - /** The number of mappings contained in this hash table. - */ - @transient protected var tableSize: Int = 0 - - /** The next size value at which to resize (capacity * load factor). - */ - @transient protected var threshold: Int = initialThreshold(_loadFactor) - - /** The array keeping track of the number of elements in 32 element blocks. - */ - @transient protected var sizemap: Array[Int] = null - - @transient protected var seedvalue: Int = tableSizeSeed - - protected def tableSizeSeed = Integer.bitCount(table.length - 1) - - /** The initial size of the hash table. - */ - protected def initialSize: Int = 16 - - /** The initial threshold. - */ - private def initialThreshold(_loadFactor: Int): Int = newThreshold(_loadFactor, initialCapacity) - - private def initialCapacity = capacity(initialSize) - - private def lastPopulatedIndex = { - var idx = table.length - 1 - while (table(idx) == null && idx > 0) - idx -= 1 - - idx - } - - /** - * Initializes the collection from the input stream. `readEntry` will be called for each - * entry to be read from the input stream. - */ - private[collection] def init(in: java.io.ObjectInputStream, readEntry: => Entry) { - in.defaultReadObject - - _loadFactor = in.readInt() - assert(_loadFactor > 0) - - val size = in.readInt() - tableSize = 0 - assert(size >= 0) - - seedvalue = in.readInt() - - val smDefined = in.readBoolean() - - table = new Array(capacity(sizeForThreshold(_loadFactor, size))) - threshold = newThreshold(_loadFactor, table.length) - - if (smDefined) sizeMapInit(table.length) else sizemap = null - - var index = 0 - while (index < size) { - addEntry(readEntry) - index += 1 - } - } - - /** - * Serializes the collection to the output stream by saving the load factor, collection - * size and collection entries. `writeEntry` is responsible for writing an entry to the stream. - * - * `foreachEntry` determines the order in which the key/value pairs are saved to the stream. To - * deserialize, `init` should be used. - */ - private[collection] def serializeTo(out: java.io.ObjectOutputStream, writeEntry: Entry => Unit) { - out.defaultWriteObject - out.writeInt(_loadFactor) - out.writeInt(tableSize) - out.writeInt(seedvalue) - out.writeBoolean(isSizeMapDefined) - - foreachEntry(writeEntry) - } - - /** Find entry with given key in table, null if not found. - */ - @deprecatedOverriding("No sensible way to override findEntry as private findEntry0 is used in multiple places internally.", "2.11.0") - protected def findEntry(key: A): Entry = - findEntry0(key, index(elemHashCode(key))) - - private[this] def findEntry0(key: A, h: Int): Entry = { - var e = table(h).asInstanceOf[Entry] - while (e != null && !elemEquals(e.key, key)) e = e.next - e - } - - /** Add entry to table - * pre: no entry with same key exists - */ - @deprecatedOverriding("No sensible way to override addEntry as private addEntry0 is used in multiple places internally.", "2.11.0") - protected def addEntry(e: Entry) { - addEntry0(e, index(elemHashCode(e.key))) - } - - private[this] def addEntry0(e: Entry, h: Int) { - e.next = table(h).asInstanceOf[Entry] - table(h) = e - tableSize = tableSize + 1 - nnSizeMapAdd(h) - if (tableSize > threshold) - resize(2 * table.length) - } - - /** Find entry with given key in table, or add new one if not found. - * May be somewhat faster then `findEntry`/`addEntry` pair as it - * computes entry's hash index only once. - * Returns entry found in table or null. - * New entries are created by calling `createNewEntry` method. - */ - protected def findOrAddEntry[B](key: A, value: B): Entry = { - val h = index(elemHashCode(key)) - val e = findEntry0(key, h) - if (e ne null) e else { addEntry0(createNewEntry(key, value), h); null } - } - - /** Creates new entry to be immediately inserted into the hashtable. - * This method is guaranteed to be called only once and in case that the entry - * will be added. In other words, an implementation may be side-effecting. - */ - protected def createNewEntry[B](key: A, value: B): Entry - - /** Remove entry from table if present. - */ - @deprecatedOverriding("Internal implementation does not admit sensible overriding of this method.", "2.11.0") - protected def removeEntry(key: A) : Entry = { - val h = index(elemHashCode(key)) - var e = table(h).asInstanceOf[Entry] - if (e != null) { - if (elemEquals(e.key, key)) { - table(h) = e.next - tableSize = tableSize - 1 - nnSizeMapRemove(h) - e.next = null - return e - } else { - var e1 = e.next - while (e1 != null && !elemEquals(e1.key, key)) { - e = e1 - e1 = e1.next - } - if (e1 != null) { - e.next = e1.next - tableSize = tableSize - 1 - nnSizeMapRemove(h) - e1.next = null - return e1 - } - } - } - null - } - - /** An iterator returning all entries. - */ - protected def entriesIterator: Iterator[Entry] = new AbstractIterator[Entry] { - val iterTable = table - var idx = lastPopulatedIndex - var es = iterTable(idx) - - def hasNext = es != null - def next() = { - val res = es - es = es.next - while (es == null && idx > 0) { - idx = idx - 1 - es = iterTable(idx) - } - res.asInstanceOf[Entry] - } - } - - /** Avoid iterator for a 2x faster traversal. */ - protected def foreachEntry[U](f: Entry => U) { - val iterTable = table - var idx = lastPopulatedIndex - var es = iterTable(idx) - - while (es != null) { - val next = es.next // Cache next in case f removes es. - f(es.asInstanceOf[Entry]) - es = next - - while (es == null && idx > 0) { - idx -= 1 - es = iterTable(idx) - } - } - } - - /** Remove all entries from table - */ - protected def clearTable() { - var i = table.length - 1 - while (i >= 0) { table(i) = null; i = i - 1 } - tableSize = 0 - nnSizeMapReset(0) - } - - private def resize(newSize: Int) { - val oldTable = table - table = new Array(newSize) - nnSizeMapReset(table.length) - var i = oldTable.length - 1 - while (i >= 0) { - var e = oldTable(i) - while (e != null) { - val h = index(elemHashCode(e.key)) - val e1 = e.next - e.next = table(h).asInstanceOf[Entry] - table(h) = e - e = e1 - nnSizeMapAdd(h) - } - i = i - 1 - } - threshold = newThreshold(_loadFactor, newSize) - } - - /* Size map handling code */ - - /* - * The following three sizeMap* functions (Add, Remove, Reset) - * are used to update the size map of the hash table. - * - * The size map logically divides the hash table into `sizeMapBucketSize` element buckets - * by keeping an integer entry for each such bucket. Each integer entry simply denotes - * the number of elements in the corresponding bucket. - * Best understood through an example, see: - * table = [/, 1, /, 6, 90, /, -3, 5] (8 entries) - * sizemap = [ 2 | 3 ] (2 entries) - * where sizeMapBucketSize == 4. - * - * By default the size map is not initialized, so these methods don't do anything, thus, - * their impact on hash table performance is negligible. However, if the hash table - * is converted into a parallel hash table, the size map is initialized, as it will be needed - * there. - */ - @deprecatedOverriding("Internal implementation does not admit sensible overriding of this method.", "2.11.0") - protected def nnSizeMapAdd(h: Int) = if (sizemap ne null) { - sizemap(h >> sizeMapBucketBitSize) += 1 - } - - @deprecatedOverriding("Internal implementation does not admit sensible overriding of this method.", "2.11.0") - protected def nnSizeMapRemove(h: Int) = if (sizemap ne null) { - sizemap(h >> sizeMapBucketBitSize) -= 1 - } - - @deprecatedOverriding("Internal implementation does not admit sensible overriding of this method.", "2.11.0") - protected def nnSizeMapReset(tableLength: Int) = if (sizemap ne null) { - val nsize = calcSizeMapSize(tableLength) - if (sizemap.length != nsize) sizemap = new Array[Int](nsize) - else java.util.Arrays.fill(sizemap, 0) - } - - private[collection] final def totalSizeMapBuckets = if (sizeMapBucketSize < table.length) 1 else table.length / sizeMapBucketSize - - @deprecatedOverriding("Internal implementation does not admit sensible overriding of this method.", "2.11.0") - protected def calcSizeMapSize(tableLength: Int) = (tableLength >> sizeMapBucketBitSize) + 1 - - // discards the previous sizemap and only allocates a new one - protected def sizeMapInit(tableLength: Int) { - sizemap = new Array[Int](calcSizeMapSize(tableLength)) - } - - // discards the previous sizemap and populates the new one - @deprecatedOverriding("Internal implementation does not admit sensible overriding of this method.", "2.11.0") - protected def sizeMapInitAndRebuild() { - sizeMapInit(table.length) - - // go through the buckets, count elements - var tableidx = 0 - var bucketidx = 0 - val tbl = table - var tableuntil = 0 - if (tbl.length < sizeMapBucketSize) tableuntil = tbl.length else tableuntil = sizeMapBucketSize - val totalbuckets = totalSizeMapBuckets - while (bucketidx < totalbuckets) { - var currbucketsize = 0 - while (tableidx < tableuntil) { - var e = tbl(tableidx) - while (e ne null) { - currbucketsize += 1 - e = e.next - } - tableidx += 1 - } - sizemap(bucketidx) = currbucketsize - tableuntil += sizeMapBucketSize - bucketidx += 1 - } - } - - private[collection] def printSizeMap() { - println(sizemap.toList) - } - - @deprecatedOverriding("Internal implementation does not admit sensible overriding of this method.", "2.11.0") - protected def sizeMapDisable() = sizemap = null - - @deprecatedOverriding("Internal implementation does not admit sensible overriding of this method.", "2.11.0") - protected def isSizeMapDefined = sizemap ne null - - // override to automatically initialize the size map - protected def alwaysInitSizeMap = false - - /* End of size map handling code */ - - protected def elemEquals(key1: A, key2: A): Boolean = (key1 == key2) - - /** - * Note: we take the most significant bits of the hashcode, not the lower ones - * this is of crucial importance when populating the table in parallel - */ - protected final def index(hcode: Int): Int = { - val ones = table.length - 1 - val exponent = Integer.numberOfLeadingZeros(ones) - (improve(hcode, seedvalue) >>> exponent) & ones - } - - protected def initWithContents(c: HashTable.Contents[A, Entry]) = { - if (c != null) { - _loadFactor = c.loadFactor - table = c.table - tableSize = c.tableSize - threshold = c.threshold - seedvalue = c.seedvalue - sizemap = c.sizemap - } - if (alwaysInitSizeMap && sizemap == null) sizeMapInitAndRebuild() - } - - private[collection] def hashTableContents = new HashTable.Contents( - _loadFactor, - table, - tableSize, - threshold, - seedvalue, - sizemap - ) -} - -private[collection] object HashTable { - /** The load factor for the hash table (in 0.001 step). - */ - private[collection] final def defaultLoadFactor: Int = 750 // corresponds to 75% - private[collection] final def loadFactorDenum = 1000 // should be loadFactorDenom, but changing that isn't binary compatible - - private[collection] final def newThreshold(_loadFactor: Int, size: Int) = ((size.toLong * _loadFactor) / loadFactorDenum).toInt - - private[collection] final def sizeForThreshold(_loadFactor: Int, thr: Int) = ((thr.toLong * loadFactorDenum) / _loadFactor).toInt - - private[collection] final def capacity(expectedSize: Int) = nextPositivePowerOfTwo(expectedSize) - - trait HashUtils[KeyType] { - protected final def sizeMapBucketBitSize = 5 - // so that: - protected final def sizeMapBucketSize = 1 << sizeMapBucketBitSize - - protected def elemHashCode(key: KeyType) = key.## - - /** - * Defer to a high-quality hash in [[scala.util.hashing]]. - * The goal is to distribute across bins as well as possible even if a hash code has low entropy at some bits. - *

- * OLD VERSION - quick, but bad for sequence 0-10000 - little entropy in higher bits - since 2003 - * {{{ - * var h: Int = hcode + ~(hcode << 9) - * h = h ^ (h >>> 14) - * h = h + (h << 4) - * h ^ (h >>> 10) - * }}} - * the rest of the computation is due to scala/bug#5293 - */ - protected final def improve(hcode: Int, seed: Int): Int = rotateRight(byteswap32(hcode), seed) - } - - /** - * Returns a power of two >= `target`. - */ - private[collection] def nextPositivePowerOfTwo(target: Int): Int = 1 << -numberOfLeadingZeros(target - 1) - - class Contents[A, Entry >: Null <: HashEntry[A, Entry]]( - val loadFactor: Int, - val table: Array[HashEntry[A, Entry]], - val tableSize: Int, - val threshold: Int, - val seedvalue: Int, - val sizemap: Array[Int] - ) { - import scala.collection.DebugUtils._ - private[collection] def debugInformation = buildString { - append => - append("Hash table contents") - append("-------------------") - append("Table: [" + arrayString(table, 0, table.length) + "]") - append("Table size: " + tableSize) - append("Load factor: " + loadFactor) - append("Seedvalue: " + seedvalue) - append("Threshold: " + threshold) - append("Sizemap: [" + arrayString(sizemap, 0, sizemap.length) + "]") - } - } - -} diff --git a/tests/scala2-library/src/library/scala/collection/mutable/History.scala b/tests/scala2-library/src/library/scala/collection/mutable/History.scala deleted file mode 100644 index 13e2f32225e5..000000000000 --- a/tests/scala2-library/src/library/scala/collection/mutable/History.scala +++ /dev/null @@ -1,65 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - - - -package scala -package collection -package mutable - - -/** `History[A, B]` objects may subscribe to events of - * type `A` published by an object of type `B`. - * The history subscriber object records all published events - * up to maximum number of `maxHistory` events. - * - * @author Matthias Zenger - * @version 1.0, 08/07/2003 - * @since 1 - * - * @tparam Evt Type of events. - * @tparam Pub Type of publishers. - */ -@SerialVersionUID(5219213543849892588L) -class History[Evt, Pub] -extends AbstractIterable[(Pub, Evt)] - with Subscriber[Evt, Pub] - with Iterable[(Pub, Evt)] - with Serializable -{ - protected val log: Queue[(Pub, Evt)] = new Queue - val maxHistory: Int = 1000 - - /** Notifies this listener with an event by enqueuing it in the log. - * - * @param pub the publisher. - * @param event the event. - */ - def notify(pub: Pub, event: Evt) { - if (log.length >= maxHistory) - log.dequeue() - - log.enqueue((pub, event)) - } - - override def size: Int = log.length - def iterator: Iterator[(Pub, Evt)] = log.iterator - def events: Iterator[Evt] = log.iterator map (_._2) - - def clear() { log.clear() } - - /** Checks if two history objects are structurally identical. - * - * @return true, iff both history objects contain the same sequence of elements. - */ - override def equals(obj: Any): Boolean = obj match { - case that: History[_, _] => this.log equals that.log - case _ => false - } - override def hashCode = log.hashCode() -} diff --git a/tests/scala2-library/src/library/scala/collection/mutable/ImmutableMapAdaptor.scala b/tests/scala2-library/src/library/scala/collection/mutable/ImmutableMapAdaptor.scala deleted file mode 100644 index 7ab4dd2d9df2..000000000000 --- a/tests/scala2-library/src/library/scala/collection/mutable/ImmutableMapAdaptor.scala +++ /dev/null @@ -1,80 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - - - -package scala -package collection -package mutable - -import scala.annotation.migration - -/** This class can be used as an adaptor to create mutable maps from - * immutable map implementations. Only method `empty` has - * to be redefined if the immutable map on which this mutable map is - * originally based is not empty. `empty` is supposed to - * return the representation of an empty map. - * - * @author Matthias Zenger - * @author Martin Odersky - * @version 2.0, 01/01/2007 - * @since 1 - */ -@deprecated("adaptors are inherently unreliable and prone to performance problems", "2.11.0") -class ImmutableMapAdaptor[A, B](protected var imap: immutable.Map[A, B]) -extends AbstractMap[A, B] - with Map[A, B] - with Serializable -{ - - override def size: Int = imap.size - - def get(key: A): Option[B] = imap.get(key) - - override def isEmpty: Boolean = imap.isEmpty - - override def apply(key: A): B = imap.apply(key) - - override def contains(key: A): Boolean = imap.contains(key) - - override def isDefinedAt(key: A) = imap.isDefinedAt(key) - - override def keySet: scala.collection.Set[A] = imap.keySet - - override def keysIterator: Iterator[A] = imap.keysIterator - - @migration("`keys` returns Iterable[A] rather than Iterator[A].", "2.8.0") - override def keys: scala.collection.Iterable[A] = imap.keys - - override def valuesIterator: Iterator[B] = imap.valuesIterator - - @migration("`values` returns Iterable[B] rather than Iterator[B].", "2.8.0") - override def values: scala.collection.Iterable[B] = imap.values - - def iterator: Iterator[(A, B)] = imap.iterator - - override def toList: List[(A, B)] = imap.toList - - override def update(key: A, value: B): Unit = { imap = imap.updated(key, value) } - - def -= (key: A): this.type = { imap = imap - key; this } - - def += (kv: (A, B)): this.type = { imap = imap + kv; this } - - override def clear(): Unit = { imap = imap.empty } - - override def transform(f: (A, B) => B): this.type = { imap = imap.transform(f); this } - - override def retain(p: (A, B) => Boolean): this.type = { - imap = imap.filter(xy => p(xy._1, xy._2)) - this - } - - override def toString() = imap.toString() -} - diff --git a/tests/scala2-library/src/library/scala/collection/mutable/ImmutableSetAdaptor.scala b/tests/scala2-library/src/library/scala/collection/mutable/ImmutableSetAdaptor.scala deleted file mode 100644 index aa21c4cc112d..000000000000 --- a/tests/scala2-library/src/library/scala/collection/mutable/ImmutableSetAdaptor.scala +++ /dev/null @@ -1,50 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala -package collection -package mutable - -/** This class can be used as an adaptor to create mutable sets from - * immutable set implementations. Only method `empty` has - * to be redefined if the immutable set on which this mutable set is - * originally based is not empty. `empty` is supposed to - * return the representation of an empty set. - * - * @author Matthias Zenger - * @version 1.0, 21/07/2003 - * @since 1 - */ -@deprecated("adaptors are inherently unreliable and prone to performance problems", "2.11.0") -class ImmutableSetAdaptor[A](protected var set: immutable.Set[A]) -extends AbstractSet[A] - with Set[A] - with Serializable { - - override def size: Int = set.size - - override def isEmpty: Boolean = set.isEmpty - - def contains(elem: A): Boolean = set.contains(elem) - - override def foreach[U](f: A => U): Unit = set.foreach(f) - - override def exists(p: A => Boolean): Boolean = set.exists(p) - - override def toList: List[A] = set.toList - - override def toString = set.toString - - def iterator: Iterator[A] = set.iterator - - def +=(elem: A): this.type = { set = set + elem; this } - - def -=(elem: A): this.type = { set = set - elem; this } - - override def clear(): Unit = { set = set.empty } -} diff --git a/tests/scala2-library/src/library/scala/collection/mutable/IndexedSeq.scala b/tests/scala2-library/src/library/scala/collection/mutable/IndexedSeq.scala deleted file mode 100644 index 3d9630eea70a..000000000000 --- a/tests/scala2-library/src/library/scala/collection/mutable/IndexedSeq.scala +++ /dev/null @@ -1,38 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - - - -package scala -package collection -package mutable - -import generic._ - -/** A subtrait of `collection.IndexedSeq` which represents sequences - * that can be mutated. - * - * $indexedSeqInfo - */ -trait IndexedSeq[A] extends Seq[A] - with scala.collection.IndexedSeq[A] - with GenericTraversableTemplate[A, IndexedSeq] - with IndexedSeqLike[A, IndexedSeq[A]] { - override def companion: GenericCompanion[IndexedSeq] = IndexedSeq - override def seq: IndexedSeq[A] = this -} - -/** $factoryInfo - * The current default implementation of a $Coll is an `ArrayBuffer`. - * @define coll mutable indexed sequence - * @define Coll `mutable.IndexedSeq` - */ -object IndexedSeq extends SeqFactory[IndexedSeq] { - implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, IndexedSeq[A]] = ReusableCBF.asInstanceOf[GenericCanBuildFrom[A]] - def newBuilder[A]: Builder[A, IndexedSeq[A]] = new ArrayBuffer[A] -} diff --git a/tests/scala2-library/src/library/scala/collection/mutable/IndexedSeqLike.scala b/tests/scala2-library/src/library/scala/collection/mutable/IndexedSeqLike.scala deleted file mode 100644 index 4cf794c32fac..000000000000 --- a/tests/scala2-library/src/library/scala/collection/mutable/IndexedSeqLike.scala +++ /dev/null @@ -1,71 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala -package collection -package mutable - -/** A subtrait of scala.collection.IndexedSeq which represents sequences - * that can be mutated. - * - * It declares a method `update` which allows updating an element - * at a specific index in the sequence. - * - * This trait just implements `iterator` in terms of `apply` and `length`. - * However, see `IndexedSeqOptimized` for an implementation trait that overrides operations - * to make them run faster under the assumption of fast random access with `apply`. - * - * $indexedSeqInfo - * - * @tparam A the element type of the $coll - * @tparam Repr the type of the actual $coll containing the elements. - * - * @define Coll `IndexedSeq` - * @define coll mutable indexed sequence - * @define indexedSeqInfo - * @author Martin Odersky - * @version 2.8 - * @since 2.8 - * @define willNotTerminateInf - * @define mayNotTerminateInf - */ -trait IndexedSeqLike[A, +Repr] extends Any with scala.collection.IndexedSeqLike[A, Repr] { self => - - override protected[this] def thisCollection: IndexedSeq[A] = this.asInstanceOf[IndexedSeq[A]] - override protected[this] def toCollection(repr: Repr): IndexedSeq[A] = repr.asInstanceOf[IndexedSeq[A]] - - /** Replaces element at given index with a new value. - * - * @param idx the index of the element to replace. - * @param elem the new value. - * @throws IndexOutOfBoundsException if the index is not valid. - */ - def update(idx: Int, elem: A) - - /** Creates a view of this iterable @see Iterable.View - */ - override def view = new IndexedSeqView[A, Repr] { - protected lazy val underlying = self.repr - override def iterator = self.iterator - override def length = self.length - override def apply(idx: Int) = self.apply(idx) - override def update(idx: Int, elem: A) = self.update(idx, elem) - } - - /** A sub-sequence view starting at index `from` - * and extending up to (but not including) index `until`. - * - * @param from The index of the first element of the slice - * @param until The index of the element following the slice - * @note The difference between `view` and `slice` is that `view` produces - * a view of the current sequence, whereas `slice` produces a new sequence. - * - * @note view(from, to) is equivalent to view.slice(from, to) - */ - override def view(from: Int, until: Int) = view.slice(from, until) -} diff --git a/tests/scala2-library/src/library/scala/collection/mutable/IndexedSeqOptimized.scala b/tests/scala2-library/src/library/scala/collection/mutable/IndexedSeqOptimized.scala deleted file mode 100644 index 09f0712862d5..000000000000 --- a/tests/scala2-library/src/library/scala/collection/mutable/IndexedSeqOptimized.scala +++ /dev/null @@ -1,18 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala -package collection -package mutable - -/** A subtrait of scala.collection.IndexedSeq which represents sequences - * that can be mutated. - * - * @since 2.8 - */ -trait IndexedSeqOptimized[A, +Repr] extends Any with IndexedSeqLike[A, Repr] with scala.collection.IndexedSeqOptimized[A, Repr] diff --git a/tests/scala2-library/src/library/scala/collection/mutable/IndexedSeqView.scala b/tests/scala2-library/src/library/scala/collection/mutable/IndexedSeqView.scala deleted file mode 100644 index c4efa314a70c..000000000000 --- a/tests/scala2-library/src/library/scala/collection/mutable/IndexedSeqView.scala +++ /dev/null @@ -1,118 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - - - -package scala -package collection -package mutable - -import generic._ - -import TraversableView.NoBuilder - -/** A non-strict view of a mutable `IndexedSeq`. - * $viewInfo - * Some of the operations of this class will yield again a mutable indexed sequence, - * others will just yield a plain indexed sequence of type `collection.IndexedSeq`. - * Because this is a leaf class there is no associated `Like` class. - * @author Martin Odersky - * @version 2.8 - * @since 2.8 - * @tparam A the element type of the view - * @tparam Coll the type of the underlying collection containing the elements. - */ -trait IndexedSeqView[A, +Coll] extends IndexedSeq[A] - with IndexedSeqOptimized[A, IndexedSeqView[A, Coll]] - with SeqView[A, Coll] - with SeqViewLike[A, Coll, IndexedSeqView[A, Coll]] { -self => - - protected[this] type This = IndexedSeqView[A, Coll] - - def update(idx: Int, elem: A): Unit - - trait TransformedX[B] extends IndexedSeqView[B, Coll] with super.TransformedS[B] { - def update(idx: Int, elem: B): Unit - override def toString = viewToString - } - - /** Explicit instantiation of the `Transformed` trait to reduce class file size in subclasses. */ - private[collection] abstract class AbstractTransformedX[B] extends super.AbstractTransformedS[B] with TransformedX[B] - - // pre: until <= self.length - trait SlicedX extends super.SlicedS with TransformedX[A] { - override def length = endpoints.width - def update(idx: Int, elem: A) = - if (idx >= 0 && idx + from < until) self.update(idx + from, elem) - else throw new IndexOutOfBoundsException(idx.toString) - } - - trait FilteredX extends super.FilteredS with TransformedX[A] { - def update(idx: Int, elem: A) = self.update(index(idx), elem) - } - - trait TakenWhileX extends super.TakenWhileS with TransformedX[A] { - def update(idx: Int, elem: A) = - if (idx < len) self.update(idx, elem) - else throw new IndexOutOfBoundsException(idx.toString) - } - - trait DroppedWhileX extends super.DroppedWhileS with TransformedX[A] { - def update(idx: Int, elem: A) = - if (idx >= 0) self.update(idx + start, elem) - else throw new IndexOutOfBoundsException(idx.toString) - } - - trait ReversedX extends super.ReversedS with TransformedX[A] { - def update(idx: Int, elem: A) = self.update(self.length - 1 - idx, elem) - } - - /** Boilerplate method, to override in each subclass - * This method could be eliminated if Scala had virtual classes - */ - protected override def newFiltered(p: A => Boolean): TransformedX[A] = new AbstractTransformedX[A] with FilteredX { lazy val pred = p } - protected override def newSliced(_endpoints: SliceInterval): TransformedX[A] = new AbstractTransformedX[A] with SlicedX { lazy val endpoints = _endpoints } - protected override def newDroppedWhile(p: A => Boolean): TransformedX[A] = new AbstractTransformedX[A] with DroppedWhileX { lazy val pred = p } - protected override def newTakenWhile(p: A => Boolean): TransformedX[A] = new AbstractTransformedX[A] with TakenWhileX { lazy val pred = p } - protected override def newReversed: TransformedX[A] = new AbstractTransformedX[A] with ReversedX - - override def filter(p: A => Boolean): This = newFiltered(p) - override def init: This = newSliced(SliceInterval(0, self.length - 1)) - override def drop(n: Int): This = newSliced(SliceInterval(n, self.length)) - override def take(n: Int): This = newSliced(SliceInterval(0, n min self.length)) - override def slice(from: Int, until: Int): This = newSliced(SliceInterval(from, until min self.length)) - override def dropWhile(p: A => Boolean): This = newDroppedWhile(p) - override def takeWhile(p: A => Boolean): This = newTakenWhile(p) - override def span(p: A => Boolean): (This, This) = (newTakenWhile(p), newDroppedWhile(p)) - override def splitAt(n: Int): (This, This) = (take(n), drop(n)) // !!! - override def reverse: This = newReversed - override def tail: IndexedSeqView[A, Coll] = if (isEmpty) super.tail else slice(1, length) -} - -/** An object containing the necessary implicit definitions to make - * `SeqView`s work. Its definitions are generally not accessed directly by clients. - * - * Note that the `canBuildFrom` factories yield `SeqView`s, not `IndexedSeqView`s. - * This is intentional, because not all operations yield again a `mutable.IndexedSeqView`. - * For instance, `map` just gives a `SeqView`, which reflects the fact that - * `map` cannot do its work and maintain a pointer into the original indexed sequence. - */ -object IndexedSeqView { - type Coll = TraversableView[_, _ <: Traversable[_]] - implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, SeqView[A, Seq[_]]] = - new CanBuildFrom[Coll, A, SeqView[A, Seq[_]]] { - def apply(from: Coll) = new NoBuilder - def apply() = new NoBuilder - } - implicit def arrCanBuildFrom[A]: CanBuildFrom[TraversableView[_, Array[_]], A, SeqView[A, Array[A]]] = - new CanBuildFrom[TraversableView[_, Array[_]], A, SeqView[A, Array[A]]] { - def apply(from: TraversableView[_, Array[_]]) = new NoBuilder - def apply() = new NoBuilder - } -} diff --git a/tests/scala2-library/src/library/scala/collection/mutable/Iterable.scala b/tests/scala2-library/src/library/scala/collection/mutable/Iterable.scala deleted file mode 100644 index 92313c9ccd84..000000000000 --- a/tests/scala2-library/src/library/scala/collection/mutable/Iterable.scala +++ /dev/null @@ -1,41 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ -package scala -package collection -package mutable - -import generic._ -import parallel.mutable.ParIterable - -/** A base trait for iterable collections that can be mutated. - * $iterableInfo - */ -trait Iterable[A] extends Traversable[A] -// with GenIterable[A] - with scala.collection.Iterable[A] - with GenericTraversableTemplate[A, Iterable] - with IterableLike[A, Iterable[A]] - with Parallelizable[A, ParIterable[A]] -{ - override def companion: GenericCompanion[Iterable] = Iterable - protected[this] override def parCombiner = ParIterable.newCombiner[A] // if `mutable.IterableLike` gets introduced, please move this there! - override def seq: Iterable[A] = this -} - -/** $factoryInfo - * The current default implementation of a $Coll is an `ArrayBuffer`. - * @define coll mutable iterable collection - * @define Coll `mutable.Iterable` - */ -object Iterable extends TraversableFactory[Iterable] { - implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, Iterable[A]] = ReusableCBF.asInstanceOf[GenericCanBuildFrom[A]] - def newBuilder[A]: Builder[A, Iterable[A]] = new ArrayBuffer -} - -/** Explicit instantiation of the `Iterable` trait to reduce class file size in subclasses. */ -abstract class AbstractIterable[A] extends scala.collection.AbstractIterable[A] with Iterable[A] diff --git a/tests/scala2-library/src/library/scala/collection/mutable/LazyBuilder.scala b/tests/scala2-library/src/library/scala/collection/mutable/LazyBuilder.scala deleted file mode 100644 index f0a5e6971a88..000000000000 --- a/tests/scala2-library/src/library/scala/collection/mutable/LazyBuilder.scala +++ /dev/null @@ -1,30 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala -package collection -package mutable - -/** A builder that constructs its result lazily. Iterators or iterables to - * be added to this builder with `++=` are not evaluated until `result` is called. - * - * This builder can be reused. - * - * @since 2.8 - * - * @tparam Elem type of the elements for this builder. - * @tparam To type of the collection this builder builds. - */ -abstract class LazyBuilder[Elem, +To] extends ReusableBuilder[Elem, To] { - /** The different segments of elements to be added to the builder, represented as iterators */ - protected var parts = new ListBuffer[TraversableOnce[Elem]] - def +=(x: Elem): this.type = { parts += List(x); this } - override def ++=(xs: TraversableOnce[Elem]): this.type = { parts += xs ; this } - def result(): To - def clear() { parts.clear() } -} diff --git a/tests/scala2-library/src/library/scala/collection/mutable/LinearSeq.scala b/tests/scala2-library/src/library/scala/collection/mutable/LinearSeq.scala deleted file mode 100644 index 3fa10042effd..000000000000 --- a/tests/scala2-library/src/library/scala/collection/mutable/LinearSeq.scala +++ /dev/null @@ -1,42 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - - - -package scala -package collection -package mutable - -import generic._ - -/** A subtrait of `collection.LinearSeq` which represents sequences - * that can be mutated. - * $linearSeqInfo - * - * @define Coll `LinearSeq` - * @define coll linear sequence - * @see [[http://docs.scala-lang.org/overviews/collections/concrete-mutable-collection-classes.html#mutable_lists "Scala's Collection Library overview"]] - * section on `Mutable Lists` for more information. - */ -trait LinearSeq[A] extends Seq[A] - with scala.collection.LinearSeq[A] - with GenericTraversableTemplate[A, LinearSeq] - with LinearSeqLike[A, LinearSeq[A]] { - override def companion: GenericCompanion[LinearSeq] = LinearSeq - override def seq: LinearSeq[A] = this -} - -/** $factoryInfo - * The current default implementation of a $Coll is a `MutableList`. - * @define coll mutable linear sequence - * @define Coll `mutable.LinearSeq` - */ -object LinearSeq extends SeqFactory[LinearSeq] { - implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, LinearSeq[A]] = ReusableCBF.asInstanceOf[GenericCanBuildFrom[A]] - def newBuilder[A]: Builder[A, LinearSeq[A]] = new MutableList[A] -} diff --git a/tests/scala2-library/src/library/scala/collection/mutable/LinkedEntry.scala b/tests/scala2-library/src/library/scala/collection/mutable/LinkedEntry.scala deleted file mode 100644 index 296e7fde181e..000000000000 --- a/tests/scala2-library/src/library/scala/collection/mutable/LinkedEntry.scala +++ /dev/null @@ -1,23 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - - - -package scala -package collection -package mutable - -/** Class for the linked hash map entry, used internally. - * @since 2.8 - */ -final class LinkedEntry[A, B](val key: A, var value: B) - extends HashEntry[A, LinkedEntry[A, B]] with Serializable { - var earlier: LinkedEntry[A, B] = null - var later: LinkedEntry[A, B] = null -} - diff --git a/tests/scala2-library/src/library/scala/collection/mutable/LinkedHashMap.scala b/tests/scala2-library/src/library/scala/collection/mutable/LinkedHashMap.scala deleted file mode 100644 index 8e54f23bc78c..000000000000 --- a/tests/scala2-library/src/library/scala/collection/mutable/LinkedHashMap.scala +++ /dev/null @@ -1,180 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - - - -package scala -package collection -package mutable - -import generic._ - -/** $factoryInfo - * @define Coll `LinkedHashMap` - * @define coll linked hash map - */ -object LinkedHashMap extends MutableMapFactory[LinkedHashMap] { - implicit def canBuildFrom[A, B]: CanBuildFrom[Coll, (A, B), LinkedHashMap[A, B]] = new MapCanBuildFrom[A, B] - def empty[A, B] = new LinkedHashMap[A, B] -} - -/** This class implements mutable maps using a hashtable. - * The iterator and all traversal methods of this class visit elements in the order they were inserted. - * - * @tparam A the type of the keys contained in this hash map. - * @tparam B the type of the values assigned to keys in this hash map. - * - * @define Coll `LinkedHashMap` - * @define coll linked hash map - * @define thatinfo the class of the returned collection. In the standard library configuration, - * `That` is always `LinkedHashMap[A, B]` if the elements contained in the resulting collection are - * pairs of type `(A, B)`. This is because an implicit of type `CanBuildFrom[LinkedHashMap, (A, B), LinkedHashMap[A, B]]` - * is defined in object `LinkedHashMap`. Otherwise, `That` resolves to the most specific type that doesn't have - * to contain pairs of type `(A, B)`, which is `Iterable`. - * @define bfinfo an implicit value of class `CanBuildFrom` which determines the - * result class `That` from the current representation type `Repr` - * and the new element type `B`. This is usually the `canBuildFrom` value - * defined in object `LinkedHashMap`. - * @define mayNotTerminateInf - * @define willNotTerminateInf - * @define orderDependent - * @define orderDependentFold - */ -@SerialVersionUID(1L) -class LinkedHashMap[A, B] extends AbstractMap[A, B] - with Map[A, B] - with MapLike[A, B, LinkedHashMap[A, B]] - with HashTable[A, LinkedEntry[A, B]] - with Serializable -{ - - override def empty = LinkedHashMap.empty[A, B] - override def size = tableSize - - type Entry = LinkedEntry[A, B] - - @transient protected var firstEntry: Entry = null - @transient protected var lastEntry: Entry = null - - def get(key: A): Option[B] = { - val e = findEntry(key) - if (e == null) None - else Some(e.value) - } - - override def put(key: A, value: B): Option[B] = { - val e = findOrAddEntry(key, value) - if (e eq null) None - else { val v = e.value; e.value = value; Some(v) } - } - - override def remove(key: A): Option[B] = { - val e = removeEntry(key) - if (e eq null) None - else { - if (e.earlier eq null) firstEntry = e.later - else e.earlier.later = e.later - if (e.later eq null) lastEntry = e.earlier - else e.later.earlier = e.earlier - e.earlier = null // Null references to prevent nepotism - e.later = null - Some(e.value) - } - } - - @deprecatedOverriding("+= should not be overridden so it stays consistent with put.", "2.11.0") - def += (kv: (A, B)): this.type = { put(kv._1, kv._2); this } - - @deprecatedOverriding("-= should not be overridden so it stays consistent with remove.", "2.11.0") - def -=(key: A): this.type = { remove(key); this } - - def iterator: Iterator[(A, B)] = new AbstractIterator[(A, B)] { - private var cur = firstEntry - def hasNext = cur ne null - def next = - if (hasNext) { val res = (cur.key, cur.value); cur = cur.later; res } - else Iterator.empty.next() - } - - protected class LinkedFilteredKeys(p: A => Boolean) extends super.FilteredKeys(p) { - override def empty = LinkedHashMap.empty - } - - override def filterKeys(p: A => Boolean): scala.collection.Map[A, B] = new LinkedFilteredKeys(p) - - protected class LinkedMappedValues[C](f: B => C) extends super.MappedValues[C](f) { - override def empty = LinkedHashMap.empty - } - - override def mapValues[C](f: B => C): scala.collection.Map[A, C] = new LinkedMappedValues(f) - - protected class LinkedDefaultKeySet extends super.DefaultKeySet { - override def empty = LinkedHashSet.empty - } - - override def keySet: scala.collection.Set[A] = new LinkedDefaultKeySet - - override def keysIterator: Iterator[A] = new AbstractIterator[A] { - private var cur = firstEntry - def hasNext = cur ne null - def next = - if (hasNext) { val res = cur.key; cur = cur.later; res } - else Iterator.empty.next() - } - - override def valuesIterator: Iterator[B] = new AbstractIterator[B] { - private var cur = firstEntry - def hasNext = cur ne null - def next = - if (hasNext) { val res = cur.value; cur = cur.later; res } - else Iterator.empty.next() - } - - override def foreach[U](f: ((A, B)) => U) { - var cur = firstEntry - while (cur ne null) { - f((cur.key, cur.value)) - cur = cur.later - } - } - - protected override def foreachEntry[U](f: Entry => U) { - var cur = firstEntry - while (cur ne null) { - f(cur) - cur = cur.later - } - } - - protected def createNewEntry[B1](key: A, value: B1): Entry = { - val e = new Entry(key, value.asInstanceOf[B]) - if (firstEntry eq null) firstEntry = e - else { lastEntry.later = e; e.earlier = lastEntry } - lastEntry = e - e - } - - override def clear() { - clearTable() - firstEntry = null - lastEntry = null - } - - private def writeObject(out: java.io.ObjectOutputStream) { - serializeTo(out, { entry => - out.writeObject(entry.key) - out.writeObject(entry.value) - }) - } - - private def readObject(in: java.io.ObjectInputStream) { - firstEntry = null - lastEntry = null - init(in, createNewEntry(in.readObject().asInstanceOf[A], in.readObject())) - } -} diff --git a/tests/scala2-library/src/library/scala/collection/mutable/LinkedHashSet.scala b/tests/scala2-library/src/library/scala/collection/mutable/LinkedHashSet.scala deleted file mode 100644 index f00cbd90dc7a..000000000000 --- a/tests/scala2-library/src/library/scala/collection/mutable/LinkedHashSet.scala +++ /dev/null @@ -1,147 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2005-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - - -package scala -package collection -package mutable - -import generic._ - -/** This class implements mutable sets using a hashtable. - * The iterator and all traversal methods of this class visit elements in the order they were inserted. - * - * @author Matthias Zenger - * @author Martin Odersky - * @author Pavel Pavlov - * @version 2.0, 31/12/2006 - * @since 1 - * - * @tparam A the type of the elements contained in this set. - * - * @define Coll `LinkedHashSet` - * @define coll linked hash set - * @define thatinfo the class of the returned collection. In the standard library configuration, - * `That` is always `LinkedHashSet[B]` because an implicit of type `CanBuildFrom[LinkedHashSet, B, LinkedHashSet[B]]` - * is defined in object `LinkedHashSet`. - * @define bfinfo an implicit value of class `CanBuildFrom` which determines the - * result class `That` from the current representation type `Repr` - * and the new element type `B`. This is usually the `canBuildFrom` value - * defined in object `LinkedHashSet`. - * @define mayNotTerminateInf - * @define willNotTerminateInf - * @define orderDependent - * @define orderDependentFold - */ -@SerialVersionUID(1L) -class LinkedHashSet[A] extends AbstractSet[A] - with Set[A] - with GenericSetTemplate[A, LinkedHashSet] - with SetLike[A, LinkedHashSet[A]] - with HashTable[A, LinkedHashSet.Entry[A]] - with Serializable -{ - override def companion: GenericCompanion[LinkedHashSet] = LinkedHashSet - - type Entry = LinkedHashSet.Entry[A] - - @transient protected var firstEntry: Entry = null - @transient protected var lastEntry: Entry = null - - override def size: Int = tableSize - - def contains(elem: A): Boolean = findEntry(elem) ne null - - @deprecatedOverriding("+= should not be overridden so it stays consistent with add.", "2.11.0") - def += (elem: A): this.type = { add(elem); this } - - @deprecatedOverriding("-= should not be overridden so it stays consistent with remove.", "2.11.0") - def -= (elem: A): this.type = { remove(elem); this } - - override def add(elem: A): Boolean = findOrAddEntry(elem, null) eq null - - override def remove(elem: A): Boolean = { - val e = removeEntry(elem) - if (e eq null) false - else { - if (e.earlier eq null) firstEntry = e.later - else e.earlier.later = e.later - if (e.later eq null) lastEntry = e.earlier - else e.later.earlier = e.earlier - e.earlier = null // Null references to prevent nepotism - e.later = null - true - } - } - - def iterator: Iterator[A] = new AbstractIterator[A] { - private var cur = firstEntry - def hasNext = cur ne null - def next = - if (hasNext) { val res = cur.key; cur = cur.later; res } - else Iterator.empty.next() - } - - override def foreach[U](f: A => U) { - var cur = firstEntry - while (cur ne null) { - f(cur.key) - cur = cur.later - } - } - - protected override def foreachEntry[U](f: Entry => U) { - var cur = firstEntry - while (cur ne null) { - f(cur) - cur = cur.later - } - } - - protected def createNewEntry[B](key: A, dummy: B): Entry = { - val e = new Entry(key) - if (firstEntry eq null) firstEntry = e - else { lastEntry.later = e; e.earlier = lastEntry } - lastEntry = e - e - } - - override def clear() { - clearTable() - firstEntry = null - lastEntry = null - } - - private def writeObject(out: java.io.ObjectOutputStream) { - serializeTo(out, { e => out.writeObject(e.key) }) - } - - private def readObject(in: java.io.ObjectInputStream) { - firstEntry = null - lastEntry = null - init(in, createNewEntry(in.readObject().asInstanceOf[A], null)) - } -} - -/** $factoryInfo - * @define Coll `LinkedHashSet` - * @define coll linked hash set - */ -object LinkedHashSet extends MutableSetFactory[LinkedHashSet] { - implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, LinkedHashSet[A]] = setCanBuildFrom[A] - override def empty[A]: LinkedHashSet[A] = new LinkedHashSet[A] - - /** Class for the linked hash set entry, used internally. - * @since 2.10 - */ - private[scala] final class Entry[A](val key: A) extends HashEntry[A, Entry[A]] with Serializable { - var earlier: Entry[A] = null - var later: Entry[A] = null - } -} - diff --git a/tests/scala2-library/src/library/scala/collection/mutable/LinkedList.scala b/tests/scala2-library/src/library/scala/collection/mutable/LinkedList.scala deleted file mode 100644 index 5d03cd44102e..000000000000 --- a/tests/scala2-library/src/library/scala/collection/mutable/LinkedList.scala +++ /dev/null @@ -1,124 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - - - -package scala -package collection -package mutable - -import generic._ - -/** A more traditional/primitive style of linked list where the "list" is also the "head" link. Links can be manually - * created and manipulated, though the use of the API, when possible, is recommended. - * - * The danger of directly manipulating next: - * {{{ - * scala> val b = LinkedList(1) - * b: scala.collection.mutable.LinkedList[Int] = LinkedList(1) - * - * scala> b.next = null - * - * scala> println(b) - * java.lang.NullPointerException - * }}} - * - * $singleLinkedListExample - * - * @author Matthias Zenger - * @author Martin Odersky - * @version 2.8 - * @since 1 - * @see [[http://docs.scala-lang.org/overviews/collections/concrete-mutable-collection-classes.html#linked_lists "Scala's Collection Library overview"]] - * section on `Linked Lists` for more information. - * - * @tparam A the type of the elements contained in this linked list. - * - * @constructor Creates an "empty" list, defined as a single node with no data element and next pointing to itself. - - * @define Coll `LinkedList` - * @define coll linked list - * @define thatinfo the class of the returned collection. In the standard library configuration, - * `That` is always `LinkedList[B]` because an implicit of type `CanBuildFrom[LinkedList, B, LinkedList[B]]` - * is defined in object `LinkedList`. - * @define bfinfo an implicit value of class `CanBuildFrom` which determines the - * result class `That` from the current representation type `Repr` - * and the new element type `B`. This is usually the `canBuildFrom` value - * defined in object `LinkedList`. - * @define orderDependent - * @define orderDependentFold - * @define mayNotTerminateInf - * @define willNotTerminateInf - * @define collectExample Example: - * {{{ - * scala> val a = LinkedList(1, 2, 3) - * a: scala.collection.mutable.LinkedList[Int] = LinkedList(1, 2, 3) - * - * scala> val addOne: PartialFunction[Any, Float] = {case i: Int => i + 1.0f} - * addOne: PartialFunction[Any,Float] = - * - * scala> val b = a.collect(addOne) - * b: scala.collection.mutable.LinkedList[Float] = LinkedList(2.0, 3.0, 4.0) - * - * scala> val c = LinkedList('a') - * c: scala.collection.mutable.LinkedList[Char] = LinkedList(a) - * - * scala> val d = a ++ c - * d: scala.collection.mutable.LinkedList[AnyVal] = LinkedList(1, 2, 3, a) - * - * scala> val e = d.collect(addOne) - * e: scala.collection.mutable.LinkedList[Float] = LinkedList(2.0, 3.0, 4.0) - * }}} - */ -@SerialVersionUID(-7308240733518833071L) -@deprecated("low-level linked lists are deprecated due to idiosyncrasies in interface and incomplete features", "2.11.0") -class LinkedList[A]() extends AbstractSeq[A] - with LinearSeq[A] - with GenericTraversableTemplate[A, LinkedList] - with LinkedListLike[A, LinkedList[A]] - with Serializable { - next = this - - /** Creates a new list. If the parameter next is null, the result is an empty list. Otherwise, the result is - * a list with elem at the head, followed by the contents of next. - * - * Note that next is part of the new list, as opposed to the +: operator, - * which makes a new copy of the original list. - * - * @example - * {{{ - * scala> val m = LinkedList(1) - * m: scala.collection.mutable.LinkedList[Int] = LinkedList(1) - * - * scala> val n = new LinkedList[Int](2, m) - * n: scala.collection.mutable.LinkedList[Int] = LinkedList(2, 1) - * }}} - */ - def this(elem: A, next: LinkedList[A]) { - this() - if (next != null) { - this.elem = elem - this.next = next - } - } - - override def companion: GenericCompanion[LinkedList] = LinkedList -} - -/** $factoryInfo - * @define Coll `LinkedList` - * @define coll linked list - */ -@deprecated("low-level linked lists are deprecated", "2.11.0") -object LinkedList extends SeqFactory[LinkedList] { - override def empty[A]: LinkedList[A] = new LinkedList[A] - implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, LinkedList[A]] = ReusableCBF.asInstanceOf[GenericCanBuildFrom[A]] - - def newBuilder[A]: Builder[A, LinkedList[A]] = - (new MutableList) mapResult ((l: MutableList[A]) => l.toLinkedList) -} diff --git a/tests/scala2-library/src/library/scala/collection/mutable/LinkedListLike.scala b/tests/scala2-library/src/library/scala/collection/mutable/LinkedListLike.scala deleted file mode 100644 index 27c4466c9968..000000000000 --- a/tests/scala2-library/src/library/scala/collection/mutable/LinkedListLike.scala +++ /dev/null @@ -1,192 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala -package collection -package mutable - -import scala.annotation.tailrec - -/** This extensible class may be used as a basis for implementing linked - * list. Type variable `A` refers to the element type of the - * list, type variable `This` is used to model self types of - * linked lists. - * - * $singleLinkedListExample - * - * @author Matthias Zenger - * @author Martin Odersky - * @version 1.0, 08/07/2003 - * @since 2.8 - * - * @tparam A type of the elements contained in the linked list - * @tparam This the type of the actual linked list holding the elements - * - * @define Coll `LinkedList` - * @define coll linked list - * - * @define singleLinkedListExample - * If the list is empty `next` must be set to `this`. The last node in every - * mutable linked list is empty. - * - * Examples (`_` represents no value): - * - * {{{ - * - * Empty: - * - * [ _ ] --, - * [ ] <-` - * - * Single element: - * - * [ x ] --> [ _ ] --, - * [ ] <-` - * - * More elements: - * - * [ x ] --> [ y ] --> [ z ] --> [ _ ] --, - * [ ] <-` - * - * }}} - */ -@deprecated("low-level linked lists are deprecated due to idiosyncrasies in interface and incomplete features", "2.11.0") -trait LinkedListLike[A, This <: Seq[A] with LinkedListLike[A, This]] extends SeqLike[A, This] { self => - - var elem: A = _ - var next: This = _ - - override def isEmpty = next eq this - - /** Determines the length of this $coll by traversing and counting every - * node. - */ - override def length: Int = length0(repr, 0) - - @tailrec private def length0(elem: This, acc: Int): Int = - if (elem.isEmpty) acc else length0(elem.next, acc + 1) - - override def head: A = - if (isEmpty) throw new NoSuchElementException - else elem - - override def tail: This = { - require(nonEmpty, "tail of empty list") - next - } - - /** If `this` is empty then it does nothing and returns `that`. Otherwise, appends `that` to `this`. The append - * requires a full traversal of `this`. - * - * Examples: - * - * {{{ - * scala> val a = LinkedList(1, 2) - * a: scala.collection.mutable.LinkedList[Int] = LinkedList(1, 2) - * - * scala> val b = LinkedList(1, 2) - * b: scala.collection.mutable.LinkedList[Int] = LinkedList(1, 2) - * - * scala> a.append(b) - * res0: scala.collection.mutable.LinkedList[Int] = LinkedList(1, 2, 1, 2) - * - * scala> println(a) - * LinkedList(1, 2, 1, 2) - * }}} - * - * {{{ - * scala> val a = new LinkedList[Int]() - * a: scala.collection.mutable.LinkedList[Int] = LinkedList() - * - * scala> val b = LinkedList(1, 2) - * b: scala.collection.mutable.LinkedList[Int] = LinkedList(1, 2) - * - * scala> val c = a.append(b) - * c: scala.collection.mutable.LinkedList[Int] = LinkedList(1, 2) - * - * scala> println(a) - * LinkedList() - * }}} - * - * @return the list after append (this is the list itself if nonempty, - * or list `that` if list this is empty. ) - */ - def append(that: This): This = { - @tailrec - def loop(x: This) { - if (x.next.isEmpty) x.next = that - else loop(x.next) - } - if (isEmpty) that - else { loop(repr); repr } - } - - /** Insert linked list `that` at current position of this linked list - * @note this linked list must not be empty - */ - def insert(that: This): Unit = { - require(nonEmpty, "insert into empty list") - if (that.nonEmpty) { - that append next - next = that - } - } - - override def drop(n: Int): This = { - var i = 0 - var these: This = repr - while (i < n && !these.isEmpty) { - these = these.next - i += 1 - } - these - } - - private def atLocation[T](n: Int)(f: This => T) = { - val loc = drop(n) - if (loc.nonEmpty) f(loc) - else throw new IndexOutOfBoundsException(n.toString) - } - - override def apply(n: Int): A = atLocation(n)(_.elem) - def update(n: Int, x: A): Unit = atLocation(n)(_.elem = x) - - def get(n: Int): Option[A] = { - val loc = drop(n) - if (loc.nonEmpty) Some(loc.elem) - else None - } - - override def iterator: Iterator[A] = new AbstractIterator[A] { - var elems = self - def hasNext = elems.nonEmpty - def next = { - val res = elems.elem - elems = elems.next - res - } - } - - override def foreach[U](f: A => U) { - var these = this - while (these.nonEmpty) { - f(these.elem) - these = these.next - } - } - - /** Return a clone of this list. - * - * @return a `LinkedList` with the same elements. - */ - override def clone(): This = { - val bf = newBuilder - bf ++= this - bf.result() - } -} diff --git a/tests/scala2-library/src/library/scala/collection/mutable/ListBuffer.scala b/tests/scala2-library/src/library/scala/collection/mutable/ListBuffer.scala deleted file mode 100644 index aa79e972d560..000000000000 --- a/tests/scala2-library/src/library/scala/collection/mutable/ListBuffer.scala +++ /dev/null @@ -1,477 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala -package collection -package mutable - -import generic._ -import immutable.{List, Nil, ::} -import java.io.{ObjectOutputStream, ObjectInputStream} - -/** A `Buffer` implementation backed by a list. It provides constant time - * prepend and append. Most other operations are linear. - * - * @author Matthias Zenger - * @author Martin Odersky - * @version 2.8 - * @since 1 - * @see [[http://docs.scala-lang.org/overviews/collections/concrete-mutable-collection-classes.html#list_buffers "Scala's Collection Library overview"]] - * section on `List Buffers` for more information. - * - * @tparam A the type of this list buffer's elements. - * - * @define Coll `ListBuffer` - * @define coll list buffer - * @define thatinfo the class of the returned collection. In the standard library configuration, - * `That` is always `ListBuffer[B]` because an implicit of type `CanBuildFrom[ListBuffer, B, ListBuffer[B]]` - * is defined in object `ListBuffer`. - * @define bfinfo an implicit value of class `CanBuildFrom` which determines the - * result class `That` from the current representation type `Repr` - * and the new element type `B`. This is usually the `canBuildFrom` value - * defined in object `ListBuffer`. - * @define orderDependent - * @define orderDependentFold - * @define mayNotTerminateInf - * @define willNotTerminateInf - */ -@SerialVersionUID(3419063961353022662L) -final class ListBuffer[A] - extends AbstractBuffer[A] - with Buffer[A] - with GenericTraversableTemplate[A, ListBuffer] - with BufferLike[A, ListBuffer[A]] - with ReusableBuilder[A, List[A]] - with SeqForwarder[A] - with Serializable -{ - override def companion: GenericCompanion[ListBuffer] = ListBuffer - - import scala.collection.Traversable - import scala.collection.immutable.ListSerializeEnd - - /** Expected invariants: - * If start.isEmpty, last0 == null - * If start.nonEmpty, last0 != null - * If len == 0, start.isEmpty - * If len > 0, start.nonEmpty - */ - private var start: List[A] = Nil - private var last0: ::[A] = _ - private var exported: Boolean = false - private var len = 0 - - protected def underlying: List[A] = start - - private def writeObject(out: ObjectOutputStream) { - // write start - var xs: List[A] = start - while (!xs.isEmpty) { out.writeObject(xs.head); xs = xs.tail } - out.writeObject(ListSerializeEnd) - - // no need to write last0 - - // write if exported - out.writeBoolean(exported) - - // write the length - out.writeInt(len) - } - - private def readObject(in: ObjectInputStream) { - // read start, set last0 appropriately - var elem: A = in.readObject.asInstanceOf[A] - if (elem == ListSerializeEnd) { - start = Nil - last0 = null - } else { - var current = new ::(elem, Nil) - start = current - elem = in.readObject.asInstanceOf[A] - while (elem != ListSerializeEnd) { - val list = new ::(elem, Nil) - current.tl = list - current = list - elem = in.readObject.asInstanceOf[A] - } - last0 = current - start - } - - // read if exported - exported = in.readBoolean() - - // read the length - len = in.readInt() - } - - /** The current length of the buffer. - * - * This operation takes constant time. - */ - override def length = len - - // Don't use the inherited size, which forwards to a List and is O(n). - override def size = length - - // Override with efficient implementations using the extra size information available to ListBuffer. - override def isEmpty: Boolean = len == 0 - override def nonEmpty: Boolean = len > 0 - - // Implementations of abstract methods in Buffer - - override def apply(n: Int): A = - if (n < 0 || n >= len) throw new IndexOutOfBoundsException(n.toString()) - else super.apply(n) - - /** Replaces element at index `n` with the new element - * `newelem`. Takes time linear in the buffer size. (except the - * first element, which is updated in constant time). - * - * @param n the index of the element to replace. - * @param x the new element. - * @throws IndexOutOfBoundsException if `n` is out of bounds. - */ - def update(n: Int, x: A) { - // We check the bounds early, so that we don't trigger copying. - if (n < 0 || n >= len) throw new IndexOutOfBoundsException(n.toString) - if (exported) copy() - if (n == 0) { - val newElem = new :: (x, start.tail) - if (last0 eq start) { - last0 = newElem - } - start = newElem - } else { - var cursor = start - var i = 1 - while (i < n) { - cursor = cursor.tail - i += 1 - } - val newElem = new :: (x, cursor.tail.tail) - if (last0 eq cursor.tail) { - last0 = newElem - } - cursor.asInstanceOf[::[A]].tl = newElem - } - } - - /** Appends a single element to this buffer. This operation takes constant time. - * - * @param x the element to append. - * @return this $coll. - */ - def += (x: A): this.type = { - if (exported) copy() - if (isEmpty) { - last0 = new :: (x, Nil) - start = last0 - } else { - val last1 = last0 - last0 = new :: (x, Nil) - last1.tl = last0 - } - len += 1 - this - } - - override def ++=(xs: TraversableOnce[A]): this.type = xs match { - case x: AnyRef if x eq this => this ++= (this take size) - case _ => super.++=(xs) - - } - - override def ++=:(xs: TraversableOnce[A]): this.type = - if (xs.asInstanceOf[AnyRef] eq this) ++=: (this take size) else super.++=:(xs) - - /** Clears the buffer contents. - */ - def clear() { - start = Nil - last0 = null - exported = false - len = 0 - } - - /** Prepends a single element to this buffer. This operation takes constant - * time. - * - * @param x the element to prepend. - * @return this $coll. - */ - def +=: (x: A): this.type = { - if (exported) copy() - val newElem = new :: (x, start) - if (isEmpty) last0 = newElem - start = newElem - len += 1 - this - } - - /** Inserts new elements at the index `n`. Opposed to method - * `update`, this method will not replace an element with a new - * one. Instead, it will insert a new element at index `n`. - * - * @param n the index where a new element will be inserted. - * @param seq the iterable object providing all elements to insert. - * @throws IndexOutOfBoundsException if `n` is out of bounds. - */ - def insertAll(n: Int, seq: Traversable[A]) { - // We check the bounds early, so that we don't trigger copying. - if (n < 0 || n > len) throw new IndexOutOfBoundsException(n.toString) - if (exported) copy() - var elems = seq.toList.reverse - len += elems.length - if (n == 0) { - while (!elems.isEmpty) { - val newElem = new :: (elems.head, start) - if (start.isEmpty) last0 = newElem - start = newElem - elems = elems.tail - } - } else { - var cursor = start - var i = 1 - while (i < n) { - cursor = cursor.tail - i += 1 - } - while (!elems.isEmpty) { - val newElem = new :: (elems.head, cursor.tail) - if (cursor.tail.isEmpty) last0 = newElem - cursor.asInstanceOf[::[A]].tl = newElem - elems = elems.tail - } - } - } - - /** Reduce the length of the buffer, and null out last0 - * if this reduces the length to 0. - */ - private def reduceLengthBy(num: Int) { - len -= num - if (len <= 0) // obviously shouldn't be < 0, but still better not to leak - last0 = null - } - - /** Removes a given number of elements on a given index position. May take - * time linear in the buffer size. - * - * @param n the index which refers to the first element to remove. - * @param count the number of elements to remove. - * @throws IndexOutOfBoundsException if the index `n` is not in the valid range - * `0 <= n <= length - count` (with `count > 0`). - * @throws IllegalArgumentException if `count < 0`. - */ - override def remove(n: Int, count: Int) { - if (count < 0) throw new IllegalArgumentException("removing negative number of elements: " + count.toString) - else if (count == 0) return // Nothing to do - if (n < 0 || n > len - count) throw new IndexOutOfBoundsException("at " + n.toString + " deleting " + count.toString) - if (exported) copy() - val n1 = n max 0 - val count1 = count min (len - n1) - if (n1 == 0) { - var c = count1 - while (c > 0) { - start = start.tail - c -= 1 - } - } else { - var cursor = start - var i = 1 - while (i < n1) { - cursor = cursor.tail - i += 1 - } - var c = count1 - while (c > 0) { - if (last0 eq cursor.tail) last0 = cursor.asInstanceOf[::[A]] - cursor.asInstanceOf[::[A]].tl = cursor.tail.tail - c -= 1 - } - } - reduceLengthBy(count1) - } - -// Implementation of abstract method in Builder - - /** Returns the accumulated `List`. - * - * This method may be called multiple times to obtain snapshots of the list in different stages of construction. - */ - def result: List[A] = toList - - /** Converts this buffer to a list. Takes constant time. The buffer is - * copied lazily, the first time it is mutated. - */ - override def toList: List[A] = { - exported = !isEmpty - start - } - -// New methods in ListBuffer - - /** Prepends the elements of this buffer to a given list - * - * @param xs the list to which elements are prepended - */ - def prependToList(xs: List[A]): List[A] = { - if (isEmpty) xs - else { - if (exported) copy() - last0.tl = xs - toList - } - } - -// Overrides of methods in Buffer - - /** Removes the element on a given index position. May take time linear in - * the buffer size. - * - * @param n the index which refers to the element to delete. - * @return n the element that was formerly at position `n`. - * @note an element must exists at position `n`. - * @throws IndexOutOfBoundsException if `n` is out of bounds. - */ - def remove(n: Int): A = { - if (n < 0 || n >= len) throw new IndexOutOfBoundsException(n.toString()) - if (exported) copy() - var old = start.head - if (n == 0) { - start = start.tail - } else { - var cursor = start - var i = 1 - while (i < n) { - cursor = cursor.tail - i += 1 - } - old = cursor.tail.head - if (last0 eq cursor.tail) last0 = cursor.asInstanceOf[::[A]] - cursor.asInstanceOf[::[A]].tl = cursor.tail.tail - } - reduceLengthBy(1) - old - } - - /** Remove a single element from this buffer. May take time linear in the - * buffer size. - * - * @param elem the element to remove. - * @return this $coll. - */ - override def -= (elem: A): this.type = { - if (exported) copy() - if (isEmpty) {} - else if (start.head == elem) { - start = start.tail - reduceLengthBy(1) - } - else { - var cursor = start - while (!cursor.tail.isEmpty && cursor.tail.head != elem) { - cursor = cursor.tail - } - if (!cursor.tail.isEmpty) { - val z = cursor.asInstanceOf[::[A]] - if (z.tl == last0) - last0 = z - z.tl = cursor.tail.tail - reduceLengthBy(1) - } - } - this - } - - /** Selects the last element. - * - * Runs in constant time. - * - * @return the last element of this buffer. - * @throws NoSuchElementException if this buffer is empty. - */ - override def last: A = - if (last0 eq null) throw new NoSuchElementException("last of empty ListBuffer") - else last0.head - - /** Optionally selects the last element. - * - * Runs in constant time. - * - * @return `Some` of the last element of this buffer if the buffer is nonempty, `None` if it is empty. - */ - override def lastOption: Option[A] = if (last0 eq null) None else Some(last0.head) - - /** Returns an iterator over this `ListBuffer`. The iterator will reflect - * changes made to the underlying `ListBuffer` beyond the next element; - * the next element's value is cached so that `hasNext` and `next` are - * guaranteed to be consistent. In particular, an empty `ListBuffer` - * will give an empty iterator even if the `ListBuffer` is later filled. - */ - override def iterator: Iterator[A] = new AbstractIterator[A] { - // Have to be careful iterating over mutable structures. - // This used to have "(cursor ne last0)" as part of its hasNext - // condition, which means it can return true even when the iterator - // is exhausted. Inconsistent results are acceptable when one mutates - // a structure while iterating, but we should never return hasNext == true - // on exhausted iterators (thus creating exceptions) merely because - // values were changed in-place. - var cursor: List[A] = if (ListBuffer.this.isEmpty) Nil else start - - def hasNext: Boolean = cursor ne Nil - def next(): A = - if (!hasNext) throw new NoSuchElementException("next on empty Iterator") - else { - val ans = cursor.head - cursor = cursor.tail - ans - } - } - - // Private methods - - /** Copy contents of this buffer */ - private def copy() { - if (isEmpty) return - var cursor = start - val limit = last0.tail - clear() - while (cursor ne limit) { - this += cursor.head - cursor = cursor.tail - } - } - - override def equals(that: Any): Boolean = that match { - case that: ListBuffer[_] => this.start equals that.start - case _ => super.equals(that) - } - - /** Returns a clone of this buffer. - * - * @return a `ListBuffer` with the same elements. - */ - override def clone(): ListBuffer[A] = (new ListBuffer[A]) ++= this - - /** Defines the prefix of the string representation. - * - * @return the string representation of this buffer. - */ - override def stringPrefix: String = "ListBuffer" -} - -/** $factoryInfo - * @define Coll `ListBuffer` - * @define coll list buffer - */ -object ListBuffer extends SeqFactory[ListBuffer] { - implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, ListBuffer[A]] = ReusableCBF.asInstanceOf[GenericCanBuildFrom[A]] - def newBuilder[A]: Builder[A, ListBuffer[A]] = new GrowingBuilder(new ListBuffer[A]) -} diff --git a/tests/scala2-library/src/library/scala/collection/mutable/ListMap.scala b/tests/scala2-library/src/library/scala/collection/mutable/ListMap.scala deleted file mode 100644 index e963af4a8aa2..000000000000 --- a/tests/scala2-library/src/library/scala/collection/mutable/ListMap.scala +++ /dev/null @@ -1,81 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - - - -package scala -package collection -package mutable - -import generic._ -import annotation.tailrec - -/** A simple mutable map backed by a list, so it preserves insertion order. - * - * @tparam A the type of the keys contained in this list map. - * @tparam B the type of the values assigned to keys in this list map. - * - * @define Coll `mutable.ListMap` - * @define coll mutable list map - * @define thatinfo the class of the returned collection. In the standard library configuration, - * `That` is always `ListMap[A, B]` if the elements contained in the resulting collection are - * pairs of type `(A, B)`. This is because an implicit of type `CanBuildFrom[ListMap, (A, B), ListMap[A, B]]` - * is defined in object `ListMap`. Otherwise, `That` resolves to the most specific type that doesn't have - * to contain pairs of type `(A, B)`, which is `Iterable`. - * @define bfinfo an implicit value of class `CanBuildFrom` which determines the - * result class `That` from the current representation type `Repr` - * and the new element type `B`. This is usually the `canBuildFrom` value - * defined in object `ListMap`. - * @define mayNotTerminateInf - * @define willNotTerminateInf - * @define orderDependent - * @define orderDependentFold - */ -class ListMap[A, B] -extends AbstractMap[A, B] - with Map[A, B] - with MapLike[A, B, ListMap[A, B]] - with Serializable { - - override def empty = ListMap.empty[A, B] - - private var elems: List[(A, B)] = List() - private var siz: Int = 0 - - def get(key: A): Option[B] = elems find (_._1 == key) map (_._2) - def iterator: Iterator[(A, B)] = elems.iterator - - @deprecatedOverriding("No sensible way to override += as private remove is used in multiple places internally.", "2.11.0") - def += (kv: (A, B)) = { elems = remove(kv._1, elems, List()); elems = kv :: elems; siz += 1; this } - - @deprecatedOverriding("No sensible way to override -= as private remove is used in multiple places internally.", "2.11.0") - def -= (key: A) = { elems = remove(key, elems, List()); this } - - @tailrec - private def remove(key: A, elems: List[(A, B)], acc: List[(A, B)]): List[(A, B)] = { - if (elems.isEmpty) acc - else if (elems.head._1 == key) { siz -= 1; acc ::: elems.tail } - else remove(key, elems.tail, elems.head :: acc) - } - - - @deprecatedOverriding("No sensible way to override as this functionality relies upon access to private methods.", "2.11.0") - override def clear() = { elems = List(); siz = 0 } - - @deprecatedOverriding("No sensible way to override as this functionality relies upon access to private methods.", "2.11.0") - override def size: Int = siz -} - -/** $factoryInfo - * @define Coll `mutable.ListMap` - * @define coll mutable list map - */ -object ListMap extends MutableMapFactory[ListMap] { - implicit def canBuildFrom[A, B]: CanBuildFrom[Coll, (A, B), ListMap[A, B]] = new MapCanBuildFrom[A, B] - def empty[A, B]: ListMap[A, B] = new ListMap[A, B] -} diff --git a/tests/scala2-library/src/library/scala/collection/mutable/LongMap.scala b/tests/scala2-library/src/library/scala/collection/mutable/LongMap.scala deleted file mode 100644 index ecbb1952af7b..000000000000 --- a/tests/scala2-library/src/library/scala/collection/mutable/LongMap.scala +++ /dev/null @@ -1,575 +0,0 @@ -package scala -package collection -package mutable - -import generic.CanBuildFrom - -/** This class implements mutable maps with `Long` keys based on a hash table with open addressing. - * - * Basic map operations on single entries, including `contains` and `get`, - * are typically substantially faster with `LongMap` than [[HashMap]]. Methods - * that act on the whole map, including `foreach` and `map` are not in - * general expected to be faster than with a generic map, save for those - * that take particular advantage of the internal structure of the map: - * `foreachKey`, `foreachValue`, `mapValuesNow`, and `transformValues`. - * - * Maps with open addressing may become less efficient at lookup after - * repeated addition/removal of elements. Although `LongMap` makes a - * decent attempt to remain efficient regardless, calling `repack` - * on a map that will no longer have elements removed but will be - * used heavily may save both time and storage space. - * - * This map is not intended to contain more than 2^29 entries (approximately - * 500 million). The maximum capacity is 2^30, but performance will degrade - * rapidly as 2^30 is approached. - * - */ -final class LongMap[V] private[collection] (defaultEntry: Long => V, initialBufferSize: Int, initBlank: Boolean) -extends AbstractMap[Long, V] - with Map[Long, V] - with MapLike[Long, V, LongMap[V]] - with Serializable -{ - import LongMap._ - - def this() = this(LongMap.exceptionDefault, 16, true) - - /** Creates a new `LongMap` that returns default values according to a supplied key-value mapping. */ - def this(defaultEntry: Long => V) = this(defaultEntry, 16, true) - - /** Creates a new `LongMap` with an initial buffer of specified size. - * - * A LongMap can typically contain half as many elements as its buffer size - * before it requires resizing. - */ - def this(initialBufferSize: Int) = this(LongMap.exceptionDefault, initialBufferSize, true) - - /** Creates a new `LongMap` with specified default values and initial buffer size. */ - def this(defaultEntry: Long => V, initialBufferSize: Int) = this(defaultEntry, initialBufferSize, true) - - private[this] var mask = 0 - private[this] var extraKeys: Int = 0 - private[this] var zeroValue: AnyRef = null - private[this] var minValue: AnyRef = null - private[this] var _size = 0 - private[this] var _vacant = 0 - private[this] var _keys: Array[Long] = null - private[this] var _values: Array[AnyRef] = null - - if (initBlank) defaultInitialize(initialBufferSize) - - private[this] def defaultInitialize(n: Int) = { - mask = - if (n<0) 0x7 - else (((1 << (32 - java.lang.Integer.numberOfLeadingZeros(n-1))) - 1) & 0x3FFFFFFF) | 0x7 - _keys = new Array[Long](mask+1) - _values = new Array[AnyRef](mask+1) - } - - private[collection] def initializeTo( - m: Int, ek: Int, zv: AnyRef, mv: AnyRef, sz: Int, vc: Int, kz: Array[Long], vz: Array[AnyRef] - ) { - mask = m; extraKeys = ek; zeroValue = zv; minValue = mv; _size = sz; _vacant = vc; _keys = kz; _values = vz - } - - override def size: Int = _size + (extraKeys+1)/2 - override def empty: LongMap[V] = new LongMap() - - private def imbalanced: Boolean = - (_size + _vacant) > 0.5*mask || _vacant > _size - - private def toIndex(k: Long): Int = { - // Part of the MurmurHash3 32 bit finalizer - val h = ((k ^ (k >>> 32)) & 0xFFFFFFFFL).toInt - val x = (h ^ (h >>> 16)) * 0x85EBCA6B - (x ^ (x >>> 13)) & mask - } - - private def seekEmpty(k: Long): Int = { - var e = toIndex(k) - var x = 0 - while (_keys(e) != 0) { x += 1; e = (e + 2*(x+1)*x - 3) & mask } - e - } - - private def seekEntry(k: Long): Int = { - var e = toIndex(k) - var x = 0 - var q = 0L - while ({ q = _keys(e); if (q==k) return e; q != 0}) { x += 1; e = (e + 2*(x+1)*x - 3) & mask } - e | MissingBit - } - - private def seekEntryOrOpen(k: Long): Int = { - var e = toIndex(k) - var x = 0 - var q = 0L - while ({ q = _keys(e); if (q==k) return e; q+q != 0}) { - x += 1 - e = (e + 2*(x+1)*x - 3) & mask - } - if (q == 0) return e | MissingBit - val o = e | MissVacant - while ({ q = _keys(e); if (q==k) return e; q != 0}) { - x += 1 - e = (e + 2*(x+1)*x - 3) & mask - } - o - } - - override def contains(key: Long): Boolean = { - if (key == -key) (((key>>>63).toInt+1) & extraKeys) != 0 - else seekEntry(key) >= 0 - } - - override def get(key: Long): Option[V] = { - if (key == -key) { - if ((((key>>>63).toInt+1) & extraKeys) == 0) None - else if (key == 0) Some(zeroValue.asInstanceOf[V]) - else Some(minValue.asInstanceOf[V]) - } - else { - val i = seekEntry(key) - if (i < 0) None else Some(_values(i).asInstanceOf[V]) - } - } - - override def getOrElse[V1 >: V](key: Long, default: => V1): V1 = { - if (key == -key) { - if ((((key>>>63).toInt+1) & extraKeys) == 0) default - else if (key == 0) zeroValue.asInstanceOf[V1] - else minValue.asInstanceOf[V1] - } - else { - val i = seekEntry(key) - if (i < 0) default else _values(i).asInstanceOf[V1] - } - } - - override def getOrElseUpdate(key: Long, defaultValue: => V): V = { - if (key == -key) { - val kbits = (key>>>63).toInt + 1 - if ((kbits & extraKeys) == 0) { - val value = defaultValue - extraKeys |= kbits - if (key == 0) zeroValue = value.asInstanceOf[AnyRef] - else minValue = value.asInstanceOf[AnyRef] - value - } - else if (key == 0) zeroValue.asInstanceOf[V] - else minValue.asInstanceOf[V] - } - else { - var i = seekEntryOrOpen(key) - if (i < 0) { - // It is possible that the default value computation was side-effecting - // Our hash table may have resized or even contain what we want now - // (but if it does, we'll replace it) - val value = { - val ok = _keys - val ans = defaultValue - if (ok ne _keys) { - i = seekEntryOrOpen(key) - if (i >= 0) _size -= 1 - } - ans - } - _size += 1 - val j = i & IndexMask - _keys(j) = key - _values(j) = value.asInstanceOf[AnyRef] - if ((i & VacantBit) != 0) _vacant -= 1 - else if (imbalanced) repack() - value - } - else _values(i).asInstanceOf[V] - } - } - - /** Retrieves the value associated with a key, or the default for that type if none exists - * (null for AnyRef, 0 for floats and integers). - * - * Note: this is the fastest way to retrieve a value that may or - * may not exist, if the default null/zero is acceptable. For key/value - * pairs that do exist, `apply` (i.e. `map(key)`) is equally fast. - */ - def getOrNull(key: Long): V = { - if (key == -key) { - if ((((key>>>63).toInt+1) & extraKeys) == 0) null.asInstanceOf[V] - else if (key == 0) zeroValue.asInstanceOf[V] - else minValue.asInstanceOf[V] - } - else { - val i = seekEntry(key) - if (i < 0) null.asInstanceOf[V] else _values(i).asInstanceOf[V] - } - } - - /** Retrieves the value associated with a key. - * If the key does not exist in the map, the `defaultEntry` for that key - * will be returned instead. - */ - override def apply(key: Long): V = { - if (key == -key) { - if ((((key>>>63).toInt+1) & extraKeys) == 0) defaultEntry(key) - else if (key == 0) zeroValue.asInstanceOf[V] - else minValue.asInstanceOf[V] - } - else { - val i = seekEntry(key) - if (i < 0) defaultEntry(key) else _values(i).asInstanceOf[V] - } - } - - /** The user-supplied default value for the key. Throws an exception - * if no other default behavior was specified. - */ - override def default(key: Long) = defaultEntry(key) - - private def repack(newMask: Int) { - val ok = _keys - val ov = _values - mask = newMask - _keys = new Array[Long](mask+1) - _values = new Array[AnyRef](mask+1) - _vacant = 0 - var i = 0 - while (i < ok.length) { - val k = ok(i) - if (k != -k) { - val j = seekEmpty(k) - _keys(j) = k - _values(j) = ov(i) - } - i += 1 - } - } - - /** Repacks the contents of this `LongMap` for maximum efficiency of lookup. - * - * For maps that undergo a complex creation process with both addition and - * removal of keys, and then are used heavily with no further removal of - * elements, calling `repack` after the end of the creation can result in - * improved performance. Repacking takes time proportional to the number - * of entries in the map. - */ - def repack() { - var m = mask - if (_size + _vacant >= 0.5*mask && !(_vacant > 0.2*mask)) m = ((m << 1) + 1) & IndexMask - while (m > 8 && 8*_size < m) m = m >>> 1 - repack(m) - } - - override def put(key: Long, value: V): Option[V] = { - if (key == -key) { - if (key == 0) { - val ans = if ((extraKeys&1) == 1) Some(zeroValue.asInstanceOf[V]) else None - zeroValue = value.asInstanceOf[AnyRef] - extraKeys |= 1 - ans - } - else { - val ans = if ((extraKeys&2) == 1) Some(minValue.asInstanceOf[V]) else None - minValue = value.asInstanceOf[AnyRef] - extraKeys |= 2 - ans - } - } - else { - val i = seekEntryOrOpen(key) - if (i < 0) { - val j = i & IndexMask - _keys(j) = key - _values(j) = value.asInstanceOf[AnyRef] - _size += 1 - if ((i & VacantBit) != 0) _vacant -= 1 - else if (imbalanced) repack() - None - } - else { - val ans = Some(_values(i).asInstanceOf[V]) - _keys(i) = key - _values(i) = value.asInstanceOf[AnyRef] - ans - } - } - } - - /** Updates the map to include a new key-value pair. - * - * This is the fastest way to add an entry to a `LongMap`. - */ - override def update(key: Long, value: V): Unit = { - if (key == -key) { - if (key == 0) { - zeroValue = value.asInstanceOf[AnyRef] - extraKeys |= 1 - } - else { - minValue = value.asInstanceOf[AnyRef] - extraKeys |= 2 - } - } - else { - val i = seekEntryOrOpen(key) - if (i < 0) { - val j = i & IndexMask - _keys(j) = key - _values(j) = value.asInstanceOf[AnyRef] - _size += 1 - if ((i & VacantBit) != 0) _vacant -= 1 - else if (imbalanced) repack() - } - else { - _keys(i) = key - _values(i) = value.asInstanceOf[AnyRef] - } - } - } - - /** Adds a new key/value pair to this map and returns the map. */ - def +=(key: Long, value: V): this.type = { update(key, value); this } - - def +=(kv: (Long, V)): this.type = { update(kv._1, kv._2); this } - - def -=(key: Long): this.type = { - if (key == -key) { - if (key == 0L) { - extraKeys &= 0x2 - zeroValue = null - } - else { - extraKeys &= 0x1 - minValue = null - } - } - else { - val i = seekEntry(key) - if (i >= 0) { - _size -= 1 - _vacant += 1 - _keys(i) = Long.MinValue - _values(i) = null - } - } - this - } - - def iterator: Iterator[(Long, V)] = new Iterator[(Long, V)] { - private[this] val kz = _keys - private[this] val vz = _values - - private[this] var nextPair: (Long, V) = - if (extraKeys==0) null - else if ((extraKeys&1)==1) (0L, zeroValue.asInstanceOf[V]) - else (Long.MinValue, minValue.asInstanceOf[V]) - - private[this] var anotherPair: (Long, V) = - if (extraKeys==3) (Long.MinValue, minValue.asInstanceOf[V]) - else null - - private[this] var index = 0 - - def hasNext: Boolean = nextPair != null || (index < kz.length && { - var q = kz(index) - while (q == -q) { - index += 1 - if (index >= kz.length) return false - q = kz(index) - } - nextPair = (kz(index), vz(index).asInstanceOf[V]) - index += 1 - true - }) - def next = { - if (nextPair == null && !hasNext) throw new NoSuchElementException("next") - val ans = nextPair - if (anotherPair != null) { - nextPair = anotherPair - anotherPair = null - } - else nextPair = null - ans - } - } - - override def foreach[U](f: ((Long,V)) => U) { - if ((extraKeys & 1) == 1) f((0L, zeroValue.asInstanceOf[V])) - if ((extraKeys & 2) == 2) f((Long.MinValue, minValue.asInstanceOf[V])) - var i,j = 0 - while (i < _keys.length & j < _size) { - val k = _keys(i) - if (k != -k) { - j += 1 - f((k, _values(i).asInstanceOf[V])) - } - i += 1 - } - } - - override def clone(): LongMap[V] = { - val kz = java.util.Arrays.copyOf(_keys, _keys.length) - val vz = java.util.Arrays.copyOf(_values, _values.length) - val lm = new LongMap[V](defaultEntry, 1, false) - lm.initializeTo(mask, extraKeys, zeroValue, minValue, _size, _vacant, kz, vz) - lm - } - - override def +[V1 >: V](kv: (Long, V1)): LongMap[V1] = { - val lm = clone().asInstanceOf[LongMap[V1]] - lm += kv - lm - } - - override def ++[V1 >: V](xs: GenTraversableOnce[(Long, V1)]): LongMap[V1] = { - val lm = clone().asInstanceOf[LongMap[V1]] - xs.foreach(kv => lm += kv) - lm - } - - override def updated[V1 >: V](key: Long, value: V1): LongMap[V1] = { - val lm = clone().asInstanceOf[LongMap[V1]] - lm += (key, value) - lm - } - - /** Applies a function to all keys of this map. */ - def foreachKey[A](f: Long => A) { - if ((extraKeys & 1) == 1) f(0L) - if ((extraKeys & 2) == 2) f(Long.MinValue) - var i,j = 0 - while (i < _keys.length & j < _size) { - val k = _keys(i) - if (k != -k) { - j += 1 - f(k) - } - i += 1 - } - } - - /** Applies a function to all values of this map. */ - def foreachValue[A](f: V => A) { - if ((extraKeys & 1) == 1) f(zeroValue.asInstanceOf[V]) - if ((extraKeys & 2) == 2) f(minValue.asInstanceOf[V]) - var i,j = 0 - while (i < _keys.length & j < _size) { - val k = _keys(i) - if (k != -k) { - j += 1 - f(_values(i).asInstanceOf[V]) - } - i += 1 - } - } - - /** Creates a new `LongMap` with different values. - * Unlike `mapValues`, this method generates a new - * collection immediately. - */ - def mapValuesNow[V1](f: V => V1): LongMap[V1] = { - val zv = if ((extraKeys & 1) == 1) f(zeroValue.asInstanceOf[V]).asInstanceOf[AnyRef] else null - val mv = if ((extraKeys & 2) == 2) f(minValue.asInstanceOf[V]).asInstanceOf[AnyRef] else null - val lm = new LongMap[V1](LongMap.exceptionDefault, 1, false) - val kz = java.util.Arrays.copyOf(_keys, _keys.length) - val vz = new Array[AnyRef](_values.length) - var i,j = 0 - while (i < _keys.length & j < _size) { - val k = _keys(i) - if (k != -k) { - j += 1 - vz(i) = f(_values(i).asInstanceOf[V]).asInstanceOf[AnyRef] - } - i += 1 - } - lm.initializeTo(mask, extraKeys, zv, mv, _size, _vacant, kz, vz) - lm - } - - /** Applies a transformation function to all values stored in this map. - * Note: the default, if any, is not transformed. - */ - def transformValues(f: V => V): this.type = { - if ((extraKeys & 1) == 1) zeroValue = f(zeroValue.asInstanceOf[V]).asInstanceOf[AnyRef] - if ((extraKeys & 2) == 2) minValue = f(minValue.asInstanceOf[V]).asInstanceOf[AnyRef] - var i,j = 0 - while (i < _keys.length & j < _size) { - val k = _keys(i) - if (k != -k) { - j += 1 - _values(i) = f(_values(i).asInstanceOf[V]).asInstanceOf[AnyRef] - } - i += 1 - } - this - } -} - -object LongMap { - private final val IndexMask = 0x3FFFFFFF - private final val MissingBit = 0x80000000 - private final val VacantBit = 0x40000000 - private final val MissVacant = 0xC0000000 - - private val exceptionDefault: Long => Nothing = (k: Long) => throw new NoSuchElementException(k.toString) - - implicit def canBuildFrom[V, U]: CanBuildFrom[LongMap[V], (Long, U), LongMap[U]] = - new CanBuildFrom[LongMap[V], (Long, U), LongMap[U]] { - def apply(from: LongMap[V]): LongMapBuilder[U] = apply() - def apply(): LongMapBuilder[U] = new LongMapBuilder[U] - } - - /** A builder for instances of `LongMap`. - * - * This builder can be reused to create multiple instances. - */ - final class LongMapBuilder[V] extends ReusableBuilder[(Long, V), LongMap[V]] { - private[collection] var elems: LongMap[V] = new LongMap[V] - def +=(entry: (Long, V)): this.type = { - elems += entry - this - } - def clear() { elems = new LongMap[V] } - def result(): LongMap[V] = elems - } - - /** Creates a new `LongMap` with zero or more key/value pairs. */ - def apply[V](elems: (Long, V)*): LongMap[V] = { - val sz = if (elems.hasDefiniteSize) elems.size else 4 - val lm = new LongMap[V](sz * 2) - elems.foreach{ case (k,v) => lm(k) = v } - if (lm.size < (sz>>3)) lm.repack() - lm - } - - /** Creates a new empty `LongMap`. */ - def empty[V]: LongMap[V] = new LongMap[V] - - /** Creates a new empty `LongMap` with the supplied default */ - def withDefault[V](default: Long => V): LongMap[V] = new LongMap[V](default) - - /** Creates a new `LongMap` from arrays of keys and values. - * Equivalent to but more efficient than `LongMap((keys zip values): _*)`. - */ - def fromZip[V](keys: Array[Long], values: Array[V]): LongMap[V] = { - val sz = math.min(keys.length, values.length) - val lm = new LongMap[V](sz * 2) - var i = 0 - while (i < sz) { lm(keys(i)) = values(i); i += 1 } - if (lm.size < (sz>>3)) lm.repack() - lm - } - - /** Creates a new `LongMap` from keys and values. - * Equivalent to but more efficient than `LongMap((keys zip values): _*)`. - */ - def fromZip[V](keys: collection.Iterable[Long], values: collection.Iterable[V]): LongMap[V] = { - val sz = math.min(keys.size, values.size) - val lm = new LongMap[V](sz * 2) - val ki = keys.iterator - val vi = values.iterator - while (ki.hasNext && vi.hasNext) lm(ki.next) = vi.next - if (lm.size < (sz >> 3)) lm.repack() - lm - } -} diff --git a/tests/scala2-library/src/library/scala/collection/mutable/Map.scala b/tests/scala2-library/src/library/scala/collection/mutable/Map.scala deleted file mode 100644 index 460a8b8f77f8..000000000000 --- a/tests/scala2-library/src/library/scala/collection/mutable/Map.scala +++ /dev/null @@ -1,80 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - - - -package scala -package collection -package mutable - -import generic._ - -/** A base trait for maps that can be mutated. - * $mapNote - * $mapTags - * @since 1.0 - * @author Matthias Zenger - */ -trait Map[K, V] - extends Iterable[(K, V)] -// with GenMap[K, V] - with scala.collection.Map[K, V] - with MapLike[K, V, Map[K, V]] { - - override def empty: Map[K, V] = Map.empty - - override def seq: Map[K, V] = this - - /** The same map with a given default function. - * - * Invoking transformer methods (e.g. `map`) will not preserve the default value. - * - * @param d the function mapping keys to values, used for non-present keys - * @return a wrapper of the map with a default value - */ - def withDefault(d: K => V): mutable.Map[K, V] = new Map.WithDefault[K, V](this, d) - - /** The same map with a given default value. - * - * Invoking transformer methods (e.g. `map`) will not preserve the default value. - * - * @param d default value used for non-present keys - * @return a wrapper of the map with a default value - */ - def withDefaultValue(d: V): mutable.Map[K, V] = new Map.WithDefault[K, V](this, x => d) -} - -/** $factoryInfo - * The current default implementation of a $Coll is a `HashMap`. - * @define coll mutable map - * @define Coll `mutable.Map` - */ -object Map extends MutableMapFactory[Map] { - /** $canBuildFromInfo */ - implicit def canBuildFrom[K, V]: CanBuildFrom[Coll, (K, V), Map[K, V]] = new MapCanBuildFrom[K, V] - - def empty[K, V]: Map[K, V] = new HashMap[K, V] - - class WithDefault[K, V](underlying: Map[K, V], d: K => V) extends scala.collection.Map.WithDefault(underlying, d) with Map[K, V] { - override def += (kv: (K, V)) = {underlying += kv; this} - def -= (key: K) = {underlying -= key; this} - override def empty = new WithDefault(underlying.empty, d) - override def updated[V1 >: V](key: K, value: V1): WithDefault[K, V1] = new WithDefault[K, V1](underlying.updated[V1](key, value), d) - override def + [V1 >: V](kv: (K, V1)): WithDefault[K, V1] = updated(kv._1, kv._2) - override def - (key: K): WithDefault[K, V] = new WithDefault(underlying - key, d) - - /** If these methods aren't overridden to thread through the underlying map, - * successive calls to withDefault* have no effect. - */ - override def withDefault(d: K => V): mutable.Map[K, V] = new WithDefault[K, V](underlying, d) - override def withDefaultValue(d: V): mutable.Map[K, V] = new WithDefault[K, V](underlying, x => d) - } -} - -/** Explicit instantiation of the `Map` trait to reduce class file size in subclasses. */ -abstract class AbstractMap[K, V] extends scala.collection.AbstractMap[K, V] with Map[K, V] diff --git a/tests/scala2-library/src/library/scala/collection/mutable/MapBuilder.scala b/tests/scala2-library/src/library/scala/collection/mutable/MapBuilder.scala deleted file mode 100644 index cfc3079f41cd..000000000000 --- a/tests/scala2-library/src/library/scala/collection/mutable/MapBuilder.scala +++ /dev/null @@ -1,37 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - - -package scala -package collection -package mutable - -/** The canonical builder for immutable maps, working with the map's `+` method - * to add new elements. - * Collections are built from their `empty` element using this + method. - * - * @tparam A Type of the keys for the map this builder creates. - * @tparam B Type of the values for the map this builder creates. - * @tparam Coll The type of the actual collection this builder builds. - * @param empty The empty element of the collection. - * - * @since 2.8 - */ -class MapBuilder[A, B, Coll <: scala.collection.GenMap[A, B] with scala.collection.GenMapLike[A, B, Coll]](empty: Coll) -extends ReusableBuilder[(A, B), Coll] { - protected var elems: Coll = empty - def +=(x: (A, B)): this.type = { - elems = (elems + x).asInstanceOf[Coll] - // the cast is necessary because right now we cannot enforce statically that - // for every map of type Coll, `+` yields again a Coll. With better support - // for hk-types we might be able to enforce this in the future, though. - this - } - def clear() { elems = empty } - def result: Coll = elems -} diff --git a/tests/scala2-library/src/library/scala/collection/mutable/MapLike.scala b/tests/scala2-library/src/library/scala/collection/mutable/MapLike.scala deleted file mode 100644 index b00a5c115ec6..000000000000 --- a/tests/scala2-library/src/library/scala/collection/mutable/MapLike.scala +++ /dev/null @@ -1,264 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - - -package scala -package collection -package mutable - -import generic._ -import scala.annotation.migration -import scala.collection.parallel.mutable.ParMap - -/** A template trait for mutable maps. - * $mapNote - * $mapTags - * @define Coll `mutable.Map` - * @define coll mutable map - * @since 2.8 - * - * @define mapNote - * '''Implementation note:''' - * This trait provides most of the operations of a mutable `Map` - * independently of its representation. It is typically inherited by - * concrete implementations of maps. - * - * To implement a concrete mutable map, you need to provide - * implementations of the following methods: - * {{{ - * def get(key: K): Option[V] - * def iterator: Iterator[(K, V)] - * def += (kv: (K, V)): This - * def -= (key: K): This - * }}} - * If you wish that methods like `take`, `drop`, `filter` also return the same kind of map - * you should also override: - * {{{ - * def empty: This - * }}} - * It is also good idea to override methods `foreach` and - * `size` for efficiency. - */ -trait MapLike[K, V, +This <: MapLike[K, V, This] with Map[K, V]] - extends scala.collection.MapLike[K, V, This] - with Builder[(K, V), This] - with Growable[(K, V)] - with Shrinkable[K] - with Cloneable[This] - with Parallelizable[(K, V), ParMap[K, V]] -{ self => - - /** A common implementation of `newBuilder` for all mutable maps - * in terms of `empty`. - * - * Overrides `MapLike` implementation for better efficiency. - */ - override protected[this] def newBuilder: Builder[(K, V), This] = empty - - protected[this] override def parCombiner = ParMap.newCombiner[K, V] - - /** Converts this $coll to a sequence. - * - * ```Note```: assumes a fast `size` method. Subclasses should override if this is not true. - */ - override def toSeq: collection.Seq[(K, V)] = { - // ArrayBuffer for efficiency, preallocated to the right size. - val result = new ArrayBuffer[(K, V)](size) - foreach(result += _) - result - } - - - /** Adds a new key/value pair to this map and optionally returns previously bound value. - * If the map already contains a - * mapping for the key, it will be overridden by the new value. - * - * @param key the key to update - * @param value the new value - * @return an option value containing the value associated with the key - * before the `put` operation was executed, or `None` if `key` - * was not defined in the map before. - */ - def put(key: K, value: V): Option[V] = { - val r = get(key) - update(key, value) - r - } - - /** Adds a new key/value pair to this map. - * If the map already contains a - * mapping for the key, it will be overridden by the new value. - * - * @param key The key to update - * @param value The new value - */ - def update(key: K, value: V) { this += ((key, value)) } - - /** Adds a new key/value pair to this map. - * If the map already contains a - * mapping for the key, it will be overridden by the new value. - * @param kv the key/value pair. - * @return the map itself - */ - def += (kv: (K, V)): this.type - - /** Creates a new map consisting of all key/value pairs of the current map - * plus a new pair of a given key and value. - * - * @param key The key to add - * @param value The new value - * @return A fresh immutable map with the binding from `key` to - * `value` added to this map. - */ - override def updated[V1 >: V](key: K, value: V1): Map[K, V1] = this + ((key, value)) - - /** Creates a new map containing a new key/value mapping and all the key/value mappings - * of this map. - * - * Mapping `kv` will override existing mappings from this map with the same key. - * - * @param kv the key/value mapping to be added - * @return a new map containing mappings of this map and the mapping `kv`. - */ - @migration("`+` creates a new map. Use `+=` to add an element to this map and return that map itself.", "2.8.0") - def + [V1 >: V] (kv: (K, V1)): Map[K, V1] = clone().asInstanceOf[Map[K, V1]] += kv - - /** Creates a new map containing two or more key/value mappings and all the key/value - * mappings of this map. - * - * Specified mappings will override existing mappings from this map with the same keys. - * - * @param elem1 the first element to add. - * @param elem2 the second element to add. - * @param elems the remaining elements to add. - * @return a new map containing mappings of this map and two or more specified mappings. - */ - @migration("`+` creates a new map. Use `+=` to add an element to this map and return that map itself.", "2.8.0") - override def + [V1 >: V] (elem1: (K, V1), elem2: (K, V1), elems: (K, V1) *): Map[K, V1] = - clone().asInstanceOf[Map[K, V1]] += elem1 += elem2 ++= elems - - /** Creates a new map containing the key/value mappings provided by the specified traversable object - * and all the key/value mappings of this map. - * - * Note that existing mappings from this map with the same key as those in `xs` will be overridden. - * - * @param xs the traversable object. - * @return a new map containing mappings of this map and those provided by `xs`. - */ - @migration("`++` creates a new map. Use `++=` to add an element to this map and return that map itself.", "2.8.0") - override def ++[V1 >: V](xs: GenTraversableOnce[(K, V1)]): Map[K, V1] = - clone().asInstanceOf[Map[K, V1]] ++= xs.seq - - /** Removes a key from this map, returning the value associated previously - * with that key as an option. - * @param key the key to be removed - * @return an option value containing the value associated previously with `key`, - * or `None` if `key` was not defined in the map before. - */ - def remove(key: K): Option[V] = { - val r = get(key) - this -= key - r - } - - /** Removes a key from this map. - * @param key the key to be removed - * @return the map itself. - */ - def -= (key: K): this.type - - /** Creates a new map with all the key/value mappings of this map except the key/value mapping - * with the specified key. - * - * @param key the key to be removed - * @return a new map with all the mappings of this map except that with a key `key`. - */ - @migration("`-` creates a new map. Use `-=` to remove an element from this map and return that map itself.", "2.8.0") - override def -(key: K): This = clone() -= key - - /** Removes all bindings from the map. After this operation has completed, - * the map will be empty. - */ - def clear() { keysIterator foreach -= } - - /** If given key is already in this map, returns associated value. - * - * Otherwise, computes value from given expression `op`, stores with key - * in map and returns that value. - * - * Concurrent map implementations may evaluate the expression `op` - * multiple times, or may evaluate `op` without inserting the result. - * - * @param key the key to test - * @param op the computation yielding the value to associate with `key`, if - * `key` is previously unbound. - * @return the value associated with key (either previously or as a result - * of executing the method). - */ - def getOrElseUpdate(key: K, op: => V): V = - get(key) match { - case Some(v) => v - case None => val d = op; this(key) = d; d - } - - /** Applies a transformation function to all values contained in this map. - * The transformation function produces new values from existing keys - * associated values. - * - * @param f the transformation to apply - * @return the map itself. - */ - def transform(f: (K, V) => V): this.type = { - this.iterator foreach { - case (key, value) => update(key, f(key, value)) - } - this - } - - /** Retains only those mappings for which the predicate - * `p` returns `true`. - * - * @param p The test predicate - */ - def retain(p: (K, V) => Boolean): this.type = { - for ((k, v) <- this.toList) // scala/bug#7269 toList avoids ConcurrentModificationException - if (!p(k, v)) this -= k - - this - } - - override def clone(): This = empty ++= repr - - /** The result when this map is used as a builder - * @return the map representation itself. - */ - def result: This = repr - - /** Creates a new map with all the key/value mappings of this map except mappings with keys - * equal to any of the two or more specified keys. - * - * @param elem1 the first element to remove. - * @param elem2 the second element to remove. - * @param elems the remaining elements to remove. - * @return a new map containing all the mappings of this map except mappings - * with a key equal to `elem1`, `elem2` or any of `elems`. - */ - @migration("`-` creates a new map. Use `-=` to remove an element from this map and return that map itself.", "2.8.0") - override def -(elem1: K, elem2: K, elems: K*): This = - clone() -= elem1 -= elem2 --= elems - - /** Creates a new map with all the key/value mappings of this map except mappings with keys - * equal to any of those provided by the specified traversable object. - * - * @param xs the traversable object. - * @return a new map with all the key/value mappings of this map except mappings - * with a key equal to a key from `xs`. - */ - @migration("`--` creates a new map. Use `--=` to remove an element from this map and return that map itself.", "2.8.0") - override def --(xs: GenTraversableOnce[K]): This = clone() --= xs.seq -} diff --git a/tests/scala2-library/src/library/scala/collection/mutable/MapProxy.scala b/tests/scala2-library/src/library/scala/collection/mutable/MapProxy.scala deleted file mode 100644 index 63b14d328a94..000000000000 --- a/tests/scala2-library/src/library/scala/collection/mutable/MapProxy.scala +++ /dev/null @@ -1,39 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala -package collection -package mutable - -/** - * This trait implements a proxy for [[scala.collection.mutable.Map]]. - * - * It is most useful for assembling customized map abstractions - * dynamically using object composition and forwarding. - * - * @author Matthias Zenger, Martin Odersky - * @version 2.0, 31/12/2006 - * @since 1 - */ -@deprecated("proxying is deprecated due to lack of use and compiler-level support", "2.11.0") -trait MapProxy[A, B] extends Map[A, B] with MapProxyLike[A, B, Map[A, B]] { - private def newProxy[B1 >: B](newSelf: Map[A, B1]): MapProxy[A, B1] = - new MapProxy[A, B1] { val self = newSelf } - - override def repr = this - override def empty: MapProxy[A, B] = new MapProxy[A, B] { val self = MapProxy.this.self.empty } - override def updated [B1 >: B](key: A, value: B1) = newProxy(self.updated(key, value)) - - override def + [B1 >: B] (kv: (A, B1)): Map[A, B1] = newProxy(self + kv) - override def + [B1 >: B] (elem1: (A, B1), elem2: (A, B1), elems: (A, B1) *) = newProxy(self.+(elem1, elem2, elems: _*)) - override def ++[B1 >: B](xs: GenTraversableOnce[(A, B1)]) = newProxy(self ++ xs.seq) - override def -(key: A) = newProxy(self - key) - - override def += (kv: (A, B)) = { self += kv ; this } - override def -= (key: A) = { self -= key ; this } -} diff --git a/tests/scala2-library/src/library/scala/collection/mutable/MultiMap.scala b/tests/scala2-library/src/library/scala/collection/mutable/MultiMap.scala deleted file mode 100644 index ac2ebf31d8bd..000000000000 --- a/tests/scala2-library/src/library/scala/collection/mutable/MultiMap.scala +++ /dev/null @@ -1,118 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - - - -package scala -package collection -package mutable - - -/** A trait for mutable maps with multiple values assigned to a key. - * - * This class is typically used as a mixin. It turns maps which map `A` - * to `Set[B]` objects into multimaps that map `A` to `B` objects. - * - * @example {{{ - * // first import all necessary types from package `collection.mutable` - * import collection.mutable.{ HashMap, MultiMap, Set } - * - * // to create a `MultiMap` the easiest way is to mixin it into a normal - * // `Map` instance - * val mm = new HashMap[Int, Set[String]] with MultiMap[Int, String] - * - * // to add key-value pairs to a multimap it is important to use - * // the method `addBinding` because standard methods like `+` will - * // overwrite the complete key-value pair instead of adding the - * // value to the existing key - * mm.addBinding(1, "a") - * mm.addBinding(2, "b") - * mm.addBinding(1, "c") - * - * // mm now contains `Map(2 -> Set(b), 1 -> Set(c, a))` - * - * // to check if the multimap contains a value there is method - * // `entryExists`, which allows to traverse the including set - * mm.entryExists(1, _ == "a") == true - * mm.entryExists(1, _ == "b") == false - * mm.entryExists(2, _ == "b") == true - * - * // to remove a previous added value there is the method `removeBinding` - * mm.removeBinding(1, "a") - * mm.entryExists(1, _ == "a") == false - * }}} - * - * @define coll multimap - * @define Coll `MultiMap` - * @author Matthias Zenger - * @author Martin Odersky - * @version 2.8 - * @since 1 - */ -trait MultiMap[A, B] extends Map[A, Set[B]] { - /** Creates a new set. - * - * Classes that use this trait as a mixin can override this method - * to have the desired implementation of sets assigned to new keys. - * By default this is `HashSet`. - * - * @return An empty set of values of type `B`. - */ - protected def makeSet: Set[B] = new HashSet[B] - - /** Assigns the specified `value` to a specified `key`. If the key - * already has a binding to equal to `value`, nothing is changed; - * otherwise a new binding is added for that `key`. - * - * @param key The key to which to bind the new value. - * @param value The value to bind to the key. - * @return A reference to this multimap. - */ - def addBinding(key: A, value: B): this.type = { - get(key) match { - case None => - val set = makeSet - set += value - this(key) = set - case Some(set) => - set += value - } - this - } - - /** Removes the binding of `value` to `key` if it exists, otherwise this - * operation doesn't have any effect. - * - * If this was the last value assigned to the specified key, the - * set assigned to that key will be removed as well. - * - * @param key The key of the binding. - * @param value The value to remove. - * @return A reference to this multimap. - */ - def removeBinding(key: A, value: B): this.type = { - get(key) match { - case None => - case Some(set) => - set -= value - if (set.isEmpty) this -= key - } - this - } - - /** Checks if there exists a binding to `key` such that it satisfies the predicate `p`. - * - * @param key The key for which the predicate is checked. - * @param p The predicate which a value assigned to the key must satisfy. - * @return A boolean if such a binding exists - */ - def entryExists(key: A, p: B => Boolean): Boolean = get(key) match { - case None => false - case Some(set) => set exists p - } -} diff --git a/tests/scala2-library/src/library/scala/collection/mutable/MutableList.scala b/tests/scala2-library/src/library/scala/collection/mutable/MutableList.scala deleted file mode 100644 index a333eedb1a59..000000000000 --- a/tests/scala2-library/src/library/scala/collection/mutable/MutableList.scala +++ /dev/null @@ -1,172 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala -package collection -package mutable - -import generic._ -import immutable.List - -/** - * This class is used internally to represent mutable lists. It is the - * basis for the implementation of the class `Queue`. - * - * @author Matthias Zenger - * @author Martin Odersky - * @version 2.8 - * @since 1 - * @define Coll `mutable.MutableList` - * @define coll mutable list - * @see [[http://docs.scala-lang.org/overviews/collections/concrete-mutable-collection-classes.html#mutable_lists "Scala's Collection Library overview"]] - * section on `Mutable Lists` for more information. - */ -@SerialVersionUID(5938451523372603072L) -class MutableList[A] -extends AbstractSeq[A] - with LinearSeq[A] - with LinearSeqOptimized[A, MutableList[A]] - with GenericTraversableTemplate[A, MutableList] - with Builder[A, MutableList[A]] - with Serializable -{ - override def companion: GenericCompanion[MutableList] = MutableList - - override protected[this] def newBuilder: Builder[A, MutableList[A]] = new MutableList[A] - - protected var first0: LinkedList[A] = new LinkedList[A] - protected var last0: LinkedList[A] = first0 - protected var len: Int = 0 - - def toQueue = new Queue(first0, last0, len) - - /** Is the list empty? - */ - override def isEmpty = len == 0 - - /** Returns the first element in this list - */ - override def head: A = if (nonEmpty) first0.head else throw new NoSuchElementException - - /** Returns the rest of this list - */ - override def tail: MutableList[A] = { - val tl = new MutableList[A] - tailImpl(tl) - tl - } - - protected final def tailImpl(tl: MutableList[A]) { - require(nonEmpty, "tail of empty list") - tl.first0 = first0.tail - tl.len = len - 1 - tl.last0 = if (tl.len == 0) tl.first0 else last0 - } - - /** Prepends a single element to this list. This operation takes constant - * time. - * @param elem the element to prepend. - * @return this $coll. - */ - def +=: (elem: A): this.type = { prependElem(elem); this } - - /** Returns the length of this list. - */ - override def length: Int = len - - /** Returns the `n`-th element of this list. - * @throws IndexOutOfBoundsException if index does not exist. - */ - override def apply(n: Int): A = first0.apply(n) - - /** Updates the `n`-th element of this list to a new value. - * @throws IndexOutOfBoundsException if index does not exist. - */ - def update(n: Int, x: A): Unit = first0.update(n, x) - - /** Returns the `n`-th element of this list or `None` - * if index does not exist. - */ - def get(n: Int): Option[A] = first0.get(n) - - protected def prependElem(elem: A) { - first0 = new LinkedList[A](elem, first0) - if (len == 0) last0 = first0 - len = len + 1 - } - - protected def appendElem(elem: A) { - if (len == 0) { - prependElem(elem) - } else { - last0.next = new LinkedList[A] - last0 = last0.next - last0.elem = elem - last0.next = new LinkedList[A] // for performance, use sentinel `object` instead? - len = len + 1 - } - } - - /** Returns an iterator over up to `length` elements of this list. - */ - override def iterator: Iterator[A] = if (isEmpty) Iterator.empty else - new AbstractIterator[A] { - var elems = first0 - var count = len - def hasNext = count > 0 && elems.nonEmpty - def next() = { - if (!hasNext) throw new NoSuchElementException - count = count - 1 - val e = elems.elem - elems = if (count == 0) null else elems.next - e - } - } - - override def last = { - if (isEmpty) throw new NoSuchElementException("MutableList.empty.last") - last0.elem - } - - /** Returns an instance of [[scala.List]] containing the same - * sequence of elements. - */ - override def toList: List[A] = first0.toList - - /** Returns the current list of elements as a linked List - * sequence of elements. - */ - private[mutable] def toLinkedList: LinkedList[A] = first0 - - /** Appends a single element to this buffer. This takes constant time. - * - * @param elem the element to append. - */ - def +=(elem: A): this.type = { appendElem(elem); this } - - def clear() { - first0 = new LinkedList[A] - last0 = first0 - len = 0 - } - - def result = this - - override def clone(): MutableList[A] = { - val bf = newBuilder - bf ++= seq - bf.result() - } -} - -object MutableList extends SeqFactory[MutableList] { - implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, MutableList[A]] = - ReusableCBF.asInstanceOf[GenericCanBuildFrom[A]] - - def newBuilder[A]: Builder[A, MutableList[A]] = new MutableList[A] -} diff --git a/tests/scala2-library/src/library/scala/collection/mutable/ObservableBuffer.scala b/tests/scala2-library/src/library/scala/collection/mutable/ObservableBuffer.scala deleted file mode 100644 index 53d26f4c6f00..000000000000 --- a/tests/scala2-library/src/library/scala/collection/mutable/ObservableBuffer.scala +++ /dev/null @@ -1,88 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - - - -package scala -package collection -package mutable - -import script._ - -/** This class is typically used as a mixin. It adds a subscription - * mechanism to the `Buffer` class into which this abstract - * class is mixed in. Class `ObservableBuffer` publishes - * events of the type `Message`. - * - * @author Matthias Zenger - * @version 1.0, 08/07/2003 - * @since 1 - */ -@deprecated("observables are deprecated because scripting is deprecated", "2.11.0") -trait ObservableBuffer[A] extends Buffer[A] with Publisher[Message[A] with Undoable] { - type Pub <: ObservableBuffer[A] - - abstract override def +=(element: A): this.type = { - super.+=(element) - publish(new Include(End, element) with Undoable { - def undo() { trimEnd(1) } - }) - this - } - - abstract override def ++=(xs: TraversableOnce[A]): this.type = { - for (x <- xs) this += x - this - } - - abstract override def +=:(element: A): this.type = { - super.+=:(element) - publish(new Include(Start, element) with Undoable { - def undo() { trimStart(1) } - }) - this - } - - abstract override def update(n: Int, newelement: A): Unit = { - val oldelement = apply(n) - super.update(n, newelement) - publish(new Update(Index(n), newelement) with Undoable { - def undo() { update(n, oldelement) } - }) - } - - abstract override def remove(n: Int): A = { - val oldelement = apply(n) - super.remove(n) - publish(new Remove(Index(n), oldelement) with Undoable { - def undo() { insert(n, oldelement) } - }) - oldelement - } - - abstract override def clear(): Unit = { - super.clear() - publish(new Reset with Undoable { - def undo() { throw new UnsupportedOperationException("cannot undo") } - }) - } - - abstract override def insertAll(n: Int, elems: scala.collection.Traversable[A]) { - super.insertAll(n, elems) - var curr = n - 1 - val msg = elems.foldLeft(new Script[A]() with Undoable { - def undo() { throw new UnsupportedOperationException("cannot undo") } - }) { - case (msg, elem) => - curr += 1 - msg += Include(Index(curr), elem) - } - publish(msg) - } - -} diff --git a/tests/scala2-library/src/library/scala/collection/mutable/ObservableMap.scala b/tests/scala2-library/src/library/scala/collection/mutable/ObservableMap.scala deleted file mode 100644 index 222d0c993aec..000000000000 --- a/tests/scala2-library/src/library/scala/collection/mutable/ObservableMap.scala +++ /dev/null @@ -1,69 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - - - -package scala -package collection -package mutable - -import script._ - - -/** This class is typically used as a mixin. It adds a subscription - * mechanism to the `Map` class into which this abstract - * class is mixed in. Class `ObservableMap` publishes - * events of the type `Message`. - * - * @author Matthias Zenger - * @author Martin Odersky - * @version 2.0, 31/12/2006 - * @since 1 - */ -@deprecated("observables are deprecated because scripting is deprecated", "2.11.0") -trait ObservableMap[A, B] extends Map[A, B] with Publisher[Message[(A, B)] with Undoable] { - - type Pub <: ObservableMap[A, B] - - abstract override def += (kv: (A, B)): this.type = { - val (key, value) = kv - - get(key) match { - case None => - super.+=(kv) - publish(new Include((key, value)) with Undoable { - def undo() = -=(key) - }) - case Some(old) => - super.+=(kv) - publish(new Update((key, value)) with Undoable { - def undo() = +=((key, old)) - }) - } - this - } - - abstract override def -= (key: A): this.type = { - get(key) match { - case None => - case Some(old) => - super.-=(key) - publish(new Remove((key, old)) with Undoable { - def undo = update(key, old) - }) - } - this - } - - abstract override def clear(): Unit = { - super.clear() - publish(new Reset with Undoable { - def undo(): Unit = throw new UnsupportedOperationException("cannot undo") - }) - } -} diff --git a/tests/scala2-library/src/library/scala/collection/mutable/ObservableSet.scala b/tests/scala2-library/src/library/scala/collection/mutable/ObservableSet.scala deleted file mode 100644 index d5f568fb4a7f..000000000000 --- a/tests/scala2-library/src/library/scala/collection/mutable/ObservableSet.scala +++ /dev/null @@ -1,53 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - - - -package scala -package collection -package mutable - -import script._ - -/** This class is typically used as a mixin. It adds a subscription - * mechanism to the `Set` class into which this abstract - * class is mixed in. Class `ObservableSet` publishes - * events of the type `Message`. - * - * @author Matthias Zenger - * @version 1.0, 08/07/2003 - * @since 1 - */ -@deprecated("observables are deprecated because scripting is deprecated", "2.11.0") -trait ObservableSet[A] extends Set[A] with Publisher[Message[A] with Undoable] { - - type Pub <: ObservableSet[A] - - abstract override def +=(elem: A): this.type = { - if (!contains(elem)) { - super.+=(elem) - publish(new Include(elem) with Undoable { def undo() = -=(elem) }) - } - this - } - - abstract override def -=(elem: A): this.type = { - if (contains(elem)) { - super.-=(elem) - publish(new Remove(elem) with Undoable { def undo() = +=(elem) }) - } - this - } - - abstract override def clear(): Unit = { - super.clear() - publish(new Reset with Undoable { - def undo(): Unit = throw new UnsupportedOperationException("cannot undo") - }) - } -} diff --git a/tests/scala2-library/src/library/scala/collection/mutable/OpenHashMap.scala b/tests/scala2-library/src/library/scala/collection/mutable/OpenHashMap.scala deleted file mode 100644 index b2e9ee27b940..000000000000 --- a/tests/scala2-library/src/library/scala/collection/mutable/OpenHashMap.scala +++ /dev/null @@ -1,275 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala -package collection -package mutable - -/** - * @define Coll `OpenHashMap` - * @define coll open hash map - * - * @since 2.7 - */ -object OpenHashMap { - - def apply[K, V](elems : (K, V)*) = new OpenHashMap[K, V] ++= elems - def empty[K, V] = new OpenHashMap[K, V] - - /** A hash table entry. - * - * The entry is occupied if and only if its `value` is a `Some`; - * deleted if and only if its `value` is `None`. - * If its `key` is not the default value of type `Key`, the entry is occupied. - * If the entry is occupied, `hash` contains the hash value of `key`. - */ - final private class OpenEntry[Key, Value](var key: Key, - var hash: Int, - var value: Option[Value]) -} - -/** A mutable hash map based on an open hashing scheme. The precise scheme is - * undefined, but it should make a reasonable effort to ensure that an insert - * with consecutive hash codes is not unnecessarily penalised. In particular, - * mappings of consecutive integer keys should work without significant - * performance loss. - * - * @tparam Key type of the keys in this map. - * @tparam Value type of the values in this map. - * @param initialSize the initial size of the internal hash table. - * - * @author David MacIver - * @since 2.7 - * - * @define Coll `OpenHashMap` - * @define coll open hash map - * @define mayNotTerminateInf - * @define willNotTerminateInf - */ -class OpenHashMap[Key, Value](initialSize : Int) -extends AbstractMap[Key, Value] - with Map[Key, Value] - with MapLike[Key, Value, OpenHashMap[Key, Value]] { - - import OpenHashMap.OpenEntry - private type Entry = OpenEntry[Key, Value] - - /** A default constructor creates a hashmap with initial size `8`. - */ - def this() = this(8) - - override def empty: OpenHashMap[Key, Value] = OpenHashMap.empty[Key, Value] - - private[this] val actualInitialSize = HashTable.nextPositivePowerOfTwo(initialSize) - - private var mask = actualInitialSize - 1 - - /** The hash table. - * - * The table's entries are initialized to `null`, indication of an empty slot. - * A slot is either deleted or occupied if and only if the entry is non-`null`. - */ - private[this] var table = new Array[Entry](actualInitialSize) - - private var _size = 0 - private var deleted = 0 - - // Used for tracking inserts so that iterators can determine in concurrent modification has occurred. - private[this] var modCount = 0 - - override def size = _size - private[this] def size_=(s : Int) { _size = s } - - /** Returns a mangled hash code of the provided key. */ - protected def hashOf(key: Key) = { - var h = key.## - h ^= ((h >>> 20) ^ (h >>> 12)) - h ^ (h >>> 7) ^ (h >>> 4) - } - - /** Increase the size of the table. - * Copy only the occupied slots, effectively eliminating the deleted slots. - */ - private[this] def growTable() = { - val oldSize = mask + 1 - val newSize = 4 * oldSize - val oldTable = table - table = new Array[Entry](newSize) - mask = newSize - 1 - oldTable.foreach( entry => - if (entry != null && entry.value != None) - table(findIndex(entry.key, entry.hash)) = entry ) - deleted = 0 - } - - /** Return the index of the first slot in the hash table (in probe order) - * that is, in order of preference, either occupied by the given key, deleted, or empty. - * - * @param hash hash value for `key` - */ - private[this] def findIndex(key: Key, hash: Int): Int = { - var index = hash & mask - var j = 0 - - /** Index of the first slot containing a deleted entry, or -1 if none found yet. */ - var firstDeletedIndex = -1 - - var entry = table(index) - while (entry != null) { - if (entry.hash == hash && entry.key == key && entry.value != None) - return index - - if (firstDeletedIndex == -1 && entry.value == None) - firstDeletedIndex = index - - j += 1 - index = (index + j) & mask - entry = table(index) - } - - if (firstDeletedIndex == -1) index else firstDeletedIndex - } - - override def update(key: Key, value: Value) { - put(key, value) - } - - @deprecatedOverriding("+= should not be overridden in order to maintain consistency with put.", "2.11.0") - def += (kv: (Key, Value)): this.type = { put(kv._1, kv._2); this } - - @deprecatedOverriding("-= should not be overridden in order to maintain consistency with remove.", "2.11.0") - def -= (key: Key): this.type = { remove(key); this } - - override def put(key: Key, value: Value): Option[Value] = - put(key, hashOf(key), value) - - private def put(key: Key, hash: Int, value: Value): Option[Value] = { - if (2 * (size + deleted) > mask) growTable() - val index = findIndex(key, hash) - val entry = table(index) - if (entry == null) { - table(index) = new OpenEntry(key, hash, Some(value)) - modCount += 1 - size += 1 - None - } else { - val res = entry.value - if (entry.value == None) { - entry.key = key - entry.hash = hash - size += 1 - deleted -= 1 - modCount += 1 - } - entry.value = Some(value) - res - } - } - - /** Delete the hash table slot contained in the given entry. */ - @inline - private[this] def deleteSlot(entry: Entry) = { - entry.key = null.asInstanceOf[Key] - entry.hash = 0 - entry.value = None - - size -= 1 - deleted += 1 - } - - override def remove(key : Key): Option[Value] = { - val entry = table(findIndex(key, hashOf(key))) - if (entry != null && entry.value != None) { - val res = entry.value - deleteSlot(entry) - res - } else None - } - - def get(key : Key) : Option[Value] = { - val hash = hashOf(key) - var index = hash & mask - var entry = table(index) - var j = 0 - while(entry != null){ - if (entry.hash == hash && - entry.key == key){ - return entry.value - } - - j += 1 - index = (index + j) & mask - entry = table(index) - } - None - } - - /** An iterator over the elements of this map. Use of this iterator follows - * the same contract for concurrent modification as the foreach method. - * - * @return the iterator - */ - def iterator: Iterator[(Key, Value)] = new AbstractIterator[(Key, Value)] { - var index = 0 - val initialModCount = modCount - - private[this] def advance() { - if (initialModCount != modCount) sys.error("Concurrent modification") - while((index <= mask) && (table(index) == null || table(index).value == None)) index+=1 - } - - def hasNext = {advance(); index <= mask } - - def next = { - advance() - val result = table(index) - index += 1 - (result.key, result.value.get) - } - } - - override def clone() = { - val it = new OpenHashMap[Key, Value] - foreachUndeletedEntry(entry => it.put(entry.key, entry.hash, entry.value.get)) - it - } - - /** Loop over the key, value mappings of this map. - * - * The behaviour of modifying the map during an iteration is as follows: - * - Deleting a mapping is always permitted. - * - Changing the value of mapping which is already present is permitted. - * - Anything else is not permitted. It will usually, but not always, throw an exception. - * - * @tparam U The return type of the specified function `f`, return result of which is ignored. - * @param f The function to apply to each key, value mapping. - */ - override def foreach[U](f : ((Key, Value)) => U) { - val startModCount = modCount - foreachUndeletedEntry(entry => { - if (modCount != startModCount) sys.error("Concurrent Modification") - f((entry.key, entry.value.get))} - ) - } - - private[this] def foreachUndeletedEntry(f : Entry => Unit){ - table.foreach(entry => if (entry != null && entry.value != None) f(entry)) - } - - override def transform(f : (Key, Value) => Value) = { - foreachUndeletedEntry(entry => entry.value = Some(f(entry.key, entry.value.get))) - this - } - - override def retain(f : (Key, Value) => Boolean) = { - foreachUndeletedEntry(entry => if (!f(entry.key, entry.value.get)) deleteSlot(entry)) - this - } - - override def stringPrefix = "OpenHashMap" -} diff --git a/tests/scala2-library/src/library/scala/collection/mutable/PriorityQueue.scala b/tests/scala2-library/src/library/scala/collection/mutable/PriorityQueue.scala deleted file mode 100644 index ed43ef6db96c..000000000000 --- a/tests/scala2-library/src/library/scala/collection/mutable/PriorityQueue.scala +++ /dev/null @@ -1,524 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala -package collection -package mutable - -import generic._ - -/** This class implements priority queues using a heap. - * To prioritize elements of type A there must be an implicit - * Ordering[A] available at creation. - * - * Only the `dequeue` and `dequeueAll` methods will return elements in priority - * order (while removing elements from the heap). Standard collection methods - * including `drop`, `iterator`, and `toString` will remove or traverse the heap - * in whichever order seems most convenient. - * - * Therefore, printing a `PriorityQueue` will not reveal the priority order of - * the elements, though the highest-priority element will be printed first. To - * print the elements in order, one must duplicate the `PriorityQueue` (by using - * `clone`, for instance) and then dequeue them: - * - * @example {{{ - * val pq = collection.mutable.PriorityQueue(1, 2, 5, 3, 7) - * println(pq) // elements probably not in order - * println(pq.clone.dequeueAll) // prints Vector(7, 5, 3, 2, 1) - * }}} - * - * @tparam A type of the elements in this priority queue. - * @param ord implicit ordering used to compare the elements of type `A`. - * - * @author Matthias Zenger - * @version 1.0, 03/05/2004 - * @since 1 - * - * @define Coll PriorityQueue - * @define coll priority queue - * @define orderDependent - * @define orderDependentFold - * @define mayNotTerminateInf - * @define willNotTerminateInf - */ -sealed class PriorityQueue[A](implicit val ord: Ordering[A]) - extends AbstractIterable[A] - with Iterable[A] - with GenericOrderedTraversableTemplate[A, PriorityQueue] - with IterableLike[A, PriorityQueue[A]] - with Growable[A] - with Builder[A, PriorityQueue[A]] - with Serializable - with scala.Cloneable -{ - import ord._ - - private class ResizableArrayAccess[A] extends AbstractSeq[A] with ResizableArray[A] with Serializable { - def p_size0 = size0 - def p_size0_=(s: Int) = size0 = s - def p_array = array - def p_ensureSize(n: Int) = super.ensureSize(n) - def p_swap(a: Int, b: Int) = super.swap(a, b) - } - - protected[this] override def newBuilder = PriorityQueue.newBuilder[A] - - private val resarr = new ResizableArrayAccess[A] - - resarr.p_size0 += 1 // we do not use array(0) - def length: Int = resarr.length - 1 // adjust length accordingly - override def size: Int = length - override def isEmpty: Boolean = resarr.p_size0 < 2 - override def repr = this - - def result = this - - override def orderedCompanion = PriorityQueue - - private def toA(x: AnyRef): A = x.asInstanceOf[A] - protected def fixUp(as: Array[AnyRef], m: Int): Unit = { - var k: Int = m - while (k > 1 && toA(as(k / 2)) < toA(as(k))) { - resarr.p_swap(k, k / 2) - k = k / 2 - } - } - - protected def fixDown(as: Array[AnyRef], m: Int, n: Int): Boolean = { - // returns true if any swaps were done (used in heapify) - var k: Int = m - while (n >= 2 * k) { - var j = 2 * k - if (j < n && toA(as(j)) < toA(as(j + 1))) - j += 1 - if (toA(as(k)) >= toA(as(j))) - return k != m - else { - val h = as(k) - as(k) = as(j) - as(j) = h - k = j - } - } - k != m - } - - /** Inserts a single element into the priority queue. - * - * @param elem the element to insert. - * @return this $coll. - */ - def +=(elem: A): this.type = { - resarr.p_ensureSize(resarr.p_size0 + 1) - resarr.p_array(resarr.p_size0) = elem.asInstanceOf[AnyRef] - fixUp(resarr.p_array, resarr.p_size0) - resarr.p_size0 += 1 - this - } - - override def ++=(xs: TraversableOnce[A]): this.type = { - val from = resarr.p_size0 - for (x <- xs) unsafeAdd(x) - heapify(from) - this - } - - private def unsafeAdd(elem: A): Unit = { - // like += but skips fixUp, which breaks the ordering invariant - // a series of unsafeAdds MUST be followed by heapify - resarr.p_ensureSize(resarr.p_size0 + 1) - resarr.p_array(resarr.p_size0) = elem.asInstanceOf[AnyRef] - resarr.p_size0 += 1 - } - - private def heapify(from: Int): Unit = { - // elements at indices 1..from-1 were already in heap order before any adds - // elements at indices from..n are newly added, their order must be fixed - val n = length - - if (from <= 2) { - // no pre-existing order to maintain, do the textbook heapify algorithm - for (i <- n/2 to 1 by -1) fixDown(resarr.p_array, i, n) - } - else if (n - from < 4) { - // for very small adds, doing the simplest fix is faster - for (i <- from to n) fixUp(resarr.p_array, i) - } - else { - var min = from/2 // tracks the minimum element in the queue - val queue = scala.collection.mutable.Queue[Int](min) - - // do fixDown on the parents of all the new elements - // except the parent of the first new element, which is in the queue - // (that parent is treated specially because it might be the root) - for (i <- n/2 until min by -1) { - if (fixDown(resarr.p_array, i, n)) { - // there was a swap, so also need to fixDown i's parent - val parent = i/2 - if (parent < min) { // make sure same parent isn't added twice - min = parent - queue += parent - } - } - } - - while (queue.nonEmpty) { - val i = queue.dequeue() - if (fixDown(resarr.p_array, i, n)) { - val parent = i/2 - if (parent < min && parent > 0) { - // the "parent > 0" is to avoid adding the parent of the root - min = parent - queue += parent - } - } - } - } - } - - /** Adds all elements provided by a `TraversableOnce` object - * into the priority queue. - * - * @param xs a traversable object. - * @return a new priority queue containing elements of both `xs` and `this`. - */ - def ++(xs: GenTraversableOnce[A]): PriorityQueue[A] = { this.clone() ++= xs.seq } - - /** Adds all elements to the queue. - * - * @param elems the elements to add. - */ - def enqueue(elems: A*): Unit = { this ++= elems } - - /** Returns the element with the highest priority in the queue, - * and removes this element from the queue. - * - * @throws java.util.NoSuchElementException - * @return the element with the highest priority. - */ - def dequeue(): A = - if (resarr.p_size0 > 1) { - resarr.p_size0 = resarr.p_size0 - 1 - val result = resarr.p_array(1) - resarr.p_array(1) = resarr.p_array(resarr.p_size0) - resarr.p_array(resarr.p_size0) = null // erase reference from array - fixDown(resarr.p_array, 1, resarr.p_size0 - 1) - toA(result) - } else - throw new NoSuchElementException("no element to remove from heap") - - def dequeueAll[A1 >: A, That](implicit bf: CanBuildFrom[_, A1, That]): That = { - val b = bf.apply() - while (nonEmpty) { - b += dequeue() - } - b.result() - } - - /** Returns the element with the highest priority in the queue, - * or throws an error if there is no element contained in the queue. - * - * @return the element with the highest priority. - */ - override def head: A = if (resarr.p_size0 > 1) toA(resarr.p_array(1)) else throw new NoSuchElementException("queue is empty") - - /** Removes all elements from the queue. After this operation is completed, - * the queue will be empty. - */ - def clear(): Unit = { resarr.p_size0 = 1 } - - /** Returns an iterator which yields all the elements. - * - * Note: The order of elements returned is undefined. - * If you want to traverse the elements in priority queue - * order, use `clone().dequeueAll.iterator`. - * - * @return an iterator over all the elements. - */ - override def iterator: Iterator[A] = new AbstractIterator[A] { - private var i = 1 - def hasNext: Boolean = i < resarr.p_size0 - def next(): A = { - val n = resarr.p_array(i) - i += 1 - toA(n) - } - } - - /** Returns the reverse of this priority queue. The new priority queue has - * the same elements as the original, but the opposite ordering. - * - * For example, the element with the highest priority in `pq` has the lowest - * priority in `pq.reverse`, and vice versa. - * - * Ties are handled arbitrarily. Elements with equal priority may or - * may not be reversed with respect to each other. - * - * @return the reversed priority queue. - */ - def reverse = { - val revq = new PriorityQueue[A]()(ord.reverse) - // copy the existing data into the new array backwards - // this won't put it exactly into the correct order, - // but will require less fixing than copying it in - // the original order - val n = resarr.p_size0 - revq.resarr.p_ensureSize(n) - revq.resarr.p_size0 = n - val from = resarr.p_array - val to = revq.resarr.p_array - for (i <- 1 until n) to(i) = from(n-i) - revq.heapify(1) - revq - } - - - /** Returns an iterator which yields all the elements in the reverse order - * than that returned by the method `iterator`. - * - * Note: The order of elements returned is undefined. - * - * @return an iterator over all elements sorted in descending order. - */ - def reverseIterator: Iterator[A] = new AbstractIterator[A] { - private var i = resarr.p_size0 - 1 - def hasNext: Boolean = i >= 1 - def next(): A = { - val n = resarr.p_array(i) - i -= 1 - toA(n) - } - } - - /** The hashCode method always yields an error, since it is not - * safe to use mutable queues as keys in hash tables. - * - * @return never. - */ - override def hashCode(): Int = - throw new UnsupportedOperationException("unsuitable as hash key") - - /** Returns a regular queue containing the same elements. - * - * Note: the order of elements is undefined. - */ - def toQueue: Queue[A] = new Queue[A] ++= this.iterator - - /** Returns a textual representation of a queue as a string. - * - * @return the string representation of this queue. - */ - override def toString() = toList.mkString("PriorityQueue(", ", ", ")") - - /** Converts this $coll to a list. - * - * Note: the order of elements is undefined. - * - * @return a list containing all elements of this $coll. - */ - override def toList = this.iterator.toList - - /** This method clones the priority queue. - * - * @return a priority queue with the same elements. - */ - override def clone(): PriorityQueue[A] = { - val pq = new PriorityQueue[A] - val n = resarr.p_size0 - pq.resarr.p_ensureSize(n) - java.lang.System.arraycopy(resarr.p_array, 1, pq.resarr.p_array, 1, n-1) - pq.resarr.p_size0 = n - pq - } -} - - -object PriorityQueue extends OrderedTraversableFactory[PriorityQueue] { - def newBuilder[A](implicit ord: Ordering[A]): Builder[A, PriorityQueue[A]] = { - new Builder[A, PriorityQueue[A]] { - val pq = new PriorityQueue[A] - def +=(elem: A): this.type = { pq.unsafeAdd(elem); this } - def result(): PriorityQueue[A] = { pq.heapify(1); pq } - def clear(): Unit = pq.clear() - } - } - - implicit def canBuildFrom[A](implicit ord: Ordering[A]): CanBuildFrom[Coll, A, PriorityQueue[A]] = new GenericCanBuildFrom[A] -} - - -/** This class servers as a proxy for priority queues. The - * elements of the queue have to be ordered in terms of the - * `Ordered[T]` class. - * - * @author Matthias Zenger - * @version 1.0, 03/05/2004 - * @since 1 - */ -@deprecated("proxying is deprecated due to lack of use and compiler-level support", "2.11.0") -sealed abstract class PriorityQueueProxy[A](implicit ord: Ordering[A]) extends PriorityQueue[A] with Proxy { - def self: PriorityQueue[A] - - /** Creates a new iterator over all elements contained in this - * object. - * - * @return the new iterator - */ - override def iterator: Iterator[A] = self.iterator - - /** Returns the length of this priority queue. - */ - override def length: Int = self.length - - /** Checks if the queue is empty. - * - * @return true, iff there is no element in the queue. - */ - override def isEmpty: Boolean = self.isEmpty - - /** Inserts a single element into the priority queue. - * - * @param elem the element to insert - */ - override def +=(elem: A): this.type = { self += elem; this } - - /** Adds all elements provided by an iterator into the priority queue. - * - * @param it an iterator - */ - override def ++=(it: TraversableOnce[A]): this.type = { - self ++= it - this - } - - /** Adds all elements to the queue. - * - * @param elems the elements to add. - */ - override def enqueue(elems: A*): Unit = self ++= elems - - /** Returns the element with the highest priority in the queue, - * and removes this element from the queue. - * - * @return the element with the highest priority. - */ - override def dequeue(): A = self.dequeue() - - /** Returns the element with the highest priority in the queue, - * or throws an error if there is no element contained in the queue. - * - * @return the element with the highest priority. - */ - override def head: A = self.head - - /** Removes all elements from the queue. After this operation is completed, - * the queue will be empty. - */ - override def clear(): Unit = self.clear() - - /** Returns a regular queue containing the same elements. - */ - override def toQueue: Queue[A] = self.toQueue - - /** This method clones the priority queue. - * - * @return a priority queue with the same elements. - */ - override def clone(): PriorityQueue[A] = new PriorityQueueProxy[A] { - def self = PriorityQueueProxy.this.self.clone() - } -} - - -/** This class implements synchronized priority queues using a binary heap. - * The elements of the queue have to be ordered in terms of the `Ordered[T]` class. - * - * @tparam A type of the elements contained in this synchronized priority queue - * @param ord implicit ordering used to compared elements of type `A` - * - * @author Matthias Zenger - * @version 1.0, 03/05/2004 - * @since 1 - * @define Coll `SynchronizedPriorityQueue` - * @define coll synchronized priority queue - */ -@deprecated("Comprehensive synchronization via selective overriding of methods is inherently unreliable. Consider java.util.concurrent.ConcurrentSkipListSet as an alternative.", "2.11.0") -sealed class SynchronizedPriorityQueue[A](implicit ord: Ordering[A]) extends PriorityQueue[A] { - - /** Checks if the queue is empty. - * - * @return true, iff there is no element in the queue. - */ - override def isEmpty: Boolean = synchronized { super.isEmpty } - - /** Inserts a single element into the priority queue. - * - * @param elem the element to insert - */ - override def +=(elem: A): this.type = { - synchronized { - super.+=(elem) - } - this - } - - /** Adds all elements of a traversable object into the priority queue. - * - * @param xs a traversable object - */ - override def ++=(xs: TraversableOnce[A]): this.type = { - synchronized { - super.++=(xs) - } - this - } - - /** Adds all elements to the queue. - * - * @param elems the elements to add. - */ - override def enqueue(elems: A*): Unit = synchronized { super.++=(elems) } - - /** Returns the element with the highest priority in the queue, - * and removes this element from the queue. - * - * @return the element with the highest priority. - */ - override def dequeue(): A = synchronized { super.dequeue() } - - /** Returns the element with the highest priority in the queue, - * or throws an error if there is no element contained in the queue. - * - * @return the element with the highest priority. - */ - override def head: A = synchronized { super.head } - - /** Removes all elements from the queue. After this operation is completed, - * the queue will be empty. - */ - override def clear(): Unit = synchronized { super.clear() } - - /** Returns an iterator which yield all the elements of the priority - * queue in descending priority order. - * - * @return an iterator over all elements sorted in descending order. - */ - override def iterator: Iterator[A] = synchronized { super.iterator } - - /** Checks if two queues are structurally identical. - * - * @return true, iff both queues contain the same sequence of elements. - */ - override def equals(that: Any): Boolean = synchronized { super.equals(that) } - - /** Returns a textual representation of a queue as a string. - * - * @return the string representation of this queue. - */ - override def toString(): String = synchronized { super.toString() } -} diff --git a/tests/scala2-library/src/library/scala/collection/mutable/Publisher.scala b/tests/scala2-library/src/library/scala/collection/mutable/Publisher.scala deleted file mode 100644 index 22bbea16efc3..000000000000 --- a/tests/scala2-library/src/library/scala/collection/mutable/Publisher.scala +++ /dev/null @@ -1,67 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - - - -package scala -package collection -package mutable - - -/** `Publisher[A,This]` objects publish events of type `A` - * to all registered subscribers. When subscribing, a subscriber may specify - * a filter which can be used to constrain the number of events sent to the - * subscriber. Subscribers may suspend their subscription, or reactivate a - * suspended subscription. Class `Publisher` is typically used - * as a mixin. The abstract type `Pub` models the type of the publisher itself. - * - * @tparam Evt type of the published event. - * - * @author Matthias Zenger - * @author Martin Odersky - * @version 2.8 - * @since 1 - */ -trait Publisher[Evt] { - - type Pub <: Publisher[Evt] - type Sub = Subscriber[Evt, Pub] - type Filter = Evt => Boolean - - /** The publisher itself of type `Pub`. Implemented by a cast from `this` here. - * Needs to be overridden if the actual publisher is different from `this`. - */ - protected val self: Pub = this.asInstanceOf[Pub] - - private val filters = new HashMap[Sub, Set[Filter]] with MultiMap[Sub, Filter] - private val suspended = new HashSet[Sub] - - def subscribe(sub: Sub) { subscribe(sub, event => true) } - def subscribe(sub: Sub, filter: Filter) { filters.addBinding(sub, filter) } - def suspendSubscription(sub: Sub) { suspended += sub } - def activateSubscription(sub: Sub) { suspended -= sub } - def removeSubscription(sub: Sub) { filters -= sub } - def removeSubscriptions() { filters.clear() } - - protected def publish(event: Evt) { - filters.keys.foreach(sub => - if (!suspended.contains(sub) && - filters.entryExists(sub, p => p(event))) - sub.notify(self, event) - ) - } - - /** Checks if two publishers are structurally identical. - * - * @return true, iff both publishers contain the same sequence of elements. - */ - override def equals(obj: Any): Boolean = obj match { - case that: Publisher[_] => filters == that.filters && suspended == that.suspended - case _ => false - } -} diff --git a/tests/scala2-library/src/library/scala/collection/mutable/Queue.scala b/tests/scala2-library/src/library/scala/collection/mutable/Queue.scala deleted file mode 100644 index fd5fe9aecc9c..000000000000 --- a/tests/scala2-library/src/library/scala/collection/mutable/Queue.scala +++ /dev/null @@ -1,197 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - - - -package scala -package collection -package mutable - -import generic._ - -/** `Queue` objects implement data structures that allow to - * insert and retrieve elements in a first-in-first-out (FIFO) manner. - * - * @author Matthias Zenger - * @author Martin Odersky - * @version 2.8 - * @since 1 - * @see [[http://docs.scala-lang.org/overviews/collections/concrete-mutable-collection-classes.html#queues "Scala's Collection Library overview"]] - * section on `Queues` for more information. - * - * @define Coll `mutable.Queue` - * @define coll mutable queue - * @define orderDependent - * @define orderDependentFold - * @define mayNotTerminateInf - * @define willNotTerminateInf - */ -class Queue[A] -extends MutableList[A] - with LinearSeqOptimized[A, Queue[A]] - with GenericTraversableTemplate[A, Queue] - with Cloneable[Queue[A]] - with Serializable -{ - override def companion: GenericCompanion[Queue] = Queue - - override protected[this] def newBuilder = companion.newBuilder[A] - - private[mutable] def this(fst: LinkedList[A], lst: LinkedList[A], lng: Int) { - this() - first0 = fst - last0 = lst - len = lng - } - - /** Adds all elements to the queue. - * - * @param elems the elements to add. - */ - def enqueue(elems: A*): Unit = this ++= elems - - /** Returns the first element in the queue, and removes this element - * from the queue. - * - * @throws java.util.NoSuchElementException - * @return the first element of the queue. - */ - def dequeue(): A = - if (isEmpty) - throw new NoSuchElementException("queue empty") - else { - val res = first0.elem - first0 = first0.next - decrementLength() - res - } - - /** Returns the first element in the queue which satisfies the - * given predicate, and removes this element from the queue. - * - * @param p the predicate used for choosing the first element - * @return the first element of the queue for which p yields true - */ - def dequeueFirst(p: A => Boolean): Option[A] = - if (isEmpty) - None - else if (p(first0.elem)) { - val res: Option[A] = Some(first0.elem) - first0 = first0.next - decrementLength() - res - } else { - val optElem = removeFromList(p) - if (optElem != None) decrementLength() - optElem - } - - private def removeFromList(p: A => Boolean): Option[A] = { - var leftlst = first0 - var res: Option[A] = None - while (leftlst.next.nonEmpty && !p(leftlst.next.elem)) { - leftlst = leftlst.next - } - if (leftlst.next.nonEmpty) { - res = Some(leftlst.next.elem) - if (leftlst.next eq last0) last0 = leftlst - leftlst.next = leftlst.next.next - } - res - } - - /** Returns all elements in the queue which satisfy the - * given predicate, and removes those elements from the queue. - * - * @param p the predicate used for choosing elements - * @return a sequence of all elements in the queue for which - * p yields true. - */ - def dequeueAll(p: A => Boolean): Seq[A] = { - if (first0.isEmpty) - Seq.empty - else { - val res = new ArrayBuffer[A] - while ((first0.nonEmpty) && p(first0.elem)) { - res += first0.elem - first0 = first0.next - decrementLength() - } - if (first0.isEmpty) res - else removeAllFromList(p, res) - } - } - - private def removeAllFromList(p: A => Boolean, res: ArrayBuffer[A]): ArrayBuffer[A] = { - var leftlst = first0 - while (leftlst.next.nonEmpty) { - if (p(leftlst.next.elem)) { - res += leftlst.next.elem - if (leftlst.next eq last0) last0 = leftlst - leftlst.next = leftlst.next.next - decrementLength() - } else leftlst = leftlst.next - } - res - } - - /** Return the proper suffix of this list which starts with the first element that satisfies `p`. - * That element is unlinked from the list. If no element satisfies `p`, return None. - */ - @deprecated("extractFirst inappropriately exposes implementation details. Use dequeue or dequeueAll.", "2.11.0") - def extractFirst(start: LinkedList[A], p: A => Boolean): Option[LinkedList[A]] = { - if (isEmpty) None - else { - var cell = start - while ((cell.next.nonEmpty) && !p(cell.next.elem)) { - cell = cell.next - } - if (cell.next.isEmpty) - None - else { - val res: Option[LinkedList[A]] = Some(cell.next) - cell.next = cell.next.next - decrementLength() - res - } - } - } - - /** Returns the first element in the queue, or throws an error if there - * is no element contained in the queue. - * - * @return the first element. - */ - def front: A = head - - - // TODO - Don't override this just for new to create appropriate type.... - override def tail: Queue[A] = { - val tl = new Queue[A] - tailImpl(tl) - tl - } - - override def clone(): Queue[A] = { - val bf = newBuilder - bf ++= seq - bf.result() - } - - private[this] def decrementLength() { - len -= 1 - if (len == 0) last0 = first0 - } -} - - -object Queue extends SeqFactory[Queue] { - implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, Queue[A]] = ReusableCBF.asInstanceOf[GenericCanBuildFrom[A]] - - def newBuilder[A]: Builder[A, Queue[A]] = new MutableList[A] mapResult { _.toQueue } -} diff --git a/tests/scala2-library/src/library/scala/collection/mutable/QueueProxy.scala b/tests/scala2-library/src/library/scala/collection/mutable/QueueProxy.scala deleted file mode 100644 index e780cc2cf05f..000000000000 --- a/tests/scala2-library/src/library/scala/collection/mutable/QueueProxy.scala +++ /dev/null @@ -1,99 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - - - -package scala -package collection -package mutable - -/** `Queue` objects implement data structures that allow to - * insert and retrieve elements in a first-in-first-out (FIFO) manner. - * - * @tparam A type of the elements in this queue proxy. - * - * @author Matthias Zenger - * @version 1.1, 03/05/2004 - * @since 1 - */ -@deprecated("proxying is deprecated due to lack of use and compiler-level support", "2.11.0") -trait QueueProxy[A] extends Queue[A] with Proxy { - - def self: Queue[A] - - /** Access element number `n`. - * - * @return the element at index `n`. - */ - override def apply(n: Int): A = self.apply(n) - - /** Returns the length of this queue. - */ - override def length: Int = self.length - - /** Checks if the queue is empty. - * - * @return true, iff there is no element in the queue. - */ - override def isEmpty: Boolean = self.isEmpty - - /** Inserts a single element at the end of the queue. - * - * @param elem the element to insert - */ - override def +=(elem: A): this.type = { self += elem; this } - - /** Adds all elements provided by an iterator at the end of the queue. The - * elements are prepended in the order they are given out by the iterator. - * - * @param it an iterator - */ - override def ++=(it: TraversableOnce[A]): this.type = { - self ++= it - this - } - - /** Adds all elements to the queue. - * - * @param elems the elements to add. - */ - override def enqueue(elems: A*) { self ++= elems } - - /** Returns the first element in the queue, and removes this element - * from the queue. - * - * @return the first element of the queue. - */ - override def dequeue(): A = self.dequeue() - - /** Returns the first element in the queue, or throws an error if there - * is no element contained in the queue. - * - * @return the first element. - */ - override def front: A = self.front - - /** Removes all elements from the queue. After this operation is completed, - * the queue will be empty. - */ - override def clear(): Unit = self.clear() - - /** Returns an iterator over all elements on the queue. - * - * @return an iterator over all queue elements. - */ - override def iterator: Iterator[A] = self.iterator - - /** This method clones the queue. - * - * @return a queue with the same elements. - */ - override def clone(): Queue[A] = new QueueProxy[A] { - def self = QueueProxy.this.self.clone() - } -} diff --git a/tests/scala2-library/src/library/scala/collection/mutable/RedBlackTree.scala b/tests/scala2-library/src/library/scala/collection/mutable/RedBlackTree.scala deleted file mode 100644 index e4793242bfa3..000000000000 --- a/tests/scala2-library/src/library/scala/collection/mutable/RedBlackTree.scala +++ /dev/null @@ -1,580 +0,0 @@ -package scala.collection.mutable - -import scala.annotation.tailrec -import scala.collection.Iterator - -/** - * An object containing the red-black tree implementation used by mutable `TreeMaps`. - * - * The trees implemented in this object are *not* thread safe. - * - * @author Rui Gonçalves - * @version 2.12 - * @since 2.12 - */ -private[collection] object RedBlackTree { - - // ---- class structure ---- - - // For performance reasons, this implementation uses `null` references to represent leaves instead of a sentinel node. - // Currently, the internal nodes do not store their subtree size - only the tree object keeps track of their size. - // Therefore, while obtaining the size of the whole tree is O(1), knowing the number of entries inside a range is O(n) - // on the size of the range. - - @SerialVersionUID(21575944040195605L) - final class Tree[A, B](var root: Node[A, B], var size: Int) extends Serializable - - @SerialVersionUID(1950599696441054720L) - final class Node[A, B](var key: A, var value: B, var red: Boolean, - var left: Node[A, B], var right: Node[A, B], var parent: Node[A, B]) extends Serializable { - - override def toString: String = "Node(" + key + ", " + value + ", " + red + ", " + left + ", " + right + ")" - } - - object Tree { - def empty[A, B]: Tree[A, B] = new Tree(null, 0) - } - - object Node { - - @inline def apply[A, B](key: A, value: B, red: Boolean, - left: Node[A, B], right: Node[A, B], parent: Node[A, B]): Node[A, B] = - new Node(key, value, red, left, right, parent) - - @inline def leaf[A, B](key: A, value: B, red: Boolean, parent: Node[A, B]): Node[A, B] = - new Node(key, value, red, null, null, parent) - - def unapply[A, B](t: Node[A, B]) = Some((t.key, t.value, t.left, t.right, t.parent)) - } - - // ---- getters ---- - - def isRed(node: Node[_, _]) = (node ne null) && node.red - def isBlack(node: Node[_, _]) = (node eq null) || !node.red - - // ---- size ---- - - def size(node: Node[_, _]): Int = if (node eq null) 0 else 1 + size(node.left) + size(node.right) - def size(tree: Tree[_, _]): Int = tree.size - def isEmpty(tree: Tree[_, _]) = tree.root eq null - def clear(tree: Tree[_, _]): Unit = { tree.root = null; tree.size = 0 } - - // ---- search ---- - - def get[A: Ordering, B](tree: Tree[A, B], key: A): Option[B] = getNode(tree.root, key) match { - case null => None - case node => Some(node.value) - } - - @tailrec private[this] def getNode[A, B](node: Node[A, B], key: A)(implicit ord: Ordering[A]): Node[A, B] = - if (node eq null) null - else { - val cmp = ord.compare(key, node.key) - if (cmp < 0) getNode(node.left, key) - else if (cmp > 0) getNode(node.right, key) - else node - } - - def contains[A: Ordering](tree: Tree[A, _], key: A) = getNode(tree.root, key) ne null - - def min[A, B](tree: Tree[A, B]): Option[(A, B)] = minNode(tree.root) match { - case null => None - case node => Some((node.key, node.value)) - } - - def minKey[A](tree: Tree[A, _]): Option[A] = minNode(tree.root) match { - case null => None - case node => Some(node.key) - } - - private def minNode[A, B](node: Node[A, B]): Node[A, B] = - if (node eq null) null else minNodeNonNull(node) - - @tailrec def minNodeNonNull[A, B](node: Node[A, B]): Node[A, B] = - if (node.left eq null) node else minNodeNonNull(node.left) - - def max[A, B](tree: Tree[A, B]): Option[(A, B)] = maxNode(tree.root) match { - case null => None - case node => Some((node.key, node.value)) - } - - def maxKey[A](tree: Tree[A, _]): Option[A] = maxNode(tree.root) match { - case null => None - case node => Some(node.key) - } - - private def maxNode[A, B](node: Node[A, B]): Node[A, B] = - if (node eq null) null else maxNodeNonNull(node) - - @tailrec def maxNodeNonNull[A, B](node: Node[A, B]): Node[A, B] = - if (node.right eq null) node else maxNodeNonNull(node.right) - - /** - * Returns the first (lowest) map entry with a key equal or greater than `key`. Returns `None` if there is no such - * node. - */ - def minAfter[A, B](tree: Tree[A, B], key: A)(implicit ord: Ordering[A]): Option[(A, B)] = - minNodeAfter(tree.root, key) match { - case null => None - case node => Some((node.key, node.value)) - } - - def minKeyAfter[A](tree: Tree[A, _], key: A)(implicit ord: Ordering[A]): Option[A] = - minNodeAfter(tree.root, key) match { - case null => None - case node => Some(node.key) - } - - private[this] def minNodeAfter[A, B](node: Node[A, B], key: A)(implicit ord: Ordering[A]): Node[A, B] = { - if (node eq null) null - else { - var y: Node[A, B] = null - var x = node - var cmp = 1 - while ((x ne null) && cmp != 0) { - y = x - cmp = ord.compare(key, x.key) - x = if (cmp < 0) x.left else x.right - } - if (cmp <= 0) y else successor(y) - } - } - - /** - * Returns the last (highest) map entry with a key smaller than `key`. Returns `None` if there is no such node. - */ - def maxBefore[A, B](tree: Tree[A, B], key: A)(implicit ord: Ordering[A]): Option[(A, B)] = - maxNodeBefore(tree.root, key) match { - case null => None - case node => Some((node.key, node.value)) - } - - def maxKeyBefore[A](tree: Tree[A, _], key: A)(implicit ord: Ordering[A]): Option[A] = - maxNodeBefore(tree.root, key) match { - case null => None - case node => Some(node.key) - } - - private[this] def maxNodeBefore[A, B](node: Node[A, B], key: A)(implicit ord: Ordering[A]): Node[A, B] = { - if (node eq null) null - else { - var y: Node[A, B] = null - var x = node - var cmp = 1 - while ((x ne null) && cmp != 0) { - y = x - cmp = ord.compare(key, x.key) - x = if (cmp < 0) x.left else x.right - } - if (cmp > 0) y else predecessor(y) - } - } - - // ---- insertion ---- - - def insert[A, B](tree: Tree[A, B], key: A, value: B)(implicit ord: Ordering[A]): Unit = { - var y: Node[A, B] = null - var x = tree.root - var cmp = 1 - while ((x ne null) && cmp != 0) { - y = x - cmp = ord.compare(key, x.key) - x = if (cmp < 0) x.left else x.right - } - - if (cmp == 0) y.value = value - else { - val z = Node.leaf(key, value, red = true, y) - - if (y eq null) tree.root = z - else if (cmp < 0) y.left = z - else y.right = z - - fixAfterInsert(tree, z) - tree.size += 1 - } - } - - private[this] def fixAfterInsert[A, B](tree: Tree[A, B], node: Node[A, B]): Unit = { - var z = node - while (isRed(z.parent)) { - if (z.parent eq z.parent.parent.left) { - val y = z.parent.parent.right - if (isRed(y)) { - z.parent.red = false - y.red = false - z.parent.parent.red = true - z = z.parent.parent - } else { - if (z eq z.parent.right) { - z = z.parent - rotateLeft(tree, z) - } - z.parent.red = false - z.parent.parent.red = true - rotateRight(tree, z.parent.parent) - } - } else { // symmetric cases - val y = z.parent.parent.left - if (isRed(y)) { - z.parent.red = false - y.red = false - z.parent.parent.red = true - z = z.parent.parent - } else { - if (z eq z.parent.left) { - z = z.parent - rotateRight(tree, z) - } - z.parent.red = false - z.parent.parent.red = true - rotateLeft(tree, z.parent.parent) - } - } - } - tree.root.red = false - } - - // ---- deletion ---- - - def delete[A, B](tree: Tree[A, B], key: A)(implicit ord: Ordering[A]): Unit = { - val z = getNode(tree.root, key) - if (z ne null) { - var y = z - var yIsRed = y.red - var x: Node[A, B] = null - var xParent: Node[A, B] = null - - if (z.left eq null) { - x = z.right - transplant(tree, z, z.right) - xParent = z.parent - } - else if (z.right eq null) { - x = z.left - transplant(tree, z, z.left) - xParent = z.parent - } - else { - y = minNodeNonNull(z.right) - yIsRed = y.red - x = y.right - - if (y.parent eq z) xParent = y - else { - xParent = y.parent - transplant(tree, y, y.right) - y.right = z.right - y.right.parent = y - } - transplant(tree, z, y) - y.left = z.left - y.left.parent = y - y.red = z.red - } - - if (!yIsRed) fixAfterDelete(tree, x, xParent) - tree.size -= 1 - } - } - - private[this] def fixAfterDelete[A, B](tree: Tree[A, B], node: Node[A, B], parent: Node[A, B]): Unit = { - var x = node - var xParent = parent - while ((x ne tree.root) && isBlack(x)) { - if (x eq xParent.left) { - var w = xParent.right - // assert(w ne null) - - if (w.red) { - w.red = false - xParent.red = true - rotateLeft(tree, xParent) - w = xParent.right - } - if (isBlack(w.left) && isBlack(w.right)) { - w.red = true - x = xParent - } else { - if (isBlack(w.right)) { - w.left.red = false - w.red = true - rotateRight(tree, w) - w = xParent.right - } - w.red = xParent.red - xParent.red = false - w.right.red = false - rotateLeft(tree, xParent) - x = tree.root - } - } else { // symmetric cases - var w = xParent.left - // assert(w ne null) - - if (w.red) { - w.red = false - xParent.red = true - rotateRight(tree, xParent) - w = xParent.left - } - if (isBlack(w.right) && isBlack(w.left)) { - w.red = true - x = xParent - } else { - if (isBlack(w.left)) { - w.right.red = false - w.red = true - rotateLeft(tree, w) - w = xParent.left - } - w.red = xParent.red - xParent.red = false - w.left.red = false - rotateRight(tree, xParent) - x = tree.root - } - } - xParent = x.parent - } - if (x ne null) x.red = false - } - - // ---- helpers ---- - - /** - * Returns the node that follows `node` in an in-order tree traversal. If `node` has the maximum key (and is, - * therefore, the last node), this method returns `null`. - */ - private[this] def successor[A, B](node: Node[A, B]): Node[A, B] = { - if (node.right ne null) minNodeNonNull(node.right) - else { - var x = node - var y = x.parent - while ((y ne null) && (x eq y.right)) { - x = y - y = y.parent - } - y - } - } - - /** - * Returns the node that precedes `node` in an in-order tree traversal. If `node` has the minimum key (and is, - * therefore, the first node), this method returns `null`. - */ - private[this] def predecessor[A, B](node: Node[A, B]): Node[A, B] = { - if (node.left ne null) maxNodeNonNull(node.left) - else { - var x = node - var y = x.parent - while ((y ne null) && (x eq y.left)) { - x = y - y = y.parent - } - y - } - } - - private[this] def rotateLeft[A, B](tree: Tree[A, B], x: Node[A, B]): Unit = if (x ne null) { - // assert(x.right ne null) - val y = x.right - x.right = y.left - - if (y.left ne null) y.left.parent = x - y.parent = x.parent - - if (x.parent eq null) tree.root = y - else if (x eq x.parent.left) x.parent.left = y - else x.parent.right = y - - y.left = x - x.parent = y - } - - private[this] def rotateRight[A, B](tree: Tree[A, B], x: Node[A, B]): Unit = if (x ne null) { - // assert(x.left ne null) - val y = x.left - x.left = y.right - - if (y.right ne null) y.right.parent = x - y.parent = x.parent - - if (x.parent eq null) tree.root = y - else if (x eq x.parent.right) x.parent.right = y - else x.parent.left = y - - y.right = x - x.parent = y - } - - /** - * Transplant the node `from` to the place of node `to`. This is done by setting `from` as a child of `to`'s previous - * parent and setting `from`'s parent to the `to`'s previous parent. The children of `from` are left unchanged. - */ - private[this] def transplant[A, B](tree: Tree[A, B], to: Node[A, B], from: Node[A, B]): Unit = { - if (to.parent eq null) tree.root = from - else if (to eq to.parent.left) to.parent.left = from - else to.parent.right = from - - if (from ne null) from.parent = to.parent - } - - // ---- tree traversal ---- - - def foreach[A, B, U](tree: Tree[A, B], f: ((A, B)) => U): Unit = foreachNode(tree.root, f) - - private[this] def foreachNode[A, B, U](node: Node[A, B], f: ((A, B)) => U): Unit = - if (node ne null) foreachNodeNonNull(node, f) - - private[this] def foreachNodeNonNull[A, B, U](node: Node[A, B], f: ((A, B)) => U): Unit = { - if (node.left ne null) foreachNodeNonNull(node.left, f) - f((node.key, node.value)) - if (node.right ne null) foreachNodeNonNull(node.right, f) - } - - def foreachKey[A, U](tree: Tree[A, _], f: A => U): Unit = foreachNodeKey(tree.root, f) - - private[this] def foreachNodeKey[A, U](node: Node[A, _], f: A => U): Unit = - if (node ne null) foreachNodeKeyNonNull(node, f) - - private[this] def foreachNodeKeyNonNull[A, U](node: Node[A, _], f: A => U): Unit = { - if (node.left ne null) foreachNodeKeyNonNull(node.left, f) - f(node.key) - if (node.right ne null) foreachNodeKeyNonNull(node.right, f) - } - - def transform[A, B](tree: Tree[A, B], f: (A, B) => B): Unit = transformNode(tree.root, f) - - private[this] def transformNode[A, B, U](node: Node[A, B], f: (A, B) => B): Unit = - if (node ne null) transformNodeNonNull(node, f) - - private[this] def transformNodeNonNull[A, B, U](node: Node[A, B], f: (A, B) => B): Unit = { - if (node.left ne null) transformNodeNonNull(node.left, f) - node.value = f(node.key, node.value) - if (node.right ne null) transformNodeNonNull(node.right, f) - } - - def iterator[A: Ordering, B](tree: Tree[A, B], start: Option[A] = None, end: Option[A] = None): Iterator[(A, B)] = - new EntriesIterator(tree, start, end) - - def keysIterator[A: Ordering](tree: Tree[A, _], start: Option[A] = None, end: Option[A] = None): Iterator[A] = - new KeysIterator(tree, start, end) - - def valuesIterator[A: Ordering, B](tree: Tree[A, B], start: Option[A] = None, end: Option[A] = None): Iterator[B] = - new ValuesIterator(tree, start, end) - - private[this] abstract class TreeIterator[A, B, R](tree: Tree[A, B], start: Option[A], end: Option[A]) - (implicit ord: Ordering[A]) extends Iterator[R] { - - protected[this] def nextResult(node: Node[A, B]): R - - def hasNext: Boolean = nextNode ne null - - def next(): R = nextNode match { - case null => throw new NoSuchElementException("next on empty iterator") - case node => - nextNode = successor(node) - setNullIfAfterEnd() - nextResult(node) - } - - private[this] var nextNode: Node[A, B] = start match { - case None => minNode(tree.root) - case Some(from) => minNodeAfter(tree.root, from) - } - - private[this] def setNullIfAfterEnd(): Unit = - if (end.isDefined && (nextNode ne null) && ord.compare(nextNode.key, end.get) >= 0) - nextNode = null - - setNullIfAfterEnd() - } - - private[this] final class EntriesIterator[A: Ordering, B](tree: Tree[A, B], start: Option[A], end: Option[A]) - extends TreeIterator[A, B, (A, B)](tree, start, end) { - - def nextResult(node: Node[A, B]) = (node.key, node.value) - } - - private[this] final class KeysIterator[A: Ordering, B](tree: Tree[A, B], start: Option[A], end: Option[A]) - extends TreeIterator[A, B, A](tree, start, end) { - - def nextResult(node: Node[A, B]) = node.key - } - - private[this] final class ValuesIterator[A: Ordering, B](tree: Tree[A, B], start: Option[A], end: Option[A]) - extends TreeIterator[A, B, B](tree, start, end) { - - def nextResult(node: Node[A, B]) = node.value - } - - // ---- debugging ---- - - /** - * Checks if the tree is in a valid state. That happens if: - * - It is a valid binary search tree; - * - All red-black properties are satisfied; - * - All non-null nodes have their `parent` reference correct; - * - The size variable in `tree` corresponds to the actual size of the tree. - */ - def isValid[A: Ordering, B](tree: Tree[A, B]): Boolean = - isValidBST(tree.root) && hasProperParentRefs(tree) && isValidRedBlackTree(tree) && size(tree.root) == tree.size - - /** - * Returns true if all non-null nodes have their `parent` reference correct. - */ - private[this] def hasProperParentRefs[A, B](tree: Tree[A, B]): Boolean = { - - def hasProperParentRefs(node: Node[A, B]): Boolean = { - if (node eq null) true - else { - if ((node.left ne null) && (node.left.parent ne node) || - (node.right ne null) && (node.right.parent ne node)) false - else hasProperParentRefs(node.left) && hasProperParentRefs(node.right) - } - } - - if(tree.root eq null) true - else (tree.root.parent eq null) && hasProperParentRefs(tree.root) - } - - /** - * Returns true if this node follows the properties of a binary search tree. - */ - private[this] def isValidBST[A, B](node: Node[A, B])(implicit ord: Ordering[A]): Boolean = { - if (node eq null) true - else { - if ((node.left ne null) && (ord.compare(node.key, node.left.key) <= 0) || - (node.right ne null) && (ord.compare(node.key, node.right.key) >= 0)) false - else isValidBST(node.left) && isValidBST(node.right) - } - } - - /** - * Returns true if the tree has all the red-black tree properties: if the root node is black, if all children of red - * nodes are black and if the path from any node to any of its null children has the same number of black nodes. - */ - private[this] def isValidRedBlackTree[A, B](tree: Tree[A, B]): Boolean = { - - def noRedAfterRed(node: Node[A, B]): Boolean = { - if (node eq null) true - else if (node.red && (isRed(node.left) || isRed(node.right))) false - else noRedAfterRed(node.left) && noRedAfterRed(node.right) - } - - def blackHeight(node: Node[A, B]): Int = { - if (node eq null) 1 - else { - val lh = blackHeight(node.left) - val rh = blackHeight(node.right) - - if (lh == -1 || lh != rh) -1 - else if (isRed(node)) lh - else lh + 1 - } - } - - isBlack(tree.root) && noRedAfterRed(tree.root) && blackHeight(tree.root) >= 0 - } -} diff --git a/tests/scala2-library/src/library/scala/collection/mutable/ResizableArray.scala b/tests/scala2-library/src/library/scala/collection/mutable/ResizableArray.scala deleted file mode 100644 index 50d3513784ad..000000000000 --- a/tests/scala2-library/src/library/scala/collection/mutable/ResizableArray.scala +++ /dev/null @@ -1,129 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala -package collection -package mutable - -import generic._ - -/** This class is used internally to implement data structures that - * are based on resizable arrays. - * - * @tparam A type of the elements contained in this resizable array. - * - * @author Matthias Zenger, Burak Emir - * @author Martin Odersky - * @version 2.8 - * @since 1 - */ -trait ResizableArray[A] extends IndexedSeq[A] - with GenericTraversableTemplate[A, ResizableArray] - with IndexedSeqOptimized[A, ResizableArray[A]] { - - override def companion: GenericCompanion[ResizableArray] = ResizableArray - - protected def initialSize: Int = 16 - protected var array: Array[AnyRef] = new Array[AnyRef](math.max(initialSize, 1)) - protected var size0: Int = 0 - - //########################################################################## - // implement/override methods of IndexedSeq[A] - - /** Returns the length of this resizable array. - */ - def length: Int = size0 - - def apply(idx: Int) = { - if (idx >= size0) throw new IndexOutOfBoundsException(idx.toString) - array(idx).asInstanceOf[A] - } - - def update(idx: Int, elem: A) { - if (idx >= size0) throw new IndexOutOfBoundsException(idx.toString) - array(idx) = elem.asInstanceOf[AnyRef] - } - - override def foreach[U](f: A => U) { - var i = 0 - // size is cached here because profiling reports a lot of time spent calling - // it on every iteration. I think it's likely a profiler ghost but it doesn't - // hurt to lift it into a local. - val top = size - while (i < top) { - f(array(i).asInstanceOf[A]) - i += 1 - } - } - - /** Fills the given array `xs` with at most `len` elements of this - * traversable starting at position `start`. - * - * Copying will stop once either the end of the current traversable is - * reached or `len` elements have been copied or the end of the array - * is reached. - * - * @param xs the array to fill. - * @param start starting index. - * @param len number of elements to copy - */ - override def copyToArray[B >: A](xs: Array[B], start: Int, len: Int) { - val len1 = len min (xs.length - start) min length - if (len1 > 0) Array.copy(array, 0, xs, start, len1) - } - - //########################################################################## - - /** Remove elements of this array at indices after `sz`. - */ - def reduceToSize(sz: Int) { - require(sz <= size0) - while (size0 > sz) { - size0 -= 1 - array(size0) = null - } - } - - /** Ensure that the internal array has at least `n` cells. */ - protected def ensureSize(n: Int) { - // Use a Long to prevent overflows - val arrayLength: Long = array.length - if (n > arrayLength) { - var newSize: Long = arrayLength * 2 - while (n > newSize) - newSize = newSize * 2 - // Clamp newSize to Int.MaxValue - if (newSize > Int.MaxValue) newSize = Int.MaxValue - - val newArray: Array[AnyRef] = new Array(newSize.toInt) - java.lang.System.arraycopy(array, 0, newArray, 0, size0) - array = newArray - } - } - - /** Swap two elements of this array. - */ - protected def swap(a: Int, b: Int) { - val h = array(a) - array(a) = array(b) - array(b) = h - } - - /** Move parts of the array. - */ - protected def copy(m: Int, n: Int, len: Int) { - scala.compat.Platform.arraycopy(array, m, array, n, len) - } -} - -object ResizableArray extends SeqFactory[ResizableArray] { - implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, ResizableArray[A]] = - ReusableCBF.asInstanceOf[GenericCanBuildFrom[A]] - - def newBuilder[A]: Builder[A, ResizableArray[A]] = new ArrayBuffer[A] -} diff --git a/tests/scala2-library/src/library/scala/collection/mutable/ReusableBuilder.scala b/tests/scala2-library/src/library/scala/collection/mutable/ReusableBuilder.scala deleted file mode 100644 index dee2cd6393db..000000000000 --- a/tests/scala2-library/src/library/scala/collection/mutable/ReusableBuilder.scala +++ /dev/null @@ -1,49 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2016, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - - -package scala -package collection -package mutable - -/** `ReusableBuilder` is a marker trait that indicates that a `Builder` - * can be reused to build more than one instance of a collection. In - * particular, calling `result` followed by `clear` will produce a - * collection and reset the builder to begin building a new collection - * of the same type. - * - * It is up to subclasses to implement this behavior, and to document any - * other behavior that varies from standard `ReusableBuilder` usage - * (e.g. operations being well-defined after a call to `result`, or allowing - * multiple calls to result to obtain different snapshots of a collection under - * construction). - * - * @tparam Elem the type of elements that get added to the builder. - * @tparam To the type of collection that it produced. - * - * @since 2.12 - */ -trait ReusableBuilder[-Elem, +To] extends Builder[Elem, To] { - /** Clears the contents of this builder. - * After execution of this method, the builder will contain no elements. - * - * If executed immediately after a call to `result`, this allows a new - * instance of the same type of collection to be built. - */ - override def clear(): Unit // Note: overriding for Scaladoc only! - - /** Produces a collection from the added elements. - * - * After a call to `result`, the behavior of all other methods is undefined - * save for `clear`. If `clear` is called, then the builder is reset and - * may be used to build another instance. - * - * @return a collection containing the elements added to this builder. - */ - override def result(): To // Note: overriding for Scaladoc only! -} diff --git a/tests/scala2-library/src/library/scala/collection/mutable/RevertibleHistory.scala b/tests/scala2-library/src/library/scala/collection/mutable/RevertibleHistory.scala deleted file mode 100644 index 725a8113ec5e..000000000000 --- a/tests/scala2-library/src/library/scala/collection/mutable/RevertibleHistory.scala +++ /dev/null @@ -1,37 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - - - -package scala -package collection -package mutable - - -/** A revertible history is a `History` object which supports - * an undo operation. Type variable `Evt` refers to the type - * of the published events, `Pub` denotes the publisher type. - * Type `Pub` is typically a subtype of `Publisher`. - * - * @tparam Evt type of the events - * @tparam Pub type of the publisher - * - * @author Matthias Zenger - * @version 1.0, 08/07/2003 - * @since 2.8 - */ -class RevertibleHistory[Evt <: Undoable, Pub] extends History[Evt, Pub] with Undoable with Serializable { - - /** Rollback the full history. - */ - def undo(): Unit = { - val old = log.toList.reverse - clear() - old.foreach { case (sub, event) => event.undo() } - } -} diff --git a/tests/scala2-library/src/library/scala/collection/mutable/Seq.scala b/tests/scala2-library/src/library/scala/collection/mutable/Seq.scala deleted file mode 100644 index eafde70a2dd2..000000000000 --- a/tests/scala2-library/src/library/scala/collection/mutable/Seq.scala +++ /dev/null @@ -1,48 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - - - -package scala -package collection -package mutable - -import generic._ - - -/** A subtrait of `collection.Seq` which represents sequences - * that can be mutated. - * - * $seqInfo - * - * The class adds an `update` method to `collection.Seq`. - * - * @define Coll `mutable.Seq` - * @define coll mutable sequence - */ -trait Seq[A] extends Iterable[A] -// with GenSeq[A] - with scala.collection.Seq[A] - with GenericTraversableTemplate[A, Seq] - with SeqLike[A, Seq[A]] { - override def companion: GenericCompanion[Seq] = Seq - override def seq: Seq[A] = this -} - -/** $factoryInfo - * The current default implementation of a $Coll is an `ArrayBuffer`. - * @define coll mutable sequence - * @define Coll `mutable.Seq` - */ -object Seq extends SeqFactory[Seq] { - implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, Seq[A]] = ReusableCBF.asInstanceOf[GenericCanBuildFrom[A]] - def newBuilder[A]: Builder[A, Seq[A]] = new ArrayBuffer -} - -/** Explicit instantiation of the `Seq` trait to reduce class file size in subclasses. */ -abstract class AbstractSeq[A] extends scala.collection.AbstractSeq[A] with Seq[A] diff --git a/tests/scala2-library/src/library/scala/collection/mutable/SeqLike.scala b/tests/scala2-library/src/library/scala/collection/mutable/SeqLike.scala deleted file mode 100644 index 6987066f2bf2..000000000000 --- a/tests/scala2-library/src/library/scala/collection/mutable/SeqLike.scala +++ /dev/null @@ -1,51 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala -package collection -package mutable - -import parallel.mutable.ParSeq - -/** A template trait for mutable sequences of type `mutable.Seq[A]`. - * @tparam A the type of the elements of the set - * @tparam This the type of the set itself. - * - */ -trait SeqLike[A, +This <: SeqLike[A, This] with Seq[A]] - extends scala.collection.SeqLike[A, This] - with Cloneable[This] - with Parallelizable[A, ParSeq[A]] -{ - self => - - protected[this] override def parCombiner = ParSeq.newCombiner[A] - - /** Replaces element at given index with a new value. - * - * @param idx the index of the element to replace. - * @param elem the new value. - * @throws IndexOutOfBoundsException if the index is not valid. - */ - def update(idx: Int, elem: A) - - /** Applies a transformation function to all values contained in this sequence. - * The transformation function produces new values from existing elements. - * - * @param f the transformation to apply - * @return the sequence itself. - */ - def transform(f: A => A): this.type = { - var i = 0 - this foreach { el => - this(i) = f(el) - i += 1 - } - this - } -} diff --git a/tests/scala2-library/src/library/scala/collection/mutable/Set.scala b/tests/scala2-library/src/library/scala/collection/mutable/Set.scala deleted file mode 100644 index 97574718e8c3..000000000000 --- a/tests/scala2-library/src/library/scala/collection/mutable/Set.scala +++ /dev/null @@ -1,46 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - - - -package scala -package collection -package mutable - -import generic._ - -/** A generic trait for mutable sets. - * $setNote - * $setTags - * - * @since 1.0 - * @author Matthias Zenger - * @define Coll `mutable.Set` - * @define coll mutable set - */ -trait Set[A] extends Iterable[A] -// with GenSet[A] - with scala.collection.Set[A] - with GenericSetTemplate[A, Set] - with SetLike[A, Set[A]] { - override def companion: GenericCompanion[Set] = Set - override def seq: Set[A] = this -} - -/** $factoryInfo - * The current default implementation of a $Coll is a `HashSet`. - * @define coll mutable set - * @define Coll `mutable.Set` - */ -object Set extends MutableSetFactory[Set] { - implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, Set[A]] = setCanBuildFrom[A] - override def empty[A]: Set[A] = HashSet.empty[A] -} - -/** Explicit instantiation of the `Set` trait to reduce class file size in subclasses. */ -abstract class AbstractSet[A] extends AbstractIterable[A] with Set[A] diff --git a/tests/scala2-library/src/library/scala/collection/mutable/SetBuilder.scala b/tests/scala2-library/src/library/scala/collection/mutable/SetBuilder.scala deleted file mode 100644 index 5d1e9ffc3adf..000000000000 --- a/tests/scala2-library/src/library/scala/collection/mutable/SetBuilder.scala +++ /dev/null @@ -1,27 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala -package collection -package mutable - -/** The canonical builder for mutable Sets. - * - * @tparam A The type of the elements that will be contained in this set. - * @tparam Coll The type of the actual collection this set builds. - * @param empty The empty element of the collection. - * @since 2.8 - */ -class SetBuilder[A, Coll <: scala.collection.Set[A] -with scala.collection.SetLike[A, Coll]](empty: Coll) -extends ReusableBuilder[A, Coll] { - protected var elems: Coll = empty - def +=(x: A): this.type = { elems = elems + x; this } - def clear() { elems = empty } - def result: Coll = elems -} diff --git a/tests/scala2-library/src/library/scala/collection/mutable/SetLike.scala b/tests/scala2-library/src/library/scala/collection/mutable/SetLike.scala deleted file mode 100644 index 81c94133520e..000000000000 --- a/tests/scala2-library/src/library/scala/collection/mutable/SetLike.scala +++ /dev/null @@ -1,235 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala -package collection -package mutable - -import generic._ -import script._ -import scala.annotation.migration -import parallel.mutable.ParSet - -/** A template trait for mutable sets of type `mutable.Set[A]`. - * - * This trait provides most of the operations of a `mutable.Set` independently of its representation. - * It is typically inherited by concrete implementations of sets. - * - * $setNote - * - * @tparam A the type of the elements of the set - * @tparam This the type of the set itself. - * - * @author Martin Odersky - * @version 2.8 - * @since 2.8 - * - * @define setNote - * - * To implement a concrete mutable set, you need to provide implementations - * of the following methods: - * {{{ - * def contains(elem: A): Boolean - * def iterator: Iterator[A] - * def += (elem: A): this.type - * def -= (elem: A): this.type - * }}} - * If you wish that methods like `take`, - * `drop`, `filter` return the same kind of set, - * you should also override: - * {{{ - * def empty: This - * }}} - * It is also good idea to override methods `foreach` and - * `size` for efficiency. - * @define addDuplicates - * Note that duplicates (elements for which `equals` yields true) will be - * removed, but it is not specified whether it will be an element of this - * set or a newly added element. - * @define coll mutable set - * @define Coll mutable.Set - */ -trait SetLike[A, +This <: SetLike[A, This] with Set[A]] - extends scala.collection.SetLike[A, This] - with Scriptable[A] - with Builder[A, This] - with Growable[A] - with Shrinkable[A] - with Cloneable[mutable.Set[A]] - with Parallelizable[A, ParSet[A]] -{ self => - - /** A common implementation of `newBuilder` for all mutable sets - * in terms of `empty`. Overrides the implementation in `collection.SetLike` - * for better efficiency. - */ - override protected[this] def newBuilder: Builder[A, This] = empty - - protected[this] override def parCombiner = ParSet.newCombiner[A] - - /** Converts this $coll to a sequence. - * - * ```Note```: assumes a fast `size` method. Subclasses should override if this is not true. - */ - override def toSeq: collection.Seq[A] = { - // ArrayBuffer for efficiency, preallocated to the right size. - val result = new ArrayBuffer[A](size) - foreach(result += _) - result - } - - /** Adds an element to this $coll. - * - * @param elem the element to be added - * @return `true` if the element was not yet present in the set, `false` otherwise. - */ - def add(elem: A): Boolean = { - val r = contains(elem) - this += elem - !r - } - - /** Removes an element from this set. - * - * @param elem The element to be removed. - * @return `true` if the element was previously present in the set, `false` otherwise. - */ - def remove(elem: A): Boolean = { - val r = contains(elem) - this -= elem - r - } - - /** Updates the presence of a single element in this set. - * - * This method allows one to add or remove an element `elem` - * from this set depending on the value of parameter `included`. - * Typically, one would use the following syntax: - * {{{ - * set(elem) = true // adds element - * set(elem) = false // removes element - * }}} - * - * @param elem the element to be added or removed - * @param included a flag indicating whether element should be included or excluded. - */ - def update(elem: A, included: Boolean) { - if (included) this += elem else this -= elem - } - - // abstract methods from Growable/Shrinkable - - /** Adds a single element to the set. */ - def +=(elem: A): this.type - def -=(elem: A): this.type - - /** Removes all elements from the set for which do not satisfy a predicate. - * @param p the predicate used to test elements. Only elements for - * which `p` returns `true` are retained in the set; all others - * are removed. - */ - def retain(p: A => Boolean): Unit = - for (elem <- this.toList) // scala/bug#7269 toList avoids ConcurrentModificationException - if (!p(elem)) this -= elem - - /** Removes all elements from the set. After this operation is completed, - * the set will be empty. - */ - def clear(): Unit = - for (elem <- this.toList) - this -= elem - - override def clone(): This = empty ++= repr.seq - - /** The result when this set is used as a builder - * @return the set representation itself. - */ - def result: This = repr - - /** Creates a new set consisting of all the elements of this set and `elem`. - * - * $addDuplicates - * - * @param elem the element to add. - * @return a new set consisting of elements of this set and `elem`. - */ - @migration("`+` creates a new set. Use `+=` to add an element to this set and return that set itself.", "2.8.0") - override def + (elem: A): This = clone() += elem - - /** Creates a new set consisting of all the elements of this set and two or more - * specified elements. - * - * $addDuplicates - * - * @param elem1 the first element to add. - * @param elem2 the second element to add. - * @param elems the remaining elements to add. - * @return a new set consisting of all the elements of this set, `elem1`, - * `elem2` and those in `elems`. - */ - @migration("`+` creates a new set. Use `+=` to add an element to this set and return that set itself.", "2.8.0") - override def + (elem1: A, elem2: A, elems: A*): This = - clone() += elem1 += elem2 ++= elems - - /** Creates a new set consisting of all the elements of this set and those - * provided by the specified traversable object. - * - * $addDuplicates - * - * @param xs the traversable object. - * @return a new set consisting of elements of this set and those in `xs`. - */ - @migration("`++` creates a new set. Use `++=` to add elements to this set and return that set itself.", "2.8.0") - override def ++(xs: GenTraversableOnce[A]): This = clone() ++= xs.seq - - /** Creates a new set consisting of all the elements of this set except `elem`. - * - * @param elem the element to remove. - * @return a new set consisting of all the elements of this set except `elem`. - */ - @migration("`-` creates a new set. Use `-=` to remove an element from this set and return that set itself.", "2.8.0") - override def -(elem: A): This = clone() -= elem - - /** Creates a new set consisting of all the elements of this set except the two - * or more specified elements. - * - * @param elem1 the first element to remove. - * @param elem2 the second element to remove. - * @param elems the remaining elements to remove. - * @return a new set consisting of all the elements of this set except - * `elem1`, `elem2` and `elems`. - */ - @migration("`-` creates a new set. Use `-=` to remove an element from this set and return that set itself.", "2.8.0") - override def -(elem1: A, elem2: A, elems: A*): This = - clone() -= elem1 -= elem2 --= elems - - /** Creates a new set consisting of all the elements of this set except those - * provided by the specified traversable object. - * - * @param xs the traversable object. - * @return a new set consisting of all the elements of this set except - * elements from `xs`. - */ - @migration("`--` creates a new set. Use `--=` to remove elements from this set and return that set itself.", "2.8.0") - override def --(xs: GenTraversableOnce[A]): This = clone() --= xs.seq - - /** Send a message to this scriptable object. - * - * @param cmd the message to send. - * @throws UnsupportedOperationException - * if the message was not understood. - */ - @deprecated("scripting is deprecated", "2.11.0") - def <<(cmd: Message[A]): Unit = cmd match { - case Include(_, x) => this += x - case Remove(_, x) => this -= x - case Reset() => clear() - case s: Script[_] => s.iterator foreach << - case _ => throw new UnsupportedOperationException("message " + cmd + " not understood") - } -} diff --git a/tests/scala2-library/src/library/scala/collection/mutable/SetProxy.scala b/tests/scala2-library/src/library/scala/collection/mutable/SetProxy.scala deleted file mode 100644 index 43b6aa57af68..000000000000 --- a/tests/scala2-library/src/library/scala/collection/mutable/SetProxy.scala +++ /dev/null @@ -1,30 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala -package collection -package mutable - -/** This is a simple wrapper class for [[scala.collection.mutable.Set]]. - * It is most useful for assembling customized set abstractions - * dynamically using object composition and forwarding. - * - * @author Matthias Zenger - * @version 1.1, 09/05/2004 - * @since 1 - */ -@deprecated("proxying is deprecated due to lack of use and compiler-level support", "2.11.0") -trait SetProxy[A] extends Set[A] with SetProxyLike[A, Set[A]] { - override def repr = this - override def empty = new SetProxy[A] { val self = SetProxy.this.self.empty } - override def + (elem: A) = { self += elem ; this } - override def - (elem: A) = { self -= elem ; this } - - def +=(elem: A) = { self += elem; this } - def -=(elem: A) = { self -= elem; this } -} diff --git a/tests/scala2-library/src/library/scala/collection/mutable/SortedMap.scala b/tests/scala2-library/src/library/scala/collection/mutable/SortedMap.scala deleted file mode 100644 index 806b30e79a52..000000000000 --- a/tests/scala2-library/src/library/scala/collection/mutable/SortedMap.scala +++ /dev/null @@ -1,57 +0,0 @@ -package scala -package collection -package mutable - -import generic._ - -/** - * A mutable map whose keys are sorted. - * - * @tparam A the type of the keys contained in this sorted map. - * @tparam B the type of the values associated with the keys. - * - * @author Rui Gonçalves - * @version 2.12 - * @since 2.12 - * - * @define Coll mutable.SortedMap - * @define coll mutable sorted map - */ -trait SortedMap[A, B] - extends Map[A, B] - with collection.SortedMap[A, B] - with MapLike[A, B, SortedMap[A, B]] - with SortedMapLike[A, B, SortedMap[A, B]] { - - override protected[this] def newBuilder: Builder[(A, B), SortedMap[A, B]] = SortedMap.newBuilder[A, B] - - override def empty: SortedMap[A, B] = SortedMap.empty - - override def updated[B1 >: B](key: A, value: B1): SortedMap[A, B1] = this + ((key, value)) - - override def +[B1 >: B](kv: (A, B1)): SortedMap[A, B1] = clone().asInstanceOf[SortedMap[A, B1]] += kv - - override def +[B1 >: B](elem1: (A, B1), elem2: (A, B1), elems: (A, B1)*): SortedMap[A, B1] = - clone().asInstanceOf[SortedMap[A, B1]] += elem1 += elem2 ++= elems - - override def ++[B1 >: B](xs: GenTraversableOnce[(A, B1)]): SortedMap[A, B1] = - clone().asInstanceOf[SortedMap[A, B1]] ++= xs.seq -} - -/** - * $factoryInfo - * - * @define Coll mutable.SortedMap - * @define coll mutable sorted map - */ -object SortedMap extends MutableSortedMapFactory[SortedMap] { - - def empty[A, B](implicit ord: Ordering[A]): SortedMap[A, B] = TreeMap.empty[A, B] - - /** $sortedMapCanBuildFromInfo */ - implicit def canBuildFrom[A, B](implicit ord: Ordering[A]): CanBuildFrom[Coll, (A, B), SortedMap[A, B]] = - new SortedMapCanBuildFrom[A, B] -} - -/** Explicit instantiation of the `SortedMap` trait to reduce class file size in subclasses. */ -abstract class AbstractSortedMap[A, B] extends scala.collection.mutable.AbstractMap[A, B] with SortedMap[A, B] diff --git a/tests/scala2-library/src/library/scala/collection/mutable/SortedSet.scala b/tests/scala2-library/src/library/scala/collection/mutable/SortedSet.scala deleted file mode 100644 index 304469916db5..000000000000 --- a/tests/scala2-library/src/library/scala/collection/mutable/SortedSet.scala +++ /dev/null @@ -1,55 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala -package collection -package mutable - -import generic._ - -/** - * Base trait for mutable sorted set. - * - * @define Coll `mutable.SortedSet` - * @define coll mutable sorted set - * - * @author Lucien Pereira - * - */ -trait SortedSet[A] extends scala.collection.SortedSet[A] with scala.collection.SortedSetLike[A,SortedSet[A]] - with mutable.Set[A] with mutable.SetLike[A, SortedSet[A]] { - - /** Needs to be overridden in subclasses. */ - override def empty: SortedSet[A] = SortedSet.empty[A] - -} - -/** - * A template for mutable sorted set companion objects. - * - * @define Coll `mutable.SortedSet` - * @define coll mutable sorted set - * @define factoryInfo - * This object provides a set of operations needed to create sorted sets of type mutable.SortedSet. - * @define sortedSetCanBuildFromInfo - * Standard `CanBuildFrom` instance for sorted sets. - * - * @author Lucien Pereira - * - */ -object SortedSet extends MutableSortedSetFactory[SortedSet] { - def canBuildFrom[A](implicit ord: Ordering[A]): CanBuildFrom[Coll, A, SortedSet[A]] = new SortedSetCanBuildFrom[A] - - def empty[A](implicit ord: Ordering[A]): SortedSet[A] = TreeSet.empty[A] - - // Force a declaration here so that BitSet (which does not inherit from SortedSetFactory) can be more specific - override implicit def newCanBuildFrom[A](implicit ord : Ordering[A]): CanBuildFrom[Coll, A, SortedSet[A]] = super.newCanBuildFrom -} - -/** Explicit instantiation of the `SortedSet` trait to reduce class file size in subclasses. */ -abstract class AbstractSortedSet[A] extends scala.collection.mutable.AbstractSet[A] with SortedSet[A] diff --git a/tests/scala2-library/src/library/scala/collection/mutable/Stack.scala b/tests/scala2-library/src/library/scala/collection/mutable/Stack.scala deleted file mode 100644 index 28d50af1f979..000000000000 --- a/tests/scala2-library/src/library/scala/collection/mutable/Stack.scala +++ /dev/null @@ -1,178 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - - - -package scala -package collection -package mutable - -import generic._ -import scala.collection.immutable.{List, Nil} -import scala.collection.Iterator -import scala.annotation.migration - -/** Factory object for the `mutable.Stack` class. - * - * $factoryInfo - * @define coll mutable stack - * @define Coll `mutable.Stack` - */ -object Stack extends SeqFactory[Stack] { - class StackBuilder[A] extends Builder[A, Stack[A]] { - val lbuff = new ListBuffer[A] - def +=(elem: A) = { lbuff += elem; this } - def clear() = lbuff.clear() - def result = new Stack(lbuff.result) - } - - implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, Stack[A]] = ReusableCBF.asInstanceOf[GenericCanBuildFrom[A]] - def newBuilder[A]: Builder[A, Stack[A]] = new StackBuilder[A] - val empty: Stack[Nothing] = new Stack(Nil) -} - -/** A stack implements a data structure which allows to store and retrieve - * objects in a last-in-first-out (LIFO) fashion. - * - * @tparam A type of the elements contained in this stack. - * - * @author Matthias Zenger - * @author Martin Odersky - * @version 2.8 - * @since 1 - * @see [[http://docs.scala-lang.org/overviews/collections/concrete-mutable-collection-classes.html#stacks "Scala's Collection Library overview"]] - * section on `Stacks` for more information. - * @define Coll `Stack` - * @define coll stack - * @define orderDependent - * @define orderDependentFold - * @define mayNotTerminateInf - * @define willNotTerminateInf - */ -@deprecated("Stack is an inelegant and potentially poorly-performing wrapper around List. Use a List assigned to a var instead.", "2.12.0") -class Stack[A] private (var elems: List[A]) -extends AbstractSeq[A] - with Seq[A] - with SeqLike[A, Stack[A]] - with GenericTraversableTemplate[A, Stack] - with Cloneable[Stack[A]] - with Serializable -{ - def this() = this(Nil) - - override def companion = Stack - - /** Checks if the stack is empty. - * - * @return true, iff there is no element on the stack - */ - override def isEmpty: Boolean = elems.isEmpty - - /** The number of elements in the stack */ - override def length = elems.length - - /** Retrieve `n`-th element from stack, where top of stack has index `0`. - * - * This is a linear time operation. - * - * @param index the index of the element to return - * @return the element at the specified index - * @throws IndexOutOfBoundsException if the index is out of bounds - */ - override def apply(index: Int) = elems(index) - - /** Replace element at index `n` with the new element `newelem`. - * - * This is a linear time operation. - * - * @param n the index of the element to replace. - * @param newelem the new element. - * @throws IndexOutOfBoundsException if the index is not valid - */ - def update(n: Int, newelem: A) = - if(n < 0 || n >= length) throw new IndexOutOfBoundsException(n.toString) - else elems = elems.take(n) ++ (newelem :: elems.drop(n+1)) - - /** Push an element on the stack. - * - * @param elem the element to push on the stack. - * @return the stack with the new element on top. - */ - def push(elem: A): this.type = { elems = elem :: elems; this } - - /** Push two or more elements onto the stack. The last element - * of the sequence will be on top of the new stack. - * - * @param elems the element sequence. - * @return the stack with the new elements on top. - */ - def push(elem1: A, elem2: A, elems: A*): this.type = - this.push(elem1).push(elem2).pushAll(elems) - - /** Push all elements in the given traversable object onto the stack. The - * last element in the traversable object will be on top of the new stack. - * - * @param xs the traversable object. - * @return the stack with the new elements on top. - */ - def pushAll(xs: TraversableOnce[A]): this.type = { xs foreach push ; this } - - /** Returns the top element of the stack. This method will not remove - * the element from the stack. An error is signaled if there is no - * element on the stack. - * - * @throws java.util.NoSuchElementException - * @return the top element - */ - def top: A = - elems.head - - /** Removes the top element from the stack. - * - * @throws java.util.NoSuchElementException - * @return the top element - */ - def pop(): A = { - val res = elems.head - elems = elems.tail - res - } - - /** - * Removes all elements from the stack. After this operation completed, - * the stack will be empty. - */ - def clear(): Unit = elems = Nil - - /** Returns an iterator over all elements on the stack. This iterator - * is stable with respect to state changes in the stack object; i.e. - * such changes will not be reflected in the iterator. The iterator - * issues elements in the reversed order they were inserted into the - * stack (LIFO order). - * - * @return an iterator over all stack elements. - */ - @migration("`iterator` traverses in FIFO order.", "2.8.0") - override def iterator: Iterator[A] = elems.iterator - - /** Creates a list of all stack elements in LIFO order. - * - * @return the created list. - */ - @migration("`toList` traverses in FIFO order.", "2.8.0") - override def toList: List[A] = elems - - @migration("`foreach` traverses in FIFO order.", "2.8.0") - override def foreach[U](f: A => U): Unit = super.foreach(f) - - /** This method clones the stack. - * - * @return a stack with the same elements. - */ - override def clone(): Stack[A] = new Stack[A](elems) -} diff --git a/tests/scala2-library/src/library/scala/collection/mutable/StackProxy.scala b/tests/scala2-library/src/library/scala/collection/mutable/StackProxy.scala deleted file mode 100644 index ac52bbba2197..000000000000 --- a/tests/scala2-library/src/library/scala/collection/mutable/StackProxy.scala +++ /dev/null @@ -1,105 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala -package collection -package mutable - -/** A stack implements a data structure which allows to store and retrieve - * objects in a last-in-first-out (LIFO) fashion. - * - * @tparam A type of the elements in this stack proxy. - * - * @author Matthias Zenger - * @version 1.0, 10/05/2004 - * @since 1 - */ -@deprecated("proxying is deprecated due to lack of use and compiler-level support", "2.11.0") -trait StackProxy[A] extends Stack[A] with Proxy { - - def self: Stack[A] - - /** Access element number `n`. - * - * @return the element at index `n`. - */ - override def apply(n: Int): A = self.apply(n) - - /** Returns the length of this stack. - */ - override def length: Int = self.length - - /** Checks if the stack is empty. - * - * @return true, iff there is no element on the stack - */ - override def isEmpty: Boolean = self.isEmpty - - /** Pushes a single element on top of the stack. - * - * @param elem the element to push onto the stack - */ - def +=(elem: A): this.type = { - self push elem - this - } - - override def pushAll(xs: TraversableOnce[A]): this.type = { self pushAll xs; this } - - override def push(elem1: A, elem2: A, elems: A*): this.type = { - self.push(elem1).push(elem2).pushAll(elems) - this - } - - override def push(elem: A): this.type = { - self.push(elem) - this - } - - /** Returns the top element of the stack. This method will not remove - * the element from the stack. An error is signaled if there is no - * element on the stack. - * - * @return the top element - */ - override def top: A = self.top - - /** Removes the top element from the stack. - */ - override def pop(): A = self.pop() - - /** - * Removes all elements from the stack. After this operation completed, - * the stack will be empty. - */ - override def clear(): Unit = self.clear() - - /** Returns an iterator over all elements on the stack. This iterator - * is stable with respect to state changes in the stack object; i.e. - * such changes will not be reflected in the iterator. The iterator - * issues elements in the order they were inserted into the stack - * (FIFO order). - * - * @return an iterator over all stack elements. - */ - override def iterator: Iterator[A] = self.iterator - - /** Creates a list of all stack elements in FIFO order. - * - * @return the created list. - */ - override def toList: List[A] = self.toList - - /** This method clones the stack. - * - * @return a stack with the same elements. - */ - override def clone(): Stack[A] = new StackProxy[A] { - def self = StackProxy.this.self.clone() - } -} diff --git a/tests/scala2-library/src/library/scala/collection/mutable/StringBuilder.scala b/tests/scala2-library/src/library/scala/collection/mutable/StringBuilder.scala deleted file mode 100644 index b5b949837417..000000000000 --- a/tests/scala2-library/src/library/scala/collection/mutable/StringBuilder.scala +++ /dev/null @@ -1,451 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2006-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala -package collection -package mutable - -import java.lang.{ StringBuilder => JavaStringBuilder } -import scala.annotation.migration -import immutable.StringLike - -/** A builder for mutable sequence of characters. This class provides an API - * mostly compatible with `java.lang.StringBuilder`, except where there are - * conflicts with the Scala collections API (such as the `reverse` method.) - * - * @author Stephane Micheloud - * @author Martin Odersky - * @version 2.8 - * @since 2.7 - * @define Coll `mutable.IndexedSeq` - * @define coll string builder - * @see [[http://docs.scala-lang.org/overviews/collections/concrete-mutable-collection-classes.html# "Scala's Collection Library overview"]] - * section on `StringBuilders` for more information. - */ -@SerialVersionUID(0 - 8525408645367278351L) -final class StringBuilder(private val underlying: JavaStringBuilder) - extends AbstractSeq[Char] - with java.lang.CharSequence - with IndexedSeq[Char] - with StringLike[StringBuilder] - with ReusableBuilder[Char, String] - with Serializable { - - override protected[this] def thisCollection: StringBuilder = this - override protected[this] def toCollection(repr: StringBuilder): StringBuilder = repr - - /** Creates a string builder buffer as builder for this class */ - override protected[this] def newBuilder = new GrowingBuilder(new StringBuilder) - - /** Constructs a string builder initialized with string value `initValue` - * and with additional character capacity `initCapacity`. - */ - def this(initCapacity: Int, initValue: String) = - this(new JavaStringBuilder(initValue.length + initCapacity) append initValue) - - /** Constructs a string builder with no characters in it and an - * initial capacity of 16 characters. - */ - def this() = this(16, "") - - /** Constructs a string builder with no characters in it and an - * initial capacity specified by the `capacity` argument. - * - * @param capacity the initial capacity. - * @throws NegativeArraySizeException if capacity < 0. - */ - def this(capacity: Int) = this(capacity, "") - - /** Constructs a string builder with initial characters - * equal to characters of `str`. - */ - def this(str: String) = this(16, str) - - def toArray: Array[Char] = { - val arr = new Array[Char](length) - underlying.getChars(0, length, arr, 0) - arr - } - - override def length: Int = underlying.length() - def length_=(n: Int) { underlying.setLength(n) } - - /** Clears the builder contents. - */ - def clear(): Unit = setLength(0) - - /** Sets the length of the character sequence. If the current sequence - * is shorter than the given length, it is padded with nulls; if it is - * longer, it is truncated. - * - * @param len the new length - * @throws IndexOutOfBoundsException if the argument is negative. - */ - def setLength(len: Int) { underlying setLength len } - - /** Returns the current capacity, which is the size of the underlying array. - * A new array will be allocated if the current capacity is exceeded. - * - * @return the capacity - */ - def capacity: Int = underlying.capacity() - - /** Ensure that the capacity is at least the given argument. - * If the argument is greater than the current capacity, new - * storage will be allocated with size equal to the given - * argument or to `(2 * capacity + 2)`, whichever is larger. - * - * @param newCapacity the minimum desired capacity. - */ - def ensureCapacity(newCapacity: Int) { underlying ensureCapacity newCapacity } - - /** Returns the Char at the specified index, counting from 0 as in Arrays. - * - * @param index the index to look up - * @return the Char at the given index. - * @throws IndexOutOfBoundsException if the index is out of bounds. - */ - def charAt(index: Int): Char = underlying charAt index - - /** Equivalent to charAt. - */ - override def apply(index: Int): Char = underlying charAt index - - /** Removes the Char at the specified index. The sequence is - * shortened by one. - * - * @param index The index to remove. - * @return This StringBuilder. - * @throws IndexOutOfBoundsException if the index is out of bounds. - */ - def deleteCharAt(index: Int): StringBuilder = { - underlying deleteCharAt index - this - } - - /** Update the sequence at the given index to hold the specified Char. - * - * @param index the index to modify. - * @param ch the new Char. - * @throws IndexOutOfBoundsException if the index is out of bounds. - */ - def setCharAt(index: Int, ch: Char): Unit = underlying.setCharAt(index, ch) - - /** Equivalent to setCharAt. - */ - def update(i: Int, c: Char): Unit = setCharAt(i, c) - - /** Returns a new String made up of a subsequence of this sequence, - * beginning at the given index and extending to the end of the sequence. - * - * target.substring(start) is equivalent to target.drop(start) - * - * @param start The starting index, inclusive. - * @return The new String. - * @throws IndexOutOfBoundsException if the index is out of bounds. - */ - def substring(start: Int): String = substring(start, length) - - /** Returns a new String made up of a subsequence of this sequence, - * beginning at the start index (inclusive) and extending to the - * end index (exclusive). - * - * target.substring(start, end) is equivalent to target.slice(start, end).mkString - * - * @param start The beginning index, inclusive. - * @param end The ending index, exclusive. - * @return The new String. - * @throws StringIndexOutOfBoundsException If either index is out of bounds, - * or if start > end. - */ - def substring(start: Int, end: Int): String = underlying.substring(start, end) - - /** For implementing CharSequence. - */ - def subSequence(start: Int, end: Int): java.lang.CharSequence = - substring(start, end) - - /** Appends the given Char to the end of the sequence. - */ - def +=(x: Char): this.type = { append(x); this } - - /** Optimization. - */ - def ++=(s: String): this.type = { - underlying append s - this - } - - def appendAll(xs: String): StringBuilder = { - underlying append xs - this - } - - /** !!! This should create a new sequence. - */ - def +(x: Char): this.type = { +=(x); this } - - /** Appends the string representation of the given argument, - * which is converted to a String with `String.valueOf`. - * - * @param x an `Any` object. - * @return this StringBuilder. - */ - def append(x: Any): StringBuilder = { - underlying append String.valueOf(x) - this - } - - /** Appends the given String to this sequence. - * - * @param s a String. - * @return this StringBuilder. - */ - def append(s: String): StringBuilder = { - underlying append s - this - } - - /** Appends the specified string builder to this sequence. - * - * @param sb - * @return - */ - def append(sb: StringBuilder): StringBuilder = { - underlying append sb - this - } - - /** Appends all the Chars in the given Seq[Char] to this sequence. - * - * @param xs the characters to be appended. - * @return this StringBuilder. - */ - def appendAll(xs: TraversableOnce[Char]): StringBuilder = appendAll(xs.toArray) - - /** Appends all the Chars in the given Array[Char] to this sequence. - * - * @param xs the characters to be appended. - * @return a reference to this object. - */ - def appendAll(xs: Array[Char]): StringBuilder = { - underlying append xs - this - } - - /** Appends a portion of the given Array[Char] to this sequence. - * - * @param xs the Array containing Chars to be appended. - * @param offset the index of the first Char to append. - * @param len the numbers of Chars to append. - * @return this StringBuilder. - */ - def appendAll(xs: Array[Char], offset: Int, len: Int): StringBuilder = { - underlying.append(xs, offset, len) - this - } - - /** Append the String representation of the given primitive type - * to this sequence. The argument is converted to a String with - * String.valueOf. - * - * @param x a primitive value - * @return This StringBuilder. - */ - def append(x: Boolean): StringBuilder = { underlying append x ; this } - def append(x: Byte): StringBuilder = append(x.toInt) - def append(x: Short): StringBuilder = append(x.toInt) - def append(x: Int): StringBuilder = { underlying append x ; this } - def append(x: Long): StringBuilder = { underlying append x ; this } - def append(x: Float): StringBuilder = { underlying append x ; this } - def append(x: Double): StringBuilder = { underlying append x ; this } - def append(x: Char): StringBuilder = { underlying append x ; this } - - /** Remove a subsequence of Chars from this sequence, starting at the - * given start index (inclusive) and extending to the end index (exclusive) - * or to the end of the String, whichever comes first. - * - * @param start The beginning index, inclusive. - * @param end The ending index, exclusive. - * @return This StringBuilder. - * @throws StringIndexOutOfBoundsException if start < 0 || start > end - */ - def delete(start: Int, end: Int): StringBuilder = { - underlying.delete(start, end) - this - } - - /** Replaces a subsequence of Chars with the given String. The semantics - * are as in delete, with the String argument then inserted at index 'start'. - * - * @param start The beginning index, inclusive. - * @param end The ending index, exclusive. - * @param str The String to be inserted at the start index. - * @return This StringBuilder. - * @throws StringIndexOutOfBoundsException if start < 0, start > length, or start > end - */ - def replace(start: Int, end: Int, str: String): StringBuilder = { - underlying.replace(start, end, str) - this - } - - /** Inserts a subarray of the given Array[Char] at the given index - * of this sequence. - * - * @param index index at which to insert the subarray. - * @param str the Array from which Chars will be taken. - * @param offset the index of the first Char to insert. - * @param len the number of Chars from 'str' to insert. - * @return This StringBuilder. - * - * @throws StringIndexOutOfBoundsException if index < 0, index > length, - * offset < 0, len < 0, or (offset + len) > str.length. - */ - def insertAll(index: Int, str: Array[Char], offset: Int, len: Int): StringBuilder = { - underlying.insert(index, str, offset, len) - this - } - - /** Inserts the String representation (via String.valueOf) of the given - * argument into this sequence at the given index. - * - * @param index the index at which to insert. - * @param x a value. - * @return this StringBuilder. - * @throws StringIndexOutOfBoundsException if the index is out of bounds. - */ - def insert(index: Int, x: Any): StringBuilder = insert(index, String.valueOf(x)) - - /** Inserts the String into this character sequence. - * - * @param index the index at which to insert. - * @param x a String. - * @return this StringBuilder. - * @throws StringIndexOutOfBoundsException if the index is out of bounds. - */ - def insert(index: Int, x: String): StringBuilder = { - underlying.insert(index, x) - this - } - - /** Inserts the given Seq[Char] into this sequence at the given index. - * - * @param index the index at which to insert. - * @param xs the Seq[Char]. - * @return this StringBuilder. - * @throws StringIndexOutOfBoundsException if the index is out of bounds. - */ - def insertAll(index: Int, xs: TraversableOnce[Char]): StringBuilder = insertAll(index, xs.toArray) - - /** Inserts the given Array[Char] into this sequence at the given index. - * - * @param index the index at which to insert. - * @param xs the Array[Char]. - * @return this StringBuilder. - * @throws StringIndexOutOfBoundsException if the index is out of bounds. - */ - def insertAll(index: Int, xs: Array[Char]): StringBuilder = { - underlying.insert(index, xs) - this - } - - /** Calls String.valueOf on the given primitive value, and inserts the - * String at the given index. - * - * @param index the offset position. - * @param x a primitive value. - * @return this StringBuilder. - */ - def insert(index: Int, x: Boolean): StringBuilder = insert(index, String.valueOf(x)) - def insert(index: Int, x: Byte): StringBuilder = insert(index, x.toInt) - def insert(index: Int, x: Short): StringBuilder = insert(index, x.toInt) - def insert(index: Int, x: Int): StringBuilder = insert(index, String.valueOf(x)) - def insert(index: Int, x: Long): StringBuilder = insert(index, String.valueOf(x)) - def insert(index: Int, x: Float): StringBuilder = insert(index, String.valueOf(x)) - def insert(index: Int, x: Double): StringBuilder = insert(index, String.valueOf(x)) - def insert(index: Int, x: Char): StringBuilder = insert(index, String.valueOf(x)) - - /** Finds the index of the first occurrence of the specified substring. - * - * @param str the target string to search for - * @return the first applicable index where target occurs, or -1 if not found. - */ - def indexOf(str: String): Int = underlying.indexOf(str) - - /** Finds the index of the first occurrence of the specified substring. - * - * @param str the target string to search for - * @param fromIndex the smallest index in the source string to consider - * @return the first applicable index where target occurs, or -1 if not found. - */ - def indexOf(str: String, fromIndex: Int): Int = underlying.indexOf(str, fromIndex) - - /** Finds the index of the last occurrence of the specified substring. - * - * @param str the target string to search for - * @return the last applicable index where target occurs, or -1 if not found. - */ - def lastIndexOf(str: String): Int = underlying.lastIndexOf(str) - - /** Finds the index of the last occurrence of the specified substring. - * - * @param str the target string to search for - * @param fromIndex the smallest index in the source string to consider - * @return the last applicable index where target occurs, or -1 if not found. - */ - def lastIndexOf(str: String, fromIndex: Int): Int = underlying.lastIndexOf(str, fromIndex) - - /** Creates a new StringBuilder with the reversed contents of this one. - * If surrogate pairs are present, they are treated as indivisible units: each - * pair will appear in the same order in the updated sequence. - * - * @return the reversed StringBuilder - */ - @migration("`reverse` returns a new instance. Use `reverseContents` to update in place and return that StringBuilder itself.", "2.8.0") - override def reverse: StringBuilder = new StringBuilder(new JavaStringBuilder(underlying).reverse) - - override def clone(): StringBuilder = new StringBuilder(new JavaStringBuilder(underlying)) - - /** Like reverse, but destructively updates the target StringBuilder. - * - * @return the reversed StringBuilder (same as the target StringBuilder) - */ - def reverseContents(): StringBuilder = { - underlying.reverse() - this - } - - /** Returns a new String representing the data in this sequence. - * - * @note because toString is inherited from AnyRef and used for - * many purposes, it is better practice to call mkString - * to obtain a StringBuilder result. - * @return the current contents of this sequence as a String - */ - override def toString = underlying.toString - - /** Returns a new String representing the data in this sequence. - * - * @return the current contents of this sequence as a String - */ - override def mkString = toString - - /** Returns the result of this Builder (a String). - * - * If this method is called multiple times, each call will result in a snapshot of the buffer at that point in time. - * In particular, a `StringBuilder` can be used to build multiple independent strings by emptying the buffer with `clear` - * after each call to `result`. - * - * @return the string assembled by this StringBuilder - */ - def result(): String = toString -} - -object StringBuilder { - def newBuilder = new StringBuilder -} diff --git a/tests/scala2-library/src/library/scala/collection/mutable/Subscriber.scala b/tests/scala2-library/src/library/scala/collection/mutable/Subscriber.scala deleted file mode 100644 index c2aa9be72ddb..000000000000 --- a/tests/scala2-library/src/library/scala/collection/mutable/Subscriber.scala +++ /dev/null @@ -1,24 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala -package collection -package mutable - -/** `Subscriber[A, B]` objects may subscribe to events of type `A` - * published by an object of type `B`. `B` is typically a subtype of - * [[scala.collection.mutable.Publisher]]. - * - * @author Matthias Zenger - * @author Martin Odersky - * @version 2.8 - * @since 1 - */ -trait Subscriber[-Evt, -Pub] { - def notify(pub: Pub, event: Evt): Unit -} diff --git a/tests/scala2-library/src/library/scala/collection/mutable/SynchronizedBuffer.scala b/tests/scala2-library/src/library/scala/collection/mutable/SynchronizedBuffer.scala deleted file mode 100644 index 9c27f8b003f9..000000000000 --- a/tests/scala2-library/src/library/scala/collection/mutable/SynchronizedBuffer.scala +++ /dev/null @@ -1,186 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - - - -package scala -package collection -package mutable - -import script._ - -/** This class should be used as a mixin. It synchronizes the `Buffer` - * methods of the class into which it is mixed in. - * - * @tparam A type of the elements contained in this buffer. - * - * @author Matthias Zenger - * @version 1.0, 08/07/2003 - * @since 1 - * @define Coll `SynchronizedBuffer` - * @define coll synchronized buffer - */ -@deprecated("Synchronization via traits is deprecated as it is inherently unreliable. Consider java.util.concurrent.ConcurrentLinkedQueue as an alternative.", "2.11.0") -trait SynchronizedBuffer[A] extends Buffer[A] { - - import scala.collection.Traversable - - abstract override def length: Int = synchronized { - super.length - } - - abstract override def iterator: Iterator[A] = synchronized { - super.iterator - } - - abstract override def apply(n: Int): A = synchronized { - super.apply(n) - } - - /** Append a single element to this buffer. - * - * @param elem the element to append. - */ - abstract override def +=(elem: A): this.type = synchronized[this.type] { - super.+=(elem) - } - - /** Appends a number of elements provided by a traversable object via - * its `foreach` method. - * The identity of the buffer is returned. - * - * @param xs the traversable object. - */ - override def ++(xs: GenTraversableOnce[A]): Self = synchronized { - super.++(xs) - } - - /** Appends a number of elements provided by a traversable object - * via its `foreach` method. - * - * @param xs the iterable object. - */ - override def ++=(xs: TraversableOnce[A]): this.type = synchronized[this.type] { - super.++=(xs) - } - - /** Appends a sequence of elements to this buffer. - * - * @param elems the elements to append. - */ - override def append(elems: A*): Unit = synchronized { - super.++=(elems) - } - - /** Appends a number of elements provided by a traversable object - * via its `foreach` method. - * - * @param xs the traversable object. - */ - override def appendAll(xs: TraversableOnce[A]): Unit = synchronized { - super.appendAll(xs) - } - - /** Prepend a single element to this buffer and return - * the identity of the buffer. - * - * @param elem the element to append. - */ - abstract override def +=:(elem: A): this.type = synchronized[this.type] { - super.+=:(elem) - } - - /** Prepends a number of elements provided by a traversable object - * via its `foreach` method. The identity of the buffer is returned. - * - * @param xs the traversable object. - */ - override def ++=:(xs: TraversableOnce[A]): this.type = synchronized[this.type] { super.++=:(xs) } - - /** Prepend an element to this list. - * - * @param elems the elements to prepend. - */ - override def prepend(elems: A*): Unit = prependAll(elems) - - /** Prepends a number of elements provided by a traversable object - * via its `foreach` method. The identity of the buffer is returned. - * - * @param xs the traversable object. - */ - override def prependAll(xs: TraversableOnce[A]): Unit = synchronized { - super.prependAll(xs) - } - - /** Inserts new elements at the index `n`. Opposed to method `update`, - * this method will not replace an element with a one. - * Instead, it will insert the new elements at index `n`. - * - * @param n the index where a new element will be inserted. - * @param elems the new elements to insert. - */ - override def insert(n: Int, elems: A*): Unit = synchronized { - super.insertAll(n, elems) - } - - /** Inserts new elements at the index `n`. Opposed to method `update`, - * this method will not replace an element with a one. - * Instead, it will insert a new element at index `n`. - * - * @param n the index where a new element will be inserted. - * @param xs the traversable object providing all elements to insert. - */ - abstract override def insertAll(n: Int, xs: Traversable[A]): Unit = synchronized { - super.insertAll(n, xs) - } - - /** Replace element at index `n` with the new element `newelem`. - * - * @param n the index of the element to replace. - * @param newelem the new element. - */ - abstract override def update(n: Int, newelem: A): Unit = synchronized { - super.update(n, newelem) - } - - /** Removes the element on a given index position. - * - * @param n the index which refers to the element to delete. - */ - abstract override def remove(n: Int): A = synchronized { - super.remove(n) - } - - /** Clears the buffer contents. - */ - abstract override def clear(): Unit = synchronized { - super.clear() - } - - @deprecated("scripting is deprecated", "2.11.0") - override def <<(cmd: Message[A]): Unit = synchronized { - super.<<(cmd) - } - - /** Return a clone of this buffer. - * - * @return an `ArrayBuffer` with the same elements. - */ - override def clone(): Self = synchronized { - super.clone() - } - - /** The `hashCode` method always yields an error, since it is not - * safe to use buffers as keys in hash tables. - * - * @return never. - */ - override def hashCode(): Int = synchronized { - super.hashCode() - } -} diff --git a/tests/scala2-library/src/library/scala/collection/mutable/SynchronizedMap.scala b/tests/scala2-library/src/library/scala/collection/mutable/SynchronizedMap.scala deleted file mode 100644 index 8618798dbd95..000000000000 --- a/tests/scala2-library/src/library/scala/collection/mutable/SynchronizedMap.scala +++ /dev/null @@ -1,63 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala -package collection -package mutable - -import scala.annotation.migration - -/** This class should be used as a mixin. It synchronizes the `Map` - * functions of the class into which it is mixed in. - * - * @tparam A type of the keys contained in this map. - * @tparam B type of the values associated with keys. - * - * @author Matthias Zenger, Martin Odersky - * @version 2.0, 31/12/2006 - * @since 1 - * @define Coll `SynchronizedMap` - * @define coll synchronized map - */ -@deprecated("Synchronization via traits is deprecated as it is inherently unreliable. Consider java.util.concurrent.ConcurrentHashMap as an alternative.", "2.11.0") -trait SynchronizedMap[A, B] extends Map[A, B] { - - abstract override def get(key: A): Option[B] = synchronized { super.get(key) } - abstract override def iterator: Iterator[(A, B)] = synchronized { super.iterator } - abstract override def += (kv: (A, B)): this.type = synchronized[this.type] { super.+=(kv) } - abstract override def -= (key: A): this.type = synchronized[this.type] { super.-=(key) } - - override def size: Int = synchronized { super.size } - override def put(key: A, value: B): Option[B] = synchronized { super.put(key, value) } - override def update(key: A, value: B): Unit = synchronized { super.update(key, value) } - override def remove(key: A): Option[B] = synchronized { super.remove(key) } - override def clear(): Unit = synchronized { super.clear() } - override def getOrElseUpdate(key: A, default: => B): B = synchronized { super.getOrElseUpdate(key, default) } - override def transform(f: (A, B) => B): this.type = synchronized[this.type] { super.transform(f) } - override def retain(p: (A, B) => Boolean): this.type = synchronized[this.type] { super.retain(p) } - @migration("`values` returns `Iterable[B]` rather than `Iterator[B]`.", "2.8.0") - override def values: scala.collection.Iterable[B] = synchronized { super.values } - override def valuesIterator: Iterator[B] = synchronized { super.valuesIterator } - override def clone(): Self = synchronized { super.clone() } - override def foreach[U](f: ((A, B)) => U) = synchronized { super.foreach(f) } - override def apply(key: A): B = synchronized { super.apply(key) } - override def keySet: scala.collection.Set[A] = synchronized { super.keySet } - @migration("`keys` returns `Iterable[A]` rather than `Iterator[A]`.", "2.8.0") - override def keys: scala.collection.Iterable[A] = synchronized { super.keys } - override def keysIterator: Iterator[A] = synchronized { super.keysIterator } - override def isEmpty: Boolean = synchronized { super.isEmpty } - override def contains(key: A): Boolean = synchronized {super.contains(key) } - override def isDefinedAt(key: A) = synchronized { super.isDefinedAt(key) } - - // @deprecated("see Map.+ for explanation") override def +(kv: (A, B)): this.type = synchronized[this.type] { super.+(kv) } - // can't override -, -- same type! - // @deprecated override def -(key: A): Self = synchronized { super.-(key) } - - // !!! todo: also add all other methods -} - diff --git a/tests/scala2-library/src/library/scala/collection/mutable/SynchronizedQueue.scala b/tests/scala2-library/src/library/scala/collection/mutable/SynchronizedQueue.scala deleted file mode 100644 index ee44f07df214..000000000000 --- a/tests/scala2-library/src/library/scala/collection/mutable/SynchronizedQueue.scala +++ /dev/null @@ -1,103 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - - - -package scala -package collection -package mutable - - -/** This is a synchronized version of the `Queue[T]` class. It - * implements a data structure that allows one to insert and retrieve - * elements in a first-in-first-out (FIFO) manner. - * - * @tparam A type of elements contained in this synchronized queue. - * - * @author Matthias Zenger - * @version 1.0, 03/05/2004 - * @since 1 - * @define Coll `SynchronizedQueue` - * @define coll synchronized queue - */ -@deprecated("Synchronization via selective overriding of methods is inherently unreliable. Consider java.util.concurrent.ConcurrentLinkedQueue as an alternative.", "2.11.0") -class SynchronizedQueue[A] extends Queue[A] { - /** Checks if the queue is empty. - * - * @return true, iff there is no element in the queue. - */ - override def isEmpty: Boolean = synchronized { super.isEmpty } - - /** Inserts a single element at the end of the queue. - * - * @param elem the element to insert - */ - override def +=(elem: A): this.type = synchronized[this.type] { super.+=(elem) } - - /** Adds all elements provided by a `TraversableOnce` object - * at the end of the queue. The elements are prepended in the order they - * are given out by the iterator. - * - * @param xs a traversable object - */ - override def ++=(xs: TraversableOnce[A]): this.type = synchronized[this.type] { super.++=(xs) } - - /** Adds all elements to the queue. - * - * @param elems the elements to add. - */ - override def enqueue(elems: A*): Unit = synchronized { super.++=(elems) } - - /** Returns the first element in the queue, and removes this element - * from the queue. - * - * @return the first element of the queue. - */ - override def dequeue(): A = synchronized { super.dequeue() } - - /** Returns the first element in the queue which satisfies the - * given predicate, and removes this element from the queue. - * - * @param p the predicate used for choosing the first element - * @return the first element of the queue for which p yields true - */ - override def dequeueFirst(p: A => Boolean): Option[A] = synchronized { super.dequeueFirst(p) } - - /** Returns all elements in the queue which satisfy the - * given predicate, and removes those elements from the queue. - * - * @param p the predicate used for choosing elements - * @return a sequence of all elements in the queue for which - * p yields true. - */ - override def dequeueAll(p: A => Boolean): Seq[A] = synchronized { super.dequeueAll(p) } - - /** Returns the first element in the queue, or throws an error if there - * is no element contained in the queue. - * - * @return the first element. - */ - override def front: A = synchronized { super.front } - - /** Removes all elements from the queue. After this operation is completed, - * the queue will be empty. - */ - override def clear(): Unit = synchronized { super.clear() } - - /** Checks if two queues are structurally identical. - * - * @return true, iff both queues contain the same sequence of elements. - */ - override def equals(that: Any): Boolean = synchronized { super.equals(that) } - - /** Returns a textual representation of a queue as a string. - * - * @return the string representation of this queue. - */ - override def toString() = synchronized { super.toString() } -} diff --git a/tests/scala2-library/src/library/scala/collection/mutable/SynchronizedSet.scala b/tests/scala2-library/src/library/scala/collection/mutable/SynchronizedSet.scala deleted file mode 100644 index 399630eb3c72..000000000000 --- a/tests/scala2-library/src/library/scala/collection/mutable/SynchronizedSet.scala +++ /dev/null @@ -1,105 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - - -package scala -package collection -package mutable - -import script._ - -/** This class should be used as a mixin. It synchronizes the `Set` - * functions of the class into which it is mixed in. - * - * @tparam A type of the elements contained in this synchronized set. - * - * @author Matthias Zenger - * @version 1.0, 08/07/2003 - * @since 1 - * @define Coll `SynchronizedSet` - * @define coll synchronized set - */ -@deprecated("Synchronization via traits is deprecated as it is inherently unreliable. Consider java.util.concurrent.ConcurrentHashMap[A,Unit] as an alternative.", "2.11.0") -trait SynchronizedSet[A] extends Set[A] { - abstract override def size: Int = synchronized { - super.size - } - - override def isEmpty: Boolean = synchronized { - super.isEmpty - } - - abstract override def contains(elem: A) = synchronized { - super.contains(elem) - } - - abstract override def +=(elem: A): this.type = synchronized[this.type] { - super.+=(elem) - } - - override def ++=(xs: TraversableOnce[A]): this.type = synchronized[this.type] { - super.++=(xs) - } - - abstract override def -=(elem: A): this.type = synchronized[this.type] { - super.-=(elem) - } - - override def --=(xs: TraversableOnce[A]): this.type = synchronized[this.type] { - super.--=(xs) - } - - override def update(elem: A, included: Boolean): Unit = synchronized { - super.update(elem, included) - } - - override def add(elem: A): Boolean = synchronized { - super.add(elem) - } - - override def remove(elem: A): Boolean = synchronized { - super.remove(elem) - } - - override def intersect(that: scala.collection.GenSet[A]) = synchronized { - super.intersect(that) - } - - abstract override def clear(): Unit = synchronized { - super.clear() - } - - override def subsetOf(that: scala.collection.GenSet[A]) = synchronized { - super.subsetOf(that) - } - - override def foreach[U](f: A => U) = synchronized { - super.foreach(f) - } - - override def retain(p: A => Boolean) = synchronized { - super.retain(p) - } - - override def toList: List[A] = synchronized { - super.toList - } - - override def toString = synchronized { - super.toString - } - - @deprecated("scripting is deprecated", "2.11.0") - override def <<(cmd: Message[A]): Unit = synchronized { - super.<<(cmd) - } - - override def clone(): Self = synchronized { - super.clone() - } -} diff --git a/tests/scala2-library/src/library/scala/collection/mutable/SynchronizedStack.scala b/tests/scala2-library/src/library/scala/collection/mutable/SynchronizedStack.scala deleted file mode 100644 index 2954a1f768b6..000000000000 --- a/tests/scala2-library/src/library/scala/collection/mutable/SynchronizedStack.scala +++ /dev/null @@ -1,100 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - - - -package scala -package collection -package mutable - - -/** This is a synchronized version of the `Stack[T]` class. It - * implements a data structure which allows to store and retrieve - * objects in a last-in-first-out (LIFO) fashion. - * - * @tparam A type of the elements contained in this stack. - * - * @author Matthias Zenger - * @version 1.0, 03/05/2004 - * @since 1 - * @define Coll `SynchronizedStack` - * @define coll synchronized stack - */ -@deprecated("Synchronization via selective overriding of methods is inherently unreliable. Consider java.util.concurrent.LinkedBlockingDequeue instead.", "2.11.0") -class SynchronizedStack[A] extends Stack[A] { - - /** Checks if the stack is empty. - * - * @return true, iff there is no element on the stack - */ - override def isEmpty: Boolean = synchronized { super.isEmpty } - - /** Pushes a single element on top of the stack. - * - * @param elem the element to push onto the stack - */ - override def push(elem: A): this.type = synchronized[this.type] { super.push(elem) } - - /** Push two or more elements onto the stack. The last element - * of the sequence will be on top of the new stack. - * - * @param elem1 the first element to push. - * @param elem2 the second element to push. - * @param elems the element sequence that will be pushed. - * @return the stack with the new elements on top. - */ - override def push(elem1: A, elem2: A, elems: A*): this.type = synchronized[this.type] { super.push(elem1, elem2, elems: _*) } - - /** Pushes all elements provided by a traversable object - * on top of the stack. The elements are pushed in the order the - * traversable object is traversed. - * - * @param xs a traversable object - */ - override def pushAll(xs: TraversableOnce[A]): this.type = synchronized[this.type] { super.pushAll(elems) } - - /** Returns the top element of the stack. This method will not remove - * the element from the stack. An error is signaled if there is no - * element on the stack. - * - * @return the top element - */ - override def top: A = synchronized { super.top } - - /** Removes the top element from the stack. - */ - override def pop(): A = synchronized { super.pop() } - - /** - * Removes all elements from the stack. After this operation completed, - * the stack will be empty. - */ - override def clear(): Unit = synchronized { super.clear() } - - /** Returns an iterator over all elements on the stack. This iterator - * is stable with respect to state changes in the stack object; i.e. - * such changes will not be reflected in the iterator. The iterator - * issues elements in the order they were inserted into the stack - * (FIFO order). - * - * @return an iterator over all stack elements. - */ - override def iterator: Iterator[A] = synchronized { super.iterator } - - /** Creates a list of all stack elements in FIFO order. - * - * @return the created list. - */ - override def toList: List[A] = synchronized { super.toList } - - /** Returns a textual representation of a stack as a string. - * - * @return the string representation of this stack. - */ - override def toString = synchronized { super.toString } -} diff --git a/tests/scala2-library/src/library/scala/collection/mutable/Traversable.scala b/tests/scala2-library/src/library/scala/collection/mutable/Traversable.scala deleted file mode 100644 index d7ea376d285d..000000000000 --- a/tests/scala2-library/src/library/scala/collection/mutable/Traversable.scala +++ /dev/null @@ -1,40 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - - - -package scala -package collection -package mutable - -import generic._ - -/** A trait for traversable collections that can be mutated. - * $traversableInfo - * @define mutability mutable - */ -trait Traversable[A] extends scala.collection.Traversable[A] -// with GenTraversable[A] - with GenericTraversableTemplate[A, Traversable] - with TraversableLike[A, Traversable[A]] - with Mutable { - override def companion: GenericCompanion[Traversable] = Traversable - override def seq: Traversable[A] = this -} - -/** $factoryInfo - * The current default implementation of a $Coll is an `ArrayBuffer`. - * @define coll mutable traversable collection - * @define Coll `mutable.Traversable` - */ -object Traversable extends TraversableFactory[Traversable] { - implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, Traversable[A]] = ReusableCBF.asInstanceOf[GenericCanBuildFrom[A]] - def newBuilder[A]: Builder[A, Traversable[A]] = new ArrayBuffer -} - - diff --git a/tests/scala2-library/src/library/scala/collection/mutable/TreeMap.scala b/tests/scala2-library/src/library/scala/collection/mutable/TreeMap.scala deleted file mode 100644 index 14ae7c9c8cc8..000000000000 --- a/tests/scala2-library/src/library/scala/collection/mutable/TreeMap.scala +++ /dev/null @@ -1,188 +0,0 @@ -package scala -package collection -package mutable - -import scala.collection.generic._ -import scala.collection.mutable.{RedBlackTree => RB} - -/** - * $factoryInfo - * - * @define Coll mutable.TreeMap - * @define coll mutable tree map - */ -object TreeMap extends MutableSortedMapFactory[TreeMap] { - - def empty[A, B](implicit ord: Ordering[A]) = new TreeMap[A, B]()(ord) - - /** $sortedMapCanBuildFromInfo */ - implicit def canBuildFrom[A, B](implicit ord: Ordering[A]): CanBuildFrom[Coll, (A, B), TreeMap[A, B]] = - new SortedMapCanBuildFrom[A, B] -} - -/** - * A mutable sorted map implemented using a mutable red-black tree as underlying data structure. - * - * @param ordering the implicit ordering used to compare objects of type `A`. - * @tparam A the type of the keys contained in this tree map. - * @tparam B the type of the values associated with the keys. - * - * @author Rui Gonçalves - * @version 2.12 - * @since 2.12 - * - * @define Coll mutable.TreeMap - * @define coll mutable tree map - */ -@SerialVersionUID(-2558985573956740112L) -sealed class TreeMap[A, B] private (tree: RB.Tree[A, B])(implicit val ordering: Ordering[A]) - extends AbstractSortedMap[A, B] - with SortedMap[A, B] - with MapLike[A, B, TreeMap[A, B]] - with SortedMapLike[A, B, TreeMap[A, B]] - with Serializable { - - /** - * Creates an empty `TreeMap`. - * @param ord the implicit ordering used to compare objects of type `A`. - * @return an empty `TreeMap`. - */ - def this()(implicit ord: Ordering[A]) = this(RB.Tree.empty)(ord) - - override def empty = TreeMap.empty - override protected[this] def newBuilder = TreeMap.newBuilder[A, B] - - /** - * Creates a ranged projection of this map. Any mutations in the ranged projection will update the original map and - * vice versa. - * - * Only entries with keys between this projection's key range will ever appear as elements of this map, independently - * of whether the entries are added through the original map or through this view. That means that if one inserts a - * key-value in a view whose key is outside the view's bounds, calls to `get` or `contains` will _not_ consider the - * newly added entry. Mutations are always reflected in the original map, though. - * - * @param from the lower bound (inclusive) of this projection wrapped in a `Some`, or `None` if there is no lower - * bound. - * @param until the upper bound (exclusive) of this projection wrapped in a `Some`, or `None` if there is no upper - * bound. - */ - def rangeImpl(from: Option[A], until: Option[A]): TreeMap[A, B] = new TreeMapView(from, until) - - def -=(key: A): this.type = { RB.delete(tree, key); this } - def +=(kv: (A, B)): this.type = { RB.insert(tree, kv._1, kv._2); this } - - def get(key: A) = RB.get(tree, key) - - def iterator = RB.iterator(tree) - def iteratorFrom(start: A) = RB.iterator(tree, Some(start)) - def keysIteratorFrom(start: A) = RB.keysIterator(tree, Some(start)) - def valuesIteratorFrom(start: A) = RB.valuesIterator(tree, Some(start)) - - override def size = RB.size(tree) - override def isEmpty = RB.isEmpty(tree) - override def contains(key: A) = RB.contains(tree, key) - - override def head = RB.min(tree).get - override def headOption = RB.min(tree) - override def last = RB.max(tree).get - override def lastOption = RB.max(tree) - - override def keysIterator = RB.keysIterator(tree) - override def valuesIterator = RB.valuesIterator(tree) - - override def foreach[U](f: ((A, B)) => U): Unit = RB.foreach(tree, f) - override def transform(f: (A, B) => B) = { RB.transform(tree, f); this } - override def clear(): Unit = RB.clear(tree) - - override def stringPrefix = "TreeMap" - - /** - * A ranged projection of a [[TreeMap]]. Mutations on this map affect the original map and vice versa. - * - * Only entries with keys between this projection's key range will ever appear as elements of this map, independently - * of whether the entries are added through the original map or through this view. That means that if one inserts a - * key-value in a view whose key is outside the view's bounds, calls to `get` or `contains` will _not_ consider the - * newly added entry. Mutations are always reflected in the original map, though. - * - * @param from the lower bound (inclusive) of this projection wrapped in a `Some`, or `None` if there is no lower - * bound. - * @param until the upper bound (exclusive) of this projection wrapped in a `Some`, or `None` if there is no upper - * bound. - */ - @SerialVersionUID(2219159283273389116L) - private[this] final class TreeMapView(from: Option[A], until: Option[A]) extends TreeMap[A, B](tree) { - - /** - * Given a possible new lower bound, chooses and returns the most constraining one (the maximum). - */ - private[this] def pickLowerBound(newFrom: Option[A]): Option[A] = (from, newFrom) match { - case (Some(fr), Some(newFr)) => Some(ordering.max(fr, newFr)) - case (None, _) => newFrom - case _ => from - } - - /** - * Given a possible new upper bound, chooses and returns the most constraining one (the minimum). - */ - private[this] def pickUpperBound(newUntil: Option[A]): Option[A] = (until, newUntil) match { - case (Some(unt), Some(newUnt)) => Some(ordering.min(unt, newUnt)) - case (None, _) => newUntil - case _ => until - } - - /** - * Returns true if the argument is inside the view bounds (between `from` and `until`). - */ - private[this] def isInsideViewBounds(key: A): Boolean = { - val afterFrom = from.isEmpty || ordering.compare(from.get, key) <= 0 - val beforeUntil = until.isEmpty || ordering.compare(key, until.get) < 0 - afterFrom && beforeUntil - } - - override def rangeImpl(from: Option[A], until: Option[A]): TreeMap[A, B] = - new TreeMapView(pickLowerBound(from), pickUpperBound(until)) - - override def get(key: A) = if (isInsideViewBounds(key)) RB.get(tree, key) else None - - override def iterator = RB.iterator(tree, from, until) - override def iteratorFrom(start: A) = RB.iterator(tree, pickLowerBound(Some(start)), until) - override def keysIteratorFrom(start: A) = RB.keysIterator(tree, pickLowerBound(Some(start)), until) - override def valuesIteratorFrom(start: A) = RB.valuesIterator(tree, pickLowerBound(Some(start)), until) - - override def size = iterator.length - override def isEmpty = !iterator.hasNext - override def contains(key: A) = isInsideViewBounds(key) && RB.contains(tree, key) - - override def head = headOption.get - override def headOption = { - val entry = if (from.isDefined) RB.minAfter(tree, from.get) else RB.min(tree) - (entry, until) match { - case (Some(e), Some(unt)) if ordering.compare(e._1, unt) >= 0 => None - case _ => entry - } - } - - override def last = lastOption.get - override def lastOption = { - val entry = if (until.isDefined) RB.maxBefore(tree, until.get) else RB.max(tree) - (entry, from) match { - case (Some(e), Some(fr)) if ordering.compare(e._1, fr) < 0 => None - case _ => entry - } - } - - // Using the iterator should be efficient enough; if performance is deemed a problem later, specialized - // `foreach(f, from, until)` and `transform(f, from, until)` methods can be created in `RedBlackTree`. See - // https://github.com/scala/scala/pull/4608#discussion_r34307985 for a discussion about this. - override def foreach[U](f: ((A, B)) => U): Unit = iterator.foreach(f) - override def transform(f: (A, B) => B) = { - iterator.foreach { case (key, value) => update(key, f(key, value)) } - this - } - - override def valuesIterator: Iterator[B] = RB.valuesIterator(tree, from, until) - override def keysIterator: Iterator[A] = RB.keysIterator(tree, from, until) - - override def clone() = super.clone().rangeImpl(from, until) - } -} diff --git a/tests/scala2-library/src/library/scala/collection/mutable/TreeSet.scala b/tests/scala2-library/src/library/scala/collection/mutable/TreeSet.scala deleted file mode 100644 index ada6f145ad42..000000000000 --- a/tests/scala2-library/src/library/scala/collection/mutable/TreeSet.scala +++ /dev/null @@ -1,189 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala -package collection -package mutable - -import generic._ -import scala.collection.mutable.{RedBlackTree => RB} - -/** - * @define Coll `mutable.TreeSet` - * @define coll mutable tree set - * @factoryInfo - * Companion object of TreeSet providing factory related utilities. - * - * @author Lucien Pereira - * - */ -object TreeSet extends MutableSortedSetFactory[TreeSet] { - /** - * The empty set of this type - */ - def empty[A](implicit ordering: Ordering[A]) = new TreeSet[A]() - - /** $sortedMapCanBuildFromInfo */ - implicit def canBuildFrom[A](implicit ord: Ordering[A]): CanBuildFrom[Coll, A, TreeSet[A]] = - new SortedSetCanBuildFrom[A] -} - -/** - * A mutable sorted set implemented using a mutable red-black tree as underlying data structure. - * - * @param ordering the implicit ordering used to compare objects of type `A`. - * @tparam A the type of the keys contained in this tree set. - * - * @author Rui Gonçalves - * @version 2.12 - * @since 2.10 - * - * @define Coll mutable.TreeSet - * @define coll mutable tree set - */ -// Original API designed in part by Lucien Pereira -@SerialVersionUID(-3642111301929493640L) -sealed class TreeSet[A] private (tree: RB.Tree[A, Null])(implicit val ordering: Ordering[A]) - extends AbstractSortedSet[A] - with SortedSet[A] - with SetLike[A, TreeSet[A]] - with SortedSetLike[A, TreeSet[A]] - with Serializable { - - if (ordering eq null) - throw new NullPointerException("ordering must not be null") - - /** - * Creates an empty `TreeSet`. - * @param ord the implicit ordering used to compare objects of type `A`. - * @return an empty `TreeSet`. - */ - def this()(implicit ord: Ordering[A]) = this(RB.Tree.empty)(ord) - - override def empty = TreeSet.empty - override protected[this] def newBuilder = TreeSet.newBuilder[A] - - /** - * Creates a ranged projection of this set. Any mutations in the ranged projection affect will update the original set - * and vice versa. - * - * Only keys between this projection's key range will ever appear as elements of this set, independently of whether - * the elements are added through the original set or through this view. That means that if one inserts an element in - * a view whose key is outside the view's bounds, calls to `contains` will _not_ consider the newly added element. - * Mutations are always reflected in the original set, though. - * - * @param from the lower bound (inclusive) of this projection wrapped in a `Some`, or `None` if there is no lower - * bound. - * @param until the upper bound (exclusive) of this projection wrapped in a `Some`, or `None` if there is no upper - * bound. - */ - def rangeImpl(from: Option[A], until: Option[A]): TreeSet[A] = new TreeSetView(from, until) - - def -=(key: A): this.type = { RB.delete(tree, key); this } - def +=(elem: A): this.type = { RB.insert(tree, elem, null); this } - - def contains(elem: A) = RB.contains(tree, elem) - - def iterator = RB.keysIterator(tree) - def keysIteratorFrom(start: A) = RB.keysIterator(tree, Some(start)) - override def iteratorFrom(start: A) = RB.keysIterator(tree, Some(start)) - - override def size = RB.size(tree) - override def isEmpty = RB.isEmpty(tree) - - override def head = RB.minKey(tree).get - override def headOption = RB.minKey(tree) - override def last = RB.maxKey(tree).get - override def lastOption = RB.maxKey(tree) - - override def foreach[U](f: A => U): Unit = RB.foreachKey(tree, f) - override def clear(): Unit = RB.clear(tree) - - override def stringPrefix = "TreeSet" - - /** - * A ranged projection of a [[TreeSet]]. Mutations on this set affect the original set and vice versa. - * - * Only keys between this projection's key range will ever appear as elements of this set, independently of whether - * the elements are added through the original set or through this view. That means that if one inserts an element in - * a view whose key is outside the view's bounds, calls to `contains` will _not_ consider the newly added element. - * Mutations are always reflected in the original set, though. - * - * @param from the lower bound (inclusive) of this projection wrapped in a `Some`, or `None` if there is no lower - * bound. - * @param until the upper bound (exclusive) of this projection wrapped in a `Some`, or `None` if there is no upper - * bound. - */ - @SerialVersionUID(7087824939194006086L) - private[this] final class TreeSetView(from: Option[A], until: Option[A]) extends TreeSet[A](tree) { - - /** - * Given a possible new lower bound, chooses and returns the most constraining one (the maximum). - */ - private[this] def pickLowerBound(newFrom: Option[A]): Option[A] = (from, newFrom) match { - case (Some(fr), Some(newFr)) => Some(ordering.max(fr, newFr)) - case (None, _) => newFrom - case _ => from - } - - /** - * Given a possible new upper bound, chooses and returns the most constraining one (the minimum). - */ - private[this] def pickUpperBound(newUntil: Option[A]): Option[A] = (until, newUntil) match { - case (Some(unt), Some(newUnt)) => Some(ordering.min(unt, newUnt)) - case (None, _) => newUntil - case _ => until - } - - /** - * Returns true if the argument is inside the view bounds (between `from` and `until`). - */ - private[this] def isInsideViewBounds(key: A): Boolean = { - val afterFrom = from.isEmpty || ordering.compare(from.get, key) <= 0 - val beforeUntil = until.isEmpty || ordering.compare(key, until.get) < 0 - afterFrom && beforeUntil - } - - override def rangeImpl(from: Option[A], until: Option[A]): TreeSet[A] = - new TreeSetView(pickLowerBound(from), pickUpperBound(until)) - - override def contains(key: A) = isInsideViewBounds(key) && RB.contains(tree, key) - - override def iterator = RB.keysIterator(tree, from, until) - override def keysIteratorFrom(start: A) = RB.keysIterator(tree, pickLowerBound(Some(start)), until) - override def iteratorFrom(start: A) = RB.keysIterator(tree, pickLowerBound(Some(start)), until) - - override def size = iterator.length - override def isEmpty = !iterator.hasNext - - override def head = headOption.get - override def headOption = { - val elem = if (from.isDefined) RB.minKeyAfter(tree, from.get) else RB.minKey(tree) - (elem, until) match { - case (Some(e), Some(unt)) if ordering.compare(e, unt) >= 0 => None - case _ => elem - } - } - - override def last = lastOption.get - override def lastOption = { - val elem = if (until.isDefined) RB.maxKeyBefore(tree, until.get) else RB.maxKey(tree) - (elem, from) match { - case (Some(e), Some(fr)) if ordering.compare(e, fr) < 0 => None - case _ => elem - } - } - - // Using the iterator should be efficient enough; if performance is deemed a problem later, a specialized - // `foreachKey(f, from, until)` method can be created in `RedBlackTree`. See - // https://github.com/scala/scala/pull/4608#discussion_r34307985 for a discussion about this. - override def foreach[U](f: A => U): Unit = iterator.foreach(f) - - override def clone() = super.clone().rangeImpl(from, until) - } -} diff --git a/tests/scala2-library/src/library/scala/collection/mutable/Undoable.scala b/tests/scala2-library/src/library/scala/collection/mutable/Undoable.scala deleted file mode 100644 index 482d61816530..000000000000 --- a/tests/scala2-library/src/library/scala/collection/mutable/Undoable.scala +++ /dev/null @@ -1,27 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - - - -package scala -package collection -package mutable - - -/** Classes that mix in the `Undoable` class provide an operation - * `undo` which can be used to undo the last operation. - * - * @author Matthias Zenger - * @version 1.0, 08/07/2003 - * @since 1 - */ -trait Undoable { - /** Undo the last operation. - */ - def undo(): Unit -} diff --git a/tests/scala2-library/src/library/scala/collection/mutable/UnrolledBuffer.scala b/tests/scala2-library/src/library/scala/collection/mutable/UnrolledBuffer.scala deleted file mode 100644 index dac1bccc41c5..000000000000 --- a/tests/scala2-library/src/library/scala/collection/mutable/UnrolledBuffer.scala +++ /dev/null @@ -1,359 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala -package collection.mutable - -import scala.collection.AbstractIterator -import scala.collection.Iterator -import scala.collection.generic._ -import scala.annotation.tailrec -import scala.reflect.ClassTag - -/** A buffer that stores elements in an unrolled linked list. - * - * Unrolled linked lists store elements in linked fixed size - * arrays. - * - * Unrolled buffers retain locality and low memory overhead - * properties of array buffers, but offer much more efficient - * element addition, since they never reallocate and copy the - * internal array. - * - * However, they provide `O(n/m)` complexity random access, - * where `n` is the number of elements, and `m` the size of - * internal array chunks. - * - * Ideal to use when: - * - elements are added to the buffer and then all of the - * elements are traversed sequentially - * - two unrolled buffers need to be concatenated (see `concat`) - * - * Better than singly linked lists for random access, but - * should still be avoided for such a purpose. - * - * @define coll unrolled buffer - * @define Coll `UnrolledBuffer` - * @author Aleksandar Prokopec - * - */ -@SerialVersionUID(1L) -sealed class UnrolledBuffer[T](implicit val tag: ClassTag[T]) -extends scala.collection.mutable.AbstractBuffer[T] - with scala.collection.mutable.Buffer[T] - with scala.collection.mutable.BufferLike[T, UnrolledBuffer[T]] - with GenericClassTagTraversableTemplate[T, UnrolledBuffer] - with scala.collection.mutable.Builder[T, UnrolledBuffer[T]] - with Serializable -{ - import UnrolledBuffer.Unrolled - - @transient private var headptr = newUnrolled - @transient private var lastptr = headptr - @transient private var sz = 0 - - private[collection] def headPtr = headptr - private[collection] def headPtr_=(head: Unrolled[T]) = headptr = head - private[collection] def lastPtr = lastptr - private[collection] def lastPtr_=(last: Unrolled[T]) = lastptr = last - private[collection] def size_=(s: Int) = sz = s - - protected[this] override def newBuilder = new UnrolledBuffer[T] - - protected def newUnrolled = new Unrolled[T](this) - - // The below would allow more flexible behavior without requiring inheritance - // that is risky because all the important internals are private. - // private var myLengthPolicy: Int => Int = x => x - // - // /** Specifies how the array lengths should vary. - // * - // * By default, `UnrolledBuffer` uses arrays of a fixed size. A length - // * policy can be given that changes this scheme to, for instance, an - // * exponential growth. - // * - // * @param nextLength computes the length of the next array from the length of the latest one - // */ - // def setLengthPolicy(nextLength: Int => Int): Unit = { myLengthPolicy = nextLength } - private[collection] def calcNextLength(sz: Int) = sz // myLengthPolicy(sz) - - def classTagCompanion = UnrolledBuffer - - /** Concatenates the target unrolled buffer to this unrolled buffer. - * - * The specified buffer `that` is cleared after this operation. This is - * an O(1) operation. - * - * @param that the unrolled buffer whose elements are added to this buffer - */ - def concat(that: UnrolledBuffer[T]) = { - // bind the two together - if (!lastptr.bind(that.headptr)) lastptr = that.lastPtr - - // update size - sz += that.sz - - // `that` is no longer usable, so clear it - // here we rely on the fact that `clear` allocates - // new nodes instead of modifying the previous ones - that.clear() - - // return a reference to this - this - } - - def +=(elem: T) = { - lastptr = lastptr.append(elem) - sz += 1 - this - } - - def clear() { - headptr = newUnrolled - lastptr = headptr - sz = 0 - } - - def iterator: Iterator[T] = new AbstractIterator[T] { - var pos: Int = -1 - var node: Unrolled[T] = headptr - scan() - - private def scan() { - pos += 1 - while (pos >= node.size) { - pos = 0 - node = node.next - if (node eq null) return - } - } - def hasNext = node ne null - def next = if (hasNext) { - val r = node.array(pos) - scan() - r - } else Iterator.empty.next() - } - - // this should be faster than the iterator - override def foreach[U](f: T => U) = headptr.foreach(f) - - def result = this - - def length = sz - - def apply(idx: Int) = - if (idx >= 0 && idx < sz) headptr(idx) - else throw new IndexOutOfBoundsException(idx.toString) - - def update(idx: Int, newelem: T) = - if (idx >= 0 && idx < sz) headptr(idx) = newelem - else throw new IndexOutOfBoundsException(idx.toString) - - def remove(idx: Int) = - if (idx >= 0 && idx < sz) { - sz -= 1 - headptr.remove(idx, this) - } else throw new IndexOutOfBoundsException(idx.toString) - - def +=:(elem: T) = { - headptr = headptr prepend elem - sz += 1 - this - } - - def insertAll(idx: Int, elems: scala.collection.Traversable[T]) = - if (idx >= 0 && idx <= sz) { - headptr.insertAll(idx, elems, this) - sz += elems.size - } else throw new IndexOutOfBoundsException(idx.toString) - - private def writeObject(out: java.io.ObjectOutputStream) { - out.defaultWriteObject - out writeInt sz - for (elem <- this) out writeObject elem - } - - private def readObject(in: java.io.ObjectInputStream) { - in.defaultReadObject - - val num = in.readInt - - headPtr = newUnrolled - lastPtr = headPtr - sz = 0 - var i = 0 - while (i < num) { - this += in.readObject.asInstanceOf[T] - i += 1 - } - } - - override def clone(): UnrolledBuffer[T] = new UnrolledBuffer[T] ++= this - - override def stringPrefix = "UnrolledBuffer" -} - - -object UnrolledBuffer extends ClassTagTraversableFactory[UnrolledBuffer] { - /** $genericCanBuildFromInfo */ - implicit def canBuildFrom[T](implicit t: ClassTag[T]): CanBuildFrom[Coll, T, UnrolledBuffer[T]] = - new GenericCanBuildFrom[T] - def newBuilder[T](implicit t: ClassTag[T]): Builder[T, UnrolledBuffer[T]] = new UnrolledBuffer[T] - - val waterline = 50 - val waterlineDelim = 100 // TODO -- fix this name! It's a denominator, not a delimiter. (But it's part of the API so we can't just change it.) - private[collection] val unrolledlength = 32 - - /** Unrolled buffer node. - */ - class Unrolled[T: ClassTag] private[collection] (var size: Int, var array: Array[T], var next: Unrolled[T], val buff: UnrolledBuffer[T] = null) { - private[collection] def this() = this(0, new Array[T](unrolledlength), null, null) - private[collection] def this(b: UnrolledBuffer[T]) = this(0, new Array[T](unrolledlength), null, b) - - private def nextlength = if (buff eq null) unrolledlength else buff.calcNextLength(array.length) - - // adds and returns itself or the new unrolled if full - @tailrec final def append(elem: T): Unrolled[T] = if (size < array.length) { - array(size) = elem - size += 1 - this - } else { - next = new Unrolled[T](0, new Array[T](nextlength), null, buff) - next append elem - } - def foreach[U](f: T => U) { - var unrolled = this - var i = 0 - while (unrolled ne null) { - val chunkarr = unrolled.array - val chunksz = unrolled.size - while (i < chunksz) { - val elem = chunkarr(i) - f(elem) - i += 1 - } - i = 0 - unrolled = unrolled.next - } - } - @tailrec final def apply(idx: Int): T = - if (idx < size) array(idx) else next.apply(idx - size) - @tailrec final def update(idx: Int, newelem: T): Unit = - if (idx < size) array(idx) = newelem else next.update(idx - size, newelem) - @tailrec final def locate(idx: Int): Unrolled[T] = - if (idx < size) this else next.locate(idx - size) - def prepend(elem: T) = if (size < array.length) { - // shift the elements of the array right - // then insert the element - shiftright() - array(0) = elem - size += 1 - this - } else { - // allocate a new node and store element - // then make it point to this - val newhead = new Unrolled[T](buff) - newhead append elem - newhead.next = this - newhead - } - // shifts right assuming enough space - private def shiftright() { - var i = size - 1 - while (i >= 0) { - array(i + 1) = array(i) - i -= 1 - } - } - // returns pointer to new last if changed - @tailrec final def remove(idx: Int, buffer: UnrolledBuffer[T]): T = - if (idx < size) { - // remove the element - // then try to merge with the next bucket - val r = array(idx) - shiftleft(idx) - size -= 1 - if (tryMergeWithNext()) buffer.lastPtr = this - r - } else next.remove(idx - size, buffer) - // shifts left elements after `leftb` (overwrites `leftb`) - private def shiftleft(leftb: Int) { - var i = leftb - while (i < (size - 1)) { - array(i) = array(i + 1) - i += 1 - } - nullout(i, i + 1) - } - protected def tryMergeWithNext() = if (next != null && (size + next.size) < (array.length * waterline / waterlineDelim)) { - // copy the next array, then discard the next node - Array.copy(next.array, 0, array, size, next.size) - size = size + next.size - next = next.next - if (next eq null) true else false // checks if last node was thrown out - } else false - - @tailrec final def insertAll(idx: Int, t: scala.collection.Traversable[T], buffer: UnrolledBuffer[T]): Unit = { - if (idx < size) { - // divide this node at the appropriate position and insert all into head - // update new next - val newnextnode = new Unrolled[T](0, new Array(array.length), null, buff) - Array.copy(array, idx, newnextnode.array, 0, size - idx) - newnextnode.size = size - idx - newnextnode.next = next - - // update this - nullout(idx, size) - size = idx - next = null - - // insert everything from iterable to this - var curr = this - for (elem <- t) curr = curr append elem - curr.next = newnextnode - - // try to merge the last node of this with the newnextnode and fix tail pointer if needed - if (curr.tryMergeWithNext()) buffer.lastPtr = curr - else if (newnextnode.next eq null) buffer.lastPtr = newnextnode - } - else if (idx == size || (next eq null)) { - var curr = this - for (elem <- t) curr = curr append elem - } - else next.insertAll(idx - size, t, buffer) - } - private def nullout(from: Int, until: Int) { - var idx = from - while (idx < until) { - array(idx) = null.asInstanceOf[T] // TODO find a way to assign a default here!! - idx += 1 - } - } - - // assumes this is the last node - // `thathead` and `thatlast` are head and last node - // of the other unrolled list, respectively - def bind(thathead: Unrolled[T]) = { - assert(next eq null) - next = thathead - tryMergeWithNext() - } - - override def toString: String = array.take(size).mkString("Unrolled@%08x".format(System.identityHashCode(this)) + "[" + size + "/" + array.length + "](", ", ", ")") + " -> " + (if (next ne null) next.toString else "") - } - -} - - -// This is used by scala.collection.parallel.mutable.UnrolledParArrayCombiner: -// Todo -- revisit whether inheritance is the best way to achieve this functionality -private[collection] class DoublingUnrolledBuffer[T](implicit t: ClassTag[T]) extends UnrolledBuffer[T]()(t) { - override def calcNextLength(sz: Int) = if (sz < 10000) sz * 2 else sz - protected override def newUnrolled = new UnrolledBuffer.Unrolled[T](0, new Array[T](4), null, this) -} diff --git a/tests/scala2-library/src/library/scala/collection/mutable/WeakHashMap.scala b/tests/scala2-library/src/library/scala/collection/mutable/WeakHashMap.scala deleted file mode 100644 index 433d054bfcf5..000000000000 --- a/tests/scala2-library/src/library/scala/collection/mutable/WeakHashMap.scala +++ /dev/null @@ -1,54 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala -package collection -package mutable - -import generic._ -import convert.Wrappers._ - -/** A hash map with references to entries which are weakly reachable. Entries are - * removed from this map when the key is no longer (strongly) referenced. This class wraps - * `java.util.WeakHashMap`. - * - * @tparam A type of keys contained in this map - * @tparam B type of values associated with the keys - * - * @since 2.8 - * @see [[http://docs.scala-lang.org/overviews/collections/concrete-mutable-collection-classes.html#weak_hash_maps "Scala's Collection Library overview"]] - * section on `Weak Hash Maps` for more information. - * - * @define Coll `WeakHashMap` - * @define coll weak hash map - * @define thatinfo the class of the returned collection. In the standard library configuration, - * `That` is always `WeakHashMap[A, B]` if the elements contained in the resulting collection are - * pairs of type `(A, B)`. This is because an implicit of type `CanBuildFrom[WeakHashMap, (A, B), WeakHashMap[A, B]]` - * is defined in object `WeakHashMap`. Otherwise, `That` resolves to the most specific type that doesn't have - * to contain pairs of type `(A, B)`, which is `Iterable`. - * @define bfinfo an implicit value of class `CanBuildFrom` which determines the - * result class `That` from the current representation type `Repr` - * and the new element type `B`. This is usually the `canBuildFrom` value - * defined in object `WeakHashMap`. - * @define mayNotTerminateInf - * @define willNotTerminateInf - */ -class WeakHashMap[A, B] extends JMapWrapper[A, B](new java.util.WeakHashMap) - with JMapWrapperLike[A, B, WeakHashMap[A, B]] { - override def empty = new WeakHashMap[A, B] -} - -/** $factoryInfo - * @define Coll `WeakHashMap` - * @define coll weak hash map - */ -object WeakHashMap extends MutableMapFactory[WeakHashMap] { - implicit def canBuildFrom[A, B]: CanBuildFrom[Coll, (A, B), WeakHashMap[A, B]] = new MapCanBuildFrom[A, B] - def empty[A, B]: WeakHashMap[A, B] = new WeakHashMap[A, B] -} - diff --git a/tests/scala2-library/src/library/scala/collection/mutable/WrappedArray.scala b/tests/scala2-library/src/library/scala/collection/mutable/WrappedArray.scala deleted file mode 100644 index 0b5ebe7e9a85..000000000000 --- a/tests/scala2-library/src/library/scala/collection/mutable/WrappedArray.scala +++ /dev/null @@ -1,246 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - - - -package scala -package collection -package mutable - -import scala.reflect.ClassTag -import scala.runtime.BoxedUnit -import scala.collection.generic._ -import scala.collection.parallel.mutable.ParArray -import scala.util.hashing.MurmurHash3 - -import java.util.Arrays - -/** - * A class representing `Array[T]`. - * - * @tparam T type of the elements in this wrapped array. - * - * @author Martin Odersky, Stephane Micheloud - * @version 1.0 - * @since 2.8 - * @define Coll `WrappedArray` - * @define coll wrapped array - * @define orderDependent - * @define orderDependentFold - * @define mayNotTerminateInf - * @define willNotTerminateInf - */ -abstract class WrappedArray[T] -extends AbstractSeq[T] - with IndexedSeq[T] - with ArrayLike[T, WrappedArray[T]] - with CustomParallelizable[T, ParArray[T]] -{ - - override protected[this] def thisCollection: WrappedArray[T] = this - override protected[this] def toCollection(repr: WrappedArray[T]): WrappedArray[T] = repr - - /** The tag of the element type */ - def elemTag: ClassTag[T] - - @deprecated("use elemTag instead", "2.10.0") - def elemManifest: ClassManifest[T] = ClassManifest.fromClass[T](elemTag.runtimeClass.asInstanceOf[Class[T]]) - - /** The length of the array */ - def length: Int - - /** The element at given index */ - def apply(index: Int): T - - /** Update element at given index */ - def update(index: Int, elem: T): Unit - - /** The underlying array */ - def array: Array[T] - - override def par = ParArray.handoff(array) - - private def elementClass: Class[_] = - array.getClass.getComponentType - - override def toArray[U >: T : ClassTag]: Array[U] = { - val thatElementClass = implicitly[ClassTag[U]].runtimeClass - if (elementClass eq thatElementClass) - array.asInstanceOf[Array[U]] - else - super.toArray[U] - } - - override def stringPrefix = "WrappedArray" - - /** Clones this object, including the underlying Array. */ - override def clone(): WrappedArray[T] = WrappedArray make array.clone() - - /** Creates new builder for this collection ==> move to subclasses - */ - override protected[this] def newBuilder: Builder[T, WrappedArray[T]] = - new WrappedArrayBuilder[T](elemTag) - -} - -/** A companion object used to create instances of `WrappedArray`. - */ -object WrappedArray { - // This is reused for all calls to empty. - private val EmptyWrappedArray = new ofRef[AnyRef](new Array[AnyRef](0)) - def empty[T <: AnyRef]: WrappedArray[T] = EmptyWrappedArray.asInstanceOf[WrappedArray[T]] - - // If make is called explicitly we use whatever we're given, even if it's - // empty. This may be unnecessary (if WrappedArray is to honor the collections - // contract all empty ones must be equal, so discriminating based on the reference - // equality of an empty array should not come up) but we may as well be - // conservative since wrapRefArray contributes most of the unnecessary allocations. - def make[T](x: AnyRef): WrappedArray[T] = (x match { - case null => null - case x: Array[AnyRef] => new ofRef[AnyRef](x) - case x: Array[Int] => new ofInt(x) - case x: Array[Double] => new ofDouble(x) - case x: Array[Long] => new ofLong(x) - case x: Array[Float] => new ofFloat(x) - case x: Array[Char] => new ofChar(x) - case x: Array[Byte] => new ofByte(x) - case x: Array[Short] => new ofShort(x) - case x: Array[Boolean] => new ofBoolean(x) - case x: Array[Unit] => new ofUnit(x) - }).asInstanceOf[WrappedArray[T]] - - implicit def canBuildFrom[T](implicit m: ClassTag[T]): CanBuildFrom[WrappedArray[_], T, WrappedArray[T]] = - new CanBuildFrom[WrappedArray[_], T, WrappedArray[T]] { - def apply(from: WrappedArray[_]): Builder[T, WrappedArray[T]] = - ArrayBuilder.make[T]()(m) mapResult WrappedArray.make[T] - def apply: Builder[T, WrappedArray[T]] = - ArrayBuilder.make[T]()(m) mapResult WrappedArray.make[T] - } - - def newBuilder[A]: Builder[A, IndexedSeq[A]] = new ArrayBuffer - - final class ofRef[T <: AnyRef](val array: Array[T]) extends WrappedArray[T] with Serializable { - lazy val elemTag = ClassTag[T](array.getClass.getComponentType) - def length: Int = array.length - def apply(index: Int): T = array(index).asInstanceOf[T] - def update(index: Int, elem: T) { array(index) = elem } - override def hashCode = MurmurHash3.wrappedArrayHash(array) - override def equals(that: Any) = that match { - case that: ofRef[_] => Arrays.equals(array.asInstanceOf[Array[AnyRef]], that.array.asInstanceOf[Array[AnyRef]]) - case _ => super.equals(that) - } - } - - final class ofByte(val array: Array[Byte]) extends WrappedArray[Byte] with Serializable { - def elemTag = ClassTag.Byte - def length: Int = array.length - def apply(index: Int): Byte = array(index) - def update(index: Int, elem: Byte) { array(index) = elem } - override def hashCode = MurmurHash3.wrappedBytesHash(array) - override def equals(that: Any) = that match { - case that: ofByte => Arrays.equals(array, that.array) - case _ => super.equals(that) - } - } - - final class ofShort(val array: Array[Short]) extends WrappedArray[Short] with Serializable { - def elemTag = ClassTag.Short - def length: Int = array.length - def apply(index: Int): Short = array(index) - def update(index: Int, elem: Short) { array(index) = elem } - override def hashCode = MurmurHash3.wrappedArrayHash(array) - override def equals(that: Any) = that match { - case that: ofShort => Arrays.equals(array, that.array) - case _ => super.equals(that) - } - } - - final class ofChar(val array: Array[Char]) extends WrappedArray[Char] with Serializable { - def elemTag = ClassTag.Char - def length: Int = array.length - def apply(index: Int): Char = array(index) - def update(index: Int, elem: Char) { array(index) = elem } - override def hashCode = MurmurHash3.wrappedArrayHash(array) - override def equals(that: Any) = that match { - case that: ofChar => Arrays.equals(array, that.array) - case _ => super.equals(that) - } - } - - final class ofInt(val array: Array[Int]) extends WrappedArray[Int] with Serializable { - def elemTag = ClassTag.Int - def length: Int = array.length - def apply(index: Int): Int = array(index) - def update(index: Int, elem: Int) { array(index) = elem } - override def hashCode = MurmurHash3.wrappedArrayHash(array) - override def equals(that: Any) = that match { - case that: ofInt => Arrays.equals(array, that.array) - case _ => super.equals(that) - } - } - - final class ofLong(val array: Array[Long]) extends WrappedArray[Long] with Serializable { - def elemTag = ClassTag.Long - def length: Int = array.length - def apply(index: Int): Long = array(index) - def update(index: Int, elem: Long) { array(index) = elem } - override def hashCode = MurmurHash3.wrappedArrayHash(array) - override def equals(that: Any) = that match { - case that: ofLong => Arrays.equals(array, that.array) - case _ => super.equals(that) - } - } - - final class ofFloat(val array: Array[Float]) extends WrappedArray[Float] with Serializable { - def elemTag = ClassTag.Float - def length: Int = array.length - def apply(index: Int): Float = array(index) - def update(index: Int, elem: Float) { array(index) = elem } - override def hashCode = MurmurHash3.wrappedArrayHash(array) - override def equals(that: Any) = that match { - case that: ofFloat => Arrays.equals(array, that.array) - case _ => super.equals(that) - } - } - - final class ofDouble(val array: Array[Double]) extends WrappedArray[Double] with Serializable { - def elemTag = ClassTag.Double - def length: Int = array.length - def apply(index: Int): Double = array(index) - def update(index: Int, elem: Double) { array(index) = elem } - override def hashCode = MurmurHash3.wrappedArrayHash(array) - override def equals(that: Any) = that match { - case that: ofDouble => Arrays.equals(array, that.array) - case _ => super.equals(that) - } - } - - final class ofBoolean(val array: Array[Boolean]) extends WrappedArray[Boolean] with Serializable { - def elemTag = ClassTag.Boolean - def length: Int = array.length - def apply(index: Int): Boolean = array(index) - def update(index: Int, elem: Boolean) { array(index) = elem } - override def hashCode = MurmurHash3.wrappedArrayHash(array) - override def equals(that: Any) = that match { - case that: ofBoolean => Arrays.equals(array, that.array) - case _ => super.equals(that) - } - } - - final class ofUnit(val array: Array[Unit]) extends WrappedArray[Unit] with Serializable { - def elemTag = ClassTag.Unit - def length: Int = array.length - def apply(index: Int): Unit = array(index) - def update(index: Int, elem: Unit) { array(index) = elem } - override def hashCode = MurmurHash3.wrappedArrayHash(array) - override def equals(that: Any) = that match { - case that: ofUnit => array.length == that.array.length - case _ => super.equals(that) - } - } -} diff --git a/tests/scala2-library/src/library/scala/collection/mutable/WrappedArrayBuilder.scala b/tests/scala2-library/src/library/scala/collection/mutable/WrappedArrayBuilder.scala deleted file mode 100644 index 5bc581145096..000000000000 --- a/tests/scala2-library/src/library/scala/collection/mutable/WrappedArrayBuilder.scala +++ /dev/null @@ -1,88 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - - - -package scala -package collection -package mutable - -import scala.reflect.ClassTag - -/** A builder class for arrays. - * - * This builder can be reused. - * - * @tparam A type of elements that can be added to this builder. - * @param tag class tag for objects of type `A`. - * - * @since 2.8 - */ -class WrappedArrayBuilder[A](tag: ClassTag[A]) extends ReusableBuilder[A, WrappedArray[A]] { - - @deprecated("use tag instead", "2.10.0") - val manifest: ClassTag[A] = tag - - private var elems: WrappedArray[A] = _ - private var capacity: Int = 0 - private var size: Int = 0 - - private def mkArray(size: Int): WrappedArray[A] = { - val runtimeClass = tag.runtimeClass - val newelems = runtimeClass match { - case java.lang.Byte.TYPE => new WrappedArray.ofByte(new Array[Byte](size)).asInstanceOf[WrappedArray[A]] - case java.lang.Short.TYPE => new WrappedArray.ofShort(new Array[Short](size)).asInstanceOf[WrappedArray[A]] - case java.lang.Character.TYPE => new WrappedArray.ofChar(new Array[Char](size)).asInstanceOf[WrappedArray[A]] - case java.lang.Integer.TYPE => new WrappedArray.ofInt(new Array[Int](size)).asInstanceOf[WrappedArray[A]] - case java.lang.Long.TYPE => new WrappedArray.ofLong(new Array[Long](size)).asInstanceOf[WrappedArray[A]] - case java.lang.Float.TYPE => new WrappedArray.ofFloat(new Array[Float](size)).asInstanceOf[WrappedArray[A]] - case java.lang.Double.TYPE => new WrappedArray.ofDouble(new Array[Double](size)).asInstanceOf[WrappedArray[A]] - case java.lang.Boolean.TYPE => new WrappedArray.ofBoolean(new Array[Boolean](size)).asInstanceOf[WrappedArray[A]] - case java.lang.Void.TYPE => new WrappedArray.ofUnit(new Array[Unit](size)).asInstanceOf[WrappedArray[A]] - case _ => new WrappedArray.ofRef[A with AnyRef](tag.newArray(size).asInstanceOf[Array[A with AnyRef]]).asInstanceOf[WrappedArray[A]] - } - if (this.size > 0) Array.copy(elems.array, 0, newelems.array, 0, this.size) - newelems - } - - private def resize(size: Int) { - elems = mkArray(size) - capacity = size - } - - override def sizeHint(size: Int) { - if (capacity < size) resize(size) - } - - private def ensureSize(size: Int) { - if (capacity < size) { - var newsize = if (capacity == 0) 16 else capacity * 2 - while (newsize < size) newsize *= 2 - resize(newsize) - } - } - - def +=(elem: A): this.type = { - ensureSize(size + 1) - elems(size) = elem - size += 1 - this - } - - def clear() { size = 0 } - - def result() = { - if (capacity != 0 && capacity == size) { - capacity = 0 - elems - } - else mkArray(size) - } - - // todo: add ++= -} diff --git a/tests/scala2-library/src/library/scala/collection/package.scala b/tests/scala2-library/src/library/scala/collection/package.scala deleted file mode 100644 index 6df254c0e0df..000000000000 --- a/tests/scala2-library/src/library/scala/collection/package.scala +++ /dev/null @@ -1,122 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala - -/** - * Contains the base traits and objects needed to use and extend Scala's collection library. - * - * == Guide == - * - * A detailed guide for using the collections library is available - * at [[http://docs.scala-lang.org/overviews/collections/introduction.html]]. - * Developers looking to extend the collections library can find a description - * of its architecture at - * [[http://docs.scala-lang.org/overviews/core/architecture-of-scala-collections.html]]. - * - * == Using Collections == - * - * It is convenient to treat all collections as either - * a [[scala.collection.Traversable]] or [[scala.collection.Iterable]], as - * these traits define the vast majority of operations - * on a collection. - * - * Collections can, of course, be treated as specifically as needed, and - * the library is designed to ensure that - * the methods that transform collections will return a collection of the same - * type: {{{ - * scala> val array = Array(1,2,3,4,5,6) - * array: Array[Int] = Array(1, 2, 3, 4, 5, 6) - * - * scala> array map { _.toString } - * res0: Array[String] = Array(1, 2, 3, 4, 5, 6) - * - * scala> val list = List(1,2,3,4,5,6) - * list: List[Int] = List(1, 2, 3, 4, 5, 6) - * - * scala> list map { _.toString } - * res1: List[String] = List(1, 2, 3, 4, 5, 6) - * - * }}} - * - * == Creating Collections == - * - * The most common way to create a collection is to use its companion object as - * a factory. The three most commonly used collections are - * [[scala.collection.Seq]], [[scala.collection.immutable.Set]], and - * [[scala.collection.immutable.Map]]. - * They can be used directly as shown below since their companion objects are - * all available as type aliases in either the [[scala]] package or in - * `scala.Predef`. New collections are created like this: - * {{{ - * scala> val seq = Seq(1,2,3,4,1) - * seq: Seq[Int] = List(1, 2, 3, 4, 1) - * - * scala> val set = Set(1,2,3,4,1) - * set: scala.collection.immutable.Set[Int] = Set(1, 2, 3, 4) - * - * scala> val map = Map(1 -> "one", 2 -> "two", 3 -> "three", 2 -> "too") - * map: scala.collection.immutable.Map[Int,String] = Map(1 -> one, 2 -> too, 3 -> three) - * }}} - * - * It is also typical to prefer the [[scala.collection.immutable]] collections - * over those in [[scala.collection.mutable]]; the types aliased in - * the `scala.Predef` object are the immutable versions. - * - * Also note that the collections library was carefully designed to include several implementations of - * each of the three basic collection types. These implementations have specific performance - * characteristics which are described - * in [[http://docs.scala-lang.org/overviews/collections/performance-characteristics.html the guide]]. - * - * The concrete parallel collections also have specific performance characteristics which are - * described in [[http://docs.scala-lang.org/overviews/parallel-collections/concrete-parallel-collections.html#performance-characteristics the parallel collections guide]] - * - * === Converting to and from Java Collections === - * - * The [[scala.collection.JavaConverters]] object provides a collection - * of decorators that allow converting between Scala and Java collections using `asScala` - * and `asJava` methods. - */ -package object collection { - import scala.collection.generic.CanBuildFrom - - /** Provides a CanBuildFrom instance that builds a specific target collection (`To') - * irrespective of the original collection (`From'). - */ - def breakOut[From, T, To](implicit b: CanBuildFrom[Nothing, T, To]): CanBuildFrom[From, T, To] = - // can't just return b because the argument to apply could be cast to From in b - new CanBuildFrom[From, T, To] { - def apply(from: From) = b.apply() - def apply() = b.apply() - } -} - -package collection { - /** Collection internal utility functions. - */ - private[collection] object DebugUtils { - def unsupported(msg: String) = throw new UnsupportedOperationException(msg) - def noSuchElement(msg: String) = throw new NoSuchElementException(msg) - def indexOutOfBounds(index: Int) = throw new IndexOutOfBoundsException(index.toString) - def illegalArgument(msg: String) = throw new IllegalArgumentException(msg) - - def buildString(closure: (Any => Unit) => Unit): String = { - var output = "" - closure(output += _ + "\n") - - output - } - - def arrayString[T](array: Array[T], from: Int, until: Int): String = { - array.slice(from, until) map { - case null => "n/a" - case x => "" + x - } mkString " | " - } - } -} diff --git a/tests/scala2-library/src/library/scala/collection/parallel/Combiner.scala b/tests/scala2-library/src/library/scala/collection/parallel/Combiner.scala deleted file mode 100644 index abccf5d402c4..000000000000 --- a/tests/scala2-library/src/library/scala/collection/parallel/Combiner.scala +++ /dev/null @@ -1,98 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala -package collection.parallel - -import scala.collection.Parallel -import scala.collection.mutable.Builder -import scala.collection.generic.Sizing - -/** The base trait for all combiners. - * A combiner incremental collection construction just like - * a regular builder, but also implements an efficient merge operation of two builders - * via `combine` method. Once the collection is constructed, it may be obtained by invoking - * the `result` method. - * - * The complexity of the `combine` method should be less than linear for best - * performance. The `result` method doesn't have to be a constant time operation, - * but may be performed in parallel. - * - * @tparam Elem the type of the elements added to the builder - * @tparam To the type of the collection the builder produces - * - * @author Aleksandar Prokopec - * @since 2.9 - */ -trait Combiner[-Elem, +To] extends Builder[Elem, To] with Sizing with Parallel { - - @transient - @volatile - var _combinerTaskSupport = defaultTaskSupport - - def combinerTaskSupport = { - val cts = _combinerTaskSupport - if (cts eq null) { - _combinerTaskSupport = defaultTaskSupport - defaultTaskSupport - } else cts - } - - def combinerTaskSupport_=(cts: TaskSupport) = _combinerTaskSupport = cts - - /** Combines the contents of the receiver builder and the `other` builder, - * producing a new builder containing both their elements. - * - * This method may combine the two builders by copying them into a larger collection, - * by producing a lazy view that gets evaluated once `result` is invoked, or use - * a merge operation specific to the data structure in question. - * - * Note that both the receiver builder and `other` builder become invalidated - * after the invocation of this method, and should be cleared (see `clear`) - * if they are to be used again. - * - * Also, combining two combiners `c1` and `c2` for which `c1 eq c2` is `true`, that is, - * they are the same objects in memory: - * - * {{{ - * c1.combine(c2) - * }}} - * - * always does nothing and returns `c1`. - * - * @tparam N the type of elements contained by the `other` builder - * @tparam NewTo the type of collection produced by the `other` builder - * @param other the other builder - * @return the parallel builder containing both the elements of this and the `other` builder - */ - def combine[N <: Elem, NewTo >: To](other: Combiner[N, NewTo]): Combiner[N, NewTo] - - /** Returns `true` if this combiner has a thread-safe `+=` and is meant to be shared - * across several threads constructing the collection. - * - * By default, this method returns `false`. - */ - def canBeShared: Boolean = false - - /** Constructs the result and sets the appropriate tasksupport object to the resulting collection - * if this is applicable. - */ - def resultWithTaskSupport: To = { - val res = result() - setTaskSupport(res, combinerTaskSupport) - } -} - -/* -private[collection] trait EnvironmentPassingCombiner[-Elem, +To] extends Combiner[Elem, To] { - abstract override def result = { - val res = super.result - res - } -} -*/ diff --git a/tests/scala2-library/src/library/scala/collection/parallel/ParIterable.scala b/tests/scala2-library/src/library/scala/collection/parallel/ParIterable.scala deleted file mode 100644 index a5ba8c49adec..000000000000 --- a/tests/scala2-library/src/library/scala/collection/parallel/ParIterable.scala +++ /dev/null @@ -1,46 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala -package collection.parallel - -import scala.collection.GenIterable -import scala.collection.generic._ -import scala.collection.parallel.mutable.ParArrayCombiner - -/** A template trait for parallel iterable collections. - * - * $paralleliterableinfo - * - * $sideeffects - * - * @tparam T the element type of the collection - * - * @author Aleksandar Prokopec - * @since 2.9 - */ -trait ParIterable[+T] -extends GenIterable[T] - with GenericParTemplate[T, ParIterable] - with ParIterableLike[T, ParIterable[T], Iterable[T]] { - override def companion: GenericCompanion[ParIterable] with GenericParCompanion[ParIterable] = ParIterable - //protected[this] override def newBuilder = ParIterable.newBuilder[T] - - def stringPrefix = "ParIterable" -} - -/** $factoryInfo - */ -object ParIterable extends ParFactory[ParIterable] { - implicit def canBuildFrom[T]: CanCombineFrom[Coll, T, ParIterable[T]] = new GenericCanCombineFrom[T] - - def newBuilder[T]: Combiner[T, ParIterable[T]] = ParArrayCombiner[T] - - def newCombiner[T]: Combiner[T, ParIterable[T]] = ParArrayCombiner[T] -} - diff --git a/tests/scala2-library/src/library/scala/collection/parallel/ParIterableLike.scala b/tests/scala2-library/src/library/scala/collection/parallel/ParIterableLike.scala deleted file mode 100644 index e66dc7f24749..000000000000 --- a/tests/scala2-library/src/library/scala/collection/parallel/ParIterableLike.scala +++ /dev/null @@ -1,1500 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala -package collection.parallel - -import scala.language.{ higherKinds, implicitConversions } - -import scala.collection.mutable.Builder -import scala.collection.mutable.ArrayBuffer -import scala.collection.IterableLike -import scala.collection.Parallel -import scala.collection.Parallelizable -import scala.collection.CustomParallelizable -import scala.collection.generic._ -import scala.collection.GenIterableLike -import scala.collection.GenIterable -import scala.collection.GenTraversableOnce -import scala.collection.GenTraversable -import immutable.HashMapCombiner -import scala.reflect.ClassTag - -import scala.annotation.unchecked.uncheckedVariance - -import scala.collection.parallel.ParallelCollectionImplicits._ - - -/** A template trait for parallel collections of type `ParIterable[T]`. - * - * $paralleliterableinfo - * - * $sideeffects - * - * @tparam T the element type of the collection - * @tparam Repr the type of the actual collection containing the elements - * - * @define paralleliterableinfo - * This is a base trait for Scala parallel collections. It defines behaviour - * common to all parallel collections. Concrete parallel collections should - * inherit this trait and `ParIterable` if they want to define specific combiner - * factories. - * - * Parallel operations are implemented with divide and conquer style algorithms that - * parallelize well. The basic idea is to split the collection into smaller parts until - * they are small enough to be operated on sequentially. - * - * All of the parallel operations are implemented as tasks within this trait. Tasks rely - * on the concept of splitters, which extend iterators. Every parallel collection defines: - * - * {{{ - * def splitter: IterableSplitter[T] - * }}} - * - * which returns an instance of `IterableSplitter[T]`, which is a subtype of `Splitter[T]`. - * Splitters have a method `remaining` to check the remaining number of elements, - * and method `split` which is defined by splitters. Method `split` divides the splitters - * iterate over into disjunct subsets: - * - * {{{ - * def split: Seq[Splitter] - * }}} - * - * which splits the splitter into a sequence of disjunct subsplitters. This is typically a - * very fast operation which simply creates wrappers around the receiver collection. - * This can be repeated recursively. - * - * Tasks are scheduled for execution through a - * [[scala.collection.parallel.TaskSupport]] object, which can be changed - * through the `tasksupport` setter of the collection. - * - * Method `newCombiner` produces a new combiner. Combiners are an extension of builders. - * They provide a method `combine` which combines two combiners and returns a combiner - * containing elements of both combiners. - * This method can be implemented by aggressively copying all the elements into the new combiner - * or by lazily binding their results. It is recommended to avoid copying all of - * the elements for performance reasons, although that cost might be negligible depending on - * the use case. Standard parallel collection combiners avoid copying when merging results, - * relying either on a two-step lazy construction or specific data-structure properties. - * - * Methods: - * - * {{{ - * def seq: Sequential - * def par: Repr - * }}} - * - * produce the sequential or parallel implementation of the collection, respectively. - * Method `par` just returns a reference to this parallel collection. - * Method `seq` is efficient - it will not copy the elements. Instead, - * it will create a sequential version of the collection using the same underlying data structure. - * Note that this is not the case for sequential collections in general - they may copy the elements - * and produce a different underlying data structure. - * - * The combination of methods `toMap`, `toSeq` or `toSet` along with `par` and `seq` is a flexible - * way to change between different collection types. - * - * Since this trait extends the `GenIterable` trait, methods like `size` must also - * be implemented in concrete collections, while `iterator` forwards to `splitter` by - * default. - * - * Each parallel collection is bound to a specific fork/join pool, on which dormant worker - * threads are kept. The fork/join pool contains other information such as the parallelism - * level, that is, the number of processors used. When a collection is created, it is assigned the - * default fork/join pool found in the `scala.parallel` package object. - * - * Parallel collections are not necessarily ordered in terms of the `foreach` - * operation (see `Traversable`). Parallel sequences have a well defined order for iterators - creating - * an iterator and traversing the elements linearly will always yield the same order. - * However, bulk operations such as `foreach`, `map` or `filter` always occur in undefined orders for all - * parallel collections. - * - * Existing parallel collection implementations provide strict parallel iterators. Strict parallel iterators are aware - * of the number of elements they have yet to traverse. It's also possible to provide non-strict parallel iterators, - * which do not know the number of elements remaining. To do this, the new collection implementation must override - * `isStrictSplitterCollection` to `false`. This will make some operations unavailable. - * - * To create a new parallel collection, extend the `ParIterable` trait, and implement `size`, `splitter`, - * `newCombiner` and `seq`. Having an implicit combiner factory requires extending this trait in addition, as - * well as providing a companion object, as with regular collections. - * - * Method `size` is implemented as a constant time operation for parallel collections, and parallel collection - * operations rely on this assumption. - * - * @author Aleksandar Prokopec - * @since 2.9 - * - * @define sideeffects - * The higher-order functions passed to certain operations may contain side-effects. Since implementations - * of bulk operations may not be sequential, this means that side-effects may not be predictable and may - * produce data-races, deadlocks or invalidation of state if care is not taken. It is up to the programmer - * to either avoid using side-effects or to use some form of synchronization when accessing mutable data. - * - * @define pbfinfo - * An implicit value of class `CanCombineFrom` which determines the - * result class `That` from the current representation type `Repr` and - * and the new element type `B`. This builder factory can provide a parallel - * builder for the resulting collection. - * - * @define abortsignalling - * This method will use `abort` signalling capabilities. This means - * that splitters may send and read `abort` signals. - * - * @define indexsignalling - * This method will use `indexFlag` signalling capabilities. This means - * that splitters may set and read the `indexFlag` state. - * @define Coll `ParIterable` - * @define coll parallel iterable - */ -trait ParIterableLike[+T, +Repr <: ParIterable[T], +Sequential <: Iterable[T] with IterableLike[T, Sequential]] -extends GenIterableLike[T, Repr] - with CustomParallelizable[T, Repr] - with Parallel - with HasNewCombiner[T, Repr] -{ -self: ParIterableLike[T, Repr, Sequential] => - - @transient - @volatile - private var _tasksupport = defaultTaskSupport - - protected def initTaskSupport() { - _tasksupport = defaultTaskSupport - } - - /** The task support object which is responsible for scheduling and - * load-balancing tasks to processors. - * - * @see [[scala.collection.parallel.TaskSupport]] - */ - def tasksupport = { - val ts = _tasksupport - if (ts eq null) { - _tasksupport = defaultTaskSupport - defaultTaskSupport - } else ts - } - - /** Changes the task support object which is responsible for scheduling and - * load-balancing tasks to processors. - * - * A task support object can be changed in a parallel collection after it - * has been created, but only during a quiescent period, i.e. while there - * are no concurrent invocations to parallel collection methods. - * - * Here is a way to change the task support of a parallel collection: - * - * {{{ - * import scala.collection.parallel._ - * val pc = mutable.ParArray(1, 2, 3) - * pc.tasksupport = new ForkJoinTaskSupport( - * new java.util.concurrent.ForkJoinPool(2)) - * }}} - * - * @see [[scala.collection.parallel.TaskSupport]] - */ - def tasksupport_=(ts: TaskSupport) = _tasksupport = ts - - def seq: Sequential - - def repr: Repr = this.asInstanceOf[Repr] - - final def isTraversableAgain = true - - def hasDefiniteSize = true - - def isEmpty = size == 0 - - def nonEmpty = size != 0 - - def head = iterator.next() - - def headOption = if (nonEmpty) Some(head) else None - - def tail = drop(1) - - def last = { - var lst = head - for (x <- this.seq) lst = x - lst - } - - def lastOption = if (nonEmpty) Some(last) else None - - def init = take(size - 1) - - /** Creates a new parallel iterator used to traverse the elements of this parallel collection. - * This iterator is more specific than the iterator of the returned by `iterator`, and augmented - * with additional accessor and transformer methods. - * - * @return a parallel iterator - */ - protected[parallel] def splitter: IterableSplitter[T] - - /** Creates a new split iterator used to traverse the elements of this collection. - * - * By default, this method is implemented in terms of the protected `splitter` method. - * - * @return a split iterator - */ - def iterator: Splitter[T] = splitter - - override def par: Repr = repr - - /** Denotes whether this parallel collection has strict splitters. - * - * This is true in general, and specific collection instances may choose to - * override this method. Such collections will fail to execute methods - * which rely on splitters being strict, i.e. returning a correct value - * in the `remaining` method. - * - * This method helps ensure that such failures occur on method invocations, - * rather than later on and in unpredictable ways. - */ - def isStrictSplitterCollection = true - - /** The `newBuilder` operation returns a parallel builder assigned to this collection's fork/join pool. - * This method forwards the call to `newCombiner`. - */ - //protected[this] def newBuilder: scala.collection.mutable.Builder[T, Repr] = newCombiner - - /** Optionally reuses an existing combiner for better performance. By default it doesn't - subclasses may override this behaviour. - * The provided combiner `oldc` that can potentially be reused will be either some combiner from the previous computational task, or `None` if there - * was no previous phase (in which case this method must return `newc`). - * - * @param oldc The combiner that is the result of the previous task, or `None` if there was no previous task. - * @param newc The new, empty combiner that can be used. - * @return Either `newc` or `oldc`. - */ - protected def reuse[S, That](oldc: Option[Combiner[S, That]], newc: Combiner[S, That]): Combiner[S, That] = newc - - type SSCTask[R, Tp] = StrictSplitterCheckTask[R, Tp] - - /* helper traits - to avoid structural invocations */ - - trait TaskOps[R, Tp] { - def mapResult[R1](mapping: R => R1): ResultMapping[R, Tp, R1] - // public method with inaccessible types in parameters - def compose[R3, R2, Tp2](t2: SSCTask[R2, Tp2])(resCombiner: (R, R2) => R3): SeqComposite[R, R2, R3, SSCTask[R, Tp], SSCTask[R2, Tp2]] - def parallel[R3, R2, Tp2](t2: SSCTask[R2, Tp2])(resCombiner: (R, R2) => R3): ParComposite[R, R2, R3, SSCTask[R, Tp], SSCTask[R2, Tp2]] - } - - trait BuilderOps[Elem, To] { - trait Otherwise[Cmb] { - def otherwise(notbody: => Unit)(implicit t: ClassTag[Cmb]): Unit - } - - def ifIs[Cmb](isbody: Cmb => Unit): Otherwise[Cmb] - def isCombiner: Boolean - def asCombiner: Combiner[Elem, To] - } - - trait SignallingOps[PI <: DelegatedSignalling] { - def assign(cntx: Signalling): PI - } - - /* convenience task operations wrapper */ - protected implicit def task2ops[R, Tp](tsk: SSCTask[R, Tp]): TaskOps[R, Tp] = new TaskOps[R, Tp] { - def mapResult[R1](mapping: R => R1): ResultMapping[R, Tp, R1] = new ResultMapping[R, Tp, R1](tsk) { - def map(r: R): R1 = mapping(r) - } - - def compose[R3, R2, Tp2](t2: SSCTask[R2, Tp2])(resCombiner: (R, R2) => R3) = new SeqComposite[R, R2, R3, SSCTask[R, Tp], SSCTask[R2, Tp2]](tsk, t2) { - def combineResults(fr: R, sr: R2): R3 = resCombiner(fr, sr) - } - - def parallel[R3, R2, Tp2](t2: SSCTask[R2, Tp2])(resCombiner: (R, R2) => R3) = new ParComposite[R, R2, R3, SSCTask[R, Tp], SSCTask[R2, Tp2]](tsk, t2) { - def combineResults(fr: R, sr: R2): R3 = resCombiner(fr, sr) - } - } - - protected def wrap[R](body: => R) = new NonDivisible[R] { - def leaf(prevr: Option[R]) = result = body - @volatile var result: R = null.asInstanceOf[R] - } - - /* convenience signalling operations wrapper */ - protected implicit def delegatedSignalling2ops[PI <: DelegatedSignalling](it: PI): SignallingOps[PI] = new SignallingOps[PI] { - def assign(cntx: Signalling): PI = { - it.signalDelegate = cntx - it - } - } - - protected implicit def builder2ops[Elem, To](cb: Builder[Elem, To]): BuilderOps[Elem, To] = new BuilderOps[Elem, To] { - def ifIs[Cmb](isbody: Cmb => Unit) = new Otherwise[Cmb] { - def otherwise(notbody: => Unit)(implicit t: ClassTag[Cmb]) { - if (cb.getClass == t.runtimeClass) isbody(cb.asInstanceOf[Cmb]) else notbody - } - } - def isCombiner = cb.isInstanceOf[Combiner[_, _]] - def asCombiner = cb.asInstanceOf[Combiner[Elem, To]] - } - - protected[this] def bf2seq[S, That](bf: CanBuildFrom[Repr, S, That]) = new CanBuildFrom[Sequential, S, That] { - def apply(from: Sequential) = bf.apply(from.par.asInstanceOf[Repr]) // !!! we only use this on `this.seq`, and know that `this.seq.par.getClass == this.getClass` - def apply() = bf.apply() - } - - protected[this] def sequentially[S, That <: Parallel](b: Sequential => Parallelizable[S, That]) = b(seq).par.asInstanceOf[Repr] - - def mkString(start: String, sep: String, end: String): String = seq.mkString(start, sep, end) - - def mkString(sep: String): String = seq.mkString("", sep, "") - - def mkString: String = seq.mkString("") - - override def toString = seq.mkString(stringPrefix + "(", ", ", ")") - - def canEqual(other: Any) = true - - /** Reduces the elements of this sequence using the specified associative binary operator. - * - * $undefinedorder - * - * Note this method has a different signature than the `reduceLeft` - * and `reduceRight` methods of the trait `Traversable`. - * The result of reducing may only be a supertype of this parallel collection's - * type parameter `T`. - * - * @tparam U A type parameter for the binary operator, a supertype of `T`. - * @param op A binary operator that must be associative. - * @return The result of applying reduce operator `op` between all the elements if the collection is nonempty. - * @throws UnsupportedOperationException - * if this $coll is empty. - */ - def reduce[U >: T](op: (U, U) => U): U = { - tasksupport.executeAndWaitResult(new Reduce(op, splitter) mapResult { _.get }) - } - - /** Optionally reduces the elements of this sequence using the specified associative binary operator. - * - * $undefinedorder - * - * Note this method has a different signature than the `reduceLeftOption` - * and `reduceRightOption` methods of the trait `Traversable`. - * The result of reducing may only be a supertype of this parallel collection's - * type parameter `T`. - * - * @tparam U A type parameter for the binary operator, a supertype of `T`. - * @param op A binary operator that must be associative. - * @return An option value containing result of applying reduce operator `op` between all - * the elements if the collection is nonempty, and `None` otherwise. - */ - def reduceOption[U >: T](op: (U, U) => U): Option[U] = if (isEmpty) None else Some(reduce(op)) - - /** Folds the elements of this sequence using the specified associative binary operator. - * The order in which the elements are reduced is unspecified and may be nondeterministic. - * - * Note this method has a different signature than the `foldLeft` - * and `foldRight` methods of the trait `Traversable`. - * The result of folding may only be a supertype of this parallel collection's - * type parameter `T`. - * - * @tparam U a type parameter for the binary operator, a supertype of `T`. - * @param z a neutral element for the fold operation, it may be added to the result - * an arbitrary number of times, not changing the result (e.g. `Nil` for list concatenation, - * 0 for addition, or 1 for multiplication) - * @param op a binary operator that must be associative - * @return the result of applying fold operator `op` between all the elements and `z` - */ - def fold[U >: T](z: U)(op: (U, U) => U): U = { - tasksupport.executeAndWaitResult(new Fold(z, op, splitter)) - } - - /** Aggregates the results of applying an operator to subsequent elements. - * - * This is a more general form of `fold` and `reduce`. It has similar semantics, but does - * not require the result to be a supertype of the element type. It traverses the elements in - * different partitions sequentially, using `seqop` to update the result, and then - * applies `combop` to results from different partitions. The implementation of this - * operation may operate on an arbitrary number of collection partitions, so `combop` - * may be invoked arbitrary number of times. - * - * For example, one might want to process some elements and then produce a `Set`. In this - * case, `seqop` would process an element and append it to the set, while `combop` - * would concatenate two sets from different partitions together. The initial value - * `z` would be an empty set. - * - * {{{ - * pc.aggregate(Set[Int]())(_ += process(_), _ ++ _) - * }}} - * - * Another example is calculating geometric mean from a collection of doubles - * (one would typically require big doubles for this). - * - * @tparam S the type of accumulated results - * @param z the initial value for the accumulated result of the partition - this - * will typically be the neutral element for the `seqop` operator (e.g. - * `Nil` for list concatenation or `0` for summation) and may be evaluated - * more than once - * @param seqop an operator used to accumulate results within a partition - * @param combop an associative operator used to combine results from different partitions - */ - def aggregate[S](z: =>S)(seqop: (S, T) => S, combop: (S, S) => S): S = { - tasksupport.executeAndWaitResult(new Aggregate(() => z, seqop, combop, splitter)) - } - - def foldLeft[S](z: S)(op: (S, T) => S): S = seq.foldLeft(z)(op) - - def foldRight[S](z: S)(op: (T, S) => S): S = seq.foldRight(z)(op) - - def reduceLeft[U >: T](op: (U, T) => U): U = seq.reduceLeft(op) - - def reduceRight[U >: T](op: (T, U) => U): U = seq.reduceRight(op) - - def reduceLeftOption[U >: T](op: (U, T) => U): Option[U] = seq.reduceLeftOption(op) - - def reduceRightOption[U >: T](op: (T, U) => U): Option[U] = seq.reduceRightOption(op) - - /** Applies a function `f` to all the elements of $coll in an undefined order. - * - * @tparam U the result type of the function applied to each element, which is always discarded - * @param f function applied to each element - */ - def foreach[U](f: T => U) = { - tasksupport.executeAndWaitResult(new Foreach(f, splitter)) - } - - def count(p: T => Boolean): Int = { - tasksupport.executeAndWaitResult(new Count(p, splitter)) - } - - def sum[U >: T](implicit num: Numeric[U]): U = { - tasksupport.executeAndWaitResult(new Sum[U](num, splitter)) - } - - def product[U >: T](implicit num: Numeric[U]): U = { - tasksupport.executeAndWaitResult(new Product[U](num, splitter)) - } - - def min[U >: T](implicit ord: Ordering[U]): T = { - tasksupport.executeAndWaitResult(new Min(ord, splitter) mapResult { _.get }).asInstanceOf[T] - } - - def max[U >: T](implicit ord: Ordering[U]): T = { - tasksupport.executeAndWaitResult(new Max(ord, splitter) mapResult { _.get }).asInstanceOf[T] - } - - def maxBy[S](f: T => S)(implicit cmp: Ordering[S]): T = { - if (isEmpty) throw new UnsupportedOperationException("empty.maxBy") - - reduce((x, y) => if (cmp.gteq(f(x), f(y))) x else y) - } - - def minBy[S](f: T => S)(implicit cmp: Ordering[S]): T = { - if (isEmpty) throw new UnsupportedOperationException("empty.minBy") - - reduce((x, y) => if (cmp.lteq(f(x), f(y))) x else y) - } - - def map[S, That](f: T => S)(implicit bf: CanBuildFrom[Repr, S, That]): That = if (bf(repr).isCombiner) { - tasksupport.executeAndWaitResult(new Map[S, That](f, combinerFactory(() => bf(repr).asCombiner), splitter) mapResult { _.resultWithTaskSupport }) - } else setTaskSupport(seq.map(f)(bf2seq(bf)), tasksupport) - /*bf ifParallel { pbf => - tasksupport.executeAndWaitResult(new Map[S, That](f, pbf, splitter) mapResult { _.result }) - } otherwise seq.map(f)(bf2seq(bf))*/ - - def collect[S, That](pf: PartialFunction[T, S])(implicit bf: CanBuildFrom[Repr, S, That]): That = if (bf(repr).isCombiner) { - tasksupport.executeAndWaitResult(new Collect[S, That](pf, combinerFactory(() => bf(repr).asCombiner), splitter) mapResult { _.resultWithTaskSupport }) - } else setTaskSupport(seq.collect(pf)(bf2seq(bf)), tasksupport) - /*bf ifParallel { pbf => - tasksupport.executeAndWaitResult(new Collect[S, That](pf, pbf, splitter) mapResult { _.result }) - } otherwise seq.collect(pf)(bf2seq(bf))*/ - - def flatMap[S, That](f: T => GenTraversableOnce[S])(implicit bf: CanBuildFrom[Repr, S, That]): That = if (bf(repr).isCombiner) { - tasksupport.executeAndWaitResult(new FlatMap[S, That](f, combinerFactory(() => bf(repr).asCombiner), splitter) mapResult { _.resultWithTaskSupport }) - } else setTaskSupport(seq.flatMap(f)(bf2seq(bf)), tasksupport) - /*bf ifParallel { pbf => - tasksupport.executeAndWaitResult(new FlatMap[S, That](f, pbf, splitter) mapResult { _.result }) - } otherwise seq.flatMap(f)(bf2seq(bf))*/ - - /** Tests whether a predicate holds for all elements of this $coll. - * - * $abortsignalling - * - * @param p a predicate used to test elements - * @return true if `p` holds for all elements, false otherwise - */ - def forall(@deprecatedName('pred) p: T => Boolean): Boolean = { - tasksupport.executeAndWaitResult(new Forall(p, splitter assign new DefaultSignalling with VolatileAbort)) - } - - /** Tests whether a predicate holds for some element of this $coll. - * - * $abortsignalling - * - * @param p a predicate used to test elements - * @return true if `p` holds for some element, false otherwise - */ - def exists(@deprecatedName('pred) p: T => Boolean): Boolean = { - tasksupport.executeAndWaitResult(new Exists(p, splitter assign new DefaultSignalling with VolatileAbort)) - } - - /** Finds some element in the collection for which the predicate holds, if such - * an element exists. The element may not necessarily be the first such element - * in the iteration order. - * - * If there are multiple elements obeying the predicate, the choice is nondeterministic. - * - * $abortsignalling - * - * @param p predicate used to test the elements - * @return an option value with the element if such an element exists, or `None` otherwise - */ - def find(@deprecatedName('pred) p: T => Boolean): Option[T] = { - tasksupport.executeAndWaitResult(new Find(p, splitter assign new DefaultSignalling with VolatileAbort)) - } - - /** Creates a combiner factory. Each combiner factory instance is used - * once per invocation of a parallel transformer method for a single - * collection. - * - * The default combiner factory creates a new combiner every time it - * is requested, unless the combiner is thread-safe as indicated by its - * `canBeShared` method. In this case, the method returns a factory which - * returns the same combiner each time. This is typically done for - * concurrent parallel collections, the combiners of which allow - * thread safe access. - */ - protected[this] def combinerFactory = { - val combiner = newCombiner - combiner.combinerTaskSupport = tasksupport - if (combiner.canBeShared) new CombinerFactory[T, Repr] { - val shared = combiner - def apply() = shared - def doesShareCombiners = true - } else new CombinerFactory[T, Repr] { - def apply() = newCombiner - def doesShareCombiners = false - } - } - - protected[this] def combinerFactory[S, That](cbf: () => Combiner[S, That]) = { - val combiner = cbf() - combiner.combinerTaskSupport = tasksupport - if (combiner.canBeShared) new CombinerFactory[S, That] { - val shared = combiner - def apply() = shared - def doesShareCombiners = true - } else new CombinerFactory[S, That] { - def apply() = cbf() - def doesShareCombiners = false - } - } - - def withFilter(pred: T => Boolean): Repr = filter(pred) - - def filter(pred: T => Boolean): Repr = { - tasksupport.executeAndWaitResult(new Filter(pred, combinerFactory, splitter) mapResult { _.resultWithTaskSupport }) - } - - def filterNot(pred: T => Boolean): Repr = { - tasksupport.executeAndWaitResult(new FilterNot(pred, combinerFactory, splitter) mapResult { _.resultWithTaskSupport }) - } - - def ++[U >: T, That](that: GenTraversableOnce[U])(implicit bf: CanBuildFrom[Repr, U, That]): That = { - if (that.isParallel && bf.isParallel) { - // println("case both are parallel") - val other = that.asParIterable - val pbf = bf.asParallel - val cfactory = combinerFactory(() => pbf(repr)) - val copythis = new Copy(cfactory, splitter) - val copythat = wrap { - val othtask = new other.Copy(cfactory, other.splitter) - tasksupport.executeAndWaitResult(othtask) - } - val task = (copythis parallel copythat) { _ combine _ } mapResult { - _.resultWithTaskSupport - } - tasksupport.executeAndWaitResult(task) - } else if (bf(repr).isCombiner) { - // println("case parallel builder, `that` not parallel") - val copythis = new Copy(combinerFactory(() => bf(repr).asCombiner), splitter) - val copythat = wrap { - val cb = bf(repr).asCombiner - for (elem <- that.seq) cb += elem - cb - } - tasksupport.executeAndWaitResult((copythis parallel copythat) { _ combine _ } mapResult { _.resultWithTaskSupport }) - } else { - // println("case not a parallel builder") - val b = bf(repr) - this.splitter.copy2builder[U, That, Builder[U, That]](b) - for (elem <- that.seq) b += elem - setTaskSupport(b.result(), tasksupport) - } - } - - def partition(pred: T => Boolean): (Repr, Repr) = { - tasksupport.executeAndWaitResult( - new Partition(pred, combinerFactory, combinerFactory, splitter) mapResult { - p => (p._1.resultWithTaskSupport, p._2.resultWithTaskSupport) - } - ) - } - - def groupBy[K](f: T => K): immutable.ParMap[K, Repr] = { - val r = tasksupport.executeAndWaitResult(new GroupBy(f, () => HashMapCombiner[K, T], splitter) mapResult { - rcb => rcb.groupByKey(() => combinerFactory()) - }) - setTaskSupport(r, tasksupport) - } - - def take(n: Int): Repr = { - val actualn = if (size > n) n else size - if (actualn < MIN_FOR_COPY) take_sequential(actualn) - else tasksupport.executeAndWaitResult(new Take(actualn, combinerFactory, splitter) mapResult { - _.resultWithTaskSupport - }) - } - - private def take_sequential(n: Int) = { - val cb = newCombiner - cb.sizeHint(n) - val it = splitter - var left = n - while (left > 0) { - cb += it.next - left -= 1 - } - cb.resultWithTaskSupport - } - - def drop(n: Int): Repr = { - val actualn = if (size > n) n else size - if ((size - actualn) < MIN_FOR_COPY) drop_sequential(actualn) - else tasksupport.executeAndWaitResult(new Drop(actualn, combinerFactory, splitter) mapResult { _.resultWithTaskSupport }) - } - - private def drop_sequential(n: Int) = { - val it = splitter drop n - val cb = newCombiner - cb.sizeHint(size - n) - while (it.hasNext) cb += it.next - cb.resultWithTaskSupport - } - - override def slice(unc_from: Int, unc_until: Int): Repr = { - val from = unc_from min size max 0 - val until = unc_until min size max from - if ((until - from) <= MIN_FOR_COPY) slice_sequential(from, until) - else tasksupport.executeAndWaitResult(new Slice(from, until, combinerFactory, splitter) mapResult { _.resultWithTaskSupport }) - } - - private def slice_sequential(from: Int, until: Int): Repr = { - val cb = newCombiner - var left = until - from - val it = splitter drop from - while (left > 0) { - cb += it.next - left -= 1 - } - cb.resultWithTaskSupport - } - - def splitAt(n: Int): (Repr, Repr) = { - tasksupport.executeAndWaitResult( - new SplitAt(n, combinerFactory, combinerFactory, splitter) mapResult { - p => (p._1.resultWithTaskSupport, p._2.resultWithTaskSupport) - } - ) - } - - /** Computes a prefix scan of the elements of the collection. - * - * Note: The neutral element `z` may be applied more than once. - * - * @tparam U element type of the resulting collection - * @tparam That type of the resulting collection - * @param z neutral element for the operator `op` - * @param op the associative operator for the scan - * @param bf $bfinfo - * @return a collection containing the prefix scan of the elements in the original collection - * - * @usecase def scan(z: T)(op: (T, T) => T): $Coll[T] - * @inheritdoc - * - * @return a new $coll containing the prefix scan of the elements in this $coll - */ - def scan[U >: T, That](z: U)(op: (U, U) => U)(implicit bf: CanBuildFrom[Repr, U, That]): That = if (bf(repr).isCombiner) { - if (tasksupport.parallelismLevel > 1) { - if (size > 0) tasksupport.executeAndWaitResult(new CreateScanTree(0, size, z, op, splitter) mapResult { - tree => tasksupport.executeAndWaitResult(new FromScanTree(tree, z, op, combinerFactory(() => bf(repr).asCombiner)) mapResult { - cb => cb.resultWithTaskSupport - }) - }) else setTaskSupport((bf(repr) += z).result(), tasksupport) - } else setTaskSupport(seq.scan(z)(op)(bf2seq(bf)), tasksupport) - } else setTaskSupport(seq.scan(z)(op)(bf2seq(bf)), tasksupport) - - def scanLeft[S, That](z: S)(op: (S, T) => S)(implicit bf: CanBuildFrom[Repr, S, That]) = setTaskSupport(seq.scanLeft(z)(op)(bf2seq(bf)), tasksupport) - - def scanRight[S, That](z: S)(op: (T, S) => S)(implicit bf: CanBuildFrom[Repr, S, That]) = setTaskSupport(seq.scanRight(z)(op)(bf2seq(bf)), tasksupport) - - /** Takes the longest prefix of elements that satisfy the predicate. - * - * $indexsignalling - * The index flag is initially set to maximum integer value. - * - * @param pred the predicate used to test the elements - * @return the longest prefix of this $coll of elements that satisfy the predicate `pred` - */ - def takeWhile(pred: T => Boolean): Repr = { - val cbf = combinerFactory - if (cbf.doesShareCombiners) { - val parseqspan = toSeq.takeWhile(pred) - tasksupport.executeAndWaitResult(new Copy(combinerFactory, parseqspan.splitter) mapResult { - _.resultWithTaskSupport - }) - } else { - val cntx = new DefaultSignalling with AtomicIndexFlag - cntx.setIndexFlag(Int.MaxValue) - tasksupport.executeAndWaitResult(new TakeWhile(0, pred, combinerFactory, splitter assign cntx) mapResult { - _._1.resultWithTaskSupport - }) - } - } - - /** Splits this $coll into a prefix/suffix pair according to a predicate. - * - * $indexsignalling - * The index flag is initially set to maximum integer value. - * - * @param pred the predicate used to test the elements - * @return a pair consisting of the longest prefix of the collection for which all - * the elements satisfy `pred`, and the rest of the collection - */ - def span(pred: T => Boolean): (Repr, Repr) = { - val cbf = combinerFactory - if (cbf.doesShareCombiners) { - val (xs, ys) = toSeq.span(pred) - val copyxs = new Copy(combinerFactory, xs.splitter) mapResult { _.resultWithTaskSupport } - val copyys = new Copy(combinerFactory, ys.splitter) mapResult { _.resultWithTaskSupport } - val copyall = (copyxs parallel copyys) { - (xr, yr) => (xr, yr) - } - tasksupport.executeAndWaitResult(copyall) - } else { - val cntx = new DefaultSignalling with AtomicIndexFlag - cntx.setIndexFlag(Int.MaxValue) - tasksupport.executeAndWaitResult(new Span(0, pred, combinerFactory, combinerFactory, splitter assign cntx) mapResult { - p => (p._1.resultWithTaskSupport, p._2.resultWithTaskSupport) - }) - } - } - - /** Drops all elements in the longest prefix of elements that satisfy the predicate, - * and returns a collection composed of the remaining elements. - * - * $indexsignalling - * The index flag is initially set to maximum integer value. - * - * @param pred the predicate used to test the elements - * @return a collection composed of all the elements after the longest prefix of elements - * in this $coll that satisfy the predicate `pred` - */ - def dropWhile(pred: T => Boolean): Repr = { - val cntx = new DefaultSignalling with AtomicIndexFlag - cntx.setIndexFlag(Int.MaxValue) - tasksupport.executeAndWaitResult( - new Span(0, pred, combinerFactory, combinerFactory, splitter assign cntx) mapResult { - _._2.resultWithTaskSupport - } - ) - } - - def copyToArray[U >: T](xs: Array[U]) = copyToArray(xs, 0) - - def copyToArray[U >: T](xs: Array[U], start: Int) = copyToArray(xs, start, xs.length - start) - - def copyToArray[U >: T](xs: Array[U], start: Int, len: Int) = if (len > 0) { - tasksupport.executeAndWaitResult(new CopyToArray(start, len, xs, splitter)) - } - - def sameElements[U >: T](that: GenIterable[U]) = seq.sameElements(that) - - def zip[U >: T, S, That](that: GenIterable[S])(implicit bf: CanBuildFrom[Repr, (U, S), That]): That = if (bf(repr).isCombiner && that.isParSeq) { - val thatseq = that.asParSeq - tasksupport.executeAndWaitResult(new Zip(combinerFactory(() => bf(repr).asCombiner), splitter, thatseq.splitter) mapResult { _.resultWithTaskSupport }) - } else setTaskSupport(seq.zip(that)(bf2seq(bf)), tasksupport) - - def zipWithIndex[U >: T, That](implicit bf: CanBuildFrom[Repr, (U, Int), That]): That = this zip immutable.ParRange(0, size, 1, inclusive = false) - - def zipAll[S, U >: T, That](that: GenIterable[S], thisElem: U, thatElem: S)(implicit bf: CanBuildFrom[Repr, (U, S), That]): That = if (bf(repr).isCombiner && that.isParSeq) { - val thatseq = that.asParSeq - tasksupport.executeAndWaitResult( - new ZipAll(size max thatseq.length, thisElem, thatElem, combinerFactory(() => bf(repr).asCombiner), splitter, thatseq.splitter) mapResult { - _.resultWithTaskSupport - } - ) - } else setTaskSupport(seq.zipAll(that, thisElem, thatElem)(bf2seq(bf)), tasksupport) - - protected def toParCollection[U >: T, That](cbf: () => Combiner[U, That]): That = { - tasksupport.executeAndWaitResult(new ToParCollection(combinerFactory(cbf), splitter) mapResult { _.resultWithTaskSupport }) - } - - protected def toParMap[K, V, That](cbf: () => Combiner[(K, V), That])(implicit ev: T <:< (K, V)): That = { - tasksupport.executeAndWaitResult(new ToParMap(combinerFactory(cbf), splitter)(ev) mapResult { _.resultWithTaskSupport }) - } - - @deprecated("use .seq.view instead", "2.11.0") - def view = seq.view - - override def toArray[U >: T: ClassTag]: Array[U] = { - val arr = new Array[U](size) - copyToArray(arr) - arr - } - - override def toList: List[T] = seq.toList - - override def toIndexedSeq: scala.collection.immutable.IndexedSeq[T] = seq.toIndexedSeq - - override def toStream: Stream[T] = seq.toStream - - override def toIterator: Iterator[T] = splitter - - // the methods below are overridden - - override def toBuffer[U >: T]: scala.collection.mutable.Buffer[U] = seq.toBuffer // have additional, parallel buffers? - - override def toTraversable: GenTraversable[T] = this.asInstanceOf[GenTraversable[T]] - - override def toIterable: ParIterable[T] = this.asInstanceOf[ParIterable[T]] - - override def toSeq: ParSeq[T] = toParCollection[T, ParSeq[T]](() => ParSeq.newCombiner[T]) - - override def toSet[U >: T]: immutable.ParSet[U] = toParCollection[U, immutable.ParSet[U]](() => immutable.ParSet.newCombiner[U]) - - override def toMap[K, V](implicit ev: T <:< (K, V)): immutable.ParMap[K, V] = toParMap[K, V, immutable.ParMap[K, V]](() => immutable.ParMap.newCombiner[K, V]) - - override def toVector: Vector[T] = to[Vector] - - override def to[Col[_]](implicit cbf: CanBuildFrom[Nothing, T, Col[T @uncheckedVariance]]): Col[T @uncheckedVariance] = if (cbf().isCombiner) { - toParCollection[T, Col[T]](() => cbf().asCombiner) - } else seq.to(cbf) - - /* tasks */ - - protected trait StrictSplitterCheckTask[R, Tp] extends Task[R, Tp] { - def requiresStrictSplitters = false - if (requiresStrictSplitters && !isStrictSplitterCollection) - throw new UnsupportedOperationException("This collection does not provide strict splitters.") - } - - /** Standard accessor task that iterates over the elements of the collection. - * - * @tparam R type of the result of this method (`R` for result). - * @tparam Tp the representation type of the task at hand. - */ - protected trait Accessor[R, Tp] - extends StrictSplitterCheckTask[R, Tp] { - protected[this] val pit: IterableSplitter[T] - protected[this] def newSubtask(p: IterableSplitter[T]): Accessor[R, Tp] - def shouldSplitFurther = pit.shouldSplitFurther(self.repr, tasksupport.parallelismLevel) - def split = pit.splitWithSignalling.map(newSubtask(_)) // default split procedure - private[parallel] override def signalAbort = pit.abort() - override def toString = this.getClass.getSimpleName + "(" + pit.toString + ")(" + result + ")(supername: " + super.toString + ")" - } - - protected[this] trait NonDivisibleTask[R, Tp] extends StrictSplitterCheckTask[R, Tp] { - def shouldSplitFurther = false - def split = throw new UnsupportedOperationException("Does not split.") - } - - protected[this] trait NonDivisible[R] extends NonDivisibleTask[R, NonDivisible[R]] - - protected[this] abstract class Composite[FR, SR, R, First <: StrictSplitterCheckTask[FR, _], Second <: StrictSplitterCheckTask[SR, _]] - (val ft: First, val st: Second) - extends NonDivisibleTask[R, Composite[FR, SR, R, First, Second]] { - def combineResults(fr: FR, sr: SR): R - @volatile var result: R = null.asInstanceOf[R] - private[parallel] override def signalAbort() { - ft.signalAbort() - st.signalAbort() - } - protected def mergeSubtasks() { - ft mergeThrowables st - if (throwable eq null) result = combineResults(ft.result, st.result) - } - override def requiresStrictSplitters = ft.requiresStrictSplitters || st.requiresStrictSplitters - } - - /** Sequentially performs one task after another. */ - protected[this] abstract class SeqComposite[FR, SR, R, First <: StrictSplitterCheckTask[FR, _], Second <: StrictSplitterCheckTask[SR, _]] - (f: First, s: Second) - extends Composite[FR, SR, R, First, Second](f, s) { - def leaf(prevr: Option[R]) = { - tasksupport.executeAndWaitResult(ft) : Any - tasksupport.executeAndWaitResult(st) : Any - mergeSubtasks() - } - } - - /** Performs two tasks in parallel, and waits for both to finish. */ - protected[this] abstract class ParComposite[FR, SR, R, First <: StrictSplitterCheckTask[FR, _], Second <: StrictSplitterCheckTask[SR, _]] - (f: First, s: Second) - extends Composite[FR, SR, R, First, Second](f, s) { - def leaf(prevr: Option[R]) = { - val ftfuture: () => Any = tasksupport.execute(ft) - tasksupport.executeAndWaitResult(st) : Any - ftfuture() - mergeSubtasks() - } - } - - protected[this] abstract class ResultMapping[R, Tp, R1](val inner: StrictSplitterCheckTask[R, Tp]) - extends NonDivisibleTask[R1, ResultMapping[R, Tp, R1]] { - @volatile var result: R1 = null.asInstanceOf[R1] - def map(r: R): R1 - def leaf(prevr: Option[R1]) = { - val initialResult = tasksupport.executeAndWaitResult(inner) - result = map(initialResult) - } - private[parallel] override def signalAbort() { - inner.signalAbort() - } - override def requiresStrictSplitters = inner.requiresStrictSplitters - } - - protected trait Transformer[R, Tp] extends Accessor[R, Tp] - - protected[this] class Foreach[S](op: T => S, protected[this] val pit: IterableSplitter[T]) - extends Accessor[Unit, Foreach[S]] { - @volatile var result: Unit = () - def leaf(prevr: Option[Unit]) = pit.foreach(op) - protected[this] def newSubtask(p: IterableSplitter[T]) = new Foreach[S](op, p) - } - - protected[this] class Count(pred: T => Boolean, protected[this] val pit: IterableSplitter[T]) - extends Accessor[Int, Count] { - // val pittxt = pit.toString - @volatile var result: Int = 0 - def leaf(prevr: Option[Int]) = result = pit.count(pred) - protected[this] def newSubtask(p: IterableSplitter[T]) = new Count(pred, p) - override def merge(that: Count) = result = result + that.result - // override def toString = "CountTask(" + pittxt + ")" - } - - protected[this] class Reduce[U >: T](op: (U, U) => U, protected[this] val pit: IterableSplitter[T]) - extends Accessor[Option[U], Reduce[U]] { - @volatile var result: Option[U] = None - def leaf(prevr: Option[Option[U]]) = if (pit.remaining > 0) result = Some(pit.reduce(op)) - protected[this] def newSubtask(p: IterableSplitter[T]) = new Reduce(op, p) - override def merge(that: Reduce[U]) = - if (this.result == None) result = that.result - else if (that.result != None) result = Some(op(result.get, that.result.get)) - override def requiresStrictSplitters = true - } - - protected[this] class Fold[U >: T](z: U, op: (U, U) => U, protected[this] val pit: IterableSplitter[T]) - extends Accessor[U, Fold[U]] { - @volatile var result: U = null.asInstanceOf[U] - def leaf(prevr: Option[U]) = result = pit.fold(z)(op) - protected[this] def newSubtask(p: IterableSplitter[T]) = new Fold(z, op, p) - override def merge(that: Fold[U]) = result = op(result, that.result) - } - - protected[this] class Aggregate[S](z: () => S, seqop: (S, T) => S, combop: (S, S) => S, protected[this] val pit: IterableSplitter[T]) - extends Accessor[S, Aggregate[S]] { - @volatile var result: S = null.asInstanceOf[S] - def leaf(prevr: Option[S]) = result = pit.foldLeft(z())(seqop) - protected[this] def newSubtask(p: IterableSplitter[T]) = new Aggregate(z, seqop, combop, p) - override def merge(that: Aggregate[S]) = result = combop(result, that.result) - } - - protected[this] class Sum[U >: T](num: Numeric[U], protected[this] val pit: IterableSplitter[T]) - extends Accessor[U, Sum[U]] { - @volatile var result: U = null.asInstanceOf[U] - def leaf(prevr: Option[U]) = result = pit.sum(num) - protected[this] def newSubtask(p: IterableSplitter[T]) = new Sum(num, p) - override def merge(that: Sum[U]) = result = num.plus(result, that.result) - } - - protected[this] class Product[U >: T](num: Numeric[U], protected[this] val pit: IterableSplitter[T]) - extends Accessor[U, Product[U]] { - @volatile var result: U = null.asInstanceOf[U] - def leaf(prevr: Option[U]) = result = pit.product(num) - protected[this] def newSubtask(p: IterableSplitter[T]) = new Product(num, p) - override def merge(that: Product[U]) = result = num.times(result, that.result) - } - - protected[this] class Min[U >: T](ord: Ordering[U], protected[this] val pit: IterableSplitter[T]) - extends Accessor[Option[U], Min[U]] { - @volatile var result: Option[U] = None - def leaf(prevr: Option[Option[U]]) = if (pit.remaining > 0) result = Some(pit.min(ord)) - protected[this] def newSubtask(p: IterableSplitter[T]) = new Min(ord, p) - override def merge(that: Min[U]) = - if (this.result == None) result = that.result - else if (that.result != None) result = if (ord.lteq(result.get, that.result.get)) result else that.result - override def requiresStrictSplitters = true - } - - protected[this] class Max[U >: T](ord: Ordering[U], protected[this] val pit: IterableSplitter[T]) - extends Accessor[Option[U], Max[U]] { - @volatile var result: Option[U] = None - def leaf(prevr: Option[Option[U]]) = if (pit.remaining > 0) result = Some(pit.max(ord)) - protected[this] def newSubtask(p: IterableSplitter[T]) = new Max(ord, p) - override def merge(that: Max[U]) = - if (this.result == None) result = that.result - else if (that.result != None) result = if (ord.gteq(result.get, that.result.get)) result else that.result - override def requiresStrictSplitters = true - } - - protected[this] class Map[S, That](f: T => S, cbf: CombinerFactory[S, That], protected[this] val pit: IterableSplitter[T]) - extends Transformer[Combiner[S, That], Map[S, That]] { - @volatile var result: Combiner[S, That] = null - def leaf(prev: Option[Combiner[S, That]]) = result = pit.map2combiner(f, reuse(prev, cbf())) - protected[this] def newSubtask(p: IterableSplitter[T]) = new Map(f, cbf, p) - override def merge(that: Map[S, That]) = result = result combine that.result - } - - protected[this] class Collect[S, That] - (pf: PartialFunction[T, S], pbf: CombinerFactory[S, That], protected[this] val pit: IterableSplitter[T]) - extends Transformer[Combiner[S, That], Collect[S, That]] { - @volatile var result: Combiner[S, That] = null - def leaf(prev: Option[Combiner[S, That]]) = result = pit.collect2combiner[S, That](pf, pbf()) - protected[this] def newSubtask(p: IterableSplitter[T]) = new Collect(pf, pbf, p) - override def merge(that: Collect[S, That]) = result = result combine that.result - } - - protected[this] class FlatMap[S, That] - (f: T => GenTraversableOnce[S], pbf: CombinerFactory[S, That], protected[this] val pit: IterableSplitter[T]) - extends Transformer[Combiner[S, That], FlatMap[S, That]] { - @volatile var result: Combiner[S, That] = null - def leaf(prev: Option[Combiner[S, That]]) = result = pit.flatmap2combiner(f, pbf()) - protected[this] def newSubtask(p: IterableSplitter[T]) = new FlatMap(f, pbf, p) - override def merge(that: FlatMap[S, That]) = { - //debuglog("merging " + result + " and " + that.result) - result = result combine that.result - //debuglog("merged into " + result) - } - } - - protected[this] class Forall(pred: T => Boolean, protected[this] val pit: IterableSplitter[T]) - extends Accessor[Boolean, Forall] { - @volatile var result: Boolean = true - def leaf(prev: Option[Boolean]) = { if (!pit.isAborted) result = pit.forall(pred); if (result == false) pit.abort() } - protected[this] def newSubtask(p: IterableSplitter[T]) = new Forall(pred, p) - override def merge(that: Forall) = result = result && that.result - } - - protected[this] class Exists(pred: T => Boolean, protected[this] val pit: IterableSplitter[T]) - extends Accessor[Boolean, Exists] { - @volatile var result: Boolean = false - def leaf(prev: Option[Boolean]) = { if (!pit.isAborted) result = pit.exists(pred); if (result == true) pit.abort() } - protected[this] def newSubtask(p: IterableSplitter[T]) = new Exists(pred, p) - override def merge(that: Exists) = result = result || that.result - } - - protected[this] class Find[U >: T](pred: T => Boolean, protected[this] val pit: IterableSplitter[T]) - extends Accessor[Option[U], Find[U]] { - @volatile var result: Option[U] = None - def leaf(prev: Option[Option[U]]) = { if (!pit.isAborted) result = pit.find(pred); if (result != None) pit.abort() } - protected[this] def newSubtask(p: IterableSplitter[T]) = new Find(pred, p) - override def merge(that: Find[U]) = if (this.result == None) result = that.result - } - - protected[this] class Filter[U >: T, This >: Repr](pred: T => Boolean, cbf: CombinerFactory[U, This], protected[this] val pit: IterableSplitter[T]) - extends Transformer[Combiner[U, This], Filter[U, This]] { - @volatile var result: Combiner[U, This] = null - def leaf(prev: Option[Combiner[U, This]]) = { - result = pit.filter2combiner(pred, reuse(prev, cbf())) - } - protected[this] def newSubtask(p: IterableSplitter[T]) = new Filter(pred, cbf, p) - override def merge(that: Filter[U, This]) = result = result combine that.result - } - - protected[this] class FilterNot[U >: T, This >: Repr](pred: T => Boolean, cbf: CombinerFactory[U, This], protected[this] val pit: IterableSplitter[T]) - extends Transformer[Combiner[U, This], FilterNot[U, This]] { - @volatile var result: Combiner[U, This] = null - def leaf(prev: Option[Combiner[U, This]]) = { - result = pit.filterNot2combiner(pred, reuse(prev, cbf())) - } - protected[this] def newSubtask(p: IterableSplitter[T]) = new FilterNot(pred, cbf, p) - override def merge(that: FilterNot[U, This]) = result = result combine that.result - } - - protected class Copy[U >: T, That](cfactory: CombinerFactory[U, That], protected[this] val pit: IterableSplitter[T]) - extends Transformer[Combiner[U, That], Copy[U, That]] { - @volatile var result: Combiner[U, That] = null - def leaf(prev: Option[Combiner[U, That]]) = result = pit.copy2builder[U, That, Combiner[U, That]](reuse(prev, cfactory())) - protected[this] def newSubtask(p: IterableSplitter[T]) = new Copy[U, That](cfactory, p) - override def merge(that: Copy[U, That]) = result = result combine that.result - } - - protected[this] class Partition[U >: T, This >: Repr] - (pred: T => Boolean, cbfTrue: CombinerFactory[U, This], cbfFalse: CombinerFactory[U, This], protected[this] val pit: IterableSplitter[T]) - extends Transformer[(Combiner[U, This], Combiner[U, This]), Partition[U, This]] { - @volatile var result: (Combiner[U, This], Combiner[U, This]) = null - def leaf(prev: Option[(Combiner[U, This], Combiner[U, This])]) = result = pit.partition2combiners(pred, reuse(prev.map(_._1), cbfTrue()), reuse(prev.map(_._2), cbfFalse())) - protected[this] def newSubtask(p: IterableSplitter[T]) = new Partition(pred, cbfTrue, cbfFalse, p) - override def merge(that: Partition[U, This]) = result = (result._1 combine that.result._1, result._2 combine that.result._2) - } - - protected[this] class GroupBy[K, U >: T]( - f: U => K, - mcf: () => HashMapCombiner[K, U], - protected[this] val pit: IterableSplitter[T] - ) extends Transformer[HashMapCombiner[K, U], GroupBy[K, U]] { - @volatile var result: Result = null - final def leaf(prev: Option[Result]) = { - // note: HashMapCombiner doesn't merge same keys until evaluation - val cb = mcf() - while (pit.hasNext) { - val elem = pit.next() - cb += f(elem) -> elem - } - result = cb - } - protected[this] def newSubtask(p: IterableSplitter[T]) = new GroupBy(f, mcf, p) - override def merge(that: GroupBy[K, U]) = { - // note: this works because we know that a HashMapCombiner doesn't merge same keys until evaluation - // --> we know we're not dropping any mappings - result = (result combine that.result).asInstanceOf[HashMapCombiner[K, U]] - } - } - - protected[this] class Take[U >: T, This >: Repr] - (n: Int, cbf: CombinerFactory[U, This], protected[this] val pit: IterableSplitter[T]) - extends Transformer[Combiner[U, This], Take[U, This]] { - @volatile var result: Combiner[U, This] = null - def leaf(prev: Option[Combiner[U, This]]) = { - result = pit.take2combiner(n, reuse(prev, cbf())) - } - protected[this] def newSubtask(p: IterableSplitter[T]) = throw new UnsupportedOperationException - override def split = { - val pits = pit.splitWithSignalling - val sizes = pits.scanLeft(0)(_ + _.remaining) - for ((p, untilp) <- pits zip sizes; if untilp <= n) yield { - if (untilp + p.remaining < n) new Take(p.remaining, cbf, p) - else new Take(n - untilp, cbf, p) - } - } - override def merge(that: Take[U, This]) = result = result combine that.result - override def requiresStrictSplitters = true - } - - protected[this] class Drop[U >: T, This >: Repr] - (n: Int, cbf: CombinerFactory[U, This], protected[this] val pit: IterableSplitter[T]) - extends Transformer[Combiner[U, This], Drop[U, This]] { - @volatile var result: Combiner[U, This] = null - def leaf(prev: Option[Combiner[U, This]]) = result = pit.drop2combiner(n, reuse(prev, cbf())) - protected[this] def newSubtask(p: IterableSplitter[T]) = throw new UnsupportedOperationException - override def split = { - val pits = pit.splitWithSignalling - val sizes = pits.scanLeft(0)(_ + _.remaining) - for ((p, withp) <- pits zip sizes.tail; if withp >= n) yield { - if (withp - p.remaining > n) new Drop(0, cbf, p) - else new Drop(n - withp + p.remaining, cbf, p) - } - } - override def merge(that: Drop[U, This]) = result = result combine that.result - override def requiresStrictSplitters = true - } - - protected[this] class Slice[U >: T, This >: Repr] - (from: Int, until: Int, cbf: CombinerFactory[U, This], protected[this] val pit: IterableSplitter[T]) - extends Transformer[Combiner[U, This], Slice[U, This]] { - @volatile var result: Combiner[U, This] = null - def leaf(prev: Option[Combiner[U, This]]) = result = pit.slice2combiner(from, until, reuse(prev, cbf())) - protected[this] def newSubtask(p: IterableSplitter[T]) = throw new UnsupportedOperationException - override def split = { - val pits = pit.splitWithSignalling - val sizes = pits.scanLeft(0)(_ + _.remaining) - for ((p, untilp) <- pits zip sizes; if untilp + p.remaining >= from || untilp <= until) yield { - val f = (from max untilp) - untilp - val u = (until min (untilp + p.remaining)) - untilp - new Slice(f, u, cbf, p) - } - } - override def merge(that: Slice[U, This]) = result = result combine that.result - override def requiresStrictSplitters = true - } - - protected[this] class SplitAt[U >: T, This >: Repr] - (at: Int, cbfBefore: CombinerFactory[U, This], cbfAfter: CombinerFactory[U, This], protected[this] val pit: IterableSplitter[T]) - extends Transformer[(Combiner[U, This], Combiner[U, This]), SplitAt[U, This]] { - @volatile var result: (Combiner[U, This], Combiner[U, This]) = null - def leaf(prev: Option[(Combiner[U, This], Combiner[U, This])]) = result = pit.splitAt2combiners(at, reuse(prev.map(_._1), cbfBefore()), reuse(prev.map(_._2), cbfAfter())) - protected[this] def newSubtask(p: IterableSplitter[T]) = throw new UnsupportedOperationException - override def split = { - val pits = pit.splitWithSignalling - val sizes = pits.scanLeft(0)(_ + _.remaining) - for ((p, untilp) <- pits zip sizes) yield new SplitAt((at max untilp min (untilp + p.remaining)) - untilp, cbfBefore, cbfAfter, p) - } - override def merge(that: SplitAt[U, This]) = result = (result._1 combine that.result._1, result._2 combine that.result._2) - override def requiresStrictSplitters = true - } - - protected[this] class TakeWhile[U >: T, This >: Repr] - (pos: Int, pred: T => Boolean, cbf: CombinerFactory[U, This], protected[this] val pit: IterableSplitter[T]) - extends Transformer[(Combiner[U, This], Boolean), TakeWhile[U, This]] { - @volatile var result: (Combiner[U, This], Boolean) = null - def leaf(prev: Option[(Combiner[U, This], Boolean)]) = if (pos < pit.indexFlag) { - result = pit.takeWhile2combiner(pred, reuse(prev.map(_._1), cbf())) - if (!result._2) pit.setIndexFlagIfLesser(pos) - } else result = (reuse(prev.map(_._1), cbf()), false) - protected[this] def newSubtask(p: IterableSplitter[T]) = throw new UnsupportedOperationException - override def split = { - val pits = pit.splitWithSignalling - for ((p, untilp) <- pits zip pits.scanLeft(0)(_ + _.remaining)) yield new TakeWhile(pos + untilp, pred, cbf, p) - } - override def merge(that: TakeWhile[U, This]) = if (result._2) { - result = (result._1 combine that.result._1, that.result._2) - } - override def requiresStrictSplitters = true - } - - protected[this] class Span[U >: T, This >: Repr] - (pos: Int, pred: T => Boolean, cbfBefore: CombinerFactory[U, This], cbfAfter: CombinerFactory[U, This], protected[this] val pit: IterableSplitter[T]) - extends Transformer[(Combiner[U, This], Combiner[U, This]), Span[U, This]] { - @volatile var result: (Combiner[U, This], Combiner[U, This]) = null - def leaf(prev: Option[(Combiner[U, This], Combiner[U, This])]) = if (pos < pit.indexFlag) { - // val lst = pit.toList - // val pa = mutable.ParArray(lst: _*) - // val str = "At leaf we will iterate: " + pa.splitter.toList - result = pit.span2combiners(pred, cbfBefore(), cbfAfter()) // do NOT reuse old combiners here, lest ye be surprised - // println("\nAt leaf result is: " + result) - if (result._2.size > 0) pit.setIndexFlagIfLesser(pos) - } else { - result = (reuse(prev.map(_._2), cbfBefore()), pit.copy2builder[U, This, Combiner[U, This]](reuse(prev.map(_._2), cbfAfter()))) - } - protected[this] def newSubtask(p: IterableSplitter[T]) = throw new UnsupportedOperationException - override def split = { - val pits = pit.splitWithSignalling - for ((p, untilp) <- pits zip pits.scanLeft(0)(_ + _.remaining)) yield new Span(pos + untilp, pred, cbfBefore, cbfAfter, p) - } - override def merge(that: Span[U, This]) = result = if (result._2.size == 0) { - (result._1 combine that.result._1, that.result._2) - } else { - (result._1, result._2 combine that.result._1 combine that.result._2) - } - override def requiresStrictSplitters = true - } - - protected[this] class Zip[U >: T, S, That](pbf: CombinerFactory[(U, S), That], protected[this] val pit: IterableSplitter[T], val othpit: SeqSplitter[S]) - extends Transformer[Combiner[(U, S), That], Zip[U, S, That]] { - @volatile var result: Result = null - def leaf(prev: Option[Result]) = result = pit.zip2combiner[U, S, That](othpit, pbf()) - protected[this] def newSubtask(p: IterableSplitter[T]) = throw new UnsupportedOperationException - override def split = { - val pits = pit.splitWithSignalling - val sizes = pits.map(_.remaining) - val opits = othpit.psplitWithSignalling(sizes: _*) - (pits zip opits) map { p => new Zip(pbf, p._1, p._2) } - } - override def merge(that: Zip[U, S, That]) = result = result combine that.result - override def requiresStrictSplitters = true - } - - protected[this] class ZipAll[U >: T, S, That] - (len: Int, thiselem: U, thatelem: S, pbf: CombinerFactory[(U, S), That], protected[this] val pit: IterableSplitter[T], val othpit: SeqSplitter[S]) - extends Transformer[Combiner[(U, S), That], ZipAll[U, S, That]] { - @volatile var result: Result = null - def leaf(prev: Option[Result]) = result = pit.zipAll2combiner[U, S, That](othpit, thiselem, thatelem, pbf()) - protected[this] def newSubtask(p: IterableSplitter[T]) = throw new UnsupportedOperationException - override def split = if (pit.remaining <= len) { - val pits = pit.splitWithSignalling - val sizes = pits.map(_.remaining) - val opits = othpit.psplitWithSignalling(sizes: _*) - ((pits zip opits) zip sizes) map { t => new ZipAll(t._2, thiselem, thatelem, pbf, t._1._1, t._1._2) } - } else { - val opits = othpit.psplitWithSignalling(pit.remaining) - val diff = len - pit.remaining - Seq( - new ZipAll(pit.remaining, thiselem, thatelem, pbf, pit, opits(0)), // nothing wrong will happen with the cast below - elem T is never accessed - new ZipAll(diff, thiselem, thatelem, pbf, immutable.repetition(thiselem, diff).splitter.asInstanceOf[IterableSplitter[T]], opits(1)) - ) - } - override def merge(that: ZipAll[U, S, That]) = result = result combine that.result - override def requiresStrictSplitters = true - } - - protected[this] class CopyToArray[U >: T, This >: Repr](from: Int, len: Int, array: Array[U], protected[this] val pit: IterableSplitter[T]) - extends Accessor[Unit, CopyToArray[U, This]] { - @volatile var result: Unit = () - def leaf(prev: Option[Unit]) = pit.copyToArray(array, from, len) - protected[this] def newSubtask(p: IterableSplitter[T]) = throw new UnsupportedOperationException - override def split = { - val pits = pit.splitWithSignalling - for ((p, untilp) <- pits zip pits.scanLeft(0)(_ + _.remaining); if untilp < len) yield { - val plen = p.remaining min (len - untilp) - new CopyToArray[U, This](from + untilp, plen, array, p) - } - } - override def requiresStrictSplitters = true - } - - protected[this] class ToParCollection[U >: T, That](cbf: CombinerFactory[U, That], protected[this] val pit: IterableSplitter[T]) - extends Transformer[Combiner[U, That], ToParCollection[U, That]] { - @volatile var result: Result = null - def leaf(prev: Option[Combiner[U, That]]) { - result = cbf() - while (pit.hasNext) result += pit.next - } - protected[this] def newSubtask(p: IterableSplitter[T]) = new ToParCollection[U, That](cbf, p) - override def merge(that: ToParCollection[U, That]) = result = result combine that.result - } - - protected[this] class ToParMap[K, V, That](cbf: CombinerFactory[(K, V), That], protected[this] val pit: IterableSplitter[T])(implicit ev: T <:< (K, V)) - extends Transformer[Combiner[(K, V), That], ToParMap[K, V, That]] { - @volatile var result: Result = null - def leaf(prev: Option[Combiner[(K, V), That]]) { - result = cbf() - while (pit.hasNext) result += pit.next - } - protected[this] def newSubtask(p: IterableSplitter[T]) = new ToParMap[K, V, That](cbf, p)(ev) - override def merge(that: ToParMap[K, V, That]) = result = result combine that.result - } - - protected[this] class CreateScanTree[U >: T](from: Int, len: Int, z: U, op: (U, U) => U, protected[this] val pit: IterableSplitter[T]) - extends Transformer[ScanTree[U], CreateScanTree[U]] { - @volatile var result: ScanTree[U] = null - def leaf(prev: Option[ScanTree[U]]) = if (pit.remaining > 0) { - val trees = ArrayBuffer[ScanTree[U]]() - var i = from - val until = from + len - val blocksize = scanBlockSize - while (i < until) { - trees += scanBlock(i, scala.math.min(blocksize, pit.remaining)) - i += blocksize - } - - // merge trees - result = mergeTrees(trees, 0, trees.length) - } else result = null // no elements to scan (merge will take care of `null`s) - private def scanBlock(from: Int, len: Int): ScanTree[U] = { - val pitdup = pit.dup - new ScanLeaf(pitdup, op, from, len, None, pit.reduceLeft(len, op)) - } - private def mergeTrees(trees: ArrayBuffer[ScanTree[U]], from: Int, howmany: Int): ScanTree[U] = if (howmany > 1) { - val half = howmany / 2 - ScanNode(mergeTrees(trees, from, half), mergeTrees(trees, from + half, howmany - half)) - } else trees(from) - protected[this] def newSubtask(pit: IterableSplitter[T]) = throw new UnsupportedOperationException - override def split = { - val pits = pit.splitWithSignalling - for ((p, untilp) <- pits zip pits.scanLeft(from)(_ + _.remaining)) yield { - new CreateScanTree(untilp, p.remaining, z, op, p) - } - } - override def merge(that: CreateScanTree[U]) = if (this.result != null) { - if (that.result != null) result = ScanNode(result, that.result) - } else result = that.result - override def requiresStrictSplitters = true - } - - protected[this] class FromScanTree[U >: T, That] - (tree: ScanTree[U], z: U, op: (U, U) => U, cbf: CombinerFactory[U, That]) - extends StrictSplitterCheckTask[Combiner[U, That], FromScanTree[U, That]] { - @volatile var result: Combiner[U, That] = null - def leaf(prev: Option[Combiner[U, That]]) { - val cb = reuse(prev, cbf()) - iterate(tree, cb) - result = cb - } - private def iterate(tree: ScanTree[U], cb: Combiner[U, That]): Unit = tree match { - case ScanNode(left, right) => - iterate(left, cb) - iterate(right, cb) - case ScanLeaf(p, _, _, len, Some(prev), _) => - p.scanToCombiner(len, prev.acc, op, cb) - case ScanLeaf(p, _, _, len, None, _) => - cb += z - p.scanToCombiner(len, z, op, cb) - } - def split = tree match { - case ScanNode(left, right) => Seq( - new FromScanTree(left, z, op, cbf), - new FromScanTree(right, z, op, cbf) - ) - case _ => throw new UnsupportedOperationException("Cannot be split further") - } - def shouldSplitFurther = tree match { - case ScanNode(_, _) => true - case ScanLeaf(_, _, _, _, _, _) => false - } - override def merge(that: FromScanTree[U, That]) = result = result combine that.result - } - - /* scan tree */ - - protected[this] def scanBlockSize = (thresholdFromSize(size, tasksupport.parallelismLevel) / 2) max 1 - - protected[this] trait ScanTree[U >: T] { - def beginsAt: Int - def pushdown(v: U): Unit - def leftmost: ScanLeaf[U] - def rightmost: ScanLeaf[U] - def print(depth: Int = 0): Unit - } - - protected[this] case class ScanNode[U >: T](left: ScanTree[U], right: ScanTree[U]) extends ScanTree[U] { - right.pushdown(left.rightmost.acc) - right.leftmost.prev = Some(left.rightmost) - - val leftmost = left.leftmost - val rightmost = right.rightmost - - def beginsAt = left.beginsAt - def pushdown(v: U) { - left.pushdown(v) - right.pushdown(v) - } - def print(depth: Int) { - println((" " * depth) + "ScanNode, begins at " + beginsAt) - left.print(depth + 1) - right.print(depth + 1) - } - } - - protected[this] case class ScanLeaf[U >: T] - (pit: IterableSplitter[U], op: (U, U) => U, from: Int, len: Int, var prev: Option[ScanLeaf[U]], var acc: U) - extends ScanTree[U] { - def beginsAt = from - def pushdown(v: U) = { - acc = op(v, acc) - } - def leftmost = this - def rightmost = this - def print(depth: Int) = println((" " * depth) + this) - } - - /* alias methods */ - - def /:[S](z: S)(op: (S, T) => S): S = foldLeft(z)(op) - - def :\[S](z: S)(op: (T, S) => S): S = foldRight(z)(op) - - /* debug information */ - - private[parallel] def debugInformation = "Parallel collection: " + this.getClass - - private[parallel] def brokenInvariants = Seq[String]() - - // private val dbbuff = ArrayBuffer[String]() - // def debugBuffer: ArrayBuffer[String] = dbbuff - def debugBuffer: ArrayBuffer[String] = null - - private[parallel] def debugclear() = synchronized { - debugBuffer.clear() - } - - private[parallel] def debuglog(s: String) = synchronized { - debugBuffer += s - } - - import scala.collection.DebugUtils._ - private[parallel] def printDebugBuffer() = println(buildString { - append => - for (s <- debugBuffer) { - append(s) - } - }) -} diff --git a/tests/scala2-library/src/library/scala/collection/parallel/ParMap.scala b/tests/scala2-library/src/library/scala/collection/parallel/ParMap.scala deleted file mode 100644 index 70afe5174bef..000000000000 --- a/tests/scala2-library/src/library/scala/collection/parallel/ParMap.scala +++ /dev/null @@ -1,68 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala -package collection.parallel - -import scala.collection.Map -import scala.collection.GenMap -import scala.collection.generic.ParMapFactory -import scala.collection.generic.GenericParMapTemplate -import scala.collection.generic.GenericParMapCompanion -import scala.collection.generic.CanCombineFrom - -/** A template trait for parallel maps. - * - * $sideeffects - * - * @tparam K the key type of the map - * @tparam V the value type of the map - * - * @author Aleksandar Prokopec - * @since 2.9 - */ -trait ParMap[K, +V] -extends GenMap[K, V] - with GenericParMapTemplate[K, V, ParMap] - with ParIterable[(K, V)] - with ParMapLike[K, V, ParMap[K, V], Map[K, V]] -{ -self => - - def mapCompanion: GenericParMapCompanion[ParMap] = ParMap - - //protected[this] override def newCombiner: Combiner[(K, V), ParMap[K, V]] = ParMap.newCombiner[K, V] - - def empty: ParMap[K, V] = new mutable.ParHashMap[K, V] - - override def stringPrefix = "ParMap" - - override def updated [U >: V](key: K, value: U): ParMap[K, U] = this + ((key, value)) - - def + [U >: V](kv: (K, U)): ParMap[K, U] -} - - - -object ParMap extends ParMapFactory[ParMap] { - def empty[K, V]: ParMap[K, V] = new mutable.ParHashMap[K, V] - - def newCombiner[K, V]: Combiner[(K, V), ParMap[K, V]] = mutable.ParHashMapCombiner[K, V] - - implicit def canBuildFrom[K, V]: CanCombineFrom[Coll, (K, V), ParMap[K, V]] = new CanCombineFromMap[K, V] - - /** An abstract shell used by { mutable, immutable }.Map but not by collection.Map - * because of variance issues. - */ - abstract class WithDefault[A, +B](underlying: ParMap[A, B], d: A => B) extends ParMap[A, B] { - override def size = underlying.size - def get(key: A) = underlying.get(key) - def splitter = underlying.splitter - override def default(key: A): B = d(key) - } -} diff --git a/tests/scala2-library/src/library/scala/collection/parallel/ParMapLike.scala b/tests/scala2-library/src/library/scala/collection/parallel/ParMapLike.scala deleted file mode 100644 index a3ac38858754..000000000000 --- a/tests/scala2-library/src/library/scala/collection/parallel/ParMapLike.scala +++ /dev/null @@ -1,141 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala -package collection.parallel - -import scala.collection.MapLike -import scala.collection.GenMapLike -import scala.collection.Map - -import scala.annotation.unchecked.uncheckedVariance - -/** A template trait for mutable parallel maps. This trait is to be mixed in - * with concrete parallel maps to override the representation type. - * - * $sideeffects - * - * @tparam K the key type of the map - * @tparam V the value type of the map - * @define Coll `ParMap` - * @define coll parallel map - * - * @author Aleksandar Prokopec - * @since 2.9 - */ -trait ParMapLike[K, - +V, - +Repr <: ParMapLike[K, V, Repr, Sequential] with ParMap[K, V], - +Sequential <: Map[K, V] with MapLike[K, V, Sequential]] -extends GenMapLike[K, V, Repr] - with ParIterableLike[(K, V), Repr, Sequential] -{ -self => - - def default(key: K): V = throw new NoSuchElementException("key not found: " + key) - - def empty: Repr - - def apply(key: K) = get(key) match { - case Some(v) => v - case None => default(key) - } - - def getOrElse[U >: V](key: K, default: => U): U = get(key) match { - case Some(v) => v - case None => default - } - - def contains(key: K): Boolean = get(key).isDefined - - def isDefinedAt(key: K): Boolean = contains(key) - - private[this] def keysIterator(s: IterableSplitter[(K, V)] @uncheckedVariance): IterableSplitter[K] = - new IterableSplitter[K] { - i => - val iter = s - def hasNext = iter.hasNext - def next() = iter.next()._1 - def split = { - val ss = iter.split.map(keysIterator(_)) - ss.foreach { _.signalDelegate = i.signalDelegate } - ss - } - def remaining = iter.remaining - def dup = keysIterator(iter.dup) - } - - def keysIterator: IterableSplitter[K] = keysIterator(splitter) - - private[this] def valuesIterator(s: IterableSplitter[(K, V)] @uncheckedVariance): IterableSplitter[V] = - new IterableSplitter[V] { - i => - val iter = s - def hasNext = iter.hasNext - def next() = iter.next()._2 - def split = { - val ss = iter.split.map(valuesIterator(_)) - ss.foreach { _.signalDelegate = i.signalDelegate } - ss - } - def remaining = iter.remaining - def dup = valuesIterator(iter.dup) - } - - def valuesIterator: IterableSplitter[V] = valuesIterator(splitter) - - protected class DefaultKeySet extends ParSet[K] { - def contains(key : K) = self.contains(key) - def splitter = keysIterator(self.splitter) - def + (elem: K): ParSet[K] = - (ParSet[K]() ++ this + elem).asInstanceOf[ParSet[K]] // !!! concrete overrides abstract problem - def - (elem: K): ParSet[K] = - (ParSet[K]() ++ this - elem).asInstanceOf[ParSet[K]] // !!! concrete overrides abstract problem - override def size = self.size - override def foreach[U](f: K => U) = for ((k, v) <- self) f(k) - override def seq = self.seq.keySet - } - - protected class DefaultValuesIterable extends ParIterable[V] { - def splitter = valuesIterator(self.splitter) - override def size = self.size - override def foreach[U](f: V => U) = for ((k, v) <- self) f(v) - def seq = self.seq.values - } - - def keySet: ParSet[K] = new DefaultKeySet - - def keys: ParIterable[K] = keySet - - def values: ParIterable[V] = new DefaultValuesIterable - - def filterKeys(p: K => Boolean): ParMap[K, V] = new ParMap[K, V] { - lazy val filtered = self.filter(kv => p(kv._1)) - override def foreach[U](f: ((K, V)) => U): Unit = for (kv <- self) if (p(kv._1)) f(kv) - def splitter = filtered.splitter - override def contains(key: K) = self.contains(key) && p(key) - def get(key: K) = if (!p(key)) None else self.get(key) - def seq = self.seq.filterKeys(p) - def size = filtered.size - def + [U >: V](kv: (K, U)): ParMap[K, U] = ParMap[K, U]() ++ this + kv - def - (key: K): ParMap[K, V] = ParMap[K, V]() ++ this - key - } - - def mapValues[S](f: V => S): ParMap[K, S] = new ParMap[K, S] { - override def foreach[U](g: ((K, S)) => U): Unit = for ((k, v) <- self) g((k, f(v))) - def splitter = self.splitter.map(kv => (kv._1, f(kv._2))) - override def size = self.size - override def contains(key: K) = self.contains(key) - def get(key: K) = self.get(key).map(f) - def seq = self.seq.mapValues(f) - def + [U >: S](kv: (K, U)): ParMap[K, U] = ParMap[K, U]() ++ this + kv - def - (key: K): ParMap[K, S] = ParMap[K, S]() ++ this - key - } - - // note - should not override toMap (could be mutable) -} diff --git a/tests/scala2-library/src/library/scala/collection/parallel/ParSeq.scala b/tests/scala2-library/src/library/scala/collection/parallel/ParSeq.scala deleted file mode 100644 index 2c883ba8fe1c..000000000000 --- a/tests/scala2-library/src/library/scala/collection/parallel/ParSeq.scala +++ /dev/null @@ -1,50 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala -package collection.parallel - -import scala.collection.generic.GenericCompanion -import scala.collection.generic.GenericParCompanion -import scala.collection.generic.GenericParTemplate -import scala.collection.generic.ParFactory -import scala.collection.generic.CanCombineFrom -import scala.collection.GenSeq -import scala.collection.parallel.mutable.ParArrayCombiner - -/** A template trait for parallel sequences. - * - * $parallelseqinfo - * - * $sideeffects - * - * @tparam T the type of the elements in this parallel sequence - * - * @author Aleksandar Prokopec - */ -trait ParSeq[+T] extends GenSeq[T] - with ParIterable[T] - with GenericParTemplate[T, ParSeq] - with ParSeqLike[T, ParSeq[T], Seq[T]] -{ - override def companion: GenericCompanion[ParSeq] with GenericParCompanion[ParSeq] = ParSeq - //protected[this] override def newBuilder = ParSeq.newBuilder[T] - - def apply(i: Int): T - - override def toString = super[ParIterable].toString - - override def stringPrefix = getClass.getSimpleName -} - -object ParSeq extends ParFactory[ParSeq] { - implicit def canBuildFrom[T]: CanCombineFrom[Coll, T, ParSeq[T]] = new GenericCanCombineFrom[T] - - def newBuilder[T]: Combiner[T, ParSeq[T]] = ParArrayCombiner[T] - def newCombiner[T]: Combiner[T, ParSeq[T]] = ParArrayCombiner[T] -} diff --git a/tests/scala2-library/src/library/scala/collection/parallel/ParSeqLike.scala b/tests/scala2-library/src/library/scala/collection/parallel/ParSeqLike.scala deleted file mode 100644 index 2119e6e603d7..000000000000 --- a/tests/scala2-library/src/library/scala/collection/parallel/ParSeqLike.scala +++ /dev/null @@ -1,479 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala -package collection.parallel - -import scala.collection.{ SeqLike, GenSeq, GenIterable, Iterator } -import scala.collection.generic.DefaultSignalling -import scala.collection.generic.AtomicIndexFlag -import scala.collection.generic.CanBuildFrom -import scala.collection.generic.VolatileAbort - -import scala.collection.parallel.ParallelCollectionImplicits._ - -/** A template trait for sequences of type `ParSeq[T]`, representing - * parallel sequences with element type `T`. - * - * $parallelseqinfo - * - * @tparam T the type of the elements contained in this collection - * @tparam Repr the type of the actual collection containing the elements - * @tparam Sequential the type of the sequential version of this parallel collection - * - * @define parallelseqinfo - * Parallel sequences inherit the `Seq` trait. Their indexing and length computations - * are defined to be efficient. Like their sequential counterparts - * they always have a defined order of elements. This means they will produce resulting - * parallel sequences in the same way sequential sequences do. However, the order - * in which they perform bulk operations on elements to produce results is not defined and is generally - * nondeterministic. If the higher-order functions given to them produce no sideeffects, - * then this won't be noticeable. - * - * This trait defines a new, more general `split` operation and reimplements the `split` - * operation of `ParallelIterable` trait using the new `split` operation. - * - * @author Aleksandar Prokopec - * @since 2.9 - */ -trait ParSeqLike[+T, +Repr <: ParSeq[T], +Sequential <: Seq[T] with SeqLike[T, Sequential]] -extends scala.collection.GenSeqLike[T, Repr] - with ParIterableLike[T, Repr, Sequential] { -self => - - protected[this] type SuperParIterator = IterableSplitter[T] - - /** A more refined version of the iterator found in the `ParallelIterable` trait, - * this iterator can be split into arbitrary subsets of iterators. - * - * @return an iterator that can be split into subsets of precise size - */ - protected[parallel] def splitter: SeqSplitter[T] - - override def iterator: PreciseSplitter[T] = splitter - - override def size = length - - /** Used to iterate elements using indices */ - protected abstract class Elements(start: Int, val end: Int) extends SeqSplitter[T] with BufferedIterator[T] { - private var i = start - - def hasNext = i < end - - def next(): T = if (i < end) { - val x = self(i) - i += 1 - x - } else Iterator.empty.next() - - def head = self(i) - - final def remaining = end - i - - def dup = new Elements(i, end) {} - - def split = psplit(remaining / 2, remaining - remaining / 2) - - def psplit(sizes: Int*) = { - val incr = sizes.scanLeft(0)(_ + _) - for ((from, until) <- incr.init zip incr.tail) yield { - new Elements(start + from, (start + until) min end) {} - } - } - - override def toString = "Elements(" + start + ", " + end + ")" - } - - /* ParallelSeq methods */ - - /** Returns the length of the longest segment of elements starting at - * a given position satisfying some predicate. - * - * $indexsignalling - * - * The index flag is initially set to maximum integer value. - * - * @param p the predicate used to test the elements - * @param from the starting offset for the search - * @return the length of the longest segment of elements starting at `from` and - * satisfying the predicate - */ - def segmentLength(p: T => Boolean, from: Int): Int = if (from >= length) 0 else { - val realfrom = if (from < 0) 0 else from - val ctx = new DefaultSignalling with AtomicIndexFlag - ctx.setIndexFlag(Int.MaxValue) - tasksupport.executeAndWaitResult(new SegmentLength(p, 0, splitter.psplitWithSignalling(realfrom, length - realfrom)(1) assign ctx))._1 - } - - /** Finds the first element satisfying some predicate. - * - * $indexsignalling - * - * The index flag is initially set to maximum integer value. - * - * @param p the predicate used to test the elements - * @param from the starting offset for the search - * @return the index `>= from` of the first element of this $coll that satisfies the predicate `p`, - * or `-1`, if none exists - */ - def indexWhere(p: T => Boolean, from: Int): Int = if (from >= length) -1 else { - val realfrom = if (from < 0) 0 else from - val ctx = new DefaultSignalling with AtomicIndexFlag - ctx.setIndexFlag(Int.MaxValue) - tasksupport.executeAndWaitResult(new IndexWhere(p, realfrom, splitter.psplitWithSignalling(realfrom, length - realfrom)(1) assign ctx)) - } - - /** Finds the last element satisfying some predicate. - * - * $indexsignalling - * - * The index flag is initially set to minimum integer value. - * - * @param p the predicate used to test the elements - * @param end the maximum offset for the search - * @return the index `<= end` of the first element of this $coll that satisfies the predicate `p`, - * or `-1`, if none exists - */ - def lastIndexWhere(p: T => Boolean, end: Int): Int = if (end < 0) -1 else { - val until = if (end >= length) length else end + 1 - val ctx = new DefaultSignalling with AtomicIndexFlag - ctx.setIndexFlag(Int.MinValue) - tasksupport.executeAndWaitResult(new LastIndexWhere(p, 0, splitter.psplitWithSignalling(until, length - until)(0) assign ctx)) - } - - def reverse: Repr = { - tasksupport.executeAndWaitResult(new Reverse(() => newCombiner, splitter) mapResult { _.resultWithTaskSupport }) - } - - def reverseMap[S, That](f: T => S)(implicit bf: CanBuildFrom[Repr, S, That]): That = if (bf(repr).isCombiner) { - tasksupport.executeAndWaitResult( - new ReverseMap[S, That](f, () => bf(repr).asCombiner, splitter) mapResult { _.resultWithTaskSupport } - ) - } else setTaskSupport(seq.reverseMap(f)(bf2seq(bf)), tasksupport) - /*bf ifParallel { pbf => - tasksupport.executeAndWaitResult(new ReverseMap[S, That](f, pbf, splitter) mapResult { _.result }) - } otherwise seq.reverseMap(f)(bf2seq(bf))*/ - - /** Tests whether this $coll contains the given sequence at a given index. - * - * $abortsignalling - * - * @tparam S the element type of `that` parallel sequence - * @param that the parallel sequence this sequence is being searched for - * @param offset the starting offset for the search - * @return `true` if there is a sequence `that` starting at `offset` in this sequence, `false` otherwise - */ - def startsWith[S](that: GenSeq[S], offset: Int): Boolean = that ifParSeq { pthat => - if (offset < 0 || offset >= length) offset == length && pthat.length == 0 - else if (pthat.length == 0) true - else if (pthat.length > length - offset) false - else { - val ctx = new DefaultSignalling with VolatileAbort - tasksupport.executeAndWaitResult( - new SameElements(splitter.psplitWithSignalling(offset, pthat.length)(1) assign ctx, pthat.splitter) - ) - } - } otherwise seq.startsWith(that, offset) - - override def sameElements[U >: T](that: GenIterable[U]): Boolean = that ifParSeq { pthat => - val ctx = new DefaultSignalling with VolatileAbort - length == pthat.length && tasksupport.executeAndWaitResult(new SameElements(splitter assign ctx, pthat.splitter)) - } otherwise seq.sameElements(that) - - /** Tests whether this $coll ends with the given parallel sequence. - * - * $abortsignalling - * - * @tparam S the type of the elements of `that` sequence - * @param that the sequence to test - * @return `true` if this $coll has `that` as a suffix, `false` otherwise - */ - def endsWith[S](that: GenSeq[S]): Boolean = that ifParSeq { pthat => - if (that.length == 0) true - else if (that.length > length) false - else { - val ctx = new DefaultSignalling with VolatileAbort - val tlen = that.length - tasksupport.executeAndWaitResult(new SameElements(splitter.psplitWithSignalling(length - tlen, tlen)(1) assign ctx, pthat.splitter)) - } - } otherwise seq.endsWith(that) - - def patch[U >: T, That](from: Int, patch: GenSeq[U], replaced: Int)(implicit bf: CanBuildFrom[Repr, U, That]): That = { - val realreplaced = replaced min (length - from) - if (patch.isParSeq && bf(repr).isCombiner && (size - realreplaced + patch.size) > MIN_FOR_COPY) { - val that = patch.asParSeq - val pits = splitter.psplitWithSignalling(from, replaced, length - from - realreplaced) - val cfactory = combinerFactory(() => bf(repr).asCombiner) - val copystart = new Copy[U, That](cfactory, pits(0)) - val copymiddle = wrap { - val tsk = new that.Copy[U, That](cfactory, that.splitter) - tasksupport.executeAndWaitResult(tsk) - } - val copyend = new Copy[U, That](cfactory, pits(2)) - tasksupport.executeAndWaitResult(((copystart parallel copymiddle) { _ combine _ } parallel copyend) { _ combine _ } mapResult { - _.resultWithTaskSupport - }) - } else patch_sequential(from, patch.seq, replaced) - } - - private def patch_sequential[U >: T, That](fromarg: Int, patch: Seq[U], r: Int)(implicit bf: CanBuildFrom[Repr, U, That]): That = { - val from = 0 max fromarg - val b = bf(repr) - val repl = (r min (length - from)) max 0 - val pits = splitter.psplitWithSignalling(from, repl, length - from - repl) - b ++= pits(0) - b ++= patch - b ++= pits(2) - setTaskSupport(b.result(), tasksupport) - } - - def updated[U >: T, That](index: Int, elem: U)(implicit bf: CanBuildFrom[Repr, U, That]): That = if (bf(repr).isCombiner) { - tasksupport.executeAndWaitResult( - new Updated(index, elem, combinerFactory(() => bf(repr).asCombiner), splitter) mapResult { - _.resultWithTaskSupport - } - ) - } else setTaskSupport(seq.updated(index, elem)(bf2seq(bf)), tasksupport) - /*bf ifParallel { pbf => - tasksupport.executeAndWaitResult(new Updated(index, elem, pbf, splitter) mapResult { _.result }) - } otherwise seq.updated(index, elem)(bf2seq(bf))*/ - - def +:[U >: T, That](elem: U)(implicit bf: CanBuildFrom[Repr, U, That]): That = { - patch(0, mutable.ParArray(elem), 0) - } - - def :+[U >: T, That](elem: U)(implicit bf: CanBuildFrom[Repr, U, That]): That = { - patch(length, mutable.ParArray(elem), 0) - } - - def padTo[U >: T, That](len: Int, elem: U)(implicit bf: CanBuildFrom[Repr, U, That]): That = if (length < len) { - patch(length, new immutable.Repetition(elem, len - length), 0) - } else patch(length, Nil, 0) - - override def zip[U >: T, S, That](that: GenIterable[S])(implicit bf: CanBuildFrom[Repr, (U, S), That]): That = if (bf(repr).isCombiner && that.isParSeq) { - val thatseq = that.asParSeq - tasksupport.executeAndWaitResult( - new PSZip(length min thatseq.length, combinerFactory(() => bf(repr).asCombiner), splitter, thatseq.splitter) mapResult { - _.resultWithTaskSupport - } - ) - } else super.zip(that)(bf) - - /** Tests whether every element of this $coll relates to the - * corresponding element of another parallel sequence by satisfying a test predicate. - * - * $abortsignalling - * - * @param that the other parallel sequence - * @param p the test predicate, which relates elements from both sequences - * @tparam S the type of the elements of `that` - * @return `true` if both parallel sequences have the same length and - * `p(x, y)` is `true` for all corresponding elements `x` of this $coll - * and `y` of `that`, otherwise `false` - */ - def corresponds[S](that: GenSeq[S])(p: (T, S) => Boolean): Boolean = that ifParSeq { pthat => - val ctx = new DefaultSignalling with VolatileAbort - length == pthat.length && tasksupport.executeAndWaitResult(new Corresponds(p, splitter assign ctx, pthat.splitter)) - } otherwise seq.corresponds(that)(p) - - def diff[U >: T](that: GenSeq[U]): Repr = sequentially { - _ diff that - } - - /** Computes the multiset intersection between this $coll and another sequence. - * - * @param that the sequence of elements to intersect with. - * @tparam U the element type of `that` parallel sequence - * @return a new collection of type `That` which contains all elements of this $coll - * which also appear in `that`. - * If an element value `x` appears - * ''n'' times in `that`, then the first ''n'' occurrences of `x` will be retained - * in the result, but any following occurrences will be omitted. - * - * @usecase def intersect(that: Seq[T]): $Coll[T] - * @inheritdoc - * - * $mayNotTerminateInf - * - * @return a new $coll which contains all elements of this $coll - * which also appear in `that`. - * If an element value `x` appears - * ''n'' times in `that`, then the first ''n'' occurrences of `x` will be retained - * in the result, but any following occurrences will be omitted. - */ - def intersect[U >: T](that: GenSeq[U]) = sequentially { - _ intersect that - } - - /** Builds a new $coll from this $coll without any duplicate elements. - * $willNotTerminateInf - * - * @return A new $coll which contains the first occurrence of every element of this $coll. - */ - def distinct: Repr = sequentially { - _.distinct - } - - override def toString = seq.mkString(stringPrefix + "(", ", ", ")") - - override def toSeq = this.asInstanceOf[ParSeq[T]] - - @deprecated("use .seq.view", "2.11.0") - override def view = seq.view - - /* tasks */ - - protected[this] def down(p: IterableSplitter[_]) = p.asInstanceOf[SeqSplitter[T]] - - protected trait PSAccessor[R, Tp] extends super.Accessor[R, Tp] { - protected[this] val pit: SeqSplitter[T] - } - - protected trait PSTransformer[R, Tp] extends PSAccessor[R, Tp] with super.Transformer[R, Tp] - - protected[this] class SegmentLength(pred: T => Boolean, from: Int, protected[this] val pit: SeqSplitter[T]) - extends PSAccessor[(Int, Boolean), SegmentLength] { - @volatile var result: (Int, Boolean) = null - def leaf(prev: Option[(Int, Boolean)]) = if (from < pit.indexFlag) { - val itsize = pit.remaining - val seglen = pit.prefixLength(pred) - result = (seglen, itsize == seglen) - if (!result._2) pit.setIndexFlagIfLesser(from) - } else result = (0, false) - protected[this] def newSubtask(p: SuperParIterator) = throw new UnsupportedOperationException - override def split = { - val pits = pit.splitWithSignalling - for ((p, untilp) <- pits zip pits.scanLeft(0)(_ + _.remaining)) yield new SegmentLength(pred, from + untilp, p) - } - override def merge(that: SegmentLength) = if (result._2) result = (result._1 + that.result._1, that.result._2) - override def requiresStrictSplitters = true - } - - protected[this] class IndexWhere(pred: T => Boolean, from: Int, protected[this] val pit: SeqSplitter[T]) - extends PSAccessor[Int, IndexWhere] { - @volatile var result: Int = -1 - def leaf(prev: Option[Int]) = if (from < pit.indexFlag) { - val r = pit.indexWhere(pred) - if (r != -1) { - result = from + r - pit.setIndexFlagIfLesser(from) - } - } - protected[this] def newSubtask(p: SuperParIterator) = throw new UnsupportedOperationException - override def split = { - val pits = pit.splitWithSignalling - for ((p, untilp) <- pits zip pits.scanLeft(from)(_ + _.remaining)) yield new IndexWhere(pred, untilp, p) - } - override def merge(that: IndexWhere) = result = if (result == -1) that.result else { - if (that.result != -1) result min that.result else result - } - override def requiresStrictSplitters = true - } - - protected[this] class LastIndexWhere(pred: T => Boolean, pos: Int, protected[this] val pit: SeqSplitter[T]) - extends PSAccessor[Int, LastIndexWhere] { - @volatile var result: Int = -1 - def leaf(prev: Option[Int]) = if (pos > pit.indexFlag) { - val r = pit.lastIndexWhere(pred) - if (r != -1) { - result = pos + r - pit.setIndexFlagIfGreater(pos) - } - } - protected[this] def newSubtask(p: SuperParIterator) = throw new UnsupportedOperationException - override def split = { - val pits = pit.splitWithSignalling - for ((p, untilp) <- pits zip pits.scanLeft(pos)(_ + _.remaining)) yield new LastIndexWhere(pred, untilp, p) - } - override def merge(that: LastIndexWhere) = result = if (result == -1) that.result else { - if (that.result != -1) result max that.result else result - } - override def requiresStrictSplitters = true - } - - protected[this] class Reverse[U >: T, This >: Repr](cbf: () => Combiner[U, This], protected[this] val pit: SeqSplitter[T]) - extends PSTransformer[Combiner[U, This], Reverse[U, This]] { - @volatile var result: Combiner[U, This] = null - def leaf(prev: Option[Combiner[U, This]]) = result = pit.reverse2combiner(reuse(prev, cbf())) - protected[this] def newSubtask(p: SuperParIterator) = new Reverse(cbf, down(p)) - override def merge(that: Reverse[U, This]) = result = that.result combine result - } - - protected[this] class ReverseMap[S, That](f: T => S, pbf: () => Combiner[S, That], protected[this] val pit: SeqSplitter[T]) - extends PSTransformer[Combiner[S, That], ReverseMap[S, That]] { - @volatile var result: Combiner[S, That] = null - def leaf(prev: Option[Combiner[S, That]]) = result = pit.reverseMap2combiner(f, pbf()) - protected[this] def newSubtask(p: SuperParIterator) = new ReverseMap(f, pbf, down(p)) - override def merge(that: ReverseMap[S, That]) = result = that.result combine result - } - - protected[this] class SameElements[U >: T](protected[this] val pit: SeqSplitter[T], val otherpit: SeqSplitter[U]) - extends PSAccessor[Boolean, SameElements[U]] { - @volatile var result: Boolean = true - def leaf(prev: Option[Boolean]) = if (!pit.isAborted) { - result = pit.sameElements(otherpit) - if (!result) pit.abort() - } - protected[this] def newSubtask(p: SuperParIterator) = throw new UnsupportedOperationException - override def split = { - val fp = pit.remaining / 2 - val sp = pit.remaining - fp - for ((p, op) <- pit.psplitWithSignalling(fp, sp) zip otherpit.psplitWithSignalling(fp, sp)) yield new SameElements(p, op) - } - override def merge(that: SameElements[U]) = result = result && that.result - override def requiresStrictSplitters = true - } - - protected[this] class Updated[U >: T, That](pos: Int, elem: U, pbf: CombinerFactory[U, That], protected[this] val pit: SeqSplitter[T]) - extends PSTransformer[Combiner[U, That], Updated[U, That]] { - @volatile var result: Combiner[U, That] = null - def leaf(prev: Option[Combiner[U, That]]) = result = pit.updated2combiner(pos, elem, pbf()) - protected[this] def newSubtask(p: SuperParIterator) = throw new UnsupportedOperationException - override def split = { - val pits = pit.splitWithSignalling - for ((p, untilp) <- pits zip pits.scanLeft(0)(_ + _.remaining)) yield new Updated(pos - untilp, elem, pbf, p) - } - override def merge(that: Updated[U, That]) = result = result combine that.result - override def requiresStrictSplitters = true - } - - protected[this] class PSZip[U >: T, S, That](len: Int, cf: CombinerFactory[(U, S), That], protected[this] val pit: SeqSplitter[T], val otherpit: SeqSplitter[S]) - extends PSTransformer[Combiner[(U, S), That], PSZip[U, S, That]] { - @volatile var result: Result = null - def leaf(prev: Option[Result]) = result = pit.zip2combiner[U, S, That](otherpit, cf()) - protected[this] def newSubtask(p: SuperParIterator) = throw new UnsupportedOperationException - override def split = { - val fp = len / 2 - val sp = len - len / 2 - val pits = pit.psplitWithSignalling(fp, sp) - val opits = otherpit.psplitWithSignalling(fp, sp) - Seq( - new PSZip(fp, cf, pits(0), opits(0)), - new PSZip(sp, cf, pits(1), opits(1)) - ) - } - override def merge(that: PSZip[U, S, That]) = result = result combine that.result - } - - protected[this] class Corresponds[S](corr: (T, S) => Boolean, protected[this] val pit: SeqSplitter[T], val otherpit: SeqSplitter[S]) - extends PSAccessor[Boolean, Corresponds[S]] { - @volatile var result: Boolean = true - def leaf(prev: Option[Boolean]) = if (!pit.isAborted) { - result = pit.corresponds(corr)(otherpit) - if (!result) pit.abort() - } - protected[this] def newSubtask(p: SuperParIterator) = throw new UnsupportedOperationException - override def split = { - val fp = pit.remaining / 2 - val sp = pit.remaining - fp - for ((p, op) <- pit.psplitWithSignalling(fp, sp) zip otherpit.psplitWithSignalling(fp, sp)) yield new Corresponds(corr, p, op) - } - override def merge(that: Corresponds[S]) = result = result && that.result - override def requiresStrictSplitters = true - } -} diff --git a/tests/scala2-library/src/library/scala/collection/parallel/ParSet.scala b/tests/scala2-library/src/library/scala/collection/parallel/ParSet.scala deleted file mode 100644 index ba3d23f0e47e..000000000000 --- a/tests/scala2-library/src/library/scala/collection/parallel/ParSet.scala +++ /dev/null @@ -1,44 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala -package collection -package parallel - -import scala.collection.generic._ - -/** A template trait for parallel sets. - * - * $sideeffects - * - * @tparam T the element type of the set - * - * @author Aleksandar Prokopec - * @since 2.9 - */ -trait ParSet[T] - extends GenSet[T] - with GenericParTemplate[T, ParSet] - with ParIterable[T] - with ParSetLike[T, ParSet[T], Set[T]] -{ self => - - override def empty: ParSet[T] = mutable.ParHashSet[T]() - - //protected[this] override def newCombiner: Combiner[T, ParSet[T]] = ParSet.newCombiner[T] - - override def companion: GenericCompanion[ParSet] with GenericParCompanion[ParSet] = ParSet - - override def stringPrefix = "ParSet" -} - -object ParSet extends ParSetFactory[ParSet] { - def newCombiner[T]: Combiner[T, ParSet[T]] = mutable.ParHashSetCombiner[T] - - implicit def canBuildFrom[T]: CanCombineFrom[Coll, T, ParSet[T]] = new GenericCanCombineFrom[T] -} diff --git a/tests/scala2-library/src/library/scala/collection/parallel/ParSetLike.scala b/tests/scala2-library/src/library/scala/collection/parallel/ParSetLike.scala deleted file mode 100644 index 4feda5ff07fe..000000000000 --- a/tests/scala2-library/src/library/scala/collection/parallel/ParSetLike.scala +++ /dev/null @@ -1,47 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala -package collection.parallel - -import scala.collection.SetLike -import scala.collection.GenSetLike -import scala.collection.GenSet -import scala.collection.Set - -/** A template trait for parallel sets. This trait is mixed in with concrete - * parallel sets to override the representation type. - * - * $sideeffects - * - * @tparam T the element type of the set - * @define Coll `ParSet` - * @define coll parallel set - * - * @author Aleksandar Prokopec - * @since 2.9 - */ -trait ParSetLike[T, - +Repr <: ParSetLike[T, Repr, Sequential] with ParSet[T], - +Sequential <: Set[T] with SetLike[T, Sequential]] -extends GenSetLike[T, Repr] - with ParIterableLike[T, Repr, Sequential] -{ self => - - def empty: Repr - - // note: should not override toSet (could be mutable) - - def union(that: GenSet[T]): Repr = sequentially { - _ union that - } - - def diff(that: GenSet[T]): Repr = sequentially { - _ diff that - } -} diff --git a/tests/scala2-library/src/library/scala/collection/parallel/PreciseSplitter.scala b/tests/scala2-library/src/library/scala/collection/parallel/PreciseSplitter.scala deleted file mode 100644 index 4b22934a29ba..000000000000 --- a/tests/scala2-library/src/library/scala/collection/parallel/PreciseSplitter.scala +++ /dev/null @@ -1,57 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala -package collection.parallel - -import scala.collection.Seq - -/** A precise splitter (or a precise split iterator) can be split into arbitrary number of splitters - * that traverse disjoint subsets of arbitrary sizes. - * - * Implementors might want to override the parameterless `split` method for efficiency. - * - * @tparam T type of the elements this splitter traverses - * - * @since 2.9 - * @author Aleksandar Prokopec - */ -trait PreciseSplitter[+T] extends Splitter[T] { - - /** Splits the splitter into disjunct views. - * - * This overloaded version of the `split` method is specific to precise splitters. - * It returns a sequence of splitters, each iterating some subset of the - * elements in this splitter. The sizes of the subsplitters in the partition is equal to - * the size in the corresponding argument, as long as there are enough elements in this - * splitter to split it that way. - * - * If there aren't enough elements, a zero element splitter is appended for each additional argument. - * If there are additional elements, an additional splitter is appended at the end to compensate. - * - * For example, say we have a splitter `ps` with 100 elements. Invoking: - * {{{ - * ps.split(50, 25, 25, 10, 5) - * }}} - * will return a sequence of five splitters, last two views being empty. On the other hand, calling: - * {{{ - * ps.split(50, 40) - * }}} - * will return a sequence of three splitters, last of them containing ten elements. - * - * '''Note:''' this method actually invalidates the current splitter. - * - * Unlike the case with `split` found in splitters, views returned by this method can be empty. - * - * @param sizes the sizes used to split this split iterator into iterators that traverse disjunct subsets - * @return a sequence of disjunct subsequence iterators of this parallel iterator - */ - def psplit(sizes: Int*): Seq[PreciseSplitter[T]] - - def split: Seq[PreciseSplitter[T]] -} diff --git a/tests/scala2-library/src/library/scala/collection/parallel/RemainsIterator.scala b/tests/scala2-library/src/library/scala/collection/parallel/RemainsIterator.scala deleted file mode 100644 index 7d5096a932d3..000000000000 --- a/tests/scala2-library/src/library/scala/collection/parallel/RemainsIterator.scala +++ /dev/null @@ -1,677 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala -package collection.parallel - -import scala.collection.generic.Signalling -import scala.collection.generic.DelegatedSignalling -import scala.collection.generic.IdleSignalling -import scala.collection.mutable.Builder -import scala.collection.GenTraversableOnce -import scala.collection.parallel.immutable.repetition - -private[collection] trait RemainsIterator[+T] extends Iterator[T] { - /** The number of elements this iterator has yet to iterate. - * This method doesn't change the state of the iterator. - */ - def remaining: Int - - /** For most collections, this is a cheap operation. - * Exceptions can override this method. - */ - def isRemainingCheap = true -} - -/** Augments iterators with additional methods, mostly transformers, - * assuming they iterate an iterable collection. - * - * @tparam T type of the elements iterated. - */ -private[collection] trait AugmentedIterableIterator[+T] extends RemainsIterator[T] { - - /* accessors */ - - override def count(p: T => Boolean): Int = { - var i = 0 - while (hasNext) if (p(next())) i += 1 - i - } - - override def reduce[U >: T](op: (U, U) => U): U = { - var r: U = next() - while (hasNext) r = op(r, next()) - r - } - - override def fold[U >: T](z: U)(op: (U, U) => U): U = { - var r = z - while (hasNext) r = op(r, next()) - r - } - - override def sum[U >: T](implicit num: Numeric[U]): U = { - var r: U = num.zero - while (hasNext) r = num.plus(r, next()) - r - } - - override def product[U >: T](implicit num: Numeric[U]): U = { - var r: U = num.one - while (hasNext) r = num.times(r, next()) - r - } - - override def min[U >: T](implicit ord: Ordering[U]): T = { - var r = next() - while (hasNext) { - val curr = next() - if (ord.lteq(curr, r)) r = curr - } - r - } - - override def max[U >: T](implicit ord: Ordering[U]): T = { - var r = next() - while (hasNext) { - val curr = next() - if (ord.gteq(curr, r)) r = curr - } - r - } - - override def copyToArray[U >: T](array: Array[U], from: Int, len: Int) { - var i = from - val until = from + len - while (i < until && hasNext) { - array(i) = next() - i += 1 - } - } - - def reduceLeft[U >: T](howmany: Int, op: (U, U) => U): U = { - var i = howmany - 1 - var u: U = next() - while (i > 0 && hasNext) { - u = op(u, next()) - i -= 1 - } - u - } - - /* transformers to combiners */ - - def map2combiner[S, That](f: T => S, cb: Combiner[S, That]): Combiner[S, That] = { - //val cb = pbf(repr) - if (isRemainingCheap) cb.sizeHint(remaining) - while (hasNext) cb += f(next()) - cb - } - - def collect2combiner[S, That](pf: PartialFunction[T, S], cb: Combiner[S, That]): Combiner[S, That] = { - //val cb = pbf(repr) - val runWith = pf.runWith(cb += _) - while (hasNext) { - val curr = next() - runWith(curr) - } - cb - } - - def flatmap2combiner[S, That](f: T => GenTraversableOnce[S], cb: Combiner[S, That]): Combiner[S, That] = { - //val cb = pbf(repr) - while (hasNext) { - val traversable = f(next()).seq - if (traversable.isInstanceOf[Iterable[_]]) cb ++= traversable.asInstanceOf[Iterable[S]].iterator - else cb ++= traversable - } - cb - } - - def copy2builder[U >: T, Coll, Bld <: Builder[U, Coll]](b: Bld): Bld = { - if (isRemainingCheap) b.sizeHint(remaining) - while (hasNext) b += next - b - } - - def filter2combiner[U >: T, This](pred: T => Boolean, cb: Combiner[U, This]): Combiner[U, This] = { - while (hasNext) { - val curr = next() - if (pred(curr)) cb += curr - } - cb - } - - def filterNot2combiner[U >: T, This](pred: T => Boolean, cb: Combiner[U, This]): Combiner[U, This] = { - while (hasNext) { - val curr = next() - if (!pred(curr)) cb += curr - } - cb - } - - def partition2combiners[U >: T, This](pred: T => Boolean, btrue: Combiner[U, This], bfalse: Combiner[U, This]) = { - while (hasNext) { - val curr = next() - if (pred(curr)) btrue += curr - else bfalse += curr - } - (btrue, bfalse) - } - - def take2combiner[U >: T, This](n: Int, cb: Combiner[U, This]): Combiner[U, This] = { - cb.sizeHint(n) - var left = n - while (left > 0) { - cb += next - left -= 1 - } - cb - } - - def drop2combiner[U >: T, This](n: Int, cb: Combiner[U, This]): Combiner[U, This] = { - drop(n) - if (isRemainingCheap) cb.sizeHint(remaining) - while (hasNext) cb += next - cb - } - - def slice2combiner[U >: T, This](from: Int, until: Int, cb: Combiner[U, This]): Combiner[U, This] = { - drop(from) - var left = scala.math.max(until - from, 0) - cb.sizeHint(left) - while (left > 0) { - cb += next - left -= 1 - } - cb - } - - def splitAt2combiners[U >: T, This](at: Int, before: Combiner[U, This], after: Combiner[U, This]) = { - before.sizeHint(at) - if (isRemainingCheap) after.sizeHint(remaining - at) - var left = at - while (left > 0) { - before += next - left -= 1 - } - while (hasNext) after += next - (before, after) - } - - def takeWhile2combiner[U >: T, This](p: T => Boolean, cb: Combiner[U, This]) = { - var loop = true - while (hasNext && loop) { - val curr = next() - if (p(curr)) cb += curr - else loop = false - } - (cb, loop) - } - - def span2combiners[U >: T, This](p: T => Boolean, before: Combiner[U, This], after: Combiner[U, This]) = { - var isBefore = true - while (hasNext && isBefore) { - val curr = next() - if (p(curr)) before += curr - else { - if (isRemainingCheap) after.sizeHint(remaining + 1) - after += curr - isBefore = false - } - } - while (hasNext) after += next - (before, after) - } - - def scanToArray[U >: T, A >: U](z: U, op: (U, U) => U, array: Array[A], from: Int) { - var last = z - var i = from - while (hasNext) { - last = op(last, next()) - array(i) = last - i += 1 - } - } - - def scanToCombiner[U >: T, That](startValue: U, op: (U, U) => U, cb: Combiner[U, That]) = { - var curr = startValue - while (hasNext) { - curr = op(curr, next()) - cb += curr - } - cb - } - - def scanToCombiner[U >: T, That](howmany: Int, startValue: U, op: (U, U) => U, cb: Combiner[U, That]) = { - var curr = startValue - var left = howmany - while (left > 0) { - curr = op(curr, next()) - cb += curr - left -= 1 - } - cb - } - - def zip2combiner[U >: T, S, That](otherpit: RemainsIterator[S], cb: Combiner[(U, S), That]): Combiner[(U, S), That] = { - if (isRemainingCheap && otherpit.isRemainingCheap) cb.sizeHint(remaining min otherpit.remaining) - while (hasNext && otherpit.hasNext) { - cb += ((next(), otherpit.next())) - } - cb - } - - def zipAll2combiner[U >: T, S, That](that: RemainsIterator[S], thiselem: U, thatelem: S, cb: Combiner[(U, S), That]): Combiner[(U, S), That] = { - if (isRemainingCheap && that.isRemainingCheap) cb.sizeHint(remaining max that.remaining) - while (this.hasNext && that.hasNext) cb += ((this.next(), that.next())) - while (this.hasNext) cb += ((this.next(), thatelem)) - while (that.hasNext) cb += ((thiselem, that.next())) - cb - } - -} - - -private[collection] trait AugmentedSeqIterator[+T] extends AugmentedIterableIterator[T] { - - /** The exact number of elements this iterator has yet to iterate. - * This method doesn't change the state of the iterator. - */ - def remaining: Int - - /* accessors */ - - def prefixLength(pred: T => Boolean): Int = { - var total = 0 - var loop = true - while (hasNext && loop) { - if (pred(next())) total += 1 - else loop = false - } - total - } - - override def indexWhere(pred: T => Boolean): Int = { - var i = 0 - var loop = true - while (hasNext && loop) { - if (pred(next())) loop = false - else i += 1 - } - if (loop) -1 else i - } - - def lastIndexWhere(pred: T => Boolean): Int = { - var pos = -1 - var i = 0 - while (hasNext) { - if (pred(next())) pos = i - i += 1 - } - pos - } - - def corresponds[S](corr: (T, S) => Boolean)(that: Iterator[S]): Boolean = { - while (hasNext && that.hasNext) { - if (!corr(next(), that.next())) return false - } - hasNext == that.hasNext - } - - /* transformers */ - - def reverse2combiner[U >: T, This](cb: Combiner[U, This]): Combiner[U, This] = { - if (isRemainingCheap) cb.sizeHint(remaining) - var lst = List[T]() - while (hasNext) lst ::= next - while (lst != Nil) { - cb += lst.head - lst = lst.tail - } - cb - } - - def reverseMap2combiner[S, That](f: T => S, cb: Combiner[S, That]): Combiner[S, That] = { - //val cb = cbf(repr) - if (isRemainingCheap) cb.sizeHint(remaining) - var lst = List[S]() - while (hasNext) lst ::= f(next()) - while (lst != Nil) { - cb += lst.head - lst = lst.tail - } - cb - } - - def updated2combiner[U >: T, That](index: Int, elem: U, cb: Combiner[U, That]): Combiner[U, That] = { - //val cb = cbf(repr) - if (isRemainingCheap) cb.sizeHint(remaining) - var j = 0 - while (hasNext) { - if (j == index) { - cb += elem - next() - } else cb += next - j += 1 - } - cb - } - -} - - -/** Parallel iterators allow splitting and provide a `remaining` method to - * obtain the number of elements remaining in the iterator. - * - * @tparam T type of the elements iterated. - */ -trait IterableSplitter[+T] -extends AugmentedIterableIterator[T] - with Splitter[T] - with Signalling - with DelegatedSignalling -{ -self => - - var signalDelegate: Signalling = IdleSignalling - - /** Creates a copy of this iterator. */ - def dup: IterableSplitter[T] - - def split: Seq[IterableSplitter[T]] - - def splitWithSignalling: Seq[IterableSplitter[T]] = { - val pits = split - pits foreach { _.signalDelegate = signalDelegate } - pits - } - - def shouldSplitFurther[S](coll: ParIterable[S], parallelismLevel: Int) = remaining > thresholdFromSize(coll.size, parallelismLevel) - - /** The number of elements this iterator has yet to traverse. This method - * doesn't change the state of the iterator. - * - * This method is used to provide size hints to builders and combiners, and - * to approximate positions of iterators within a data structure. - * - * '''Note''': This method may be implemented to return an upper bound on the number of elements - * in the iterator, instead of the exact number of elements to iterate. - * Parallel collections which have such iterators are called non-strict-splitter collections. - * - * In that case, 2 considerations must be taken into account: - * - * 1) classes that inherit `ParIterable` must reimplement methods `take`, `drop`, `slice`, `splitAt`, `copyToArray` - * and all others using this information. - * - * 2) if an iterator provides an upper bound on the number of elements, then after splitting the sum - * of `remaining` values of split iterators must be less than or equal to this upper bound. - */ - def remaining: Int - - protected def buildString(closure: (String => Unit) => Unit): String = { - var output = "" - def appendln(s: String) = output += s + "\n" - closure(appendln) - output - } - - private[parallel] def debugInformation = { - // can be overridden in subclasses - "Parallel iterator: " + this.getClass - } - - /* iterator transformers */ - - class Taken(taken: Int) extends IterableSplitter[T] { - var remaining = taken min self.remaining - def hasNext = remaining > 0 - def next = { remaining -= 1; self.next() } - def dup: IterableSplitter[T] = self.dup.take(taken) - def split: Seq[IterableSplitter[T]] = takeSeq(self.split) { (p, n) => p.take(n) } - protected[this] def takeSeq[PI <: IterableSplitter[T]](sq: Seq[PI])(taker: (PI, Int) => PI) = { - val sizes = sq.scanLeft(0)(_ + _.remaining) - val shortened = for ((it, (from, until)) <- sq zip (sizes.init zip sizes.tail)) yield - if (until < remaining) it else taker(it, remaining - from) - shortened filter { _.remaining > 0 } - } - } - /** To lower "virtual class" boilerplate tax, implement creation - * in method and override this method in the subclass. - */ - private[collection] def newTaken(until: Int): Taken = new Taken(until) - private[collection] def newSliceInternal[U <: Taken](it: U, from1: Int): U = { - var count = from1 - while (count > 0 && it.hasNext) { - it.next - count -= 1 - } - it - } - /** Drop implemented as simple eager consumption. */ - override def drop(n: Int): IterableSplitter[T] = { - var i = 0 - while (i < n && hasNext) { - next() - i += 1 - } - this - } - override def take(n: Int): IterableSplitter[T] = newTaken(n) - override def slice(from1: Int, until1: Int): IterableSplitter[T] = newSliceInternal(newTaken(until1), from1) - - class Mapped[S](f: T => S) extends IterableSplitter[S] { - signalDelegate = self.signalDelegate - def hasNext = self.hasNext - def next = f(self.next()) - def remaining = self.remaining - def dup: IterableSplitter[S] = self.dup map f - def split: Seq[IterableSplitter[S]] = self.split.map { _ map f } - } - - override def map[S](f: T => S): Mapped[S] = new Mapped(f) - - class Appended[U >: T, PI <: IterableSplitter[U]](protected val that: PI) extends IterableSplitter[U] { - signalDelegate = self.signalDelegate - protected var curr: IterableSplitter[U] = self - def hasNext = if (curr.hasNext) true else if (curr eq self) { - curr = that - curr.hasNext - } else false - def next = if (curr eq self) { - hasNext - curr.next() - } else curr.next() - def remaining = if (curr eq self) curr.remaining + that.remaining else curr.remaining - protected def firstNonEmpty = (curr eq self) && curr.hasNext - def dup: IterableSplitter[U] = self.dup.appendParIterable[U, PI](that) - def split: Seq[IterableSplitter[U]] = if (firstNonEmpty) Seq(curr, that) else curr.split - } - - def appendParIterable[U >: T, PI <: IterableSplitter[U]](that: PI) = new Appended[U, PI](that) - - class Zipped[S](protected val that: SeqSplitter[S]) extends IterableSplitter[(T, S)] { - signalDelegate = self.signalDelegate - def hasNext = self.hasNext && that.hasNext - def next = (self.next(), that.next()) - def remaining = self.remaining min that.remaining - def dup: IterableSplitter[(T, S)] = self.dup.zipParSeq(that) - def split: Seq[IterableSplitter[(T, S)]] = { - val selfs = self.split - val sizes = selfs.map(_.remaining) - val thats = that.psplit(sizes: _*) - (selfs zip thats) map { p => p._1 zipParSeq p._2 } - } - } - - def zipParSeq[S](that: SeqSplitter[S]): Zipped[S] = new Zipped(that) - - class ZippedAll[U >: T, S](protected val that: SeqSplitter[S], protected val thiselem: U, protected val thatelem: S) - extends IterableSplitter[(U, S)] { - signalDelegate = self.signalDelegate - def hasNext = self.hasNext || that.hasNext - def next = if (self.hasNext) { - if (that.hasNext) (self.next(), that.next()) - else (self.next(), thatelem) - } else (thiselem, that.next()) - - def remaining = self.remaining max that.remaining - def dup: IterableSplitter[(U, S)] = self.dup.zipAllParSeq(that, thiselem, thatelem) - def split: Seq[IterableSplitter[(U, S)]] = { - val selfrem = self.remaining - val thatrem = that.remaining - val thisit = if (selfrem < thatrem) self.appendParIterable[U, SeqSplitter[U]](repetition[U](thiselem, thatrem - selfrem).splitter) else self - val thatit = if (selfrem > thatrem) that.appendParSeq(repetition(thatelem, selfrem - thatrem).splitter) else that - val zipped = thisit zipParSeq thatit - zipped.split - } - } - - def zipAllParSeq[S, U >: T, R >: S](that: SeqSplitter[S], thisElem: U, thatElem: R) = new ZippedAll[U, R](that, thisElem, thatElem) -} - -/** Parallel sequence iterators allow splitting into arbitrary subsets. - * - * @tparam T type of the elements iterated. - */ -trait SeqSplitter[+T] -extends IterableSplitter[T] - with AugmentedSeqIterator[T] - with PreciseSplitter[T] -{ -self => - def dup: SeqSplitter[T] - def split: Seq[SeqSplitter[T]] - def psplit(sizes: Int*): Seq[SeqSplitter[T]] - - override def splitWithSignalling: Seq[SeqSplitter[T]] = { - val pits = split - pits foreach { _.signalDelegate = signalDelegate } - pits - } - - def psplitWithSignalling(sizes: Int*): Seq[SeqSplitter[T]] = { - val pits = psplit(sizes: _*) - pits foreach { _.signalDelegate = signalDelegate } - pits - } - - /** The number of elements this iterator has yet to traverse. This method - * doesn't change the state of the iterator. Unlike the version of this method in the supertrait, - * method `remaining` in `ParSeqLike.this.ParIterator` must return an exact number - * of elements remaining in the iterator. - * - * @return an exact number of elements this iterator has yet to iterate - */ - def remaining: Int - - /* iterator transformers */ - - class RITaken(tk: Int) extends super.Taken(tk) with SeqSplitter[T] { - override def dup = super.dup.asInstanceOf[SeqSplitter[T]] - override def split: Seq[SeqSplitter[T]] = super.split.asInstanceOf[Seq[SeqSplitter[T]]] - def psplit(sizes: Int*): Seq[SeqSplitter[T]] = takeSeq(self.psplit(sizes: _*)) { (p, n) => p.take(n) } - } - override private[collection] def newTaken(until: Int): RITaken = new RITaken(until) - override def take(n: Int): SeqSplitter[T] = newTaken(n) - override def slice(from1: Int, until1: Int): SeqSplitter[T] = newSliceInternal(newTaken(until1), from1) - - class RIMapped[S](f: T => S) extends super.Mapped[S](f) with SeqSplitter[S] { - override def dup = super.dup.asInstanceOf[SeqSplitter[S]] - override def split: Seq[SeqSplitter[S]] = super.split.asInstanceOf[Seq[SeqSplitter[S]]] - def psplit(sizes: Int*): Seq[SeqSplitter[S]] = self.psplit(sizes: _*).map { _ map f } - } - - override def map[S](f: T => S): RIMapped[S] = new RIMapped(f) - - class RIAppended[U >: T, PI <: SeqSplitter[U]](it: PI) extends super.Appended[U, PI](it) with SeqSplitter[U] { - override def dup = super.dup.asInstanceOf[SeqSplitter[U]] - override def split: Seq[SeqSplitter[U]] = super.split.asInstanceOf[Seq[SeqSplitter[U]]] - def psplit(sizes: Int*): Seq[SeqSplitter[U]] = if (firstNonEmpty) { - val selfrem = self.remaining - - // split sizes - var appendMiddle = false - val szcum = sizes.scanLeft(0)(_ + _) - val splitsizes = sizes.zip(szcum.init zip szcum.tail).flatMap { t => - val (sz, (from, until)) = t - if (from < selfrem && until > selfrem) { - appendMiddle = true - Seq(selfrem - from, until - selfrem) - } else Seq(sz) - } - val (selfszfrom, thatszfrom) = splitsizes.zip(szcum.init).span(_._2 < selfrem) - val (selfsizes, thatsizes) = (selfszfrom map { _._1 }, thatszfrom map { _._1 }) - - // split iterators - val selfs = self.psplit(selfsizes: _*) - val thats = that.psplit(thatsizes: _*) - - // appended last in self with first in rest if necessary - if (appendMiddle) selfs.init ++ Seq(selfs.last.appendParSeq[U, SeqSplitter[U]](thats.head)) ++ thats.tail - else selfs ++ thats - } else curr.asInstanceOf[SeqSplitter[U]].psplit(sizes: _*) - } - - def appendParSeq[U >: T, PI <: SeqSplitter[U]](that: PI) = new RIAppended[U, PI](that) - - class RIZipped[S](ti: SeqSplitter[S]) extends super.Zipped[S](ti) with SeqSplitter[(T, S)] { - override def dup = super.dup.asInstanceOf[SeqSplitter[(T, S)]] - override def split: Seq[SeqSplitter[(T, S)]] = super.split.asInstanceOf[Seq[SeqSplitter[(T, S)]]] - def psplit(szs: Int*) = (self.psplit(szs: _*) zip that.psplit(szs: _*)) map { p => p._1 zipParSeq p._2 } - } - - override def zipParSeq[S](that: SeqSplitter[S]): RIZipped[S] = new RIZipped(that) - - class RIZippedAll[U >: T, S](ti: SeqSplitter[S], thise: U, thate: S) extends super.ZippedAll[U, S](ti, thise, thate) with SeqSplitter[(U, S)] { - override def dup = super.dup.asInstanceOf[SeqSplitter[(U, S)]] - private def patchem = { - val selfrem = self.remaining - val thatrem = that.remaining - val thisit = if (selfrem < thatrem) self.appendParSeq[U, SeqSplitter[U]](repetition[U](thiselem, thatrem - selfrem).splitter) else self - val thatit = if (selfrem > thatrem) that.appendParSeq(repetition(thatelem, selfrem - thatrem).splitter) else that - (thisit, thatit) - } - override def split: Seq[SeqSplitter[(U, S)]] = { - val (thisit, thatit) = patchem - val zipped = thisit zipParSeq thatit - zipped.split - } - def psplit(sizes: Int*): Seq[SeqSplitter[(U, S)]] = { - val (thisit, thatit) = patchem - val zipped = thisit zipParSeq thatit - zipped.psplit(sizes: _*) - } - } - - override def zipAllParSeq[S, U >: T, R >: S](that: SeqSplitter[S], thisElem: U, thatElem: R) = new RIZippedAll[U, R](that, thisElem, thatElem) - - def reverse: SeqSplitter[T] = { - val pa = mutable.ParArray.fromTraversables(self).reverse - new pa.ParArrayIterator { - override def reverse = self - } - } - - class Patched[U >: T](from: Int, patch: SeqSplitter[U], replaced: Int) extends SeqSplitter[U] { - signalDelegate = self.signalDelegate - private[this] val trio = { - val pits = self.psplit(from, replaced, self.remaining - from - replaced) - (pits(0).appendParSeq[U, SeqSplitter[U]](patch)) appendParSeq pits(2) - } - def hasNext = trio.hasNext - def next = trio.next - def remaining = trio.remaining - def dup = self.dup.patchParSeq(from, patch, replaced) - def split = trio.split - def psplit(sizes: Int*) = trio.psplit(sizes: _*) - } - - def patchParSeq[U >: T](from: Int, patchElems: SeqSplitter[U], replaced: Int) = new Patched(from, patchElems, replaced) - -} diff --git a/tests/scala2-library/src/library/scala/collection/parallel/Splitter.scala b/tests/scala2-library/src/library/scala/collection/parallel/Splitter.scala deleted file mode 100644 index 8329f15d88a6..000000000000 --- a/tests/scala2-library/src/library/scala/collection/parallel/Splitter.scala +++ /dev/null @@ -1,59 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala -package collection.parallel - -import scala.collection.{ Seq, Iterator } - -/** A splitter (or a split iterator) can be split into more splitters that traverse over - * disjoint subsets of elements. - * - * @tparam T type of the elements this splitter traverses - * - * @since 2.9 - * @author Aleksandar Prokopec - */ -trait Splitter[+T] extends Iterator[T] { - - /** Splits the iterator into a sequence of disjunct views. - * - * Returns a sequence of split iterators, each iterating over some subset of the - * elements in the collection. These subsets are disjoint and should be approximately - * equal in size. These subsets are not empty, unless the iterator is empty in which - * case this method returns a sequence with a single empty iterator. If the splitter has - * more than two elements, this method will return two or more splitters. - * - * Implementors are advised to keep this partition relatively small - two splitters are - * already enough when partitioning the collection, although there may be a few more. - * - * '''Note:''' this method actually invalidates the current splitter. - * - * @return a sequence of disjunct iterators of the collection - */ - def split: Seq[Splitter[T]] - /* - * '''Note:''' splitters in this sequence may actually be empty and it can contain a splitter - * which iterates over the same elements as the original splitter AS LONG AS calling `split` - * a finite number of times on the resulting splitters eventually returns a nontrivial partition. - * - * Note that the docs contract above yields implementations which are a subset of implementations - * defined by this fineprint. - * - * The rationale behind this is best given by the following example: - * try splitting an iterator over a linear hash table. - */ -} - -object Splitter { - def empty[T]: Splitter[T] = new Splitter[T] { - def hasNext = false - def next = Iterator.empty.next() - def split = Seq(this) - } -} diff --git a/tests/scala2-library/src/library/scala/collection/parallel/TaskSupport.scala b/tests/scala2-library/src/library/scala/collection/parallel/TaskSupport.scala deleted file mode 100644 index 4d633253cebe..000000000000 --- a/tests/scala2-library/src/library/scala/collection/parallel/TaskSupport.scala +++ /dev/null @@ -1,81 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala -package collection.parallel - -import java.util.concurrent.ThreadPoolExecutor -import java.util.concurrent.ForkJoinPool -import scala.concurrent.ExecutionContext - -/** A trait implementing the scheduling of a parallel collection operation. - * - * Parallel collections are modular in the way operations are scheduled. Each - * parallel collection is parameterized with a task support object which is - * responsible for scheduling and load-balancing tasks to processors. - * - * A task support object can be changed in a parallel collection after it has - * been created, but only during a quiescent period, i.e. while there are no - * concurrent invocations to parallel collection methods. - * - * There are currently a few task support implementations available for - * parallel collections. The [[scala.collection.parallel.ForkJoinTaskSupport]] - * uses a fork-join pool internally. - * - * The [[scala.collection.parallel.ExecutionContextTaskSupport]] uses the - * default execution context implementation found in scala.concurrent, and it - * reuses the thread pool used in scala.concurrent. - * - * The execution context task support is set to each parallel collection by - * default, so parallel collections reuse the same fork-join pool as the - * future API. - * - * Here is a way to change the task support of a parallel collection: - * - * {{{ - * import scala.collection.parallel._ - * val pc = mutable.ParArray(1, 2, 3) - * pc.tasksupport = new ForkJoinTaskSupport( - * new java.util.concurrent.ForkJoinPool(2)) - * }}} - * - * @see [[http://docs.scala-lang.org/overviews/parallel-collections/configuration.html Configuring Parallel Collections]] section - * on the parallel collection's guide for more information. - */ -trait TaskSupport extends Tasks - -/** A task support that uses a fork join pool to schedule tasks. - * - * @see [[scala.collection.parallel.TaskSupport]] for more information. - */ -class ForkJoinTaskSupport(val environment: ForkJoinPool = ForkJoinTasks.defaultForkJoinPool) -extends TaskSupport with AdaptiveWorkStealingForkJoinTasks - -/** A task support that uses a thread pool executor to schedule tasks. - * - * @see [[scala.collection.parallel.TaskSupport]] for more information. - */ -@deprecated("use `ForkJoinTaskSupport` instead", "2.11.0") -class ThreadPoolTaskSupport(val environment: ThreadPoolExecutor = ThreadPoolTasks.defaultThreadPool) -extends TaskSupport with AdaptiveWorkStealingThreadPoolTasks - -/** A task support that uses an execution context to schedule tasks. - * - * It can be used with the default execution context implementation in the - * `scala.concurrent` package. It internally forwards the call to either a - * forkjoin based task support or a thread pool executor one, depending on - * what the execution context uses. - * - * By default, parallel collections are parameterized with this task support - * object, so parallel collections share the same execution context backend - * as the rest of the `scala.concurrent` package. - * - * @see [[scala.collection.parallel.TaskSupport]] for more information. - */ -class ExecutionContextTaskSupport(val environment: ExecutionContext = scala.concurrent.ExecutionContext.global) -extends TaskSupport with ExecutionContextTasks diff --git a/tests/scala2-library/src/library/scala/collection/parallel/Tasks.scala b/tests/scala2-library/src/library/scala/collection/parallel/Tasks.scala deleted file mode 100644 index 94231bd40f9f..000000000000 --- a/tests/scala2-library/src/library/scala/collection/parallel/Tasks.scala +++ /dev/null @@ -1,558 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala -package collection.parallel - -import java.util.concurrent.ThreadPoolExecutor -import java.util.concurrent.{ForkJoinPool, RecursiveAction, ForkJoinWorkerThread} -import scala.concurrent.ExecutionContext -import scala.util.control.Breaks._ -import scala.annotation.unchecked.uncheckedVariance - -trait Task[R, +Tp] { - type Result = R - - def repr = this.asInstanceOf[Tp] - - /** Body of the task - non-divisible unit of work done by this task. - * Optionally is provided with the result from the previous completed task - * or `None` if there was no previous task (or the previous task is uncompleted or unknown). - */ - def leaf(result: Option[R]) - - /** A result that can be accessed once the task is completed. */ - var result: R - - /** Decides whether or not this task should be split further. */ - def shouldSplitFurther: Boolean - - /** Splits this task into a list of smaller tasks. */ - private[parallel] def split: Seq[Task[R, Tp]] - - /** Read of results of `that` task and merge them into results of this one. */ - private[parallel] def merge(that: Tp @uncheckedVariance) {} - - // exception handling mechanism - @volatile var throwable: Throwable = null - def forwardThrowable() = if (throwable != null) throw throwable - - // tries to do the leaf computation, storing the possible exception - private[parallel] def tryLeaf(lastres: Option[R]) { - try { - tryBreakable { - leaf(lastres) - result = result // ensure that effects of `leaf` are visible to readers of `result` - } catchBreak { - signalAbort() - } - } catch { - case thr: Throwable => - result = result // ensure that effects of `leaf` are visible - throwable = thr - signalAbort() - } - } - - private[parallel] def tryMerge(t: Tp @uncheckedVariance) { - val that = t.asInstanceOf[Task[R, Tp]] - if (this.throwable == null && that.throwable == null) merge(t) - mergeThrowables(that) - } - - private[parallel] def mergeThrowables(that: Task[_, _]) { - if (this.throwable != null && that.throwable != null) - this.throwable.addSuppressed(that.throwable) - else if (this.throwable == null && that.throwable != null) - this.throwable = that.throwable - } - - // override in concrete task implementations to signal abort to other tasks - private[parallel] def signalAbort() {} -} - - -/** A trait that declares task execution capabilities used - * by parallel collections. - */ -trait Tasks { - - private[parallel] val debugMessages = scala.collection.mutable.ArrayBuffer[String]() - - private[parallel] def debuglog(s: String) = synchronized { - debugMessages += s - } - - trait WrappedTask[R, +Tp] { - /** the body of this task - what it executes, how it gets split and how results are merged. */ - val body: Task[R, Tp] - - def split: Seq[WrappedTask[R, Tp]] - /** Code that gets called after the task gets started - it may spawn other tasks instead of calling `leaf`. */ - def compute() - /** Start task. */ - def start() - /** Wait for task to finish. */ - def sync() - /** Try to cancel the task. - * @return `true` if cancellation is successful. - */ - def tryCancel(): Boolean - /** If the task has been cancelled successfully, those syncing on it may - * automatically be notified, depending on the implementation. If they - * aren't, this release method should be called after processing the - * cancelled task. - * - * This method may be overridden. - */ - def release() {} - } - - /* task control */ - - /** The type of the environment is more specific in the implementations. */ - val environment: AnyRef - - /** Executes a task and returns a future. Forwards an exception if some task threw it. */ - def execute[R, Tp](fjtask: Task[R, Tp]): () => R - - /** Executes a result task, waits for it to finish, then returns its result. Forwards an exception if some task threw it. */ - def executeAndWaitResult[R, Tp](task: Task[R, Tp]): R - - /** Retrieves the parallelism level of the task execution environment. */ - def parallelismLevel: Int - -} - - - -/** This trait implements scheduling by employing - * an adaptive work stealing technique. - */ -trait AdaptiveWorkStealingTasks extends Tasks { - - trait AWSWrappedTask[R, Tp] extends super.WrappedTask[R, Tp] { - @volatile var next: AWSWrappedTask[R, Tp] = null - @volatile var shouldWaitFor = true - - def split: Seq[AWSWrappedTask[R, Tp]] - - def compute() = if (body.shouldSplitFurther) { - internal() - release() - } else { - body.tryLeaf(None) - release() - } - - def internal() = { - var last = spawnSubtasks() - - last.body.tryLeaf(None) - last.release() - body.result = last.body.result - body.throwable = last.body.throwable - - while (last.next != null) { - // val lastresult = Option(last.body.result) - last = last.next - if (last.tryCancel()) { - // println("Done with " + beforelast.body + ", next direct is " + last.body) - last.body.tryLeaf(Some(body.result)) - last.release() - } else { - // println("Done with " + beforelast.body + ", next sync is " + last.body) - last.sync() - } - // println("Merging " + body + " with " + last.body) - body.tryMerge(last.body.repr) - } - } - - def spawnSubtasks() = { - var last: AWSWrappedTask[R, Tp] = null - var head: AWSWrappedTask[R, Tp] = this - do { - val subtasks = head.split - head = subtasks.head - for (t <- subtasks.tail.reverse) { - t.next = last - last = t - t.start() - } - } while (head.body.shouldSplitFurther) - head.next = last - head - } - - def printChain() = { - var curr = this - var chain = "chain: " - while (curr != null) { - chain += curr + " ---> " - curr = curr.next - } - println(chain) - } - } - - // specialize ctor - protected def newWrappedTask[R, Tp](b: Task[R, Tp]): AWSWrappedTask[R, Tp] - -} - - -/** An implementation of tasks objects based on the Java thread pooling API. */ -@deprecated("use `ForkJoinTasks` instead", "2.11.0") -trait ThreadPoolTasks extends Tasks { - import java.util.concurrent._ - - trait TPWrappedTask[R, +Tp] extends Runnable with super.WrappedTask[R, Tp] { - // initially, this is null - // once the task is started, this future is set and used for `sync` - // utb: var future: Future[_] = null - @volatile var owned = false - @volatile var completed = false - - def start() = synchronized { - // debuglog("Starting " + body) - // utb: future = executor.submit(this) - executor.synchronized { - incrTasks() - executor.submit(this) - } - } - def sync() = synchronized { - // debuglog("Syncing on " + body) - // utb: future.get() - executor.synchronized { - val coresize = executor.getCorePoolSize - if (coresize < totaltasks) { - executor.setCorePoolSize(coresize + 1) - //assert(executor.getCorePoolSize == (coresize + 1)) - } - } - while (!completed) this.wait - } - def tryCancel() = synchronized { - // utb: future.cancel(false) - if (!owned) { - // debuglog("Cancelling " + body) - owned = true - true - } else false - } - def run() = { - // utb: compute - var isOkToRun = false - synchronized { - if (!owned) { - owned = true - isOkToRun = true - } - } - if (isOkToRun) { - // debuglog("Running body of " + body) - compute() - } else { - // just skip - // debuglog("skipping body of " + body) - } - } - override def release() = synchronized { - //println("releasing: " + this + ", body: " + this.body) - completed = true - executor.synchronized { - decrTasks() - } - this.notifyAll - } - } - - protected def newWrappedTask[R, Tp](b: Task[R, Tp]): TPWrappedTask[R, Tp] - - val environment: ThreadPoolExecutor - def executor = environment.asInstanceOf[ThreadPoolExecutor] - def queue = executor.getQueue.asInstanceOf[LinkedBlockingQueue[Runnable]] - @volatile var totaltasks = 0 - - private def incrTasks() = synchronized { - totaltasks += 1 - } - - private def decrTasks() = synchronized { - totaltasks -= 1 - } - - def execute[R, Tp](task: Task[R, Tp]): () => R = { - val t = newWrappedTask(task) - - // debuglog("-----------> Executing without wait: " + task) - t.start() - - () => { - t.sync() - t.body.forwardThrowable() - t.body.result - } - } - - def executeAndWaitResult[R, Tp](task: Task[R, Tp]): R = { - val t = newWrappedTask(task) - - // debuglog("-----------> Executing with wait: " + task) - t.start() - - t.sync() - t.body.forwardThrowable() - t.body.result - } - - def parallelismLevel = ThreadPoolTasks.numCores - -} - -@deprecated("use `ForkJoinTasks` instead", "2.11.0") -object ThreadPoolTasks { - import java.util.concurrent._ - - val numCores = Runtime.getRuntime.availableProcessors - - val tcount = new atomic.AtomicLong(0L) - - val defaultThreadPool = new ThreadPoolExecutor( - numCores, - Int.MaxValue, - 60L, TimeUnit.MILLISECONDS, - new LinkedBlockingQueue[Runnable], - new ThreadFactory { - def newThread(r: Runnable) = { - val t = new Thread(r) - t.setName("pc-thread-" + tcount.incrementAndGet) - t.setDaemon(true) - t - } - }, - new ThreadPoolExecutor.CallerRunsPolicy - ) -} - -object FutureThreadPoolTasks { - import java.util.concurrent._ - - val numCores = Runtime.getRuntime.availableProcessors - - val tcount = new atomic.AtomicLong(0L) - - val defaultThreadPool = Executors.newCachedThreadPool() -} - - - -/** - * A trait describing objects that provide a fork/join pool. - */ -trait HavingForkJoinPool { - def forkJoinPool: ForkJoinPool -} - - -/** An implementation trait for parallel tasks based on the fork/join framework. - * - * @define fjdispatch - * If the current thread is a fork/join worker thread, the task's `fork` method will - * be invoked. Otherwise, the task will be executed on the fork/join pool. - */ -trait ForkJoinTasks extends Tasks with HavingForkJoinPool { - - trait FJWrappedTask[R, +Tp] extends RecursiveAction with super.WrappedTask[R, Tp] { - def start() = fork - def sync() = join - def tryCancel = tryUnfork - } - - // specialize ctor - protected def newWrappedTask[R, Tp](b: Task[R, Tp]): FJWrappedTask[R, Tp] - - /** The fork/join pool of this collection. - */ - def forkJoinPool: ForkJoinPool = environment.asInstanceOf[ForkJoinPool] - val environment: ForkJoinPool - - /** Executes a task and does not wait for it to finish - instead returns a future. - * - * $fjdispatch - */ - def execute[R, Tp](task: Task[R, Tp]): () => R = { - val fjtask = newWrappedTask(task) - - if (Thread.currentThread.isInstanceOf[ForkJoinWorkerThread]) { - fjtask.fork - } else { - forkJoinPool.execute(fjtask) - } - - () => { - fjtask.sync() - fjtask.body.forwardThrowable() - fjtask.body.result - } - } - - /** Executes a task on a fork/join pool and waits for it to finish. - * Returns its result when it does. - * - * $fjdispatch - * - * @return the result of the task - */ - def executeAndWaitResult[R, Tp](task: Task[R, Tp]): R = { - val fjtask = newWrappedTask(task) - - if (Thread.currentThread.isInstanceOf[ForkJoinWorkerThread]) { - fjtask.fork - } else { - forkJoinPool.execute(fjtask) - } - - fjtask.sync() - // if (fjtask.body.throwable != null) println("throwing: " + fjtask.body.throwable + " at " + fjtask.body) - fjtask.body.forwardThrowable() - fjtask.body.result - } - - def parallelismLevel = forkJoinPool.getParallelism -} - -object ForkJoinTasks { - lazy val defaultForkJoinPool: ForkJoinPool = new ForkJoinPool() -} - -/* Some boilerplate due to no deep mixin composition. Not sure if it can be done differently without them. - */ -trait AdaptiveWorkStealingForkJoinTasks extends ForkJoinTasks with AdaptiveWorkStealingTasks { - - class AWSFJWrappedTask[R, Tp](val body: Task[R, Tp]) - extends super[ForkJoinTasks].FJWrappedTask[R, Tp] with super[AdaptiveWorkStealingTasks].AWSWrappedTask[R, Tp] { - def split = body.split.map(b => newWrappedTask(b)) - } - - def newWrappedTask[R, Tp](b: Task[R, Tp]) = new AWSFJWrappedTask[R, Tp](b) -} - -@deprecated("use `AdaptiveWorkStealingForkJoinTasks` instead", "2.11.0") -trait AdaptiveWorkStealingThreadPoolTasks extends ThreadPoolTasks with AdaptiveWorkStealingTasks { - - class AWSTPWrappedTask[R, Tp](val body: Task[R, Tp]) - extends super[ThreadPoolTasks].TPWrappedTask[R, Tp] with super[AdaptiveWorkStealingTasks].AWSWrappedTask[R, Tp] { - def split = body.split.map(b => newWrappedTask(b)) - } - - def newWrappedTask[R, Tp](b: Task[R, Tp]) = new AWSTPWrappedTask[R, Tp](b) -} - -/** An implementation of the `Tasks` that uses Scala `Future`s to compute - * the work encapsulated in each task. - */ -private[parallel] final class FutureTasks(executor: ExecutionContext) extends Tasks { - import scala.concurrent._ - import scala.util._ - - private val maxdepth = (math.log(parallelismLevel) / math.log(2) + 1).toInt - - val environment: ExecutionContext = executor - - /** Divides this task into a lot of small tasks and executes them asynchronously - * using futures. - * Folds the futures and merges them asynchronously. - */ - private def exec[R, Tp](topLevelTask: Task[R, Tp]): Future[R] = { - implicit val ec = environment - - /** Constructs a tree of futures where tasks can be reasonably split. - */ - def compute(task: Task[R, Tp], depth: Int): Future[Task[R, Tp]] = { - if (task.shouldSplitFurther && depth < maxdepth) { - val subtasks = task.split - val subfutures = for (subtask <- subtasks.iterator) yield compute(subtask, depth + 1) - subfutures.reduceLeft { (firstFuture, nextFuture) => - for { - firstTask <- firstFuture - nextTask <- nextFuture - } yield { - firstTask tryMerge nextTask.repr - firstTask - } - } andThen { - case Success(firstTask) => - task.throwable = firstTask.throwable - task.result = firstTask.result - case Failure(exception) => - task.throwable = exception - } - } else Future { - task.tryLeaf(None) - task - } - } - - compute(topLevelTask, 0) map { t => - t.forwardThrowable() - t.result - } - } - - def execute[R, Tp](task: Task[R, Tp]): () => R = { - val future = exec(task) - val callback = () => { - Await.result(future, scala.concurrent.duration.Duration.Inf) - } - callback - } - - def executeAndWaitResult[R, Tp](task: Task[R, Tp]): R = { - execute(task)() - } - - def parallelismLevel = Runtime.getRuntime.availableProcessors -} - -/** This tasks implementation uses execution contexts to spawn a parallel computation. - * - * As an optimization, it internally checks whether the execution context is the - * standard implementation based on fork/join pools, and if it is, creates a - * `ForkJoinTaskSupport` that shares the same pool to forward its request to it. - * - * Otherwise, it uses an execution context exclusive `Tasks` implementation to - * divide the tasks into smaller chunks and execute operations on it. - */ -trait ExecutionContextTasks extends Tasks { - def executionContext = environment - - val environment: ExecutionContext - - /** A driver serves as a target for this proxy `Tasks` object. - * - * If the execution context has the standard implementation and uses fork/join pools, - * the driver is `ForkJoinTaskSupport` with the same pool, as an optimization. - * Otherwise, the driver will be a Scala `Future`-based implementation. - */ - private val driver: Tasks = executionContext match { - case eci: scala.concurrent.impl.ExecutionContextImpl => eci.executor match { - case fjp: ForkJoinPool => new ForkJoinTaskSupport(fjp) - case _ => new FutureTasks(environment) - } - case _ => new FutureTasks(environment) - } - - def execute[R, Tp](task: Task[R, Tp]): () => R = driver execute task - - def executeAndWaitResult[R, Tp](task: Task[R, Tp]): R = driver executeAndWaitResult task - - def parallelismLevel = driver.parallelismLevel -} diff --git a/tests/scala2-library/src/library/scala/collection/parallel/immutable/ParHashMap.scala b/tests/scala2-library/src/library/scala/collection/parallel/immutable/ParHashMap.scala deleted file mode 100644 index 06455ba00621..000000000000 --- a/tests/scala2-library/src/library/scala/collection/parallel/immutable/ParHashMap.scala +++ /dev/null @@ -1,334 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala -package collection.parallel.immutable - -import scala.collection.parallel.ParMapLike -import scala.collection.parallel.Combiner -import scala.collection.parallel.IterableSplitter -import scala.collection.mutable.UnrolledBuffer.Unrolled -import scala.collection.mutable.UnrolledBuffer -import scala.collection.generic.ParMapFactory -import scala.collection.generic.CanCombineFrom -import scala.collection.generic.GenericParMapTemplate -import scala.collection.generic.GenericParMapCompanion -import scala.collection.immutable.{ HashMap, TrieIterator } -import scala.annotation.unchecked.uncheckedVariance -import scala.collection.parallel.Task - -/** Immutable parallel hash map, based on hash tries. - * - * $paralleliterableinfo - * - * $sideeffects - * - * @tparam K the key type of the map - * @tparam V the value type of the map - * - * @author Aleksandar Prokopec - * @since 2.9 - * @see [[http://docs.scala-lang.org/overviews/parallel-collections/concrete-parallel-collections.html#parallel_hash_tries Scala's Parallel Collections Library overview]] - * section on Parallel Hash Tries for more information. - * - * @define Coll `immutable.ParHashMap` - * @define coll immutable parallel hash map - */ -@SerialVersionUID(1L) -class ParHashMap[K, +V] private[immutable] (private[this] val trie: HashMap[K, V]) -extends ParMap[K, V] - with GenericParMapTemplate[K, V, ParHashMap] - with ParMapLike[K, V, ParHashMap[K, V], HashMap[K, V]] - with Serializable -{ -self => - - def this() = this(HashMap.empty[K, V]) - - override def mapCompanion: GenericParMapCompanion[ParHashMap] = ParHashMap - - override def empty: ParHashMap[K, V] = new ParHashMap[K, V] - - protected[this] override def newCombiner = HashMapCombiner[K, V] - - def splitter: IterableSplitter[(K, V)] = new ParHashMapIterator(trie.iterator, trie.size) - - override def seq = trie - - def -(k: K) = new ParHashMap(trie - k) - - def +[U >: V](kv: (K, U)) = new ParHashMap(trie + kv) - - def get(k: K) = trie.get(k) - - override def size = trie.size - - protected override def reuse[S, That](oldc: Option[Combiner[S, That]], newc: Combiner[S, That]) = oldc match { - case Some(old) => old - case None => newc - } - - class ParHashMapIterator(var triter: Iterator[(K, V @uncheckedVariance)], val sz: Int) - extends IterableSplitter[(K, V)] { - var i = 0 - def dup = triter match { - case t: TrieIterator[_] => - dupFromIterator(t.dupIterator) - case _ => - val buff = triter.toBuffer - triter = buff.iterator - dupFromIterator(buff.iterator) - } - private def dupFromIterator(it: Iterator[(K, V @uncheckedVariance)]) = { - val phit = new ParHashMapIterator(it, sz) - phit.i = i - phit - } - def split: Seq[IterableSplitter[(K, V)]] = if (remaining < 2) Seq(this) else triter match { - case t: TrieIterator[_] => - val previousRemaining = remaining - val ((fst, fstlength), snd) = t.split - val sndlength = previousRemaining - fstlength - Seq( - new ParHashMapIterator(fst, fstlength), - new ParHashMapIterator(snd, sndlength) - ) - case _ => - // iterator of the collision map case - val buff = triter.toBuffer - val (fp, sp) = buff.splitAt(buff.length / 2) - Seq(fp, sp) map { b => new ParHashMapIterator(b.iterator, b.length) } - } - def next(): (K, V) = { - i += 1 - val r = triter.next() - r - } - def hasNext: Boolean = { - i < sz - } - def remaining = sz - i - override def toString = "HashTrieIterator(" + sz + ")" - } - - /* debug */ - - private[parallel] def printDebugInfo() { - println("Parallel hash trie") - println("Top level inner trie type: " + trie.getClass) - trie match { - case hm: HashMap.HashMap1[k, v] => - println("single node type") - println("key stored: " + hm.getKey) - println("hash of key: " + hm.getHash) - println("computed hash of " + hm.getKey + ": " + hm.computeHashFor(hm.getKey)) - println("trie.get(key): " + hm.get(hm.getKey)) - case _ => - println("other kind of node") - } - } -} - -/** $factoryInfo - * @define Coll `immutable.ParHashMap` - * @define coll immutable parallel hash map - */ -object ParHashMap extends ParMapFactory[ParHashMap] { - def empty[K, V]: ParHashMap[K, V] = new ParHashMap[K, V] - - def newCombiner[K, V]: Combiner[(K, V), ParHashMap[K, V]] = HashMapCombiner[K, V] - - implicit def canBuildFrom[K, V]: CanCombineFrom[Coll, (K, V), ParHashMap[K, V]] = { - new CanCombineFromMap[K, V] - } - - def fromTrie[K, V](t: HashMap[K, V]) = new ParHashMap(t) - - var totalcombines = new java.util.concurrent.atomic.AtomicInteger(0) -} - -private[parallel] abstract class HashMapCombiner[K, V] -extends scala.collection.parallel.BucketCombiner[(K, V), ParHashMap[K, V], (K, V), HashMapCombiner[K, V]](HashMapCombiner.rootsize) { -//self: EnvironmentPassingCombiner[(K, V), ParHashMap[K, V]] => - import HashMapCombiner._ - val emptyTrie = HashMap.empty[K, V] - - def +=(elem: (K, V)) = { - sz += 1 - val hc = emptyTrie.computeHash(elem._1) - val pos = hc & 0x1f - if (buckets(pos) eq null) { - // initialize bucket - buckets(pos) = new UnrolledBuffer[(K, V)] - } - // add to bucket - buckets(pos) += elem - this - } - - def result = { - val bucks = buckets.filter(_ != null).map(_.headPtr) - val root = new Array[HashMap[K, V]](bucks.length) - - combinerTaskSupport.executeAndWaitResult(new CreateTrie(bucks, root, 0, bucks.length)) - - var bitmap = 0 - var i = 0 - while (i < rootsize) { - if (buckets(i) ne null) bitmap |= 1 << i - i += 1 - } - val sz = root.foldLeft(0)(_ + _.size) - - if (sz == 0) new ParHashMap[K, V] - else if (sz == 1) new ParHashMap[K, V](root(0)) - else { - val trie = new HashMap.HashTrieMap(bitmap, root, sz) - new ParHashMap[K, V](trie) - } - } - - def groupByKey[Repr](cbf: () => Combiner[V, Repr]): ParHashMap[K, Repr] = { - val bucks = buckets.filter(_ != null).map(_.headPtr) - val root = new Array[HashMap[K, AnyRef]](bucks.length) - - combinerTaskSupport.executeAndWaitResult(new CreateGroupedTrie(cbf, bucks, root, 0, bucks.length)) - - var bitmap = 0 - var i = 0 - while (i < rootsize) { - if (buckets(i) ne null) bitmap |= 1 << i - i += 1 - } - val sz = root.foldLeft(0)(_ + _.size) - - if (sz == 0) new ParHashMap[K, Repr] - else if (sz == 1) new ParHashMap[K, Repr](root(0).asInstanceOf[HashMap[K, Repr]]) - else { - val trie = new HashMap.HashTrieMap(bitmap, root.asInstanceOf[Array[HashMap[K, Repr]]], sz) - new ParHashMap[K, Repr](trie) - } - } - - override def toString = { - "HashTrieCombiner(sz: " + size + ")" - //"HashTrieCombiner(buckets:\n\t" + buckets.filter(_ != null).mkString("\n\t") + ")\n" - } - - /* tasks */ - - class CreateTrie(bucks: Array[Unrolled[(K, V)]], root: Array[HashMap[K, V]], offset: Int, howmany: Int) - extends Task[Unit, CreateTrie] { - @volatile var result = () - def leaf(prev: Option[Unit]) = { - var i = offset - val until = offset + howmany - while (i < until) { - root(i) = createTrie(bucks(i)) - i += 1 - } - result = result - } - private def createTrie(elems: Unrolled[(K, V)]): HashMap[K, V] = { - var trie = new HashMap[K, V] - - var unrolled = elems - var i = 0 - while (unrolled ne null) { - val chunkarr = unrolled.array - val chunksz = unrolled.size - while (i < chunksz) { - val kv = chunkarr(i) - val hc = trie.computeHash(kv._1) - trie = trie.updated0(kv._1, hc, rootbits, kv._2, kv, null) - i += 1 - } - i = 0 - unrolled = unrolled.next - } - - trie - } - def split = { - val fp = howmany / 2 - List(new CreateTrie(bucks, root, offset, fp), new CreateTrie(bucks, root, offset + fp, howmany - fp)) - } - def shouldSplitFurther = howmany > scala.collection.parallel.thresholdFromSize(root.length, combinerTaskSupport.parallelismLevel) - } - - class CreateGroupedTrie[Repr](cbf: () => Combiner[V, Repr], bucks: Array[Unrolled[(K, V)]], root: Array[HashMap[K, AnyRef]], offset: Int, howmany: Int) - extends Task[Unit, CreateGroupedTrie[Repr]] { - @volatile var result = () - def leaf(prev: Option[Unit]) = { - var i = offset - val until = offset + howmany - while (i < until) { - root(i) = createGroupedTrie(bucks(i)).asInstanceOf[HashMap[K, AnyRef]] - i += 1 - } - result = result - } - private def createGroupedTrie(elems: Unrolled[(K, V)]): HashMap[K, Repr] = { - var trie = new HashMap[K, Combiner[V, Repr]] - - var unrolled = elems - var i = 0 - while (unrolled ne null) { - val chunkarr = unrolled.array - val chunksz = unrolled.size - while (i < chunksz) { - val kv = chunkarr(i) - val hc = trie.computeHash(kv._1) - - // check to see if already present - val cmb: Combiner[V, Repr] = trie.get0(kv._1, hc, rootbits) match { - case Some(cmb) => cmb - case None => - val cmb: Combiner[V, Repr] = cbf() - trie = trie.updated0[Combiner[V, Repr]](kv._1, hc, rootbits, cmb, null, null) - cmb - } - cmb += kv._2 - i += 1 - } - i = 0 - unrolled = unrolled.next - } - - evaluateCombiners(trie).asInstanceOf[HashMap[K, Repr]] - } - private def evaluateCombiners(trie: HashMap[K, Combiner[V, Repr]]): HashMap[K, Repr] = trie match { - case hm1: HashMap.HashMap1[_, _] => - val evaledvalue = hm1.value.result - new HashMap.HashMap1[K, Repr](hm1.key, hm1.hash, evaledvalue, null) - case hmc: HashMap.HashMapCollision1[_, _] => - val evaledkvs = hmc.kvs map { p => (p._1, p._2.result) } - new HashMap.HashMapCollision1[K, Repr](hmc.hash, evaledkvs) - case htm: HashMap.HashTrieMap[k, v] => - var i = 0 - while (i < htm.elems.length) { - htm.elems(i) = evaluateCombiners(htm.elems(i)).asInstanceOf[HashMap[k, v]] - i += 1 - } - htm.asInstanceOf[HashMap[K, Repr]] - case empty => empty.asInstanceOf[HashMap[K, Repr]] - } - def split = { - val fp = howmany / 2 - List(new CreateGroupedTrie(cbf, bucks, root, offset, fp), new CreateGroupedTrie(cbf, bucks, root, offset + fp, howmany - fp)) - } - def shouldSplitFurther = howmany > scala.collection.parallel.thresholdFromSize(root.length, combinerTaskSupport.parallelismLevel) - } -} - -private[parallel] object HashMapCombiner { - def apply[K, V] = new HashMapCombiner[K, V] {} // was: with EnvironmentPassingCombiner[(K, V), ParHashMap[K, V]] - - private[immutable] val rootbits = 5 - private[immutable] val rootsize = 1 << 5 -} diff --git a/tests/scala2-library/src/library/scala/collection/parallel/immutable/ParHashSet.scala b/tests/scala2-library/src/library/scala/collection/parallel/immutable/ParHashSet.scala deleted file mode 100644 index 3a1ec7fff82f..000000000000 --- a/tests/scala2-library/src/library/scala/collection/parallel/immutable/ParHashSet.scala +++ /dev/null @@ -1,223 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala -package collection.parallel.immutable - - - -import scala.collection.parallel.ParSetLike -import scala.collection.parallel.Combiner -import scala.collection.parallel.IterableSplitter -import scala.collection.mutable.UnrolledBuffer.Unrolled -import scala.collection.mutable.UnrolledBuffer -import scala.collection.generic.ParSetFactory -import scala.collection.generic.CanCombineFrom -import scala.collection.generic.GenericParTemplate -import scala.collection.generic.GenericParCompanion -import scala.collection.generic.GenericCompanion -import scala.collection.immutable.{ HashSet, TrieIterator } -import scala.collection.parallel.Task - - - -/** Immutable parallel hash set, based on hash tries. - * - * $paralleliterableinfo - * - * $sideeffects - * - * @tparam T the element type of the set - * - * @author Aleksandar Prokopec - * @since 2.9 - * @see [[http://docs.scala-lang.org/overviews/parallel-collections/concrete-parallel-collections.html#parallel_hash_tries Scala's Parallel Collections Library overview]] - * section on Parallel Hash Tries for more information. - * - * @define Coll `immutable.ParHashSet` - * @define coll immutable parallel hash set - */ -@SerialVersionUID(1L) -class ParHashSet[T] private[immutable] (private[this] val trie: HashSet[T]) -extends ParSet[T] - with GenericParTemplate[T, ParHashSet] - with ParSetLike[T, ParHashSet[T], HashSet[T]] - with Serializable -{ -self => - - def this() = this(HashSet.empty[T]) - - override def companion: GenericCompanion[ParHashSet] with GenericParCompanion[ParHashSet] = ParHashSet - - override def empty: ParHashSet[T] = new ParHashSet[T] - - def splitter: IterableSplitter[T] = new ParHashSetIterator(trie.iterator, trie.size) - - override def seq = trie - - def -(e: T) = new ParHashSet(trie - e) - - def +(e: T) = new ParHashSet(trie + e) - - def contains(e: T): Boolean = trie.contains(e) - - override def size = trie.size - - protected override def reuse[S, That](oldc: Option[Combiner[S, That]], newc: Combiner[S, That]) = oldc match { - case Some(old) => old - case None => newc - } - - class ParHashSetIterator(var triter: Iterator[T], val sz: Int) - extends IterableSplitter[T] { - var i = 0 - def dup = triter match { - case t: TrieIterator[_] => - dupFromIterator(t.dupIterator) - case _ => - val buff = triter.toBuffer - triter = buff.iterator - dupFromIterator(buff.iterator) - } - private def dupFromIterator(it: Iterator[T]) = { - val phit = new ParHashSetIterator(it, sz) - phit.i = i - phit - } - def split: Seq[IterableSplitter[T]] = if (remaining < 2) Seq(this) else triter match { - case t: TrieIterator[_] => - val previousRemaining = remaining - val ((fst, fstlength), snd) = t.split - val sndlength = previousRemaining - fstlength - Seq( - new ParHashSetIterator(fst, fstlength), - new ParHashSetIterator(snd, sndlength) - ) - case _ => - // iterator of the collision map case - val buff = triter.toBuffer - val (fp, sp) = buff.splitAt(buff.length / 2) - Seq(fp, sp) map { b => new ParHashSetIterator(b.iterator, b.length) } - } - def next(): T = { - i += 1 - triter.next() - } - def hasNext: Boolean = { - i < sz - } - def remaining = sz - i - } - -} - - -/** $factoryInfo - * @define Coll `immutable.ParHashSet` - * @define coll immutable parallel hash set - */ -object ParHashSet extends ParSetFactory[ParHashSet] { - def newCombiner[T]: Combiner[T, ParHashSet[T]] = HashSetCombiner[T] - - implicit def canBuildFrom[T]: CanCombineFrom[Coll, T, ParHashSet[T]] = - new GenericCanCombineFrom[T] - - def fromTrie[T](t: HashSet[T]) = new ParHashSet(t) -} - - -private[immutable] abstract class HashSetCombiner[T] -extends scala.collection.parallel.BucketCombiner[T, ParHashSet[T], Any, HashSetCombiner[T]](HashSetCombiner.rootsize) { -//self: EnvironmentPassingCombiner[T, ParHashSet[T]] => - import HashSetCombiner._ - val emptyTrie = HashSet.empty[T] - - def +=(elem: T) = { - sz += 1 - val hc = emptyTrie.computeHash(elem) - val pos = hc & 0x1f - if (buckets(pos) eq null) { - // initialize bucket - buckets(pos) = new UnrolledBuffer[Any] - } - // add to bucket - buckets(pos) += elem - this - } - - def result = { - val bucks = buckets.filter(_ != null).map(_.headPtr) - val root = new Array[HashSet[T]](bucks.length) - - combinerTaskSupport.executeAndWaitResult(new CreateTrie(bucks, root, 0, bucks.length)) - - var bitmap = 0 - var i = 0 - while (i < rootsize) { - if (buckets(i) ne null) bitmap |= 1 << i - i += 1 - } - val sz = root.foldLeft(0)(_ + _.size) - - if (sz == 0) new ParHashSet[T] - else if (sz == 1) new ParHashSet[T](root(0)) - else { - val trie = new HashSet.HashTrieSet(bitmap, root, sz) - new ParHashSet[T](trie) - } - } - - /* tasks */ - - class CreateTrie(bucks: Array[Unrolled[Any]], root: Array[HashSet[T]], offset: Int, howmany: Int) - extends Task[Unit, CreateTrie] { - var result = () - def leaf(prev: Option[Unit]) = { - var i = offset - val until = offset + howmany - while (i < until) { - root(i) = createTrie(bucks(i)) - i += 1 - } - } - private def createTrie(elems: Unrolled[Any]): HashSet[T] = { - var trie = new HashSet[T] - - var unrolled = elems - var i = 0 - while (unrolled ne null) { - val chunkarr = unrolled.array - val chunksz = unrolled.size - while (i < chunksz) { - val v = chunkarr(i).asInstanceOf[T] - val hc = trie.computeHash(v) - trie = trie.updated0(v, hc, rootbits) // internal API, private[collection] - i += 1 - } - i = 0 - unrolled = unrolled.next - } - - trie - } - def split = { - val fp = howmany / 2 - List(new CreateTrie(bucks, root, offset, fp), new CreateTrie(bucks, root, offset + fp, howmany - fp)) - } - def shouldSplitFurther = howmany > scala.collection.parallel.thresholdFromSize(root.length, combinerTaskSupport.parallelismLevel) - } -} - - -object HashSetCombiner { - def apply[T] = new HashSetCombiner[T] {} // was: with EnvironmentPassingCombiner[T, ParHashSet[T]] {} - - private[immutable] val rootbits = 5 - private[immutable] val rootsize = 1 << 5 -} diff --git a/tests/scala2-library/src/library/scala/collection/parallel/immutable/ParIterable.scala b/tests/scala2-library/src/library/scala/collection/parallel/immutable/ParIterable.scala deleted file mode 100644 index 417622facced..000000000000 --- a/tests/scala2-library/src/library/scala/collection/parallel/immutable/ParIterable.scala +++ /dev/null @@ -1,49 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala -package collection -package parallel.immutable - -import scala.collection.generic._ -import scala.collection.parallel.ParIterableLike -import scala.collection.parallel.Combiner - -/** A template trait for immutable parallel iterable collections. - * - * $paralleliterableinfo - * - * $sideeffects - * - * @tparam T the element type of the collection - * - * @author Aleksandar Prokopec - * @since 2.9 - */ -trait ParIterable[+T] -extends scala.collection.GenIterable[T] - with scala.collection.parallel.ParIterable[T] - with GenericParTemplate[T, ParIterable] - with ParIterableLike[T, ParIterable[T], scala.collection.immutable.Iterable[T]] - with Immutable -{ - override def companion: GenericCompanion[ParIterable] with GenericParCompanion[ParIterable] = ParIterable - // if `immutable.ParIterableLike` is introduced, please move these 4 methods there - override def toIterable: ParIterable[T] = this - override def toSeq: ParSeq[T] = toParCollection[T, ParSeq[T]](() => ParSeq.newCombiner[T]) -} - -/** $factoryInfo - */ -object ParIterable extends ParFactory[ParIterable] { - implicit def canBuildFrom[T]: CanCombineFrom[Coll, T, ParIterable[T]] = - new GenericCanCombineFrom[T] - - def newBuilder[T]: Combiner[T, ParIterable[T]] = ParVector.newBuilder[T] - def newCombiner[T]: Combiner[T, ParIterable[T]] = ParVector.newCombiner[T] -} diff --git a/tests/scala2-library/src/library/scala/collection/parallel/immutable/ParMap.scala b/tests/scala2-library/src/library/scala/collection/parallel/immutable/ParMap.scala deleted file mode 100644 index 65bb2e12c545..000000000000 --- a/tests/scala2-library/src/library/scala/collection/parallel/immutable/ParMap.scala +++ /dev/null @@ -1,92 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala -package collection -package parallel.immutable - -import scala.collection.generic.ParMapFactory -import scala.collection.generic.GenericParMapTemplate -import scala.collection.generic.GenericParMapCompanion -import scala.collection.generic.CanCombineFrom -import scala.collection.parallel.ParMapLike -import scala.collection.parallel.Combiner - -/** A template trait for immutable parallel maps. - * - * $sideeffects - * - * @tparam K the key type of the map - * @tparam V the value type of the map - * - * @author Aleksandar Prokopec - * @since 2.9 - */ -trait ParMap[K, +V] -extends scala.collection/*.immutable*/.GenMap[K, V] - with GenericParMapTemplate[K, V, ParMap] - with parallel.ParMap[K, V] - with ParIterable[(K, V)] - with ParMapLike[K, V, ParMap[K, V], scala.collection.immutable.Map[K, V]] -{ -self => - - override def mapCompanion: GenericParMapCompanion[ParMap] = ParMap - - override def empty: ParMap[K, V] = new ParHashMap[K, V] - - override def stringPrefix = "ParMap" - - override def toMap[P, Q](implicit ev: (K, V) <:< (P, Q)): ParMap[P, Q] = this.asInstanceOf[ParMap[P, Q]] - - override def updated [U >: V](key: K, value: U): ParMap[K, U] = this + ((key, value)) - - def + [U >: V](kv: (K, U)): ParMap[K, U] - - /** The same map with a given default function. - * Note: `get`, `contains`, `iterator`, `keys`, etc are not affected by `withDefault`. - * - * Invoking transformer methods (e.g. `map`) will not preserve the default value. - * - * @param d the function mapping keys to values, used for non-present keys - * @return a wrapper of the map with a default value - */ - def withDefault[U >: V](d: K => U): scala.collection.parallel.immutable.ParMap[K, U] = new ParMap.WithDefault[K, U](this, d) - - /** The same map with a given default value. - * - * Invoking transformer methods (e.g. `map`) will not preserve the default value. - * - * @param d default value used for non-present keys - * @return a wrapper of the map with a default value - */ - def withDefaultValue[U >: V](d: U): scala.collection.parallel.immutable.ParMap[K, U] = new ParMap.WithDefault[K, U](this, x => d) - -} - - - -object ParMap extends ParMapFactory[ParMap] { - def empty[K, V]: ParMap[K, V] = new ParHashMap[K, V] - - def newCombiner[K, V]: Combiner[(K, V), ParMap[K, V]] = HashMapCombiner[K, V] - - implicit def canBuildFrom[K, V]: CanCombineFrom[Coll, (K, V), ParMap[K, V]] = new CanCombineFromMap[K, V] - - class WithDefault[K, +V](underlying: ParMap[K, V], d: K => V) - extends scala.collection.parallel.ParMap.WithDefault[K, V](underlying, d) with ParMap[K, V] { - override def empty = new WithDefault(underlying.empty, d) - override def updated[U >: V](key: K, value: U): WithDefault[K, U] = new WithDefault[K, U](underlying.updated[U](key, value), d) - override def + [U >: V](kv: (K, U)): WithDefault[K, U] = updated(kv._1, kv._2) - override def - (key: K): WithDefault[K, V] = new WithDefault(underlying - key, d) - override def withDefault[U >: V](d: K => U): ParMap[K, U] = new WithDefault[K, U](underlying, d) - override def withDefaultValue[U >: V](d: U): ParMap[K, U] = new WithDefault[K, U](underlying, x => d) - override def seq = underlying.seq.withDefault(d) - } - -} diff --git a/tests/scala2-library/src/library/scala/collection/parallel/immutable/ParRange.scala b/tests/scala2-library/src/library/scala/collection/parallel/immutable/ParRange.scala deleted file mode 100644 index de2b53a6c0c0..000000000000 --- a/tests/scala2-library/src/library/scala/collection/parallel/immutable/ParRange.scala +++ /dev/null @@ -1,118 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala -package collection.parallel.immutable - -import scala.collection.immutable.Range -import scala.collection.parallel.Combiner -import scala.collection.parallel.SeqSplitter -import scala.collection.Iterator - -/** Parallel ranges. - * - * $paralleliterableinfo - * - * $sideeffects - * - * @param range the sequential range this parallel range was obtained from - * - * @author Aleksandar Prokopec - * @since 2.9 - * @see [[http://docs.scala-lang.org/overviews/parallel-collections/concrete-parallel-collections.html#parallel_range Scala's Parallel Collections Library overview]] - * section on `ParRange` for more information. - * - * @define Coll `immutable.ParRange` - * @define coll immutable parallel range - */ -@SerialVersionUID(1L) -class ParRange(val range: Range) -extends ParSeq[Int] - with Serializable -{ -self => - - override def seq = range - - @inline final def length = range.length - - @inline final def apply(idx: Int) = range.apply(idx) - - def splitter = new ParRangeIterator - - class ParRangeIterator(range: Range = self.range) - extends SeqSplitter[Int] { - override def toString = "ParRangeIterator(over: " + range + ")" - private var ind = 0 - private val len = range.length - - final def remaining = len - ind - - final def hasNext = ind < len - - final def next = if (hasNext) { - val r = range.apply(ind) - ind += 1 - r - } else Iterator.empty.next() - - private def rangeleft = range.drop(ind) - - def dup = new ParRangeIterator(rangeleft) - - def split = { - val rleft = rangeleft - val elemleft = rleft.length - if (elemleft < 2) Seq(new ParRangeIterator(rleft)) - else Seq( - new ParRangeIterator(rleft.take(elemleft / 2)), - new ParRangeIterator(rleft.drop(elemleft / 2)) - ) - } - - def psplit(sizes: Int*) = { - var rleft = rangeleft - for (sz <- sizes) yield { - val fronttaken = rleft.take(sz) - rleft = rleft.drop(sz) - new ParRangeIterator(fronttaken) - } - } - - /* accessors */ - - override def foreach[U](f: Int => U): Unit = { - rangeleft.foreach(f.asInstanceOf[Int => Unit]) - ind = len - } - - override def reduce[U >: Int](op: (U, U) => U): U = { - val r = rangeleft.reduceLeft(op) - ind = len - r - } - - /* transformers */ - - override def map2combiner[S, That](f: Int => S, cb: Combiner[S, That]): Combiner[S, That] = { - while (hasNext) { - cb += f(next) - } - cb - } - } - - override def toString = s"Par$range" -} - -object ParRange { - def apply(start: Int, end: Int, step: Int, inclusive: Boolean) = new ParRange( - if (inclusive) new Range.Inclusive(start, end, step) - else new Range(start, end, step) - ) -} diff --git a/tests/scala2-library/src/library/scala/collection/parallel/immutable/ParSeq.scala b/tests/scala2-library/src/library/scala/collection/parallel/immutable/ParSeq.scala deleted file mode 100644 index f0502fbbcbdc..000000000000 --- a/tests/scala2-library/src/library/scala/collection/parallel/immutable/ParSeq.scala +++ /dev/null @@ -1,46 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala -package collection -package parallel.immutable - -import scala.collection.generic.GenericParTemplate -import scala.collection.generic.GenericCompanion -import scala.collection.generic.GenericParCompanion -import scala.collection.generic.CanCombineFrom -import scala.collection.generic.ParFactory -import scala.collection.parallel.ParSeqLike -import scala.collection.parallel.Combiner - -/** An immutable variant of `ParSeq`. - * - * @define Coll `mutable.ParSeq` - * @define coll mutable parallel sequence - */ -trait ParSeq[+T] -extends scala.collection/*.immutable*/.GenSeq[T] - with scala.collection.parallel.ParSeq[T] - with ParIterable[T] - with GenericParTemplate[T, ParSeq] - with ParSeqLike[T, ParSeq[T], scala.collection.immutable.Seq[T]] -{ - override def companion: GenericCompanion[ParSeq] with GenericParCompanion[ParSeq] = ParSeq - override def toSeq: ParSeq[T] = this -} - -/** $factoryInfo - * @define Coll `mutable.ParSeq` - * @define coll mutable parallel sequence - */ -object ParSeq extends ParFactory[ParSeq] { - implicit def canBuildFrom[T]: CanCombineFrom[Coll, T, ParSeq[T]] = new GenericCanCombineFrom[T] - - def newBuilder[T]: Combiner[T, ParSeq[T]] = ParVector.newBuilder[T] - def newCombiner[T]: Combiner[T, ParSeq[T]] = ParVector.newCombiner[T] -} diff --git a/tests/scala2-library/src/library/scala/collection/parallel/immutable/ParSet.scala b/tests/scala2-library/src/library/scala/collection/parallel/immutable/ParSet.scala deleted file mode 100644 index 7837d6f264eb..000000000000 --- a/tests/scala2-library/src/library/scala/collection/parallel/immutable/ParSet.scala +++ /dev/null @@ -1,48 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala -package collection -package parallel.immutable - -import scala.collection.generic._ -import scala.collection.parallel.ParSetLike -import scala.collection.parallel.Combiner - -/** An immutable variant of `ParSet`. - * - * @define Coll `mutable.ParSet` - * @define coll mutable parallel set - */ -trait ParSet[T] -extends scala.collection/*.immutable*/.GenSet[T] - with GenericParTemplate[T, ParSet] - with parallel.ParSet[T] - with ParIterable[T] - with ParSetLike[T, ParSet[T], scala.collection.immutable.Set[T]] -{ -self => - override def empty: ParSet[T] = ParHashSet[T]() - - override def companion: GenericCompanion[ParSet] with GenericParCompanion[ParSet] = ParSet - - override def stringPrefix = "ParSet" - - // ok, because this could only violate `apply` and we can live with that - override def toSet[U >: T]: ParSet[U] = this.asInstanceOf[ParSet[U]] -} - -/** $factoryInfo - * @define Coll `mutable.ParSet` - * @define coll mutable parallel set - */ -object ParSet extends ParSetFactory[ParSet] { - def newCombiner[T]: Combiner[T, ParSet[T]] = HashSetCombiner[T] - - implicit def canBuildFrom[T]: CanCombineFrom[Coll, T, ParSet[T]] = new GenericCanCombineFrom[T] -} diff --git a/tests/scala2-library/src/library/scala/collection/parallel/immutable/ParVector.scala b/tests/scala2-library/src/library/scala/collection/parallel/immutable/ParVector.scala deleted file mode 100644 index c2c1d042e1d9..000000000000 --- a/tests/scala2-library/src/library/scala/collection/parallel/immutable/ParVector.scala +++ /dev/null @@ -1,128 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala -package collection -package parallel.immutable - -import scala.collection.generic.{GenericParTemplate, CanCombineFrom, ParFactory} -import scala.collection.parallel.ParSeqLike -import scala.collection.parallel.Combiner -import scala.collection.parallel.SeqSplitter -import mutable.ArrayBuffer -import immutable.Vector -import immutable.VectorBuilder -import immutable.VectorIterator - -/** Immutable parallel vectors, based on vectors. - * - * $paralleliterableinfo - * - * $sideeffects - * - * @tparam T the element type of the vector - * - * @author Aleksandar Prokopec - * @since 2.9 - * @see [[http://docs.scala-lang.org/overviews/parallel-collections/concrete-parallel-collections.html#parallel_vector Scala's Parallel Collections Library overview]] - * section on `ParVector` for more information. - * - * @define Coll `immutable.ParVector` - * @define coll immutable parallel vector - */ -class ParVector[+T](private[this] val vector: Vector[T]) -extends ParSeq[T] - with GenericParTemplate[T, ParVector] - with ParSeqLike[T, ParVector[T], Vector[T]] - with Serializable -{ - override def companion = ParVector - - def this() = this(Vector()) - - def apply(idx: Int) = vector.apply(idx) - - def length = vector.length - - def splitter: SeqSplitter[T] = { - val pit = new ParVectorIterator(vector.startIndex, vector.endIndex) - vector.initIterator(pit) - pit - } - - override def seq: Vector[T] = vector - - override def toVector: Vector[T] = vector - - class ParVectorIterator(_start: Int, _end: Int) extends VectorIterator[T](_start, _end) with SeqSplitter[T] { - def remaining: Int = remainingElementCount - def dup: SeqSplitter[T] = (new ParVector(remainingVector)).splitter - def split: Seq[ParVectorIterator] = { - val rem = remaining - if (rem >= 2) psplit(rem / 2, rem - rem / 2) - else Seq(this) - } - def psplit(sizes: Int*): Seq[ParVectorIterator] = { - var remvector = remainingVector - val splitted = new ArrayBuffer[Vector[T]] - for (sz <- sizes) { - splitted += remvector.take(sz) - remvector = remvector.drop(sz) - } - splitted.map(v => new ParVector(v).splitter.asInstanceOf[ParVectorIterator]) - } - } -} - -/** $factoryInfo - * @define Coll `immutable.ParVector` - * @define coll immutable parallel vector - */ -object ParVector extends ParFactory[ParVector] { - implicit def canBuildFrom[T]: CanCombineFrom[Coll, T, ParVector[T]] = - new GenericCanCombineFrom[T] - - def newBuilder[T]: Combiner[T, ParVector[T]] = newCombiner[T] - - def newCombiner[T]: Combiner[T, ParVector[T]] = new LazyParVectorCombiner[T] // was: with EPC[T, ParVector[T]] -} - -private[immutable] class LazyParVectorCombiner[T] extends Combiner[T, ParVector[T]] { -//self: EnvironmentPassingCombiner[T, ParVector[T]] => - var sz = 0 - val vectors = new ArrayBuffer[VectorBuilder[T]] += new VectorBuilder[T] - - def size: Int = sz - - def +=(elem: T): this.type = { - vectors.last += elem - sz += 1 - this - } - - def clear() = { - vectors.clear() - vectors += new VectorBuilder[T] - sz = 0 - } - - def result: ParVector[T] = { - val rvb = new VectorBuilder[T] - for (vb <- vectors) { - rvb ++= vb.result - } - new ParVector(rvb.result) - } - - def combine[U <: T, NewTo >: ParVector[T]](other: Combiner[U, NewTo]) = if (other eq this) this else { - val that = other.asInstanceOf[LazyParVectorCombiner[T]] - sz += that.sz - vectors ++= that.vectors - this - } -} diff --git a/tests/scala2-library/src/library/scala/collection/parallel/immutable/package.scala b/tests/scala2-library/src/library/scala/collection/parallel/immutable/package.scala deleted file mode 100644 index 3cafdba5f702..000000000000 --- a/tests/scala2-library/src/library/scala/collection/parallel/immutable/package.scala +++ /dev/null @@ -1,50 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala -package collection.parallel - -package immutable { - /** A (parallel) sequence consisting of `length` elements `elem`. Used in the `padTo` method. - * - * @tparam T type of the elements - * @param elem the element in the repetition - * @param length the length of the collection - */ - private[parallel] class Repetition[T](elem: T, val length: Int) extends ParSeq[T] { - self => - - def apply(idx: Int) = if (0 <= idx && idx < length) elem else throw new IndexOutOfBoundsException("" + idx) - override def seq: collection.immutable.Seq[T] = new collection.AbstractSeq[T] with collection.immutable.Seq[T] { - override def length: Int = self.length - override def apply(idx: Int): T = self.apply(idx) - override def iterator: Iterator[T] = Iterator.continually(elem).take(length) - override def par: ParSeq[T] = self - } - def update(idx: Int, elem: T) = throw new UnsupportedOperationException - - class ParIterator(var i: Int = 0, val until: Int = length, elem: T = self.elem) extends SeqSplitter[T] { - def remaining = until - i - def hasNext = i < until - def next = { i += 1; elem } - def dup = new ParIterator(i, until, elem) - def psplit(sizes: Int*) = { - val incr = sizes.scanLeft(0)(_ + _) - for ((start, end) <- incr.init zip incr.tail) yield new ParIterator(i + start, (i + end) min until, elem) - } - def split = psplit(remaining / 2, remaining - remaining / 2) - } - - def splitter = new ParIterator - } -} - -package object immutable { - /* package level methods */ - def repetition[T](elem: T, len: Int) = new Repetition(elem, len) -} diff --git a/tests/scala2-library/src/library/scala/collection/parallel/mutable/LazyCombiner.scala b/tests/scala2-library/src/library/scala/collection/parallel/mutable/LazyCombiner.scala deleted file mode 100644 index f77f654fca0a..000000000000 --- a/tests/scala2-library/src/library/scala/collection/parallel/mutable/LazyCombiner.scala +++ /dev/null @@ -1,45 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala -package collection.parallel.mutable - -import scala.collection.generic.Growable -import scala.collection.generic.Sizing -import scala.collection.mutable.ArrayBuffer -import scala.collection.parallel.Combiner - -/** Implements combining contents of two combiners - * by postponing the operation until `result` method is called. It chains - * the leaf results together instead of evaluating the actual collection. - * - * @tparam Elem the type of the elements in the combiner - * @tparam To the type of the collection the combiner produces - * @tparam Buff the type of the buffers that contain leaf results and this combiner chains together - */ -trait LazyCombiner[Elem, +To, Buff <: Growable[Elem] with Sizing] extends Combiner[Elem, To] { -//self: scala.collection.parallel.EnvironmentPassingCombiner[Elem, To] => - lazy val chain: ArrayBuffer[Buff] - val lastbuff = chain.last - def +=(elem: Elem) = { lastbuff += elem; this } - def result: To = allocateAndCopy - def clear() = { chain.clear() } - def combine[N <: Elem, NewTo >: To](other: Combiner[N, NewTo]): Combiner[N, NewTo] = if (this ne other) { - if (other.isInstanceOf[LazyCombiner[_, _, _]]) { - val that = other.asInstanceOf[LazyCombiner[Elem, To, Buff]] - newLazyCombiner(chain ++= that.chain) - } else throw new UnsupportedOperationException("Cannot combine with combiner of different type.") - } else this - def size = chain.foldLeft(0)(_ + _.size) - - /** Method that allocates the data structure and copies elements into it using - * `size` and `chain` members. - */ - def allocateAndCopy: To - def newLazyCombiner(buffchain: ArrayBuffer[Buff]): LazyCombiner[Elem, To, Buff] -} diff --git a/tests/scala2-library/src/library/scala/collection/parallel/mutable/ParArray.scala b/tests/scala2-library/src/library/scala/collection/parallel/mutable/ParArray.scala deleted file mode 100644 index 46f803cddc29..000000000000 --- a/tests/scala2-library/src/library/scala/collection/parallel/mutable/ParArray.scala +++ /dev/null @@ -1,719 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - - -package scala -package collection.parallel.mutable - - - -import scala.collection.generic.GenericParTemplate -import scala.collection.generic.GenericCompanion -import scala.collection.generic.GenericParCompanion -import scala.collection.generic.CanCombineFrom -import scala.collection.generic.CanBuildFrom -import scala.collection.generic.ParFactory -import scala.collection.parallel.Combiner -import scala.collection.parallel.SeqSplitter -import scala.collection.parallel.ParSeqLike -import scala.collection.parallel.Task -import scala.collection.parallel.CHECK_RATE -import scala.collection.mutable.ArraySeq -import scala.collection.mutable.Builder -import scala.collection.GenTraversableOnce -import scala.reflect.ClassTag - -/** Parallel sequence holding elements in a linear array. - * - * `ParArray` is a parallel sequence with a predefined size. The size of the array - * cannot be changed after it's been created. - * - * `ParArray` internally keeps an array containing the elements. This means that - * bulk operations based on traversal ensure fast access to elements. `ParArray` uses lazy builders that - * create the internal data array only after the size of the array is known. In the meantime, they keep - * the result set fragmented. The fragments - * are copied into the resulting data array in parallel using fast array copy operations once all the combiners - * are populated in parallel. - * - * @tparam T type of the elements in the array - * - * @author Aleksandar Prokopec - * @since 2.9 - * @see [[http://docs.scala-lang.org/overviews/parallel-collections/concrete-parallel-collections.html#parallel_array Scala's Parallel Collections Library overview]] - * section on `ParArray` for more information. - * - * @define Coll `ParArray` - * @define coll parallel array - * - */ -@SerialVersionUID(1L) -class ParArray[T] private[mutable] (val arrayseq: ArraySeq[T]) -extends ParSeq[T] - with GenericParTemplate[T, ParArray] - with ParSeqLike[T, ParArray[T], ArraySeq[T]] - with Serializable -{ -self => - - @transient private var array: Array[Any] = arrayseq.array.asInstanceOf[Array[Any]] - - override def companion: GenericCompanion[ParArray] with GenericParCompanion[ParArray] = ParArray - - def this(sz: Int) = this { - require(sz >= 0) - new ArraySeq[T](sz) - } - - def apply(i: Int) = array(i).asInstanceOf[T] - - def update(i: Int, elem: T) = array(i) = elem - - def length = arrayseq.length - - override def seq = arrayseq - - protected[parallel] def splitter: ParArrayIterator = { - val pit = new ParArrayIterator - pit - } - - class ParArrayIterator(var i: Int = 0, val until: Int = length, val arr: Array[Any] = array) - extends SeqSplitter[T] { - def hasNext = i < until - - def next = { - val elem = arr(i) - i += 1 - elem.asInstanceOf[T] - } - - def remaining = until - i - - def dup = new ParArrayIterator(i, until, arr) - - def psplit(sizesIncomplete: Int*): Seq[ParArrayIterator] = { - var traversed = i - val total = sizesIncomplete.reduceLeft(_ + _) - val left = remaining - val sizes = if (total >= left) sizesIncomplete else sizesIncomplete :+ (left - total) - for (sz <- sizes) yield if (traversed < until) { - val start = traversed - val end = (traversed + sz) min until - traversed = end - new ParArrayIterator(start, end, arr) - } else { - new ParArrayIterator(traversed, traversed, arr) - } - } - - override def split: Seq[ParArrayIterator] = { - val left = remaining - if (left >= 2) { - val splitpoint = left / 2 - val sq = Seq( - new ParArrayIterator(i, i + splitpoint, arr), - new ParArrayIterator(i + splitpoint, until, arr)) - i = until - sq - } else { - Seq(this) - } - } - - override def toString = "ParArrayIterator(" + i + ", " + until + ")" - - /* overrides for efficiency */ - - /* accessors */ - - override def foreach[U](f: T => U) = { - foreach_quick(f, arr, until, i) - i = until - } - - private def foreach_quick[U](f: T => U, a: Array[Any], ntil: Int, from: Int) = { - var j = from - while (j < ntil) { - f(a(j).asInstanceOf[T]) - j += 1 - } - } - - override def count(p: T => Boolean) = { - val c = count_quick(p, arr, until, i) - i = until - c - } - - private def count_quick(p: T => Boolean, a: Array[Any], ntil: Int, from: Int) = { - var cnt = 0 - var j = from - while (j < ntil) { - if (p(a(j).asInstanceOf[T])) cnt += 1 - j += 1 - } - cnt - } - - override def foldLeft[S](z: S)(op: (S, T) => S): S = { - val r = foldLeft_quick(arr, until, op, z) - i = until - r - } - - private def foldLeft_quick[S](a: Array[Any], ntil: Int, op: (S, T) => S, z: S): S = { - var j = i - var sum = z - while (j < ntil) { - sum = op(sum, a(j).asInstanceOf[T]) - j += 1 - } - sum - } - - override def fold[U >: T](z: U)(op: (U, U) => U): U = foldLeft[U](z)(op) - - override def aggregate[S](z: =>S)(seqop: (S, T) => S, combop: (S, S) => S): S = foldLeft[S](z)(seqop) - - override def sum[U >: T](implicit num: Numeric[U]): U = { - val s = sum_quick(num, arr, until, i, num.zero) - i = until - s - } - - private def sum_quick[U >: T](num: Numeric[U], a: Array[Any], ntil: Int, from: Int, zero: U): U = { - var j = from - var sum = zero - while (j < ntil) { - sum = num.plus(sum, a(j).asInstanceOf[T]) - j += 1 - } - sum - } - - override def product[U >: T](implicit num: Numeric[U]): U = { - val p = product_quick(num, arr, until, i, num.one) - i = until - p - } - - private def product_quick[U >: T](num: Numeric[U], a: Array[Any], ntil: Int, from: Int, one: U): U = { - var j = from - var prod = one - while (j < ntil) { - prod = num.times(prod, a(j).asInstanceOf[T]) - j += 1 - } - prod - } - - override def forall(p: T => Boolean): Boolean = { - if (isAborted) return false - - var all = true - while (i < until) { - val nextuntil = if (i + CHECK_RATE > until) until else i + CHECK_RATE - - all = forall_quick(p, array, nextuntil, i) - if (all) i = nextuntil - else { - i = until - abort() - } - - if (isAborted) return false - } - all - } - - // it's faster to use a separate small method - private def forall_quick(p: T => Boolean, a: Array[Any], nextuntil: Int, start: Int): Boolean = { - var j = start - while (j < nextuntil) { - if (p(a(j).asInstanceOf[T])) j += 1 - else return false - } - true - } - - override def exists(p: T => Boolean): Boolean = { - if (isAborted) return true - - var some = false - while (i < until) { - val nextuntil = if (i + CHECK_RATE > until) until else i + CHECK_RATE - - some = exists_quick(p, array, nextuntil, i) - if (some) { - i = until - abort() - } else i = nextuntil - - if (isAborted) return true - } - some - } - - // faster to use separate small method - private def exists_quick(p: T => Boolean, a: Array[Any], nextuntil: Int, start: Int): Boolean = { - var j = start - while (j < nextuntil) { - if (p(a(j).asInstanceOf[T])) return true - else j += 1 - } - false - } - - override def find(p: T => Boolean): Option[T] = { - if (isAborted) return None - - var r: Option[T] = None - while (i < until) { - val nextuntil = if ((i + CHECK_RATE) < until) (i + CHECK_RATE) else until - - r = find_quick(p, array, nextuntil, i) - - if (r != None) { - i = until - abort() - } else i = nextuntil - - if (isAborted) return r - } - r - } - - private def find_quick(p: T => Boolean, a: Array[Any], nextuntil: Int, start: Int): Option[T] = { - var j = start - while (j < nextuntil) { - val elem = a(j).asInstanceOf[T] - if (p(elem)) return Some(elem) - else j += 1 - } - None - } - - override def drop(n: Int): ParArrayIterator = { - i += n - this - } - - override def copyToArray[U >: T](array: Array[U], from: Int, len: Int) { - val totallen = (self.length - i) min len min (array.length - from) - Array.copy(arr, i, array, from, totallen) - i += totallen - } - - override def prefixLength(pred: T => Boolean): Int = { - val r = prefixLength_quick(pred, arr, until, i) - i += r + 1 - r - } - - private def prefixLength_quick(pred: T => Boolean, a: Array[Any], ntil: Int, startpos: Int): Int = { - var j = startpos - var endpos = ntil - while (j < endpos) { - if (pred(a(j).asInstanceOf[T])) j += 1 - else endpos = j - } - endpos - startpos - } - - override def indexWhere(pred: T => Boolean): Int = { - val r = indexWhere_quick(pred, arr, until, i) - val ret = if (r != -1) r - i else r - i = until - ret - } - - private def indexWhere_quick(pred: T => Boolean, a: Array[Any], ntil: Int, from: Int): Int = { - var j = from - var pos = -1 - while (j < ntil) { - if (pred(a(j).asInstanceOf[T])) { - pos = j - j = ntil - } else j += 1 - } - pos - } - - override def lastIndexWhere(pred: T => Boolean): Int = { - val r = lastIndexWhere_quick(pred, arr, i, until) - val ret = if (r != -1) r - i else r - i = until - ret - } - - private def lastIndexWhere_quick(pred: T => Boolean, a: Array[Any], from: Int, ntil: Int): Int = { - var pos = -1 - var j = ntil - 1 - while (j >= from) { - if (pred(a(j).asInstanceOf[T])) { - pos = j - j = -1 - } else j -= 1 - } - pos - } - - override def sameElements(that: Iterator[_]): Boolean = { - var same = true - while (i < until && that.hasNext) { - if (arr(i) != that.next) { - i = until - same = false - } - i += 1 - } - same - } - - /* transformers */ - - override def map2combiner[S, That](f: T => S, cb: Combiner[S, That]): Combiner[S, That] = { - //val cb = cbf(self.repr) - cb.sizeHint(remaining) - map2combiner_quick(f, arr, cb, until, i) - i = until - cb - } - - private def map2combiner_quick[S, That](f: T => S, a: Array[Any], cb: Builder[S, That], ntil: Int, from: Int) { - var j = from - while (j < ntil) { - cb += f(a(j).asInstanceOf[T]) - j += 1 - } - } - - override def collect2combiner[S, That](pf: PartialFunction[T, S], cb: Combiner[S, That]): Combiner[S, That] = { - //val cb = pbf(self.repr) - collect2combiner_quick(pf, arr, cb, until, i) - i = until - cb - } - - private def collect2combiner_quick[S, That](pf: PartialFunction[T, S], a: Array[Any], cb: Builder[S, That], ntil: Int, from: Int) { - var j = from - val runWith = pf.runWith(b => cb += b) - while (j < ntil) { - val curr = a(j).asInstanceOf[T] - runWith(curr) - j += 1 - } - } - - override def flatmap2combiner[S, That](f: T => GenTraversableOnce[S], cb: Combiner[S, That]): Combiner[S, That] = { - //val cb = pbf(self.repr) - while (i < until) { - val traversable = f(arr(i).asInstanceOf[T]) - if (traversable.isInstanceOf[Iterable[_]]) cb ++= traversable.asInstanceOf[Iterable[S]].iterator - else cb ++= traversable.seq - i += 1 - } - cb - } - - override def filter2combiner[U >: T, This](pred: T => Boolean, cb: Combiner[U, This]) = { - filter2combiner_quick(pred, cb, arr, until, i) - i = until - cb - } - - private def filter2combiner_quick[U >: T, This](pred: T => Boolean, cb: Builder[U, This], a: Array[Any], ntil: Int, from: Int) { - var j = i - while(j < ntil) { - val curr = a(j).asInstanceOf[T] - if (pred(curr)) cb += curr - j += 1 - } - } - - override def filterNot2combiner[U >: T, This](pred: T => Boolean, cb: Combiner[U, This]) = { - filterNot2combiner_quick(pred, cb, arr, until, i) - i = until - cb - } - - private def filterNot2combiner_quick[U >: T, This](pred: T => Boolean, cb: Builder[U, This], a: Array[Any], ntil: Int, from: Int) { - var j = i - while(j < ntil) { - val curr = a(j).asInstanceOf[T] - if (!pred(curr)) cb += curr - j += 1 - } - } - - override def copy2builder[U >: T, Coll, Bld <: Builder[U, Coll]](cb: Bld): Bld = { - cb.sizeHint(remaining) - cb.ifIs[ResizableParArrayCombiner[T]] { - pac => - // with res. combiner: - val targetarr: Array[Any] = pac.lastbuff.internalArray.asInstanceOf[Array[Any]] - Array.copy(arr, i, targetarr, pac.lastbuff.size, until - i) - pac.lastbuff.setInternalSize(remaining) - } otherwise { - cb.ifIs[UnrolledParArrayCombiner[T]] { - pac => - // with unr. combiner: - val targetarr: Array[Any] = pac.buff.lastPtr.array.asInstanceOf[Array[Any]] - Array.copy(arr, i, targetarr, 0, until - i) - pac.buff.size = pac.buff.size + until - i - pac.buff.lastPtr.size = until - i - } otherwise { - copy2builder_quick(cb, arr, until, i) - i = until - } - } - cb - } - - private def copy2builder_quick[U >: T, Coll](b: Builder[U, Coll], a: Array[Any], ntil: Int, from: Int) { - var j = from - while (j < ntil) { - b += a(j).asInstanceOf[T] - j += 1 - } - } - - override def partition2combiners[U >: T, This](pred: T => Boolean, btrue: Combiner[U, This], bfalse: Combiner[U, This]) = { - partition2combiners_quick(pred, btrue, bfalse, arr, until, i) - i = until - (btrue, bfalse) - } - - private def partition2combiners_quick[U >: T, This](p: T => Boolean, btrue: Builder[U, This], bfalse: Builder[U, This], a: Array[Any], ntil: Int, from: Int) { - var j = from - while (j < ntil) { - val curr = a(j).asInstanceOf[T] - if (p(curr)) btrue += curr else bfalse += curr - j += 1 - } - } - - override def take2combiner[U >: T, This](n: Int, cb: Combiner[U, This]) = { - cb.sizeHint(n) - val ntil = i + n - val a = arr - while (i < ntil) { - cb += a(i).asInstanceOf[T] - i += 1 - } - cb - } - - override def drop2combiner[U >: T, This](n: Int, cb: Combiner[U, This]) = { - drop(n) - cb.sizeHint(remaining) - while (i < until) { - cb += arr(i).asInstanceOf[T] - i += 1 - } - cb - } - - override def reverse2combiner[U >: T, This](cb: Combiner[U, This]): Combiner[U, This] = { - cb.ifIs[ResizableParArrayCombiner[T]] { - pac => - // with res. combiner: - val sz = remaining - pac.sizeHint(sz) - val targetarr: Array[Any] = pac.lastbuff.internalArray.asInstanceOf[Array[Any]] - reverse2combiner_quick(targetarr, arr, 0, i, until) - pac.lastbuff.setInternalSize(sz) - } otherwise { - cb.ifIs[UnrolledParArrayCombiner[T]] { - pac => - // with unr. combiner: - val sz = remaining - pac.sizeHint(sz) - val targetarr: Array[Any] = pac.buff.lastPtr.array.asInstanceOf[Array[Any]] - reverse2combiner_quick(targetarr, arr, 0, i, until) - pac.buff.size = pac.buff.size + sz - pac.buff.lastPtr.size = sz - } otherwise super.reverse2combiner(cb) - } - cb - } - - private def reverse2combiner_quick(targ: Array[Any], a: Array[Any], targfrom: Int, srcfrom: Int, srcuntil: Int) { - var j = srcfrom - var k = targfrom + srcuntil - srcfrom - 1 - while (j < srcuntil) { - targ(k) = a(j) - j += 1 - k -= 1 - } - } - - override def scanToArray[U >: T, A >: U](z: U, op: (U, U) => U, destarr: Array[A], from: Int) { - scanToArray_quick[U](array, destarr.asInstanceOf[Array[Any]], op, z, i, until, from) - i = until - } - - protected def scanToArray_quick[U](srcarr: Array[Any], destarr: Array[Any], op: (U, U) => U, z: U, srcfrom: Int, srcntil: Int, destfrom: Int) { - var last = z - var j = srcfrom - var k = destfrom - while (j < srcntil) { - last = op(last, srcarr(j).asInstanceOf[U]) - destarr(k) = last - j += 1 - k += 1 - } - } - - } - - /* operations */ - - private def buildsArray[S, That](c: Builder[S, That]) = c.isInstanceOf[ParArrayCombiner[_]] - - override def map[S, That](f: T => S)(implicit bf: CanBuildFrom[ParArray[T], S, That]) = if (buildsArray(bf(repr))) { - // reserve an array - val targarrseq = new ArraySeq[S](length) - val targetarr = targarrseq.array.asInstanceOf[Array[Any]] - - // fill it in parallel - tasksupport.executeAndWaitResult(new PAMap[S](f, targetarr, 0, length)) - - // wrap it into a parallel array - (new ParArray[S](targarrseq)).asInstanceOf[That] - } else super.map(f)(bf) - - override def scan[U >: T, That](z: U)(op: (U, U) => U)(implicit cbf: CanBuildFrom[ParArray[T], U, That]): That = - if (tasksupport.parallelismLevel > 1 && buildsArray(cbf(repr))) { - // reserve an array - val targarrseq = new ArraySeq[U](length + 1) - val targetarr = targarrseq.array.asInstanceOf[Array[Any]] - targetarr(0) = z - - // do a parallel prefix scan - if (length > 0) tasksupport.executeAndWaitResult(new CreateScanTree[U](0, size, z, op, splitter) mapResult { - tree => tasksupport.executeAndWaitResult(new ScanToArray(tree, z, op, targetarr)) - }) - - // wrap the array into a parallel array - (new ParArray[U](targarrseq)).asInstanceOf[That] - } else super.scan(z)(op)(cbf) - - /* tasks */ - - class ScanToArray[U >: T](tree: ScanTree[U], z: U, op: (U, U) => U, targetarr: Array[Any]) - extends Task[Unit, ScanToArray[U]] { - var result = () - - def leaf(prev: Option[Unit]) = iterate(tree) - private def iterate(tree: ScanTree[U]): Unit = tree match { - case ScanNode(left, right) => - iterate(left) - iterate(right) - case ScanLeaf(_, _, from, len, Some(prev), _) => - scanLeaf(array, targetarr, from, len, prev.acc) - case ScanLeaf(_, _, from, len, None, _) => - scanLeaf(array, targetarr, from, len, z) - } - private def scanLeaf(srcarr: Array[Any], targetarr: Array[Any], from: Int, len: Int, startval: U) { - var i = from - val until = from + len - var curr = startval - val operation = op - while (i < until) { - curr = operation(curr, srcarr(i).asInstanceOf[U]) - i += 1 - targetarr(i) = curr - } - } - def split = tree match { - case ScanNode(left, right) => Seq( - new ScanToArray(left, z, op, targetarr), - new ScanToArray(right, z, op, targetarr) - ) - case _ => sys.error("Can only split scan tree internal nodes.") - } - def shouldSplitFurther = tree match { - case ScanNode(_, _) => true - case _ => false - } - } - - class PAMap[S](f: T => S, targetarr: Array[Any], offset: Int, howmany: Int) extends Task[Unit, PAMap[S]] { - var result = () - - def leaf(prev: Option[Unit]) = { - val tarr = targetarr - val sarr = array - var i = offset - val until = offset + howmany - while (i < until) { - tarr(i) = f(sarr(i).asInstanceOf[T]) - i += 1 - } - } - def split = { - val fp = howmany / 2 - List(new PAMap(f, targetarr, offset, fp), new PAMap(f, targetarr, offset + fp, howmany - fp)) - } - def shouldSplitFurther = howmany > scala.collection.parallel.thresholdFromSize(length, tasksupport.parallelismLevel) - } - - /* serialization */ - - private def writeObject(out: java.io.ObjectOutputStream) { - out.defaultWriteObject - } - - private def readObject(in: java.io.ObjectInputStream) { - in.defaultReadObject - - // get raw array from arrayseq - array = arrayseq.array.asInstanceOf[Array[Any]] - } - -} - - -/** $factoryInfo - * @define Coll `mutable.ParArray` - * @define coll parallel array - */ -object ParArray extends ParFactory[ParArray] { - implicit def canBuildFrom[T]: CanCombineFrom[Coll, T, ParArray[T]] = new GenericCanCombineFrom[T] - def newBuilder[T]: Combiner[T, ParArray[T]] = newCombiner - def newCombiner[T]: Combiner[T, ParArray[T]] = ParArrayCombiner[T] - - /** Creates a new parallel array by wrapping the specified array. - */ - def handoff[T](arr: Array[T]): ParArray[T] = wrapOrRebuild(arr, arr.length) - - /** Creates a new parallel array by wrapping a part of the specified array. - */ - def handoff[T](arr: Array[T], sz: Int): ParArray[T] = wrapOrRebuild(arr, sz) - - private def wrapOrRebuild[T](arr: AnyRef, sz: Int) = arr match { - case arr: Array[AnyRef] => new ParArray[T](new ExposedArraySeq[T](arr, sz)) - case _ => new ParArray[T](new ExposedArraySeq[T](scala.runtime.ScalaRunTime.toObjectArray(arr), sz)) - } - - def createFromCopy[T <: AnyRef : ClassTag](arr: Array[T]): ParArray[T] = { - val newarr = new Array[T](arr.length) - Array.copy(arr, 0, newarr, 0, arr.length) - handoff(newarr) - } - - def fromTraversables[T](xss: GenTraversableOnce[T]*) = { - val cb = ParArrayCombiner[T]() - for (xs <- xss) { - cb ++= xs.seq - } - cb.result - } - -} diff --git a/tests/scala2-library/src/library/scala/collection/parallel/mutable/ParFlatHashTable.scala b/tests/scala2-library/src/library/scala/collection/parallel/mutable/ParFlatHashTable.scala deleted file mode 100644 index 62165ae0d2d4..000000000000 --- a/tests/scala2-library/src/library/scala/collection/parallel/mutable/ParFlatHashTable.scala +++ /dev/null @@ -1,103 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala -package collection -package parallel.mutable - -import scala.collection.parallel.IterableSplitter - -/** Parallel flat hash table. - * - * @tparam T type of the elements in the $coll. - * @define coll table - * @define Coll `ParFlatHashTable` - * - * @author Aleksandar Prokopec - */ -trait ParFlatHashTable[T] extends scala.collection.mutable.FlatHashTable[T] { - - override def alwaysInitSizeMap = true - - abstract class ParFlatHashTableIterator(var idx: Int, val until: Int, val totalsize: Int) - extends IterableSplitter[T] with SizeMapUtils { - import scala.collection.DebugUtils._ - - private[this] var traversed = 0 - private[this] val itertable = table - - if (hasNext) scan() - - private[this] def scan() { - while (itertable(idx) eq null) { - idx += 1 - } - } - - def newIterator(index: Int, until: Int, totalsize: Int): IterableSplitter[T] - - def remaining = totalsize - traversed - def hasNext = traversed < totalsize - def next() = if (hasNext) { - val r = entryToElem(itertable(idx)) - traversed += 1 - idx += 1 - if (hasNext) scan() - r - } else Iterator.empty.next() - def dup = newIterator(idx, until, totalsize) - def split = if (remaining > 1) { - val divpt = (until + idx) / 2 - - val fstidx = idx - val fstuntil = divpt - val fsttotal = calcNumElems(idx, divpt, itertable.length, sizeMapBucketSize) - val fstit = newIterator(fstidx, fstuntil, fsttotal) - - val sndidx = divpt - val snduntil = until - val sndtotal = remaining - fsttotal - val sndit = newIterator(sndidx, snduntil, sndtotal) - - Seq(fstit, sndit) - } else Seq(this) - - override def debugInformation = buildString { - append => - append("Parallel flat hash table iterator") - append("---------------------------------") - append("Traversed/total: " + traversed + " / " + totalsize) - append("Table idx/until: " + idx + " / " + until) - append("Table length: " + itertable.length) - append("Table: ") - append(arrayString(itertable, 0, itertable.length)) - append("Sizemap: ") - append(arrayString(sizemap, 0, sizemap.length)) - } - - protected def countElems(from: Int, until: Int) = { - var count = 0 - var i = from - while (i < until) { - if (itertable(i) ne null) count += 1 - i += 1 - } - count - } - - protected def countBucketSizes(frombucket: Int, untilbucket: Int) = { - var count = 0 - var i = frombucket - while (i < untilbucket) { - count += sizemap(i) - i += 1 - } - count - } - } -} diff --git a/tests/scala2-library/src/library/scala/collection/parallel/mutable/ParHashMap.scala b/tests/scala2-library/src/library/scala/collection/parallel/mutable/ParHashMap.scala deleted file mode 100644 index bb3737f18e96..000000000000 --- a/tests/scala2-library/src/library/scala/collection/parallel/mutable/ParHashMap.scala +++ /dev/null @@ -1,302 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala -package collection.parallel -package mutable - -import scala.collection.generic._ -import scala.collection.mutable.DefaultEntry -import scala.collection.mutable.HashEntry -import scala.collection.mutable.HashTable -import scala.collection.mutable.UnrolledBuffer -import scala.collection.parallel.Task - -/** A parallel hash map. - * - * `ParHashMap` is a parallel map which internally keeps elements within a hash table. - * It uses chaining to resolve collisions. - * - * @tparam K type of the keys in the parallel hash map - * @tparam V type of the values in the parallel hash map - * - * @define Coll `ParHashMap` - * @define coll parallel hash map - * - * @author Aleksandar Prokopec - * @see [[http://docs.scala-lang.org/overviews/parallel-collections/concrete-parallel-collections.html#parallel_hash_tables Scala's Parallel Collections Library overview]] - * section on Parallel Hash Tables for more information. - */ -@SerialVersionUID(1L) -class ParHashMap[K, V] private[collection] (contents: HashTable.Contents[K, DefaultEntry[K, V]]) -extends ParMap[K, V] - with GenericParMapTemplate[K, V, ParHashMap] - with ParMapLike[K, V, ParHashMap[K, V], scala.collection.mutable.HashMap[K, V]] - with ParHashTable[K, DefaultEntry[K, V]] - with Serializable -{ -self => - initWithContents(contents) - - type Entry = scala.collection.mutable.DefaultEntry[K, V] - - def this() = this(null) - - override def mapCompanion: GenericParMapCompanion[ParHashMap] = ParHashMap - - override def empty: ParHashMap[K, V] = new ParHashMap[K, V] - - protected[this] override def newCombiner = ParHashMapCombiner[K, V] - - override def seq = new scala.collection.mutable.HashMap[K, V](hashTableContents) - - def splitter = new ParHashMapIterator(1, table.length, size, table(0).asInstanceOf[DefaultEntry[K, V]]) - - override def size = tableSize - - override def clear() = clearTable() - - def get(key: K): Option[V] = { - val e = findEntry(key) - if (e eq null) None - else Some(e.value) - } - - def put(key: K, value: V): Option[V] = { - val e = findOrAddEntry(key, value) - if (e eq null) None - else { val v = e.value; e.value = value; Some(v) } - } - - def update(key: K, value: V): Unit = put(key, value) - - def remove(key: K): Option[V] = { - val e = removeEntry(key) - if (e ne null) Some(e.value) - else None - } - - def += (kv: (K, V)): this.type = { - val e = findOrAddEntry(kv._1, kv._2) - if (e ne null) e.value = kv._2 - this - } - - def -=(key: K): this.type = { removeEntry(key); this } - - override def stringPrefix = "ParHashMap" - - class ParHashMapIterator(start: Int, untilIdx: Int, totalSize: Int, e: DefaultEntry[K, V]) - extends EntryIterator[(K, V), ParHashMapIterator](start, untilIdx, totalSize, e) { - def entry2item(entry: DefaultEntry[K, V]) = (entry.key, entry.value) - - def newIterator(idxFrom: Int, idxUntil: Int, totalSz: Int, es: DefaultEntry[K, V]) = - new ParHashMapIterator(idxFrom, idxUntil, totalSz, es) - } - - protected def createNewEntry[V1](key: K, value: V1): Entry = { - new Entry(key, value.asInstanceOf[V]) - } - - private def writeObject(out: java.io.ObjectOutputStream) { - serializeTo(out, { entry => - out.writeObject(entry.key) - out.writeObject(entry.value) - }) - } - - private def readObject(in: java.io.ObjectInputStream) { - init(in, createNewEntry(in.readObject().asInstanceOf[K], in.readObject())) - } - - private[parallel] override def brokenInvariants = { - // bucket by bucket, count elements - val buckets = for (i <- 0 until (table.length / sizeMapBucketSize)) yield checkBucket(i) - - // check if each element is in the position corresponding to its key - val elems = for (i <- 0 until table.length) yield checkEntry(i) - - buckets.flatMap(x => x) ++ elems.flatMap(x => x) - } - - private def checkBucket(i: Int) = { - def count(e: HashEntry[K, DefaultEntry[K, V]]): Int = if (e eq null) 0 else 1 + count(e.next) - val expected = sizemap(i) - val found = ((i * sizeMapBucketSize) until ((i + 1) * sizeMapBucketSize)).foldLeft(0) { - (acc, c) => acc + count(table(c)) - } - if (found != expected) List("Found " + found + " elements, while sizemap showed " + expected) - else Nil - } - - private def checkEntry(i: Int) = { - def check(e: HashEntry[K, DefaultEntry[K, V]]): List[String] = if (e eq null) Nil else - if (index(elemHashCode(e.key)) == i) check(e.next) - else ("Element " + e.key + " at " + i + " with " + elemHashCode(e.key) + " maps to " + index(elemHashCode(e.key))) :: check(e.next) - check(table(i)) - } -} - -/** $factoryInfo - * @define Coll `mutable.ParHashMap` - * @define coll parallel hash map - */ -object ParHashMap extends ParMapFactory[ParHashMap] { - var iters = 0 - - def empty[K, V]: ParHashMap[K, V] = new ParHashMap[K, V] - - def newCombiner[K, V]: Combiner[(K, V), ParHashMap[K, V]] = ParHashMapCombiner.apply[K, V] - - implicit def canBuildFrom[K, V]: CanCombineFrom[Coll, (K, V), ParHashMap[K, V]] = new CanCombineFromMap[K, V] -} - -private[mutable] abstract class ParHashMapCombiner[K, V](private val tableLoadFactor: Int) -extends scala.collection.parallel.BucketCombiner[(K, V), ParHashMap[K, V], DefaultEntry[K, V], ParHashMapCombiner[K, V]](ParHashMapCombiner.numblocks) - with scala.collection.mutable.HashTable.HashUtils[K] -{ - private val nonmasklen = ParHashMapCombiner.nonmasklength - private val seedvalue = 27 - - def +=(elem: (K, V)) = { - sz += 1 - val hc = improve(elemHashCode(elem._1), seedvalue) - val pos = (hc >>> nonmasklen) - if (buckets(pos) eq null) { - // initialize bucket - buckets(pos) = new UnrolledBuffer[DefaultEntry[K, V]]() - } - // add to bucket - buckets(pos) += new DefaultEntry(elem._1, elem._2) - this - } - - def result: ParHashMap[K, V] = if (size >= (ParHashMapCombiner.numblocks * sizeMapBucketSize)) { // 1024 - // construct table - val table = new AddingHashTable(size, tableLoadFactor, seedvalue) - val bucks = buckets.map(b => if (b ne null) b.headPtr else null) - val insertcount = combinerTaskSupport.executeAndWaitResult(new FillBlocks(bucks, table, 0, bucks.length)) - table.setSize(insertcount) - // TODO compare insertcount and size to see if compression is needed - val c = table.hashTableContents - new ParHashMap(c) - } else { - // construct a normal table and fill it sequentially - // TODO parallelize by keeping separate sizemaps and merging them - object table extends HashTable[K, DefaultEntry[K, V]] { - type Entry = DefaultEntry[K, V] - def insertEntry(e: Entry) { super.findOrAddEntry(e.key, e) } - def createNewEntry[E](key: K, entry: E): Entry = entry.asInstanceOf[Entry] - sizeMapInit(table.length) - } - var i = 0 - while (i < ParHashMapCombiner.numblocks) { - if (buckets(i) ne null) { - for (elem <- buckets(i)) table.insertEntry(elem) - } - i += 1 - } - new ParHashMap(table.hashTableContents) - } - - /* classes */ - - /** A hash table which will never resize itself. Knowing the number of elements in advance, - * it allocates the table of the required size when created. - * - * Entries are added using the `insertEntry` method. This method checks whether the element - * exists and updates the size map. It returns false if the key was already in the table, - * and true if the key was successfully inserted. It does not update the number of elements - * in the table. - */ - private[ParHashMapCombiner] class AddingHashTable(numelems: Int, lf: Int, _seedvalue: Int) extends HashTable[K, DefaultEntry[K, V]] { - import HashTable._ - _loadFactor = lf - table = new Array[HashEntry[K, DefaultEntry[K, V]]](capacity(sizeForThreshold(_loadFactor, numelems))) - tableSize = 0 - seedvalue = _seedvalue - threshold = newThreshold(_loadFactor, table.length) - sizeMapInit(table.length) - def setSize(sz: Int) = tableSize = sz - def insertEntry(/*block: Int, */e: DefaultEntry[K, V]) = { - var h = index(elemHashCode(e.key)) - val olde = table(h).asInstanceOf[DefaultEntry[K, V]] - - // check if key already exists - var ce = olde - while (ce ne null) { - if (ce.key == e.key) { - h = -1 - ce = null - } else ce = ce.next - } - - // if key does not already exist - if (h != -1) { - e.next = olde - table(h) = e - nnSizeMapAdd(h) - true - } else false - } - protected def createNewEntry[X](key: K, x: X) = ??? - } - - /* tasks */ - - import UnrolledBuffer.Unrolled - - class FillBlocks(buckets: Array[Unrolled[DefaultEntry[K, V]]], table: AddingHashTable, offset: Int, howmany: Int) - extends Task[Int, FillBlocks] { - var result = Int.MinValue - def leaf(prev: Option[Int]) = { - var i = offset - val until = offset + howmany - result = 0 - while (i < until) { - result += fillBlock(i, buckets(i)) - i += 1 - } - } - private def fillBlock(block: Int, elems: Unrolled[DefaultEntry[K, V]]) = { - var insertcount = 0 - var unrolled = elems - var i = 0 - val t = table - while (unrolled ne null) { - val chunkarr = unrolled.array - val chunksz = unrolled.size - while (i < chunksz) { - val elem = chunkarr(i) - if (t.insertEntry(elem)) insertcount += 1 - i += 1 - } - i = 0 - unrolled = unrolled.next - } - insertcount - } - def split = { - val fp = howmany / 2 - List(new FillBlocks(buckets, table, offset, fp), new FillBlocks(buckets, table, offset + fp, howmany - fp)) - } - override def merge(that: FillBlocks) { - this.result += that.result - } - def shouldSplitFurther = howmany > scala.collection.parallel.thresholdFromSize(ParHashMapCombiner.numblocks, combinerTaskSupport.parallelismLevel) - } -} - -private[parallel] object ParHashMapCombiner { - private[mutable] val discriminantbits = 5 - private[mutable] val numblocks = 1 << discriminantbits - private[mutable] val discriminantmask = ((1 << discriminantbits) - 1) - private[mutable] val nonmasklength = 32 - discriminantbits - - def apply[K, V] = new ParHashMapCombiner[K, V](HashTable.defaultLoadFactor) {} // was: with EnvironmentPassingCombiner[(K, V), ParHashMap[K, V]] -} diff --git a/tests/scala2-library/src/library/scala/collection/parallel/mutable/ParHashSet.scala b/tests/scala2-library/src/library/scala/collection/parallel/mutable/ParHashSet.scala deleted file mode 100644 index 1e3d57e0e532..000000000000 --- a/tests/scala2-library/src/library/scala/collection/parallel/mutable/ParHashSet.scala +++ /dev/null @@ -1,329 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala -package collection.parallel.mutable - - - -import scala.collection.generic._ -import scala.collection.mutable.FlatHashTable -import scala.collection.parallel.Combiner -import scala.collection.mutable.UnrolledBuffer -import scala.collection.parallel.Task - - - -/** A parallel hash set. - * - * `ParHashSet` is a parallel set which internally keeps elements within a hash table. - * It uses linear probing to resolve collisions. - * - * @tparam T type of the elements in the $coll. - * - * @define Coll `ParHashSet` - * @define coll parallel hash set - * - * @author Aleksandar Prokopec - * @see [[http://docs.scala-lang.org/overviews/parallel-collections/concrete-parallel-collections.html#parallel_hash_tables Scala's Parallel Collections Library overview]] - * section on Parallel Hash Tables for more information. - */ -@SerialVersionUID(1L) -class ParHashSet[T] private[collection] (contents: FlatHashTable.Contents[T]) -extends ParSet[T] - with GenericParTemplate[T, ParHashSet] - with ParSetLike[T, ParHashSet[T], scala.collection.mutable.HashSet[T]] - with ParFlatHashTable[T] - with Serializable -{ - initWithContents(contents) - // println("----> new par hash set!") - // java.lang.Thread.dumpStack - // println(debugInformation) - - def this() = this(null) - - override def companion = ParHashSet - - override def empty = new ParHashSet - - override def iterator = splitter - - override def size = tableSize - - def clear() = clearTable() - - override def seq = new scala.collection.mutable.HashSet(hashTableContents) - - def +=(elem: T) = { - addElem(elem) - this - } - - def -=(elem: T) = { - removeElem(elem) - this - } - - override def stringPrefix = "ParHashSet" - - def contains(elem: T) = containsElem(elem) - - def splitter = new ParHashSetIterator(0, table.length, size) - - class ParHashSetIterator(start: Int, iteratesUntil: Int, totalElements: Int) - extends ParFlatHashTableIterator(start, iteratesUntil, totalElements) { - def newIterator(start: Int, until: Int, total: Int) = new ParHashSetIterator(start, until, total) - } - - private def writeObject(s: java.io.ObjectOutputStream) { - serializeTo(s) - } - - private def readObject(in: java.io.ObjectInputStream) { - init(in, x => ()) - } - - import scala.collection.DebugUtils._ - override def debugInformation = buildString { - append => - append("Parallel flat hash table set") - append("No. elems: " + tableSize) - append("Table length: " + table.length) - append("Table: ") - append(arrayString(table, 0, table.length)) - append("Sizemap: ") - append(arrayString(sizemap, 0, sizemap.length)) - } - -} - - -/** $factoryInfo - * @define Coll `mutable.ParHashSet` - * @define coll parallel hash set - */ -object ParHashSet extends ParSetFactory[ParHashSet] { - implicit def canBuildFrom[T]: CanCombineFrom[Coll, T, ParHashSet[T]] = new GenericCanCombineFrom[T] - - override def newBuilder[T]: Combiner[T, ParHashSet[T]] = newCombiner - - override def newCombiner[T]: Combiner[T, ParHashSet[T]] = ParHashSetCombiner.apply[T] -} - - -private[mutable] abstract class ParHashSetCombiner[T](private val tableLoadFactor: Int) -extends scala.collection.parallel.BucketCombiner[T, ParHashSet[T], AnyRef, ParHashSetCombiner[T]](ParHashSetCombiner.numblocks) -with scala.collection.mutable.FlatHashTable.HashUtils[T] { -//self: EnvironmentPassingCombiner[T, ParHashSet[T]] => - private val nonmasklen = ParHashSetCombiner.nonmasklength - private val seedvalue = 27 - - def +=(elem: T) = { - val entry = elemToEntry(elem) - sz += 1 - val hc = improve(entry.hashCode, seedvalue) - val pos = hc >>> nonmasklen - if (buckets(pos) eq null) { - // initialize bucket - buckets(pos) = new UnrolledBuffer[AnyRef] - } - // add to bucket - buckets(pos) += entry - this - } - - def result: ParHashSet[T] = { - val contents = if (size >= ParHashSetCombiner.numblocks * sizeMapBucketSize) parPopulate else seqPopulate - new ParHashSet(contents) - } - - private def parPopulate: FlatHashTable.Contents[T] = { - // construct it in parallel - val table = new AddingFlatHashTable(size, tableLoadFactor, seedvalue) - val (inserted, leftovers) = combinerTaskSupport.executeAndWaitResult(new FillBlocks(buckets, table, 0, buckets.length)) - var leftinserts = 0 - for (entry <- leftovers) leftinserts += table.insertEntry(0, table.tableLength, entry) - table.setSize(leftinserts + inserted) - table.hashTableContents - } - - private def seqPopulate: FlatHashTable.Contents[T] = { - // construct it sequentially - // TODO parallelize by keeping separate size maps and merging them - val tbl = new FlatHashTable[T] { - sizeMapInit(table.length) - seedvalue = ParHashSetCombiner.this.seedvalue - for { - buffer <- buckets - if buffer ne null - entry <- buffer - } addEntry(entry) - } - tbl.hashTableContents - } - - /* classes */ - - /** A flat hash table which doesn't resize itself. It accepts the number of elements - * it has to take and allocates the underlying hash table in advance. - * Elements can only be added to it. The final size has to be adjusted manually. - * It is internal to `ParHashSet` combiners. - */ - class AddingFlatHashTable(numelems: Int, lf: Int, inseedvalue: Int) extends FlatHashTable[T] { - _loadFactor = lf - table = new Array[AnyRef](capacity(FlatHashTable.sizeForThreshold(numelems, _loadFactor))) - tableSize = 0 - threshold = FlatHashTable.newThreshold(_loadFactor, table.length) - seedvalue = inseedvalue - sizeMapInit(table.length) - - override def toString = "AFHT(%s)".format(table.length) - - def tableLength = table.length - - def setSize(sz: Int) = tableSize = sz - - /** - * The elements are added using the `insertElem` method. This method accepts three - * arguments: - * - * @param insertAt where to add the element (set to -1 to use its hashcode) - * @param comesBefore the position before which the element should be added to - * @param newEntry the element to be added - * - * If the element is to be inserted at the position corresponding to its hash code, - * the table will try to add the element in such a position if possible. Collisions are resolved - * using linear hashing, so the element may actually have to be added to a position - * that follows the specified one. In the case that the first unoccupied position - * comes after `comesBefore`, the element is not added and the method simply returns -1, - * indicating that it couldn't add the element in a position that comes before the - * specified one. - * If the element is already present in the hash table, it is not added, and this method - * returns 0. If the element is added, it returns 1. - */ - def insertEntry(insertAt: Int, comesBefore: Int, newEntry : AnyRef): Int = { - var h = insertAt - if (h == -1) h = index(newEntry.hashCode) - var curEntry = table(h) - while (null != curEntry) { - if (curEntry == newEntry) return 0 - h = h + 1 // we *do not* do `(h + 1) % table.length` here, because we'll never overflow!! - if (h >= comesBefore) return -1 - curEntry = table(h) - } - table(h) = newEntry - - // this is incorrect since we set size afterwards anyway and a counter - // like this would not even work: - // - // tableSize = tableSize + 1 - // - // furthermore, it completely bogs down the parallel - // execution when there are multiple workers - - nnSizeMapAdd(h) - 1 - } - } - - /* tasks */ - - class FillBlocks(buckets: Array[UnrolledBuffer[AnyRef]], table: AddingFlatHashTable, val offset: Int, val howmany: Int) - extends Task[(Int, UnrolledBuffer[AnyRef]), FillBlocks] { - var result = (Int.MinValue, new UnrolledBuffer[AnyRef]) - - def leaf(prev: Option[(Int, UnrolledBuffer[AnyRef])]) { - var i = offset - var totalinserts = 0 - var leftover = new UnrolledBuffer[AnyRef]() - while (i < (offset + howmany)) { - val (inserted, intonextblock) = fillBlock(i, buckets(i), leftover) - totalinserts += inserted - leftover = intonextblock - i += 1 - } - result = (totalinserts, leftover) - } - private val blocksize = table.tableLength >> ParHashSetCombiner.discriminantbits - private def blockStart(block: Int) = block * blocksize - private def nextBlockStart(block: Int) = (block + 1) * blocksize - private def fillBlock(block: Int, elems: UnrolledBuffer[AnyRef], leftovers: UnrolledBuffer[AnyRef]): (Int, UnrolledBuffer[AnyRef]) = { - val beforePos = nextBlockStart(block) - - // store the elems - val (elemsIn, elemsLeft) = if (elems != null) insertAll(-1, beforePos, elems) else (0, UnrolledBuffer[AnyRef]()) - - // store the leftovers - val (leftoversIn, leftoversLeft) = insertAll(blockStart(block), beforePos, leftovers) - - // return the no. of stored elements tupled with leftovers - (elemsIn + leftoversIn, elemsLeft concat leftoversLeft) - } - private def insertAll(atPos: Int, beforePos: Int, elems: UnrolledBuffer[AnyRef]): (Int, UnrolledBuffer[AnyRef]) = { - val leftovers = new UnrolledBuffer[AnyRef] - var inserted = 0 - - var unrolled = elems.headPtr - var i = 0 - val t = table - while (unrolled ne null) { - val chunkarr = unrolled.array - val chunksz = unrolled.size - while (i < chunksz) { - val entry = chunkarr(i) - val res = t.insertEntry(atPos, beforePos, entry) - if (res >= 0) inserted += res - else leftovers += entry - i += 1 - } - i = 0 - unrolled = unrolled.next - } - - // slower: - // var it = elems.iterator - // while (it.hasNext) { - // val elem = it.next - // val res = table.insertEntry(atPos, beforePos, elem.asInstanceOf[T]) - // if (res >= 0) inserted += res - // else leftovers += elem - // } - - (inserted, leftovers) - } - def split = { - val fp = howmany / 2 - List(new FillBlocks(buckets, table, offset, fp), new FillBlocks(buckets, table, offset + fp, howmany - fp)) - } - override def merge(that: FillBlocks) { - // take the leftovers from the left task, store them into the block of the right task - val atPos = blockStart(that.offset) - val beforePos = blockStart(that.offset + that.howmany) - val (inserted, remainingLeftovers) = insertAll(atPos, beforePos, this.result._2) - - // anything left after trying the store the left leftovers is added to the right task leftovers - // and a new leftovers set is produced in this way - // the total number of successfully inserted elements is adjusted accordingly - result = (this.result._1 + that.result._1 + inserted, remainingLeftovers concat that.result._2) - } - def shouldSplitFurther = howmany > scala.collection.parallel.thresholdFromSize(ParHashMapCombiner.numblocks, combinerTaskSupport.parallelismLevel) - } - -} - - -private[parallel] object ParHashSetCombiner { - private[mutable] val discriminantbits = 5 - private[mutable] val numblocks = 1 << discriminantbits - private[mutable] val discriminantmask = ((1 << discriminantbits) - 1) - private[mutable] val nonmasklength = 32 - discriminantbits - - def apply[T] = new ParHashSetCombiner[T](FlatHashTable.defaultLoadFactor) {} //with EnvironmentPassingCombiner[T, ParHashSet[T]] -} - diff --git a/tests/scala2-library/src/library/scala/collection/parallel/mutable/ParHashTable.scala b/tests/scala2-library/src/library/scala/collection/parallel/mutable/ParHashTable.scala deleted file mode 100644 index 423b891d4871..000000000000 --- a/tests/scala2-library/src/library/scala/collection/parallel/mutable/ParHashTable.scala +++ /dev/null @@ -1,143 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala -package collection -package parallel.mutable - -import scala.collection.mutable.HashEntry -import scala.collection.parallel.IterableSplitter - -/** Provides functionality for hash tables with linked list buckets, - * enriching the data structure by fulfilling certain requirements - * for their parallel construction and iteration. - */ -trait ParHashTable[K, Entry >: Null <: HashEntry[K, Entry]] extends scala.collection.mutable.HashTable[K, Entry] { - - override def alwaysInitSizeMap = true - - /** A parallel iterator returning all the entries. - */ - abstract class EntryIterator[T, +IterRepr <: IterableSplitter[T]] - (private var idx: Int, private val until: Int, private val totalsize: Int, private var es: Entry) - extends IterableSplitter[T] with SizeMapUtils { - private val itertable = table - private var traversed = 0 - scan() - - def entry2item(e: Entry): T - def newIterator(idxFrom: Int, idxUntil: Int, totalSize: Int, es: Entry): IterRepr - - def hasNext = { - es ne null - } - - def next(): T = { - val res = es - es = es.next - scan() - traversed += 1 - entry2item(res) - } - - def scan() { - while (es == null && idx < until) { - es = itertable(idx).asInstanceOf[Entry] - idx = idx + 1 - } - } - - def remaining = totalsize - traversed - - private[parallel] override def debugInformation = { - buildString { - append => - append("/--------------------\\") - append("Parallel hash table entry iterator") - append("total hash table elements: " + tableSize) - append("pos: " + idx) - append("until: " + until) - append("traversed: " + traversed) - append("totalsize: " + totalsize) - append("current entry: " + es) - append("underlying from " + idx + " until " + until) - append(itertable.slice(idx, until).map(x => if (x != null) x.toString else "n/a").mkString(" | ")) - append("\\--------------------/") - } - } - - def dup = newIterator(idx, until, totalsize, es) - - def split: Seq[IterableSplitter[T]] = if (remaining > 1) { - if (until > idx) { - // there is at least one more slot for the next iterator - // divide the rest of the table - val divsz = (until - idx) / 2 - - // second iterator params - val sidx = idx + divsz + 1 // + 1 preserves iteration invariant - val suntil = until - val ses = itertable(sidx - 1).asInstanceOf[Entry] // sidx - 1 ensures counting from the right spot - val stotal = calcNumElems(sidx - 1, suntil, table.length, sizeMapBucketSize) - - // first iterator params - val fidx = idx - val funtil = idx + divsz - val fes = es - val ftotal = totalsize - stotal - - Seq( - newIterator(fidx, funtil, ftotal, fes), - newIterator(sidx, suntil, stotal, ses) - ) - } else { - // otherwise, this is the last entry in the table - all what remains is the chain - // so split the rest of the chain - val arr = convertToArrayBuffer(es) - val arrpit = new scala.collection.parallel.BufferSplitter[T](arr, 0, arr.length, signalDelegate) - arrpit.split - } - } else Seq(this.asInstanceOf[IterRepr]) - - private def convertToArrayBuffer(chainhead: Entry): mutable.ArrayBuffer[T] = { - val buff = mutable.ArrayBuffer[Entry]() - var curr = chainhead - while (curr ne null) { - buff += curr - curr = curr.next - } - // println("converted " + remaining + " element iterator into buffer: " + buff) - buff map { e => entry2item(e) } - } - - protected def countElems(from: Int, until: Int) = { - var c = 0 - var idx = from - var es: Entry = null - while (idx < until) { - es = itertable(idx).asInstanceOf[Entry] - while (es ne null) { - c += 1 - es = es.next - } - idx += 1 - } - c - } - - protected def countBucketSizes(fromBucket: Int, untilBucket: Int) = { - var c = 0 - var idx = fromBucket - while (idx < untilBucket) { - c += sizemap(idx) - idx += 1 - } - c - } - } -} diff --git a/tests/scala2-library/src/library/scala/collection/parallel/mutable/ParIterable.scala b/tests/scala2-library/src/library/scala/collection/parallel/mutable/ParIterable.scala deleted file mode 100644 index 4659149106ea..000000000000 --- a/tests/scala2-library/src/library/scala/collection/parallel/mutable/ParIterable.scala +++ /dev/null @@ -1,50 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala -package collection -package parallel.mutable - -import scala.collection.generic._ -import scala.collection.parallel.{ ParIterableLike, Combiner } - -/** A template trait for mutable parallel iterable collections. - * - * $paralleliterableinfo - * - * $sideeffects - * - * @tparam T the element type of the collection - * - * @author Aleksandar Prokopec - * @since 2.9 - */ -trait ParIterable[T] extends scala.collection.GenIterable[T] - with scala.collection.parallel.ParIterable[T] - with GenericParTemplate[T, ParIterable] - with ParIterableLike[T, ParIterable[T], Iterable[T]] - with Mutable { - override def companion: GenericCompanion[ParIterable] with GenericParCompanion[ParIterable] = ParIterable - //protected[this] override def newBuilder = ParIterable.newBuilder[T] - - // if `mutable.ParIterableLike` is introduced, please move these methods there - override def toIterable: ParIterable[T] = this - - override def toSeq: ParSeq[T] = toParCollection[T, ParSeq[T]](() => ParSeq.newCombiner[T]) - - def seq: scala.collection.mutable.Iterable[T] -} - -/** $factoryInfo - */ -object ParIterable extends ParFactory[ParIterable] { - implicit def canBuildFrom[T]: CanCombineFrom[Coll, T, ParIterable[T]] = new GenericCanCombineFrom[T] - - def newBuilder[T]: Combiner[T, ParIterable[T]] = ParArrayCombiner[T] - def newCombiner[T]: Combiner[T, ParIterable[T]] = ParArrayCombiner[T] -} diff --git a/tests/scala2-library/src/library/scala/collection/parallel/mutable/ParMap.scala b/tests/scala2-library/src/library/scala/collection/parallel/mutable/ParMap.scala deleted file mode 100644 index 8110f9dc0a0a..000000000000 --- a/tests/scala2-library/src/library/scala/collection/parallel/mutable/ParMap.scala +++ /dev/null @@ -1,89 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala -package collection -package parallel.mutable - -import scala.collection.generic._ -import scala.collection.parallel.Combiner - -/** A template trait for mutable parallel maps. - * - * $sideeffects - * - * @tparam K the key type of the map - * @tparam V the value type of the map - * - * @author Aleksandar Prokopec - * @since 2.9 - */ -trait ParMap[K, V] -extends GenMap[K, V] - with parallel.ParMap[K, V] - with ParIterable[(K, V)] - with GenericParMapTemplate[K, V, ParMap] - with ParMapLike[K, V, ParMap[K, V], mutable.Map[K, V]] -{ - - protected[this] override def newCombiner: Combiner[(K, V), ParMap[K, V]] = ParMap.newCombiner[K, V] - - override def mapCompanion: GenericParMapCompanion[ParMap] = ParMap - - override def empty: ParMap[K, V] = new ParHashMap[K, V] - - def seq: scala.collection.mutable.Map[K, V] - - override def updated [U >: V](key: K, value: U): ParMap[K, U] = this + ((key, value)) - - /** The same map with a given default function. - * Note: `get`, `contains`, `iterator`, `keys`, etc are not affected by `withDefault`. - * - * Invoking transformer methods (e.g. `map`) will not preserve the default value. - * - * @param d the function mapping keys to values, used for non-present keys - * @return a wrapper of the map with a default value - */ - def withDefault(d: K => V): scala.collection.parallel.mutable.ParMap[K, V] = new ParMap.WithDefault[K, V](this, d) - - /** The same map with a given default value. - * - * Invoking transformer methods (e.g. `map`) will not preserve the default value. - * - * @param d default value used for non-present keys - * @return a wrapper of the map with a default value - */ - def withDefaultValue(d: V): scala.collection.parallel.mutable.ParMap[K, V] = new ParMap.WithDefault[K, V](this, x => d) -} - -object ParMap extends ParMapFactory[ParMap] { - def empty[K, V]: ParMap[K, V] = new ParHashMap[K, V] - - def newCombiner[K, V]: Combiner[(K, V), ParMap[K, V]] = ParHashMapCombiner.apply[K, V] - - implicit def canBuildFrom[K, V]: CanCombineFrom[Coll, (K, V), ParMap[K, V]] = new CanCombineFromMap[K, V] - - class WithDefault[K, V](underlying: ParMap[K, V], d: K => V) - extends scala.collection.parallel.ParMap.WithDefault(underlying, d) with ParMap[K, V] { - override def += (kv: (K, V)) = {underlying += kv; this} - def -= (key: K) = {underlying -= key; this} - override def empty = new WithDefault(underlying.empty, d) - override def updated[U >: V](key: K, value: U): WithDefault[K, U] = new WithDefault[K, U](underlying.updated[U](key, value), d) - override def + [U >: V](kv: (K, U)): WithDefault[K, U] = updated(kv._1, kv._2) - override def - (key: K): WithDefault[K, V] = new WithDefault(underlying - key, d) - override def seq = underlying.seq.withDefault(d) - def clear() = underlying.clear() - def put(key: K, value: V): Option[V] = underlying.put(key, value) - - /** If these methods aren't overridden to thread through the underlying map, - * successive calls to withDefault* have no effect. - */ - override def withDefault(d: K => V): ParMap[K, V] = new WithDefault[K, V](underlying, d) - override def withDefaultValue(d: V): ParMap[K, V] = new WithDefault[K, V](underlying, x => d) - } -} diff --git a/tests/scala2-library/src/library/scala/collection/parallel/mutable/ParMapLike.scala b/tests/scala2-library/src/library/scala/collection/parallel/mutable/ParMapLike.scala deleted file mode 100644 index 9ca5b8e91c56..000000000000 --- a/tests/scala2-library/src/library/scala/collection/parallel/mutable/ParMapLike.scala +++ /dev/null @@ -1,54 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala -package collection.parallel -package mutable - -import scala.collection.generic._ -import scala.collection.mutable.Cloneable -import scala.collection.generic.Growable -import scala.collection.generic.Shrinkable - -/** A template trait for mutable parallel maps. This trait is to be mixed in - * with concrete parallel maps to override the representation type. - * - * $sideeffects - * - * @tparam K the key type of the map - * @tparam V the value type of the map - * @define Coll `ParMap` - * @define coll parallel map - * - * @author Aleksandar Prokopec - * @since 2.9 - */ -trait ParMapLike[K, - V, - +Repr <: ParMapLike[K, V, Repr, Sequential] with ParMap[K, V], - +Sequential <: scala.collection.mutable.Map[K, V] with scala.collection.mutable.MapLike[K, V, Sequential]] -extends scala.collection.GenMapLike[K, V, Repr] - with scala.collection.parallel.ParMapLike[K, V, Repr, Sequential] - with Growable[(K, V)] - with Shrinkable[K] - with Cloneable[Repr] -{ - // note: should not override toMap - - def put(key: K, value: V): Option[V] - - def +=(kv: (K, V)): this.type - - def -=(key: K): this.type - - def +[U >: V](kv: (K, U)): ParMap[K, U] = this.clone().asInstanceOf[ParMap[K, U]] += kv - - def -(key: K) = this.clone() -= key - - def clear(): Unit -} diff --git a/tests/scala2-library/src/library/scala/collection/parallel/mutable/ParSeq.scala b/tests/scala2-library/src/library/scala/collection/parallel/mutable/ParSeq.scala deleted file mode 100644 index 35be2669f863..000000000000 --- a/tests/scala2-library/src/library/scala/collection/parallel/mutable/ParSeq.scala +++ /dev/null @@ -1,52 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala -package collection.parallel.mutable - -import scala.collection.generic.GenericParTemplate -import scala.collection.generic.GenericCompanion -import scala.collection.generic.GenericParCompanion -import scala.collection.generic.CanCombineFrom -import scala.collection.generic.ParFactory -import scala.collection.parallel.ParSeqLike -import scala.collection.parallel.Combiner - -/** A mutable variant of `ParSeq`. - * - * @define Coll `mutable.ParSeq` - * @define coll mutable parallel sequence - */ -trait ParSeq[T] extends scala.collection/*.mutable*/.GenSeq[T] // was: scala.collection.mutable.Seq[T] - with ParIterable[T] - with scala.collection.parallel.ParSeq[T] - with GenericParTemplate[T, ParSeq] - with ParSeqLike[T, ParSeq[T], scala.collection.mutable.Seq[T]] { -self => - override def companion: GenericCompanion[ParSeq] with GenericParCompanion[ParSeq] = ParSeq - //protected[this] override def newBuilder = ParSeq.newBuilder[T] - - def update(i: Int, elem: T): Unit - - def seq: scala.collection.mutable.Seq[T] - - override def toSeq: ParSeq[T] = this -} - - -/** $factoryInfo - * @define Coll `mutable.ParSeq` - * @define coll mutable parallel sequence - */ -object ParSeq extends ParFactory[ParSeq] { - implicit def canBuildFrom[T]: CanCombineFrom[Coll, T, ParSeq[T]] = new GenericCanCombineFrom[T] - - def newBuilder[T]: Combiner[T, ParSeq[T]] = ParArrayCombiner[T] - - def newCombiner[T]: Combiner[T, ParSeq[T]] = ParArrayCombiner[T] -} diff --git a/tests/scala2-library/src/library/scala/collection/parallel/mutable/ParSet.scala b/tests/scala2-library/src/library/scala/collection/parallel/mutable/ParSet.scala deleted file mode 100644 index 4e2d3e0e4cd5..000000000000 --- a/tests/scala2-library/src/library/scala/collection/parallel/mutable/ParSet.scala +++ /dev/null @@ -1,43 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala -package collection.parallel.mutable - -import scala.collection.generic._ -import scala.collection.parallel.Combiner - -/** A mutable variant of `ParSet`. - * - * @author Aleksandar Prokopec - */ -trait ParSet[T] -extends scala.collection/*.mutable*/.GenSet[T] - with ParIterable[T] - with scala.collection.parallel.ParSet[T] - with GenericParTemplate[T, ParSet] - with ParSetLike[T, ParSet[T], scala.collection.mutable.Set[T]] -{ -self => - override def companion: GenericCompanion[ParSet] with GenericParCompanion[ParSet] = ParSet - override def empty: ParSet[T] = ParHashSet() - def seq: scala.collection.mutable.Set[T] -} - - -/** $factoryInfo - * @define Coll `mutable.ParSet` - * @define coll mutable parallel set - */ -object ParSet extends ParSetFactory[ParSet] { - implicit def canBuildFrom[T]: CanCombineFrom[Coll, T, ParSet[T]] = new GenericCanCombineFrom[T] - - override def newBuilder[T]: Combiner[T, ParSet[T]] = ParHashSet.newBuilder - - override def newCombiner[T]: Combiner[T, ParSet[T]] = ParHashSet.newCombiner -} diff --git a/tests/scala2-library/src/library/scala/collection/parallel/mutable/ParSetLike.scala b/tests/scala2-library/src/library/scala/collection/parallel/mutable/ParSetLike.scala deleted file mode 100644 index 08aa3b024bcb..000000000000 --- a/tests/scala2-library/src/library/scala/collection/parallel/mutable/ParSetLike.scala +++ /dev/null @@ -1,52 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala -package collection -package parallel.mutable - -import scala.collection.mutable.Cloneable -import scala.collection.GenSetLike -import scala.collection.generic.Growable -import scala.collection.generic.Shrinkable - -/** A template trait for mutable parallel sets. This trait is mixed in with concrete - * parallel sets to override the representation type. - * - * $sideeffects - * - * @tparam T the element type of the set - * @define Coll `mutable.ParSet` - * @define coll mutable parallel set - * - * @author Aleksandar Prokopec - * @since 2.9 - */ -trait ParSetLike[T, - +Repr <: ParSetLike[T, Repr, Sequential] with ParSet[T], - +Sequential <: mutable.Set[T] with mutable.SetLike[T, Sequential]] -extends GenSetLike[T, Repr] - with scala.collection.parallel.ParIterableLike[T, Repr, Sequential] - with scala.collection.parallel.ParSetLike[T, Repr, Sequential] - with Growable[T] - with Shrinkable[T] - with Cloneable[Repr] -{ -self => - override def empty: Repr - - def +=(elem: T): this.type - - def -=(elem: T): this.type - - def +(elem: T) = this.clone() += elem - - def -(elem: T) = this.clone() -= elem - - // note: should not override toSet -} diff --git a/tests/scala2-library/src/library/scala/collection/parallel/mutable/ParTrieMap.scala b/tests/scala2-library/src/library/scala/collection/parallel/mutable/ParTrieMap.scala deleted file mode 100644 index 4e5dbbef79e7..000000000000 --- a/tests/scala2-library/src/library/scala/collection/parallel/mutable/ParTrieMap.scala +++ /dev/null @@ -1,167 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala -package collection.parallel.mutable - -import scala.collection.generic._ -import scala.collection.parallel.Combiner -import scala.collection.parallel.IterableSplitter -import scala.collection.parallel.Task -import scala.collection.concurrent.BasicNode -import scala.collection.concurrent.TNode -import scala.collection.concurrent.LNode -import scala.collection.concurrent.CNode -import scala.collection.concurrent.SNode -import scala.collection.concurrent.INode -import scala.collection.concurrent.TrieMap -import scala.collection.concurrent.TrieMapIterator - -/** Parallel TrieMap collection. - * - * It has its bulk operations parallelized, but uses the snapshot operation - * to create the splitter. This means that parallel bulk operations can be - * called concurrently with the modifications. - * - * @author Aleksandar Prokopec - * @since 2.10 - * @see [[http://docs.scala-lang.org/overviews/parallel-collections/concrete-parallel-collections.html#parallel_concurrent_tries Scala's Parallel Collections Library overview]] - * section on `ParTrieMap` for more information. - */ -final class ParTrieMap[K, V] private[collection] (private val ctrie: TrieMap[K, V]) -extends ParMap[K, V] - with GenericParMapTemplate[K, V, ParTrieMap] - with ParMapLike[K, V, ParTrieMap[K, V], TrieMap[K, V]] - with ParTrieMapCombiner[K, V] - with Serializable -{ - def this() = this(new TrieMap) - - override def mapCompanion: GenericParMapCompanion[ParTrieMap] = ParTrieMap - - override def empty: ParTrieMap[K, V] = ParTrieMap.empty - - protected[this] override def newCombiner = ParTrieMap.newCombiner - - override def seq = ctrie - - def splitter = new ParTrieMapSplitter(0, ctrie.readOnlySnapshot().asInstanceOf[TrieMap[K, V]], true) - - override def clear() = ctrie.clear() - - def result = this - - def get(key: K): Option[V] = ctrie.get(key) - - def put(key: K, value: V): Option[V] = ctrie.put(key, value) - - def update(key: K, value: V): Unit = ctrie.update(key, value) - - def remove(key: K): Option[V] = ctrie.remove(key) - - def +=(kv: (K, V)): this.type = { - ctrie.+=(kv) - this - } - - def -=(key: K): this.type = { - ctrie.-=(key) - this - } - - override def size = { - val in = ctrie.readRoot() - val r = in.gcasRead(ctrie) - r match { - case tn: TNode[_, _] => tn.cachedSize(ctrie) - case ln: LNode[_, _] => ln.cachedSize(ctrie) - case cn: CNode[_, _] => - tasksupport.executeAndWaitResult(new Size(0, cn.array.length, cn.array)) - cn.cachedSize(ctrie) - } - } - - override def stringPrefix = "ParTrieMap" - - /* tasks */ - - /** Computes TrieMap size in parallel. */ - class Size(offset: Int, howmany: Int, array: Array[BasicNode]) extends Task[Int, Size] { - var result = -1 - def leaf(prev: Option[Int]) = { - var sz = 0 - var i = offset - val until = offset + howmany - while (i < until) { - array(i) match { - case sn: SNode[_, _] => sz += 1 - case in: INode[K, V] => sz += in.cachedSize(ctrie) - } - i += 1 - } - result = sz - } - def split = { - val fp = howmany / 2 - Seq(new Size(offset, fp, array), new Size(offset + fp, howmany - fp, array)) - } - def shouldSplitFurther = howmany > 1 - override def merge(that: Size) = result = result + that.result - } -} - -private[collection] class ParTrieMapSplitter[K, V](lev: Int, ct: TrieMap[K, V], mustInit: Boolean) -extends TrieMapIterator[K, V](lev, ct, mustInit) - with IterableSplitter[(K, V)] -{ - // only evaluated if `remaining` is invoked (which is not used by most tasks) - lazy val totalsize = ct.par.size - var iterated = 0 - - protected override def newIterator(_lev: Int, _ct: TrieMap[K, V], _mustInit: Boolean): ParTrieMapSplitter[K, V] = new ParTrieMapSplitter[K, V](_lev, _ct, _mustInit) - - override def shouldSplitFurther[S](coll: scala.collection.parallel.ParIterable[S], parallelismLevel: Int) = { - val maxsplits = 3 + Integer.highestOneBit(parallelismLevel) - level < maxsplits - } - - def dup = { - val it = newIterator(0, ct, _mustInit = false) - dupTo(it) - it.iterated = this.iterated - it - } - - override def next() = { - iterated += 1 - super.next() - } - - def split: Seq[IterableSplitter[(K, V)]] = subdivide().asInstanceOf[Seq[IterableSplitter[(K, V)]]] - - override def isRemainingCheap = false - - def remaining: Int = totalsize - iterated -} - -/** Only used within the `ParTrieMap`. */ -private[mutable] trait ParTrieMapCombiner[K, V] extends Combiner[(K, V), ParTrieMap[K, V]] { - - def combine[N <: (K, V), NewTo >: ParTrieMap[K, V]](other: Combiner[N, NewTo]): Combiner[N, NewTo] = - if (this eq other) this - else throw new UnsupportedOperationException("This shouldn't have been called in the first place.") - - override def canBeShared = true -} - -object ParTrieMap extends ParMapFactory[ParTrieMap] { - def empty[K, V]: ParTrieMap[K, V] = new ParTrieMap[K, V] - def newCombiner[K, V]: Combiner[(K, V), ParTrieMap[K, V]] = new ParTrieMap[K, V] - - implicit def canBuildFrom[K, V]: CanCombineFrom[Coll, (K, V), ParTrieMap[K, V]] = new CanCombineFromMap[K, V] -} diff --git a/tests/scala2-library/src/library/scala/collection/parallel/mutable/ResizableParArrayCombiner.scala b/tests/scala2-library/src/library/scala/collection/parallel/mutable/ResizableParArrayCombiner.scala deleted file mode 100644 index 3ad0754cf975..000000000000 --- a/tests/scala2-library/src/library/scala/collection/parallel/mutable/ResizableParArrayCombiner.scala +++ /dev/null @@ -1,88 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala -package collection.parallel.mutable - -import scala.collection.mutable.ArraySeq -import scala.collection.mutable.ArrayBuffer -import scala.collection.parallel.Task - -/** An array combiner that uses a chain of arraybuffers to store elements. */ -trait ResizableParArrayCombiner[T] extends LazyCombiner[T, ParArray[T], ExposedArrayBuffer[T]] { - - override def sizeHint(sz: Int) = if (chain.length == 1) chain(0).sizeHint(sz) - - // public method with private[mutable] type ExposedArrayBuffer in parameter type; cannot be overridden. - final def newLazyCombiner(c: ArrayBuffer[ExposedArrayBuffer[T]]) = ResizableParArrayCombiner(c) - - def allocateAndCopy = if (chain.size > 1) { - val arrayseq = new ArraySeq[T](size) - val array = arrayseq.array.asInstanceOf[Array[Any]] - - combinerTaskSupport.executeAndWaitResult(new CopyChainToArray(array, 0, size)) - - new ParArray(arrayseq) - } else { // optimisation if there is only 1 array - new ParArray(new ExposedArraySeq[T](chain(0).internalArray, size)) - } - - override def toString = "ResizableParArrayCombiner(" + size + "): " //+ chain - - /* tasks */ - - class CopyChainToArray(array: Array[Any], offset: Int, howmany: Int) extends Task[Unit, CopyChainToArray] { - var result = () - def leaf(prev: Option[Unit]) = if (howmany > 0) { - var totalleft = howmany - val (stbuff, stind) = findStart(offset) - var buffind = stbuff - var ind = stind - var arrayIndex = offset - while (totalleft > 0) { - val currbuff = chain(buffind) - val chunksize = if (totalleft < (currbuff.size - ind)) totalleft else currbuff.size - ind - val until = ind + chunksize - - copyChunk(currbuff.internalArray, ind, array, arrayIndex, until) - arrayIndex += chunksize - ind += chunksize - - totalleft -= chunksize - buffind += 1 - ind = 0 - } - } - private def copyChunk(buffarr: Array[AnyRef], buffStart: Int, ra: Array[Any], arrayStart: Int, until: Int) { - Array.copy(buffarr, buffStart, ra, arrayStart, until - buffStart) - } - private def findStart(pos: Int) = { - var left = pos - var buffind = 0 - while (left >= chain(buffind).size) { - left -= chain(buffind).size - buffind += 1 - } - (buffind, left) - } - def split = { - val fp = howmany / 2 - List(new CopyChainToArray(array, offset, fp), new CopyChainToArray(array, offset + fp, howmany - fp)) - } - def shouldSplitFurther = howmany > scala.collection.parallel.thresholdFromSize(size, combinerTaskSupport.parallelismLevel) - } -} - -object ResizableParArrayCombiner { - def apply[T](c: ArrayBuffer[ExposedArrayBuffer[T]]): ResizableParArrayCombiner[T] = { - new ResizableParArrayCombiner[T] { // was: with EnvironmentPassingCombiner[T, ParArray[T]] - lazy val chain = c - } - } - def apply[T](): ResizableParArrayCombiner[T] = apply(new ArrayBuffer[ExposedArrayBuffer[T]] += new ExposedArrayBuffer[T]) -} diff --git a/tests/scala2-library/src/library/scala/collection/parallel/mutable/UnrolledParArrayCombiner.scala b/tests/scala2-library/src/library/scala/collection/parallel/mutable/UnrolledParArrayCombiner.scala deleted file mode 100644 index e71e61f2f122..000000000000 --- a/tests/scala2-library/src/library/scala/collection/parallel/mutable/UnrolledParArrayCombiner.scala +++ /dev/null @@ -1,101 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala -package collection.parallel.mutable - -import scala.collection.mutable.ArraySeq -import scala.collection.mutable.DoublingUnrolledBuffer -import scala.collection.mutable.UnrolledBuffer.Unrolled -import scala.collection.parallel.Combiner -import scala.collection.parallel.Task - -/** An array combiner that uses doubling unrolled buffers to store elements. */ -trait UnrolledParArrayCombiner[T] -extends Combiner[T, ParArray[T]] { -//self: EnvironmentPassingCombiner[T, ParArray[T]] => - // because size is doubling, random access is O(logn)! - val buff = new DoublingUnrolledBuffer[Any] - - def +=(elem: T) = { - buff += elem - this - } - - def result = { - val arrayseq = new ArraySeq[T](size) - val array = arrayseq.array.asInstanceOf[Array[Any]] - - combinerTaskSupport.executeAndWaitResult(new CopyUnrolledToArray(array, 0, size)) - - new ParArray(arrayseq) - } - - def clear() { - buff.clear() - } - - override def sizeHint(sz: Int) = { - buff.lastPtr.next = new Unrolled(0, new Array[Any](sz), null, buff) - buff.lastPtr = buff.lastPtr.next - } - - def combine[N <: T, NewTo >: ParArray[T]](other: Combiner[N, NewTo]): Combiner[N, NewTo] = other match { - case that if that eq this => this // just return this - case that: UnrolledParArrayCombiner[t] => - buff concat that.buff - this - case _ => throw new UnsupportedOperationException("Cannot combine with combiner of different type.") - } - - def size = buff.size - - /* tasks */ - - class CopyUnrolledToArray(array: Array[Any], offset: Int, howmany: Int) - extends Task[Unit, CopyUnrolledToArray] { - var result = () - - def leaf(prev: Option[Unit]) = if (howmany > 0) { - var totalleft = howmany - val (startnode, startpos) = findStart(offset) - var curr = startnode - var pos = startpos - var arroffset = offset - while (totalleft > 0) { - val lefthere = scala.math.min(totalleft, curr.size - pos) - Array.copy(curr.array, pos, array, arroffset, lefthere) - // println("from: " + arroffset + " elems " + lefthere + " - " + pos + ", " + curr + " -> " + array.toList + " by " + this + " !! " + buff.headPtr) - totalleft -= lefthere - arroffset += lefthere - pos = 0 - curr = curr.next - } - } - private def findStart(pos: Int) = { - var left = pos - var node = buff.headPtr - while ((left - node.size) >= 0) { - left -= node.size - node = node.next - } - (node, left) - } - def split = { - val fp = howmany / 2 - List(new CopyUnrolledToArray(array, offset, fp), new CopyUnrolledToArray(array, offset + fp, howmany - fp)) - } - def shouldSplitFurther = howmany > scala.collection.parallel.thresholdFromSize(size, combinerTaskSupport.parallelismLevel) - override def toString = "CopyUnrolledToArray(" + offset + ", " + howmany + ")" - } -} - -object UnrolledParArrayCombiner { - def apply[T](): UnrolledParArrayCombiner[T] = new UnrolledParArrayCombiner[T] {} // was: with EnvironmentPassingCombiner[T, ParArray[T]] -} - diff --git a/tests/scala2-library/src/library/scala/collection/parallel/mutable/package.scala b/tests/scala2-library/src/library/scala/collection/parallel/mutable/package.scala deleted file mode 100644 index 81121d93983e..000000000000 --- a/tests/scala2-library/src/library/scala/collection/parallel/mutable/package.scala +++ /dev/null @@ -1,77 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala -package collection.parallel - -import scala.collection.mutable.ArrayBuffer -import scala.collection.mutable.ArraySeq -import scala.collection.generic.Sizing - -package object mutable { - /* aliases */ - type ParArrayCombiner[T] = ResizableParArrayCombiner[T] - val ParArrayCombiner = ResizableParArrayCombiner -} - -package mutable { - /* classes and traits */ - private[mutable] trait SizeMapUtils { - - protected def calcNumElems(from: Int, until: Int, tableLength: Int, sizeMapBucketSize: Int) = { - // find the first bucket - val fbindex = from / sizeMapBucketSize - - // find the last bucket - val lbindex = until / sizeMapBucketSize - // note to self: FYI if you define lbindex as from / sizeMapBucketSize, the first branch - // below always triggers and tests pass, so you spend a great day benchmarking and profiling - - if (fbindex == lbindex) { - // if first and last are the same, just count between `from` and `until` - // return this count - countElems(from, until) - } else { - // otherwise count in first, then count in last - val fbuntil = ((fbindex + 1) * sizeMapBucketSize) min tableLength - val fbcount = countElems(from, fbuntil) - val lbstart = lbindex * sizeMapBucketSize - val lbcount = countElems(lbstart, until) - - // and finally count the elements in all the buckets between first and last using a sizemap - val inbetween = countBucketSizes(fbindex + 1, lbindex) - - // return the sum - fbcount + inbetween + lbcount - } - } - - protected def countElems(from: Int, until: Int): Int - - protected def countBucketSizes(fromBucket: Int, untilBucket: Int): Int - } - - /* hack-arounds */ - private[mutable] class ExposedArrayBuffer[T] extends ArrayBuffer[T] with Sizing { - def internalArray = array - def setInternalSize(s: Int) = size0 = s - override def sizeHint(len: Int) = { - if (len > size && len >= 1) { - val newarray = new Array[AnyRef](len) - Array.copy(array, 0, newarray, 0, size0) - array = newarray - } - } - } - - private[mutable] class ExposedArraySeq[T](arr: Array[AnyRef], sz: Int) extends ArraySeq[T](sz) { - override val array = arr - override val length = sz - override def stringPrefix = "ArraySeq" - } -} diff --git a/tests/scala2-library/src/library/scala/collection/parallel/package.scala b/tests/scala2-library/src/library/scala/collection/parallel/package.scala deleted file mode 100644 index 2fe3bc9c07b6..000000000000 --- a/tests/scala2-library/src/library/scala/collection/parallel/package.scala +++ /dev/null @@ -1,246 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala -package collection - -import scala.collection.generic.CanBuildFrom -import scala.collection.generic.CanCombineFrom -import scala.collection.parallel.mutable.ParArray -import scala.collection.mutable.UnrolledBuffer -import scala.annotation.unchecked.uncheckedVariance -import scala.language.implicitConversions - -/** Package object for parallel collections. - */ -package object parallel { - /* constants */ - val MIN_FOR_COPY = 512 - val CHECK_RATE = 512 - val SQRT2 = math.sqrt(2) - val availableProcessors = java.lang.Runtime.getRuntime.availableProcessors - - /* functions */ - - /** Computes threshold from the size of the collection and the parallelism level. - */ - def thresholdFromSize(sz: Int, parallelismLevel: Int) = { - val p = parallelismLevel - if (p > 1) 1 + sz / (8 * p) - else sz - } - - val defaultTaskSupport: TaskSupport = new ExecutionContextTaskSupport - - def setTaskSupport[Coll](c: Coll, t: TaskSupport): Coll = { - c match { - case pc: ParIterableLike[_, _, _] => pc.tasksupport = t - case _ => // do nothing - } - c - } - - /** Adds toParArray method to collection classes. */ - implicit class CollectionsHaveToParArray[C, T](c: C)(implicit asGto: C => scala.collection.GenTraversableOnce[T]) { - def toParArray = { - val t = asGto(c) - if (t.isInstanceOf[ParArray[_]]) t.asInstanceOf[ParArray[T]] - else { - val it = t.toIterator - val cb = mutable.ParArrayCombiner[T]() - while (it.hasNext) cb += it.next - cb.result - } - } - } -} - - -package parallel { - /** Implicit conversions used in the implementation of parallel collections. */ - private[collection] object ParallelCollectionImplicits { - implicit def factory2ops[From, Elem, To](bf: CanBuildFrom[From, Elem, To]): FactoryOps[From, Elem, To] = new FactoryOps[From, Elem, To] { - def isParallel = bf.isInstanceOf[Parallel] - def asParallel = bf.asInstanceOf[CanCombineFrom[From, Elem, To]] - def ifParallel[R](isbody: CanCombineFrom[From, Elem, To] => R) = new Otherwise[R] { - def otherwise(notbody: => R) = if (isParallel) isbody(asParallel) else notbody - } - } - implicit def traversable2ops[T](t: scala.collection.GenTraversableOnce[T]): TraversableOps[T] = new TraversableOps[T] { - def isParallel = t.isInstanceOf[Parallel] - def isParIterable = t.isInstanceOf[ParIterable[_]] - def asParIterable = t.asInstanceOf[ParIterable[T]] - def isParSeq = t.isInstanceOf[ParSeq[_]] - def asParSeq = t.asInstanceOf[ParSeq[T]] - def ifParSeq[R](isbody: ParSeq[T] => R) = new Otherwise[R] { - def otherwise(notbody: => R) = if (isParallel) isbody(asParSeq) else notbody - } - } - implicit def throwable2ops(self: Throwable): ThrowableOps = new ThrowableOps { - def alongWith(that: Throwable) = (self, that) match { - case (self: CompositeThrowable, that: CompositeThrowable) => new CompositeThrowable(self.throwables ++ that.throwables) - case (self: CompositeThrowable, _) => new CompositeThrowable(self.throwables + that) - case (_, that: CompositeThrowable) => new CompositeThrowable(that.throwables + self) - case _ => new CompositeThrowable(Set(self, that)) - } - } - } - - trait FactoryOps[From, Elem, To] { - trait Otherwise[R] { - def otherwise(notbody: => R): R - } - - def isParallel: Boolean - def asParallel: CanCombineFrom[From, Elem, To] - def ifParallel[R](isbody: CanCombineFrom[From, Elem, To] => R): Otherwise[R] - } - - trait TraversableOps[T] { - trait Otherwise[R] { - def otherwise(notbody: => R): R - } - - def isParallel: Boolean - def isParIterable: Boolean - def asParIterable: ParIterable[T] - def isParSeq: Boolean - def asParSeq: ParSeq[T] - def ifParSeq[R](isbody: ParSeq[T] => R): Otherwise[R] - } - - @deprecated("this trait will be removed", "2.11.0") - trait ThrowableOps { - @deprecated("this method will be removed", "2.11.0") - def alongWith(that: Throwable): Throwable - } - - /* classes */ - - trait CombinerFactory[U, Repr] { - /** Provides a combiner used to construct a collection. */ - def apply(): Combiner[U, Repr] - /** The call to the `apply` method can create a new combiner each time. - * If it does, this method returns `false`. - * The same combiner factory may be used each time (typically, this is - * the case for concurrent collections, which are thread safe). - * If so, the method returns `true`. - */ - def doesShareCombiners: Boolean - } - - /** Composite throwable - thrown when multiple exceptions are thrown at the same time. */ - @deprecated("this class will be removed.", "2.11.0") - final case class CompositeThrowable(throwables: Set[Throwable]) extends Exception( - "Multiple exceptions thrown during a parallel computation: " + - throwables.map(t => t + "\n" + t.getStackTrace.take(10).++("...").mkString("\n")).mkString("\n\n") - ) - - - /** A helper iterator for iterating very small array buffers. - * Automatically forwards the signal delegate when splitting. - */ - private[parallel] class BufferSplitter[T] - (private val buffer: scala.collection.mutable.ArrayBuffer[T], private var index: Int, private val until: Int, _sigdel: scala.collection.generic.Signalling) - extends IterableSplitter[T] { - signalDelegate = _sigdel - def hasNext = index < until - def next = { - val r = buffer(index) - index += 1 - r - } - def remaining = until - index - def dup = new BufferSplitter(buffer, index, until, signalDelegate) - def split: Seq[IterableSplitter[T]] = if (remaining > 1) { - val divsz = (until - index) / 2 - Seq( - new BufferSplitter(buffer, index, index + divsz, signalDelegate), - new BufferSplitter(buffer, index + divsz, until, signalDelegate) - ) - } else Seq(this) - private[parallel] override def debugInformation = { - buildString { - append => - append("---------------") - append("Buffer iterator") - append("buffer: " + buffer) - append("index: " + index) - append("until: " + until) - append("---------------") - } - } - } - - /** A helper combiner which contains an array of buckets. Buckets themselves - * are unrolled linked lists. Some parallel collections are constructed by - * sorting their result set according to some criteria. - * - * A reference `buckets` to buckets is maintained. Total size of all buckets - * is kept in `sz` and maintained whenever 2 bucket combiners are combined. - * - * Clients decide how to maintain these by implementing `+=` and `result`. - * Populating and using the buckets is up to the client. While populating them, - * the client should update `sz` accordingly. Note that a bucket is by default - * set to `null` to save space - the client should initialize it. - * Note that in general the type of the elements contained in the buckets `Buck` - * doesn't have to correspond to combiner element type `Elem`. - * - * This class simply gives an efficient `combine` for free - it chains - * the buckets together. Since the `combine` contract states that the receiver (`this`) - * becomes invalidated, `combine` reuses the receiver and returns it. - * - * Methods `beforeCombine` and `afterCombine` are called before and after - * combining the buckets, respectively, given that the argument to `combine` - * is not `this` (as required by the `combine` contract). - * They can be overridden in subclasses to provide custom behaviour by modifying - * the receiver (which will be the return value). - */ - private[parallel] abstract class BucketCombiner[-Elem, +To, Buck, +CombinerType <: BucketCombiner[Elem, To, Buck, CombinerType]] - (private val bucketnumber: Int) - extends Combiner[Elem, To] { - //self: EnvironmentPassingCombiner[Elem, To] => - protected var buckets: Array[UnrolledBuffer[Buck]] @uncheckedVariance = new Array[UnrolledBuffer[Buck]](bucketnumber) - protected var sz: Int = 0 - - def size = sz - - def clear() = { - buckets = new Array[UnrolledBuffer[Buck]](bucketnumber) - sz = 0 - } - - def beforeCombine[N <: Elem, NewTo >: To](other: Combiner[N, NewTo]) {} - - def afterCombine[N <: Elem, NewTo >: To](other: Combiner[N, NewTo]) {} - - def combine[N <: Elem, NewTo >: To](other: Combiner[N, NewTo]): Combiner[N, NewTo] = { - if (this eq other) this - else other match { - case _: BucketCombiner[_, _, _, _] => - beforeCombine(other) - val that = other.asInstanceOf[BucketCombiner[Elem, To, Buck, CombinerType]] - - var i = 0 - while (i < bucketnumber) { - if (buckets(i) eq null) - buckets(i) = that.buckets(i) - else if (that.buckets(i) ne null) - buckets(i) concat that.buckets(i) - - i += 1 - } - sz = sz + that.size - afterCombine(other) - this - case _ => - sys.error("Unexpected combiner type.") - } - } - } -} diff --git a/tests/scala2-library/src/library/scala/collection/script/Location.scala b/tests/scala2-library/src/library/scala/collection/script/Location.scala deleted file mode 100644 index 8a0b10c331ee..000000000000 --- a/tests/scala2-library/src/library/scala/collection/script/Location.scala +++ /dev/null @@ -1,34 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala -package collection -package script - -/** Class `Location` describes locations in messages implemented by - * class [[scala.collection.script.Message]]. - * - * @author Matthias Zenger - * @version 1.0, 10/05/2004 - * @since 2.8 - */ - -@deprecated("scripting is deprecated", "2.11.0") -sealed abstract class Location - -@deprecated("scripting is deprecated", "2.11.0") -case object Start extends Location - -@deprecated("scripting is deprecated", "2.11.0") -case object End extends Location - -@deprecated("scripting is deprecated", "2.11.0") -case object NoLo extends Location - -@deprecated("scripting is deprecated", "2.11.0") -case class Index(n: Int) extends Location diff --git a/tests/scala2-library/src/library/scala/collection/script/Message.scala b/tests/scala2-library/src/library/scala/collection/script/Message.scala deleted file mode 100644 index a6ba9d95233e..000000000000 --- a/tests/scala2-library/src/library/scala/collection/script/Message.scala +++ /dev/null @@ -1,89 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala -package collection -package script - -import mutable.ArrayBuffer - -/** Class `Message` represents messages that are issued by observable - * collection classes whenever a data structure is changed. Class `Message` - * has several subclasses for the various kinds of events: `Update` - * `Remove`, `Include`, `Reset`, and `Script`. - * - * @author Matthias Zenger - * @version 1.0, 08/07/2003 - * @since 2.8 - */ -@deprecated("scripting is deprecated", "2.11.0") -trait Message[+A] - -/** This observable update refers to inclusion operations that add new elements - * to collection classes. - * - * @author Matthias Zenger - * @version 1.0, 08/07/2003 - */ -@deprecated("scripting is deprecated", "2.11.0") -case class Include[+A](location: Location, elem: A) extends Message[A] { - def this(elem: A) = this(NoLo, elem) -} - -/** This observable update refers to destructive modification operations - * of elements from collection classes. - * - * @author Matthias Zenger - * @version 1.0, 08/07/2003 - */ -@deprecated("scripting is deprecated", "2.11.0") -case class Update[+A](location: Location, elem: A) extends Message[A] { - def this(elem: A) = this(NoLo, elem) -} - -/** This observable update refers to removal operations of elements - * from collection classes. - * - * @author Matthias Zenger - * @version 1.0, 08/07/2003 - */ -@deprecated("scripting is deprecated", "2.11.0") -case class Remove[+A](location: Location, elem: A) extends Message[A] { - def this(elem: A) = this(NoLo, elem) -} - -/** This command refers to reset operations. - * - * @author Matthias Zenger - * @version 1.0, 08/07/2003 - */ -@deprecated("scripting is deprecated", "2.11.0") -case class Reset[+A]() extends Message[A] - -/** Objects of this class represent compound messages consisting - * of a sequence of other messages. - * - * @author Matthias Zenger - * @version 1.0, 10/05/2004 - */ -@deprecated("scripting is deprecated", "2.11.0") -class Script[A] extends ArrayBuffer[Message[A]] with Message[A] { - - override def toString(): String = { - var res = "Script(" - val it = this.iterator - var i = 1 - while (it.hasNext) { - if (i > 1) - res = res + ", " - res = res + "[" + i + "] " + it.next - i += 1 - } - res + ")" - } -} diff --git a/tests/scala2-library/src/library/scala/collection/script/Scriptable.scala b/tests/scala2-library/src/library/scala/collection/script/Scriptable.scala deleted file mode 100644 index 8965286b0db0..000000000000 --- a/tests/scala2-library/src/library/scala/collection/script/Scriptable.scala +++ /dev/null @@ -1,25 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala -package collection -package script - -/** Classes that mix in the `Scriptable` class allow messages to be sent to - * objects of that class. - * - * @author Matthias Zenger - * @version 1.0, 09/05/2004 - * @since 2.8 - */ -@deprecated("scripting is deprecated", "2.11.0") -trait Scriptable[A] { - /** Send a message to this scriptable object. - */ - def <<(cmd: Message[A]): Unit -} diff --git a/tests/scala2-library/src/library/scala/compat/Platform.scala b/tests/scala2-library/src/library/scala/compat/Platform.scala deleted file mode 100644 index f3745bc1897a..000000000000 --- a/tests/scala2-library/src/library/scala/compat/Platform.scala +++ /dev/null @@ -1,133 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala -package compat - -import java.lang.System - -object Platform { - - /** Thrown when a stack overflow occurs because a method or function recurses too deeply. - * - * On the JVM, this is a type alias for `java.lang.StackOverflowError`, which itself extends `java.lang.Error`. - * The same rules apply to catching a `java.lang.Error` as for Java, that it indicates a serious problem that a reasonable application should not try and catch. - */ - type StackOverflowError = java.lang.StackOverflowError - - /** This is a type alias for `java.util.ConcurrentModificationException`, - * which may be thrown by methods that detect an invalid modification of an object. - * For example, many common collection types do not allow modifying a collection - * while it is being iterated over. - */ - type ConcurrentModificationException = java.util.ConcurrentModificationException - - /** Copies `length` elements of array `src` starting at position `srcPos` to the - * array `dest` starting at position `destPos`. If `src`==`dest`, the copying will - * behave as if the elements copied from `src` were first copied to a temporary - * array before being copied back into the array at the destination positions. - * - * @param src A non-null array as source for the copy. - * @param srcPos The starting index in the source array. - * @param dest A non-null array as destination for the copy. - * @param destPos The starting index in the destination array. - * @param length The number of elements to be copied. - * @throws java.lang.NullPointerException If either `src` or `dest` are `null`. - * @throws java.lang.ArrayStoreException If either `src` or `dest` are not of type - * [java.lang.Array]; or if the element type of `src` is not - * compatible with that of `dest`. - * @throws java.lang.IndexOutOfBoundsException If either `srcPos` or `destPos` are - * outside of the bounds of their respective arrays; or if `length` - * is negative; or if there are less than `length` elements available - * after `srcPos` or `destPos` in `src` and `dest` respectively. - */ - @inline - def arraycopy(src: AnyRef, srcPos: Int, dest: AnyRef, destPos: Int, length: Int) { - System.arraycopy(src, srcPos, dest, destPos, length) - } - - /** Creates a new array of the specified type and given length. - * - * Note that if `elemClass` is a subclass of [[scala.AnyVal]] then the returned value is an Array of the corresponding java primitive type. - * For example, the following code `scala.compat.Platform.createArray(classOf[Int], 4)` returns an array of the java primitive type `int`. - * - * For a [[scala.AnyVal]] array, the values of the array are set to 0 for ''numeric value types'' ([[scala.Double]], [[scala.Float]], [[scala.Long]], [[scala.Int]], [[scala.Char]], - * [[scala.Short]], and [[scala.Byte]]), and `false` for [[scala.Boolean]]. Creation of an array of type [[scala.Unit]] is not possible. - * - * For subclasses of [[scala.AnyRef]], the values of the array are set to `null`. - * - * The caller must cast the returned value to the correct type. - * - * @example {{{ - * val a = scala.compat.Platform.createArray(classOf[Int], 4).asInstanceOf[Array[Int]] // returns Array[Int](0, 0, 0, 0) - * }}} - * - * @param elemClass the `Class` object of the component type of the array - * @param length the length of the new array. - * @return an array of the given component type as an `AnyRef`. - * @throws java.lang.NullPointerException If `elemClass` is `null`. - * @throws java.lang.IllegalArgumentException if componentType is [[scala.Unit]] or `java.lang.Void.TYPE` - * @throws java.lang.NegativeArraySizeException if the specified length is negative - */ - @inline - def createArray(elemClass: Class[_], length: Int): AnyRef = - java.lang.reflect.Array.newInstance(elemClass, length) - - /** Assigns the value of 0 to each element in the array. - * @param arr A non-null Array[Int]. - * @throws java.lang.NullPointerException If `arr` is `null`. - */ - @inline - def arrayclear(arr: Array[Int]) { java.util.Arrays.fill(arr, 0) } - - /** Returns the `Class` object associated with the class or interface with the given string name using the current `ClassLoader`. - * On the JVM, invoking this method is equivalent to: `java.lang.Class.forName(name)` - * - * For more information, please see the Java documentation for [[java.lang.Class]]. - * - * @param name the fully qualified name of the desired class. - * @return the `Class` object for the class with the specified name. - * @throws java.lang.LinkageError if the linkage fails - * @throws java.lang.ExceptionInInitializerError if the initialization provoked by this method fails - * @throws java.lang.ClassNotFoundException if the class cannot be located - * @example {{{ - * val a = scala.compat.Platform.getClassForName("java.lang.Integer") // returns the Class[_] for java.lang.Integer - * }}} - */ - @inline - def getClassForName(name: String): Class[_] = java.lang.Class.forName(name) - - /** The default line separator. - * - * On the JVM, this is equivalent to calling the method: - * `java.lang.System.lineSeparator` - */ - val EOL = scala.util.Properties.lineSeparator - - /** The current time in milliseconds. The time is counted since 1 January 1970 - * UTC. - * - * Note that the operating system timer used to obtain this value may be less - * precise than a millisecond. - */ - @inline - def currentTime: Long = System.currentTimeMillis() - - /** Runs the garbage collector. - * - * This is a request that the underlying JVM runs the garbage collector. - * The results of this call depends heavily on the JVM used. - * The underlying JVM is free to ignore this request. - */ - @inline - def collectGarbage(): Unit = System.gc() - - /** The name of the default character set encoding as a string */ - @inline - def defaultCharsetName: String = java.nio.charset.Charset.defaultCharset.name -} diff --git a/tests/scala2-library/src/library/scala/concurrent/Awaitable.scala b/tests/scala2-library/src/library/scala/concurrent/Awaitable.scala deleted file mode 100644 index dff83874ba76..000000000000 --- a/tests/scala2-library/src/library/scala/concurrent/Awaitable.scala +++ /dev/null @@ -1,64 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala.concurrent - - - -import scala.concurrent.duration.Duration - - - -/** - * An object that may eventually be completed with a result value of type `T` which may be - * awaited using blocking methods. - * - * The [[Await]] object provides methods that allow accessing the result of an `Awaitable` - * by blocking the current thread until the `Awaitable` has been completed or a timeout has - * occurred. - */ -trait Awaitable[+T] { - - /** - * Await the "completed" state of this `Awaitable`. - * - * '''''This method should not be called directly; use [[Await.ready]] instead.''''' - * - * @param atMost - * maximum wait time, which may be negative (no waiting is done), - * [[scala.concurrent.duration.Duration.Inf Duration.Inf]] for unbounded waiting, or a finite positive - * duration - * @return this `Awaitable` - * @throws InterruptedException if the current thread is interrupted while waiting - * @throws TimeoutException if after waiting for the specified time this `Awaitable` is still not ready - * @throws IllegalArgumentException if `atMost` is [[scala.concurrent.duration.Duration.Undefined Duration.Undefined]] - */ - @throws(classOf[TimeoutException]) - @throws(classOf[InterruptedException]) - def ready(atMost: Duration)(implicit permit: CanAwait): this.type - - /** - * Await and return the result (of type `T`) of this `Awaitable`. - * - * '''''This method should not be called directly; use [[Await.result]] instead.''''' - * - * @param atMost - * maximum wait time, which may be negative (no waiting is done), - * [[scala.concurrent.duration.Duration.Inf Duration.Inf]] for unbounded waiting, or a finite positive - * duration - * @return the result value if the `Awaitable` is completed within the specific maximum wait time - * @throws InterruptedException if the current thread is interrupted while waiting - * @throws TimeoutException if after waiting for the specified time this `Awaitable` is still not ready - * @throws IllegalArgumentException if `atMost` is [[scala.concurrent.duration.Duration.Undefined Duration.Undefined]] - */ - @throws(classOf[Exception]) - def result(atMost: Duration)(implicit permit: CanAwait): T -} - - - diff --git a/tests/scala2-library/src/library/scala/concurrent/BatchingExecutor.scala b/tests/scala2-library/src/library/scala/concurrent/BatchingExecutor.scala deleted file mode 100644 index fd31f3470e9f..000000000000 --- a/tests/scala2-library/src/library/scala/concurrent/BatchingExecutor.scala +++ /dev/null @@ -1,117 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala.concurrent - -import java.util.concurrent.Executor -import scala.annotation.tailrec - -/** - * Mixin trait for an Executor - * which groups multiple nested `Runnable.run()` calls - * into a single Runnable passed to the original - * Executor. This can be a useful optimization - * because it bypasses the original context's task - * queue and keeps related (nested) code on a single - * thread which may improve CPU affinity. However, - * if tasks passed to the Executor are blocking - * or expensive, this optimization can prevent work-stealing - * and make performance worse. Also, some ExecutionContext - * may be fast enough natively that this optimization just - * adds overhead. - * The default ExecutionContext.global is already batching - * or fast enough not to benefit from it; while - * `fromExecutor` and `fromExecutorService` do NOT add - * this optimization since they don't know whether the underlying - * executor will benefit from it. - * A batching executor can create deadlocks if code does - * not use `scala.concurrent.blocking` when it should, - * because tasks created within other tasks will block - * on the outer task completing. - * This executor may run tasks in any order, including LIFO order. - * There are no ordering guarantees. - * - * WARNING: The underlying Executor's execute-method must not execute the submitted Runnable - * in the calling thread synchronously. It must enqueue/handoff the Runnable. - */ -private[concurrent] trait BatchingExecutor extends Executor { - - // invariant: if "_tasksLocal.get ne null" then we are inside BatchingRunnable.run; if it is null, we are outside - private val _tasksLocal = new ThreadLocal[List[Runnable]]() - - private class Batch(val initial: List[Runnable]) extends Runnable with BlockContext { - private var parentBlockContext: BlockContext = _ - // this method runs in the delegate ExecutionContext's thread - override def run(): Unit = { - require(_tasksLocal.get eq null) - - val prevBlockContext = BlockContext.current - BlockContext.withBlockContext(this) { - try { - parentBlockContext = prevBlockContext - - @tailrec def processBatch(batch: List[Runnable]): Unit = batch match { - case Nil => () - case head :: tail => - _tasksLocal set tail - try { - head.run() - } catch { - case t: Throwable => - // if one task throws, move the - // remaining tasks to another thread - // so we can throw the exception - // up to the invoking executor - val remaining = _tasksLocal.get - _tasksLocal set Nil - unbatchedExecute(new Batch(remaining)) //TODO what if this submission fails? - throw t // rethrow - } - processBatch(_tasksLocal.get) // since head.run() can add entries, always do _tasksLocal.get here - } - - processBatch(initial) - } finally { - _tasksLocal.remove() - parentBlockContext = null - } - } - } - - override def blockOn[T](thunk: => T)(implicit permission: CanAwait): T = { - // if we know there will be blocking, we don't want to keep tasks queued up because it could deadlock. - { - val tasks = _tasksLocal.get - _tasksLocal set Nil - if ((tasks ne null) && tasks.nonEmpty) - unbatchedExecute(new Batch(tasks)) - } - - // now delegate the blocking to the previous BC - require(parentBlockContext ne null) - parentBlockContext.blockOn(thunk) - } - } - - protected def unbatchedExecute(r: Runnable): Unit - - override def execute(runnable: Runnable): Unit = { - if (batchable(runnable)) { // If we can batch the runnable - _tasksLocal.get match { - case null => unbatchedExecute(new Batch(runnable :: Nil)) // If we aren't in batching mode yet, enqueue batch - case some => _tasksLocal.set(runnable :: some) // If we are already in batching mode, add to batch - } - } else unbatchedExecute(runnable) // If not batchable, just delegate to underlying - } - - /** Override this to define which runnables will be batched. */ - def batchable(runnable: Runnable): Boolean = runnable match { - case _: OnCompleteRunnable => true - case _ => false - } -} diff --git a/tests/scala2-library/src/library/scala/concurrent/BlockContext.scala b/tests/scala2-library/src/library/scala/concurrent/BlockContext.scala deleted file mode 100644 index 2b8ed4c7caa9..000000000000 --- a/tests/scala2-library/src/library/scala/concurrent/BlockContext.scala +++ /dev/null @@ -1,86 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala.concurrent - -/** - * A context to be notified by `scala.concurrent.blocking` when - * a thread is about to block. In effect this trait provides - * the implementation for `scala.concurrent.Await`. - * `scala.concurrent.Await.result()` and `scala.concurrent.Await.ready()` - * locates an instance of `BlockContext` by first looking for one - * provided through `BlockContext.withBlockContext()` and failing that, - * checking whether `Thread.currentThread` is an instance of `BlockContext`. - * So a thread pool can have its `java.lang.Thread` instances implement - * `BlockContext`. There's a default `BlockContext` used if the thread - * doesn't implement `BlockContext`. - * - * Typically, you'll want to chain to the previous `BlockContext`, - * like this: - * {{{ - * val oldContext = BlockContext.current - * val myContext = new BlockContext { - * override def blockOn[T](thunk: =>T)(implicit permission: CanAwait): T = { - * // you'd have code here doing whatever you need to do - * // when the thread is about to block. - * // Then you'd chain to the previous context: - * oldContext.blockOn(thunk) - * } - * } - * BlockContext.withBlockContext(myContext) { - * // then this block runs with myContext as the handler - * // for scala.concurrent.blocking - * } - * }}} - */ -trait BlockContext { - - /** Used internally by the framework; - * Designates (and eventually executes) a thunk which potentially blocks the calling `java.lang.Thread`. - * - * Clients must use `scala.concurrent.blocking` or `scala.concurrent.Await` instead. - */ - def blockOn[T](thunk: =>T)(implicit permission: CanAwait): T -} - -object BlockContext { - private object DefaultBlockContext extends BlockContext { - override def blockOn[T](thunk: =>T)(implicit permission: CanAwait): T = thunk - } - - /** - * @return the `BlockContext` that will be used if no other is found. - **/ - def defaultBlockContext: BlockContext = DefaultBlockContext - - private val contextLocal = new ThreadLocal[BlockContext]() - - /** - @return the `BlockContext` that would be used for the current `java.lang.Thread` at this point - **/ - def current: BlockContext = contextLocal.get match { - case null => Thread.currentThread match { - case ctx: BlockContext => ctx - case _ => DefaultBlockContext - } - case some => some - } - - /** - * Installs a current `BlockContext` around executing `body`. - **/ - def withBlockContext[T](blockContext: BlockContext)(body: => T): T = { - val old = contextLocal.get // can be null - try { - contextLocal.set(blockContext) - body - } finally { - contextLocal.set(old) - } - } -} diff --git a/tests/scala2-library/src/library/scala/concurrent/Channel.scala b/tests/scala2-library/src/library/scala/concurrent/Channel.scala deleted file mode 100644 index 89ad7d8c0e93..000000000000 --- a/tests/scala2-library/src/library/scala/concurrent/Channel.scala +++ /dev/null @@ -1,58 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - - - -package scala.concurrent - -/** This class provides a simple FIFO queue of data objects, - * which are read by one or more reader threads. - * - * @tparam A type of data exchanged - * @author Martin Odersky - * @version 1.0, 10/03/2003 - */ -class Channel[A] { - class LinkedList[A] { - var elem: A = _ - var next: LinkedList[A] = null - } - private var written = new LinkedList[A] // FIFO queue, realized through - private var lastWritten = written // aliasing of a linked list - private var nreaders = 0 - - /** Append a value to the FIFO queue to be read by `read`. - * This operation is nonblocking and can be executed by any thread. - * - * @param x object to enqueue to this channel - */ - def write(x: A) = synchronized { - lastWritten.elem = x - lastWritten.next = new LinkedList[A] - lastWritten = lastWritten.next - if (nreaders > 0) notify() - } - - /** Retrieve the next waiting object from the FIFO queue, - * blocking if necessary until an object is available. - * - * @return next object dequeued from this channel - */ - def read: A = synchronized { - while (written.next == null) { - try { - nreaders += 1 - wait() - } - finally nreaders -= 1 - } - val x = written.elem - written = written.next - x - } -} diff --git a/tests/scala2-library/src/library/scala/concurrent/DelayedLazyVal.scala b/tests/scala2-library/src/library/scala/concurrent/DelayedLazyVal.scala deleted file mode 100644 index 595d411e2a1e..000000000000 --- a/tests/scala2-library/src/library/scala/concurrent/DelayedLazyVal.scala +++ /dev/null @@ -1,43 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala.concurrent - - -/** A `DelayedLazyVal` is a wrapper for lengthy computations which have a - * valid partially computed result. - * - * The first argument is a function for obtaining the result at any given - * point in time, and the second is the lengthy computation. Once the - * computation is complete, the `apply` method will stop recalculating it - * and return a fixed value from that point forward. - * - * @param f the function to obtain the current value at any point in time - * @param body the computation to run to completion in another thread - * - * @author Paul Phillips - * @version 2.8 - */ -class DelayedLazyVal[T](f: () => T, body: => Unit)(implicit exec: ExecutionContext){ - @volatile private[this] var _isDone = false - private[this] lazy val complete = f() - - /** Whether the computation is complete. - * - * @return true if the computation is complete. - */ - def isDone = _isDone - - /** The current result of f(), or the final result if complete. - * - * @return the current value - */ - def apply(): T = if (isDone) complete else f() - - exec.execute(new Runnable { def run = { body; _isDone = true } }) -} diff --git a/tests/scala2-library/src/library/scala/concurrent/ExecutionContext.scala b/tests/scala2-library/src/library/scala/concurrent/ExecutionContext.scala deleted file mode 100644 index f46f29438767..000000000000 --- a/tests/scala2-library/src/library/scala/concurrent/ExecutionContext.scala +++ /dev/null @@ -1,189 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala.concurrent - - -import java.util.concurrent.{ ExecutorService, Executor } -import scala.annotation.implicitNotFound - -/** - * An `ExecutionContext` can execute program logic asynchronously, - * typically but not necessarily on a thread pool. - * - * A general purpose `ExecutionContext` must be asynchronous in executing - * any `Runnable` that is passed into its `execute`-method. A special purpose - * `ExecutionContext` may be synchronous but must only be passed to code that - * is explicitly safe to be run using a synchronously executing `ExecutionContext`. - * - * APIs such as `Future.onComplete` require you to provide a callback - * and an implicit `ExecutionContext`. The implicit `ExecutionContext` - * will be used to execute the callback. - * - * While it is possible to simply import - * `scala.concurrent.ExecutionContext.Implicits.global` to obtain an - * implicit `ExecutionContext`, application developers should carefully - * consider where they want to set execution policy; - * ideally, one place per application—or per logically related section of code— - * will make a decision about which `ExecutionContext` to use. - * That is, you will mostly want to avoid hardcoding, especially via an import, - * `scala.concurrent.ExecutionContext.Implicits.global`. - * The recommended approach is to add `(implicit ec: ExecutionContext)` to methods, - * or class constructor parameters, which need an `ExecutionContext`. - * - * Then locally import a specific `ExecutionContext` in one place for the entire - * application or module, passing it implicitly to individual methods. - * Alternatively define a local implicit val with the required `ExecutionContext`. - * - * A custom `ExecutionContext` may be appropriate to execute code - * which blocks on IO or performs long-running computations. - * `ExecutionContext.fromExecutorService` and `ExecutionContext.fromExecutor` - * are good ways to create a custom `ExecutionContext`. - * - * The intent of `ExecutionContext` is to lexically scope code execution. - * That is, each method, class, file, package, or application determines - * how to run its own code. This avoids issues such as running - * application callbacks on a thread pool belonging to a networking library. - * The size of a networking library's thread pool can be safely configured, - * knowing that only that library's network operations will be affected. - * Application callback execution can be configured separately. - */ -@implicitNotFound("""Cannot find an implicit ExecutionContext. You might pass -an (implicit ec: ExecutionContext) parameter to your method -or import scala.concurrent.ExecutionContext.Implicits.global.""") -trait ExecutionContext { - - /** Runs a block of code on this execution context. - * - * @param runnable the task to execute - */ - def execute(runnable: Runnable): Unit - - /** Reports that an asynchronous computation failed. - * - * @param cause the cause of the failure - */ - def reportFailure(@deprecatedName('t) cause: Throwable): Unit - - /** Prepares for the execution of a task. Returns the prepared - * execution context. The recommended implementation of - * `prepare` is to return `this`. - * - * This method should no longer be overridden or called. It was - * originally expected that `prepare` would be called by - * all libraries that consume ExecutionContexts, in order to - * capture thread local context. However, this usage has proven - * difficult to implement in practice and instead it is - * now better to avoid using `prepare` entirely. - * - * Instead, if an `ExecutionContext` needs to capture thread - * local context, it should capture that context when it is - * constructed, so that it doesn't need any additional - * preparation later. - */ - @deprecated("preparation of ExecutionContexts will be removed", "2.12.0") - def prepare(): ExecutionContext = this -} - -/** - * An [[ExecutionContext]] that is also a - * Java [[http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/Executor.html Executor]]. - */ -trait ExecutionContextExecutor extends ExecutionContext with Executor - -/** - * An [[ExecutionContext]] that is also a - * Java [[http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/ExecutorService.html ExecutorService]]. - */ -trait ExecutionContextExecutorService extends ExecutionContextExecutor with ExecutorService - - -/** Contains factory methods for creating execution contexts. - */ -object ExecutionContext { - /** - * The explicit global `ExecutionContext`. Invoke `global` when you want to provide the global - * `ExecutionContext` explicitly. - * - * The default `ExecutionContext` implementation is backed by a work-stealing thread pool. - * It can be configured via the following [[scala.sys.SystemProperties]]: - * - * `scala.concurrent.context.minThreads` = defaults to "1" - * `scala.concurrent.context.numThreads` = defaults to "x1" (i.e. the current number of available processors * 1) - * `scala.concurrent.context.maxThreads` = defaults to "x1" (i.e. the current number of available processors * 1) - * `scala.concurrent.context.maxExtraThreads` = defaults to "256" - * - * The pool size of threads is then `numThreads` bounded by `minThreads` on the lower end and `maxThreads` on the high end. - * - * The `maxExtraThreads` is the maximum number of extra threads to have at any given time to evade deadlock, - * see [[scala.concurrent.BlockContext]]. - * - * @return the global `ExecutionContext` - */ - def global: ExecutionContextExecutor = Implicits.global.asInstanceOf[ExecutionContextExecutor] - - object Implicits { - /** - * The implicit global `ExecutionContext`. Import `global` when you want to provide the global - * `ExecutionContext` implicitly. - * - * The default `ExecutionContext` implementation is backed by a work-stealing thread pool. By default, - * the thread pool uses a target number of worker threads equal to the number of - * [[https://docs.oracle.com/javase/8/docs/api/java/lang/Runtime.html#availableProcessors-- available processors]]. - */ - implicit lazy val global: ExecutionContext = impl.ExecutionContextImpl.fromExecutor(null: Executor) - } - - /** Creates an `ExecutionContext` from the given `ExecutorService`. - * - * @param e the `ExecutorService` to use. If `null`, a new `ExecutorService` is created with [[http://www.scala-lang.org/api/current/index.html#scala.concurrent.ExecutionContext$@global:scala.concurrent.ExecutionContextExecutor default configuration]]. - * @param reporter a function for error reporting - * @return the `ExecutionContext` using the given `ExecutorService` - */ - def fromExecutorService(e: ExecutorService, reporter: Throwable => Unit): ExecutionContextExecutorService = - impl.ExecutionContextImpl.fromExecutorService(e, reporter) - - /** Creates an `ExecutionContext` from the given `ExecutorService` with the [[scala.concurrent.ExecutionContext$.defaultReporter default reporter]]. - * - * If it is guaranteed that none of the executed tasks are blocking, a single-threaded `ExecutorService` - * can be used to create an `ExecutionContext` as follows: - * - * {{{ - * import java.util.concurrent.Executors - * val ec = ExecutionContext.fromExecutorService(Executors.newSingleThreadExecutor()) - * }}} - * - * @param e the `ExecutorService` to use. If `null`, a new `ExecutorService` is created with [[http://www.scala-lang.org/api/current/index.html#scala.concurrent.ExecutionContext$@global:scala.concurrent.ExecutionContextExecutor default configuration]]. - * @return the `ExecutionContext` using the given `ExecutorService` - */ - def fromExecutorService(e: ExecutorService): ExecutionContextExecutorService = fromExecutorService(e, defaultReporter) - - /** Creates an `ExecutionContext` from the given `Executor`. - * - * @param e the `Executor` to use. If `null`, a new `Executor` is created with [[http://www.scala-lang.org/api/current/index.html#scala.concurrent.ExecutionContext$@global:scala.concurrent.ExecutionContextExecutor default configuration]]. - * @param reporter a function for error reporting - * @return the `ExecutionContext` using the given `Executor` - */ - def fromExecutor(e: Executor, reporter: Throwable => Unit): ExecutionContextExecutor = - impl.ExecutionContextImpl.fromExecutor(e, reporter) - - /** Creates an `ExecutionContext` from the given `Executor` with the [[scala.concurrent.ExecutionContext$.defaultReporter default reporter]]. - * - * @param e the `Executor` to use. If `null`, a new `Executor` is created with [[http://www.scala-lang.org/api/current/index.html#scala.concurrent.ExecutionContext$@global:scala.concurrent.ExecutionContextExecutor default configuration]]. - * @return the `ExecutionContext` using the given `Executor` - */ - def fromExecutor(e: Executor): ExecutionContextExecutor = fromExecutor(e, defaultReporter) - - /** The default reporter simply prints the stack trace of the `Throwable` to [[http://docs.oracle.com/javase/8/docs/api/java/lang/System.html#err System.err]]. - * - * @return the function for error reporting - */ - def defaultReporter: Throwable => Unit = _.printStackTrace() -} - - diff --git a/tests/scala2-library/src/library/scala/concurrent/Future.scala b/tests/scala2-library/src/library/scala/concurrent/Future.scala deleted file mode 100644 index 6c1c9a0c8083..000000000000 --- a/tests/scala2-library/src/library/scala/concurrent/Future.scala +++ /dev/null @@ -1,878 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala.concurrent - -import scala.language.higherKinds - -import java.util.concurrent.{CountDownLatch, TimeUnit} -import java.util.concurrent.atomic.AtomicInteger - -import scala.util.control.NonFatal -import scala.util.{Try, Success, Failure} -import scala.concurrent.duration._ -import scala.collection.generic.CanBuildFrom -import scala.reflect.ClassTag - - -/** A `Future` represents a value which may or may not *currently* be available, - * but will be available at some point, or an exception if that value could not be made available. - * - * Asynchronous computations that yield futures are created with the `Future.apply` call and are computed using a supplied `ExecutionContext`, - * which can be backed by a Thread pool. - * - * {{{ - * import ExecutionContext.Implicits.global - * val s = "Hello" - * val f: Future[String] = Future { - * s + " future!" - * } - * f foreach { - * msg => println(msg) - * } - * }}} - * - * @author Philipp Haller, Heather Miller, Aleksandar Prokopec, Viktor Klang - * - * @see [[http://docs.scala-lang.org/overviews/core/futures.html Futures and Promises]] - * - * @define multipleCallbacks - * Multiple callbacks may be registered; there is no guarantee that they will be - * executed in a particular order. - * - * @define caughtThrowables - * The future may contain a throwable object and this means that the future failed. - * Futures obtained through combinators have the same exception as the future they were obtained from. - * The following throwable objects are not contained in the future: - * - `Error` - errors are not contained within futures - * - `InterruptedException` - not contained within futures - * - all `scala.util.control.ControlThrowable` except `NonLocalReturnControl` - not contained within futures - * - * Instead, the future is completed with a ExecutionException with one of the exceptions above - * as the cause. - * If a future is failed with a `scala.runtime.NonLocalReturnControl`, - * it is completed with a value from that throwable instead. - * - * @define swallowsExceptions - * Since this method executes asynchronously and does not produce a return value, - * any non-fatal exceptions thrown will be reported to the `ExecutionContext`. - * - * @define nonDeterministic - * Note: using this method yields nondeterministic dataflow programs. - * - * @define forComprehensionExamples - * Example: - * - * {{{ - * val f = Future { 5 } - * val g = Future { 3 } - * val h = for { - * x: Int <- f // returns Future(5) - * y: Int <- g // returns Future(3) - * } yield x + y - * }}} - * - * is translated to: - * - * {{{ - * f flatMap { (x: Int) => g map { (y: Int) => x + y } } - * }}} - * - * @define callbackInContext - * The provided callback always runs in the provided implicit - *`ExecutionContext`, though there is no guarantee that the - * `execute()` method on the `ExecutionContext` will be called once - * per callback or that `execute()` will be called in the current - * thread. That is, the implementation may run multiple callbacks - * in a batch within a single `execute()` and it may run - * `execute()` either immediately or asynchronously. - * Completion of the Future must *happen-before* the invocation of the callback. - */ -trait Future[+T] extends Awaitable[T] { - import Future.{ InternalCallbackExecutor => internalExecutor } - - /* Callbacks */ - - /** When this future is completed successfully (i.e., with a value), - * apply the provided partial function to the value if the partial function - * is defined at that value. - * - * If the future has already been completed with a value, - * this will either be applied immediately or be scheduled asynchronously. - * - * Note that the returned value of `pf` will be discarded. - * - * $swallowsExceptions - * $multipleCallbacks - * $callbackInContext - * - * @group Callbacks - */ - @deprecated("use `foreach` or `onComplete` instead (keep in mind that they take total rather than partial functions)", "2.12.0") - def onSuccess[U](pf: PartialFunction[T, U])(implicit executor: ExecutionContext): Unit = onComplete { - case Success(v) => - pf.applyOrElse[T, Any](v, Predef.identity[T]) // Exploiting the cached function to avoid MatchError - case _ => - } - - /** When this future is completed with a failure (i.e., with a throwable), - * apply the provided callback to the throwable. - * - * $caughtThrowables - * - * If the future has already been completed with a failure, - * this will either be applied immediately or be scheduled asynchronously. - * - * Will not be called in case that the future is completed with a value. - * - * Note that the returned value of `pf` will be discarded. - * - * $swallowsExceptions - * $multipleCallbacks - * $callbackInContext - * - * @group Callbacks - */ - @deprecated("use `onComplete` or `failed.foreach` instead (keep in mind that they take total rather than partial functions)", "2.12.0") - def onFailure[U](@deprecatedName('callback) pf: PartialFunction[Throwable, U])(implicit executor: ExecutionContext): Unit = onComplete { - case Failure(t) => - pf.applyOrElse[Throwable, Any](t, Predef.identity[Throwable]) // Exploiting the cached function to avoid MatchError - case _ => - } - - /** When this future is completed, either through an exception, or a value, - * apply the provided function. - * - * If the future has already been completed, - * this will either be applied immediately or be scheduled asynchronously. - * - * Note that the returned value of `f` will be discarded. - * - * $swallowsExceptions - * $multipleCallbacks - * $callbackInContext - * - * @tparam U only used to accept any return type of the given callback function - * @param f the function to be executed when this `Future` completes - * @group Callbacks - */ - def onComplete[U](@deprecatedName('func) f: Try[T] => U)(implicit executor: ExecutionContext): Unit - - - /* Miscellaneous */ - - /** Returns whether the future had already been completed with - * a value or an exception. - * - * $nonDeterministic - * - * @return `true` if the future was completed, `false` otherwise - * @group Polling - */ - def isCompleted: Boolean - - /** The current value of this `Future`. - * - * $nonDeterministic - * - * If the future was not completed the returned value will be `None`. - * If the future was completed the value will be `Some(Success(t))` - * if it contained a valid result, or `Some(Failure(error))` if it contained - * an exception. - * - * @return `None` if the `Future` wasn't completed, `Some` if it was. - * @group Polling - */ - def value: Option[Try[T]] - - - /* Projections */ - - /** The returned `Future` will be successfully completed with the `Throwable` of the original `Future` - * if the original `Future` fails. - * - * If the original `Future` is successful, the returned `Future` is failed with a `NoSuchElementException`. - * - * @return a failed projection of this `Future`. - * @group Transformations - */ - def failed: Future[Throwable] = - transform({ - case Failure(t) => Success(t) - case Success(v) => Failure(new NoSuchElementException("Future.failed not completed with a throwable.")) - })(internalExecutor) - - - /* Monadic operations */ - - /** Asynchronously processes the value in the future once the value becomes available. - * - * WARNING: Will not be called if this future is never completed or if it is completed with a failure. - * - * $swallowsExceptions - * - * @tparam U only used to accept any return type of the given callback function - * @param f the function which will be executed if this `Future` completes with a result, - * the return value of `f` will be discarded. - * @group Callbacks - */ - def foreach[U](f: T => U)(implicit executor: ExecutionContext): Unit = onComplete { _ foreach f } - - /** Creates a new future by applying the 's' function to the successful result of - * this future, or the 'f' function to the failed result. If there is any non-fatal - * exception thrown when 's' or 'f' is applied, that exception will be propagated - * to the resulting future. - * - * @tparam S the type of the returned `Future` - * @param s function that transforms a successful result of the receiver into a successful result of the returned future - * @param f function that transforms a failure of the receiver into a failure of the returned future - * @return a `Future` that will be completed with the transformed value - * @group Transformations - */ - def transform[S](s: T => S, f: Throwable => Throwable)(implicit executor: ExecutionContext): Future[S] = - transform { - case Success(r) => Try(s(r)) - case Failure(t) => Try(throw f(t)) // will throw fatal errors! - } - - /** Creates a new Future by applying the specified function to the result - * of this Future. If there is any non-fatal exception thrown when 'f' - * is applied then that exception will be propagated to the resulting future. - * - * @tparam S the type of the returned `Future` - * @param f function that transforms the result of this future - * @return a `Future` that will be completed with the transformed value - * @group Transformations - */ - def transform[S](f: Try[T] => Try[S])(implicit executor: ExecutionContext): Future[S] - - /** Creates a new Future by applying the specified function, which produces a Future, to the result - * of this Future. If there is any non-fatal exception thrown when 'f' - * is applied then that exception will be propagated to the resulting future. - * - * @tparam S the type of the returned `Future` - * @param f function that transforms the result of this future - * @return a `Future` that will be completed with the transformed value - * @group Transformations - */ - def transformWith[S](f: Try[T] => Future[S])(implicit executor: ExecutionContext): Future[S] - - - /** Creates a new future by applying a function to the successful result of - * this future. If this future is completed with an exception then the new - * future will also contain this exception. - * - * Example: - * - * {{{ - * val f = Future { "The future" } - * val g = f map { x: String => x + " is now!" } - * }}} - * - * Note that a for comprehension involving a `Future` - * may expand to include a call to `map` and or `flatMap` - * and `withFilter`. See [[scala.concurrent.Future#flatMap]] for an example of such a comprehension. - * - * - * @tparam S the type of the returned `Future` - * @param f the function which will be applied to the successful result of this `Future` - * @return a `Future` which will be completed with the result of the application of the function - * @group Transformations - */ - def map[S](f: T => S)(implicit executor: ExecutionContext): Future[S] = transform(_ map f) - - /** Creates a new future by applying a function to the successful result of - * this future, and returns the result of the function as the new future. - * If this future is completed with an exception then the new future will - * also contain this exception. - * - * $forComprehensionExamples - * - * @tparam S the type of the returned `Future` - * @param f the function which will be applied to the successful result of this `Future` - * @return a `Future` which will be completed with the result of the application of the function - * @group Transformations - */ - def flatMap[S](f: T => Future[S])(implicit executor: ExecutionContext): Future[S] = transformWith { - case Success(s) => f(s) - case Failure(_) => this.asInstanceOf[Future[S]] - } - - /** Creates a new future with one level of nesting flattened, this method is equivalent - * to `flatMap(identity)`. - * - * @tparam S the type of the returned `Future` - * @group Transformations - */ - def flatten[S](implicit ev: T <:< Future[S]): Future[S] = flatMap(ev)(internalExecutor) - - /** Creates a new future by filtering the value of the current future with a predicate. - * - * If the current future contains a value which satisfies the predicate, the new future will also hold that value. - * Otherwise, the resulting future will fail with a `NoSuchElementException`. - * - * If the current future fails, then the resulting future also fails. - * - * Example: - * {{{ - * val f = Future { 5 } - * val g = f filter { _ % 2 == 1 } - * val h = f filter { _ % 2 == 0 } - * g foreach println // Eventually prints 5 - * Await.result(h, Duration.Zero) // throw a NoSuchElementException - * }}} - * - * @param p the predicate to apply to the successful result of this `Future` - * @return a `Future` which will hold the successful result of this `Future` if it matches the predicate or a `NoSuchElementException` - * @group Transformations - */ - def filter(@deprecatedName('pred) p: T => Boolean)(implicit executor: ExecutionContext): Future[T] = - map { r => if (p(r)) r else throw new NoSuchElementException("Future.filter predicate is not satisfied") } - - /** Used by for-comprehensions. - * @group Transformations - */ - final def withFilter(p: T => Boolean)(implicit executor: ExecutionContext): Future[T] = filter(p)(executor) - - /** Creates a new future by mapping the value of the current future, if the given partial function is defined at that value. - * - * If the current future contains a value for which the partial function is defined, the new future will also hold that value. - * Otherwise, the resulting future will fail with a `NoSuchElementException`. - * - * If the current future fails, then the resulting future also fails. - * - * Example: - * {{{ - * val f = Future { -5 } - * val g = f collect { - * case x if x < 0 => -x - * } - * val h = f collect { - * case x if x > 0 => x * 2 - * } - * g foreach println // Eventually prints 5 - * Await.result(h, Duration.Zero) // throw a NoSuchElementException - * }}} - * - * @tparam S the type of the returned `Future` - * @param pf the `PartialFunction` to apply to the successful result of this `Future` - * @return a `Future` holding the result of application of the `PartialFunction` or a `NoSuchElementException` - * @group Transformations - */ - def collect[S](pf: PartialFunction[T, S])(implicit executor: ExecutionContext): Future[S] = - map { - r => pf.applyOrElse(r, (t: T) => throw new NoSuchElementException("Future.collect partial function is not defined at: " + t)) - } - - /** Creates a new future that will handle any matching throwable that this - * future might contain. If there is no match, or if this future contains - * a valid result then the new future will contain the same. - * - * Example: - * - * {{{ - * Future (6 / 0) recover { case e: ArithmeticException => 0 } // result: 0 - * Future (6 / 0) recover { case e: NotFoundException => 0 } // result: exception - * Future (6 / 2) recover { case e: ArithmeticException => 0 } // result: 3 - * }}} - * - * @tparam U the type of the returned `Future` - * @param pf the `PartialFunction` to apply if this `Future` fails - * @return a `Future` with the successful value of this `Future` or the result of the `PartialFunction` - * @group Transformations - */ - def recover[U >: T](pf: PartialFunction[Throwable, U])(implicit executor: ExecutionContext): Future[U] = - transform { _ recover pf } - - /** Creates a new future that will handle any matching throwable that this - * future might contain by assigning it a value of another future. - * - * If there is no match, or if this future contains - * a valid result then the new future will contain the same result. - * - * Example: - * - * {{{ - * val f = Future { Int.MaxValue } - * Future (6 / 0) recoverWith { case e: ArithmeticException => f } // result: Int.MaxValue - * }}} - * - * @tparam U the type of the returned `Future` - * @param pf the `PartialFunction` to apply if this `Future` fails - * @return a `Future` with the successful value of this `Future` or the outcome of the `Future` returned by the `PartialFunction` - * @group Transformations - */ - def recoverWith[U >: T](pf: PartialFunction[Throwable, Future[U]])(implicit executor: ExecutionContext): Future[U] = - transformWith { - case Failure(t) => pf.applyOrElse(t, (_: Throwable) => this) - case Success(_) => this - } - - /** Zips the values of `this` and `that` future, and creates - * a new future holding the tuple of their results. - * - * If `this` future fails, the resulting future is failed - * with the throwable stored in `this`. - * Otherwise, if `that` future fails, the resulting future is failed - * with the throwable stored in `that`. - * - * @tparam U the type of the other `Future` - * @param that the other `Future` - * @return a `Future` with the results of both futures or the failure of the first of them that failed - * @group Transformations - */ - def zip[U](that: Future[U]): Future[(T, U)] = { - implicit val ec = internalExecutor - flatMap { r1 => that.map(r2 => (r1, r2)) } - } - - /** Zips the values of `this` and `that` future using a function `f`, - * and creates a new future holding the result. - * - * If `this` future fails, the resulting future is failed - * with the throwable stored in `this`. - * Otherwise, if `that` future fails, the resulting future is failed - * with the throwable stored in `that`. - * If the application of `f` throws a throwable, the resulting future - * is failed with that throwable if it is non-fatal. - * - * @tparam U the type of the other `Future` - * @tparam R the type of the resulting `Future` - * @param that the other `Future` - * @param f the function to apply to the results of `this` and `that` - * @return a `Future` with the result of the application of `f` to the results of `this` and `that` - * @group Transformations - */ - def zipWith[U, R](that: Future[U])(f: (T, U) => R)(implicit executor: ExecutionContext): Future[R] = - flatMap(r1 => that.map(r2 => f(r1, r2)))(internalExecutor) - - /** Creates a new future which holds the result of this future if it was completed successfully, or, if not, - * the result of the `that` future if `that` is completed successfully. - * If both futures are failed, the resulting future holds the throwable object of the first future. - * - * Using this method will not cause concurrent programs to become nondeterministic. - * - * Example: - * {{{ - * val f = Future { sys.error("failed") } - * val g = Future { 5 } - * val h = f fallbackTo g - * h foreach println // Eventually prints 5 - * }}} - * - * @tparam U the type of the other `Future` and the resulting `Future` - * @param that the `Future` whose result we want to use if this `Future` fails. - * @return a `Future` with the successful result of this or that `Future` or the failure of this `Future` if both fail - * @group Transformations - */ - def fallbackTo[U >: T](that: Future[U]): Future[U] = - if (this eq that) this - else { - implicit val ec = internalExecutor - recoverWith { case _ => that } recoverWith { case _ => this } - } - - /** Creates a new `Future[S]` which is completed with this `Future`'s result if - * that conforms to `S`'s erased type or a `ClassCastException` otherwise. - * - * @tparam S the type of the returned `Future` - * @param tag the `ClassTag` which will be used to cast the result of this `Future` - * @return a `Future` holding the casted result of this `Future` or a `ClassCastException` otherwise - * @group Transformations - */ - def mapTo[S](implicit tag: ClassTag[S]): Future[S] = { - implicit val ec = internalExecutor - val boxedClass = { - val c = tag.runtimeClass - if (c.isPrimitive) Future.toBoxed(c) else c - } - require(boxedClass ne null) - map(s => boxedClass.cast(s).asInstanceOf[S]) - } - - /** Applies the side-effecting function to the result of this future, and returns - * a new future with the result of this future. - * - * This method allows one to enforce that the callbacks are executed in a - * specified order. - * - * Note that if one of the chained `andThen` callbacks throws - * an exception, that exception is not propagated to the subsequent `andThen` - * callbacks. Instead, the subsequent `andThen` callbacks are given the original - * value of this future. - * - * The following example prints out `5`: - * - * {{{ - * val f = Future { 5 } - * f andThen { - * case r => sys.error("runtime exception") - * } andThen { - * case Failure(t) => println(t) - * case Success(v) => println(v) - * } - * }}} - * - * $swallowsExceptions - * - * @tparam U only used to accept any return type of the given `PartialFunction` - * @param pf a `PartialFunction` which will be conditionally applied to the outcome of this `Future` - * @return a `Future` which will be completed with the exact same outcome as this `Future` but after the `PartialFunction` has been executed. - * @group Callbacks - */ - def andThen[U](pf: PartialFunction[Try[T], U])(implicit executor: ExecutionContext): Future[T] = - transform { - result => - try pf.applyOrElse[Try[T], Any](result, Predef.identity[Try[T]]) - catch { case NonFatal(t) => executor reportFailure t } - - result - } -} - - - -/** Future companion object. - * - * @define nonDeterministic - * Note: using this method yields nondeterministic dataflow programs. - */ -object Future { - - private[concurrent] val toBoxed = Map[Class[_], Class[_]]( - classOf[Boolean] -> classOf[java.lang.Boolean], - classOf[Byte] -> classOf[java.lang.Byte], - classOf[Char] -> classOf[java.lang.Character], - classOf[Short] -> classOf[java.lang.Short], - classOf[Int] -> classOf[java.lang.Integer], - classOf[Long] -> classOf[java.lang.Long], - classOf[Float] -> classOf[java.lang.Float], - classOf[Double] -> classOf[java.lang.Double], - classOf[Unit] -> classOf[scala.runtime.BoxedUnit] - ) - - /** A Future which is never completed. - */ - final object never extends Future[Nothing] { - - @throws(classOf[TimeoutException]) - @throws(classOf[InterruptedException]) - override def ready(atMost: Duration)(implicit permit: CanAwait): this.type = { - atMost match { - case e if e eq Duration.Undefined => throw new IllegalArgumentException("cannot wait for Undefined period") - case Duration.Inf => new CountDownLatch(1).await() - case Duration.MinusInf => // Drop out - case f: FiniteDuration => - if (f > Duration.Zero) new CountDownLatch(1).await(f.toNanos, TimeUnit.NANOSECONDS) - } - throw new TimeoutException(s"Future timed out after [$atMost]") - } - - @throws(classOf[Exception]) - override def result(atMost: Duration)(implicit permit: CanAwait): Nothing = { - ready(atMost) - throw new TimeoutException(s"Future timed out after [$atMost]") - } - - override def onSuccess[U](pf: PartialFunction[Nothing, U])(implicit executor: ExecutionContext): Unit = () - override def onFailure[U](pf: PartialFunction[Throwable, U])(implicit executor: ExecutionContext): Unit = () - override def onComplete[U](f: Try[Nothing] => U)(implicit executor: ExecutionContext): Unit = () - override def isCompleted: Boolean = false - override def value: Option[Try[Nothing]] = None - override def failed: Future[Throwable] = this - override def foreach[U](f: Nothing => U)(implicit executor: ExecutionContext): Unit = () - override def transform[S](s: Nothing => S, f: Throwable => Throwable)(implicit executor: ExecutionContext): Future[S] = this - override def transform[S](f: Try[Nothing] => Try[S])(implicit executor: ExecutionContext): Future[S] = this - override def transformWith[S](f: Try[Nothing] => Future[S])(implicit executor: ExecutionContext): Future[S] = this - override def map[S](f: Nothing => S)(implicit executor: ExecutionContext): Future[S] = this - override def flatMap[S](f: Nothing => Future[S])(implicit executor: ExecutionContext): Future[S] = this - override def flatten[S](implicit ev: Nothing <:< Future[S]): Future[S] = this - override def filter(p: Nothing => Boolean)(implicit executor: ExecutionContext): Future[Nothing] = this - override def collect[S](pf: PartialFunction[Nothing, S])(implicit executor: ExecutionContext): Future[S] = this - override def recover[U >: Nothing](pf: PartialFunction[Throwable, U])(implicit executor: ExecutionContext): Future[U] = this - override def recoverWith[U >: Nothing](pf: PartialFunction[Throwable, Future[U]])(implicit executor: ExecutionContext): Future[U] = this - override def zip[U](that: Future[U]): Future[(Nothing, U)] = this - override def zipWith[U, R](that: Future[U])(f: (Nothing, U) => R)(implicit executor: ExecutionContext): Future[R] = this - override def fallbackTo[U >: Nothing](that: Future[U]): Future[U] = this - override def mapTo[S](implicit tag: ClassTag[S]): Future[S] = this - override def andThen[U](pf: PartialFunction[Try[Nothing], U])(implicit executor: ExecutionContext): Future[Nothing] = this - - override def toString: String = "Future()" - } - - /** A Future which is always completed with the Unit value. - */ - val unit: Future[Unit] = successful(()) - - /** Creates an already completed Future with the specified exception. - * - * @tparam T the type of the value in the future - * @param exception the non-null instance of `Throwable` - * @return the newly created `Future` instance - */ - def failed[T](exception: Throwable): Future[T] = Promise.failed(exception).future - - /** Creates an already completed Future with the specified result. - * - * @tparam T the type of the value in the future - * @param result the given successful value - * @return the newly created `Future` instance - */ - def successful[T](result: T): Future[T] = Promise.successful(result).future - - /** Creates an already completed Future with the specified result or exception. - * - * @tparam T the type of the value in the `Future` - * @param result the result of the returned `Future` instance - * @return the newly created `Future` instance - */ - def fromTry[T](result: Try[T]): Future[T] = Promise.fromTry(result).future - - /** Starts an asynchronous computation and returns a `Future` instance with the result of that computation. - * - * The following expressions are equivalent: - * - * {{{ - * val f1 = Future(expr) - * val f2 = Future.unit.map(_ => expr) - * }}} - * - * The result becomes available once the asynchronous computation is completed. - * - * @tparam T the type of the result - * @param body the asynchronous computation - * @param executor the execution context on which the future is run - * @return the `Future` holding the result of the computation - */ - def apply[T](body: =>T)(implicit @deprecatedName('execctx) executor: ExecutionContext): Future[T] = - unit.map(_ => body) - - /** Simple version of `Future.traverse`. Asynchronously and non-blockingly transforms a `TraversableOnce[Future[A]]` - * into a `Future[TraversableOnce[A]]`. Useful for reducing many `Future`s into a single `Future`. - * - * @tparam A the type of the value inside the Futures - * @tparam M the type of the `TraversableOnce` of Futures - * @param in the `TraversableOnce` of Futures which will be sequenced - * @return the `Future` of the `TraversableOnce` of results - */ - def sequence[A, M[X] <: TraversableOnce[X]](in: M[Future[A]])(implicit cbf: CanBuildFrom[M[Future[A]], A, M[A]], executor: ExecutionContext): Future[M[A]] = { - in.foldLeft(successful(cbf(in))) { - (fr, fa) => fr.zipWith(fa)(_ += _) - }.map(_.result())(InternalCallbackExecutor) - } - - /** Asynchronously and non-blockingly returns a new `Future` to the result of the first future - * in the list that is completed. This means no matter if it is completed as a success or as a failure. - * - * @tparam T the type of the value in the future - * @param futures the `TraversableOnce` of Futures in which to find the first completed - * @return the `Future` holding the result of the future that is first to be completed - */ - def firstCompletedOf[T](futures: TraversableOnce[Future[T]])(implicit executor: ExecutionContext): Future[T] = { - val p = Promise[T]() - val completeFirst: Try[T] => Unit = p tryComplete _ - futures foreach { _ onComplete completeFirst } - p.future - } - - /** Asynchronously and non-blockingly returns a `Future` that will hold the optional result - * of the first `Future` with a result that matches the predicate. - * - * @tparam T the type of the value in the future - * @param futures the `TraversableOnce` of Futures to search - * @param p the predicate which indicates if it's a match - * @return the `Future` holding the optional result of the search - */ - @deprecated("use the overloaded version of this method that takes a scala.collection.immutable.Iterable instead", "2.12.0") - def find[T](@deprecatedName('futurestravonce) futures: TraversableOnce[Future[T]])(@deprecatedName('predicate) p: T => Boolean)(implicit executor: ExecutionContext): Future[Option[T]] = { - val futuresBuffer = futures.toBuffer - if (futuresBuffer.isEmpty) successful[Option[T]](None) - else { - val result = Promise[Option[T]]() - val ref = new AtomicInteger(futuresBuffer.size) - val search: Try[T] => Unit = v => try { - v match { - case Success(r) if p(r) => result tryComplete Success(Some(r)) - case _ => - } - } finally { - if (ref.decrementAndGet == 0) { - result tryComplete Success(None) - } - } - - futuresBuffer.foreach(_ onComplete search) - - result.future - } - } - - - /** Asynchronously and non-blockingly returns a `Future` that will hold the optional result - * of the first `Future` with a result that matches the predicate, failed `Future`s will be ignored. - * - * @tparam T the type of the value in the future - * @param futures the `scala.collection.immutable.Iterable` of Futures to search - * @param p the predicate which indicates if it's a match - * @return the `Future` holding the optional result of the search - */ - def find[T](futures: scala.collection.immutable.Iterable[Future[T]])(p: T => Boolean)(implicit executor: ExecutionContext): Future[Option[T]] = { - def searchNext(i: Iterator[Future[T]]): Future[Option[T]] = - if (!i.hasNext) successful[Option[T]](None) - else { - i.next().transformWith { - case Success(r) if p(r) => successful(Some(r)) - case other => searchNext(i) - } - } - searchNext(futures.iterator) - } - - /** A non-blocking, asynchronous left fold over the specified futures, - * with the start value of the given zero. - * The fold is performed asynchronously in left-to-right order as the futures become completed. - * The result will be the first failure of any of the futures, or any failure in the actual fold, - * or the result of the fold. - * - * Example: - * {{{ - * val futureSum = Future.foldLeft(futures)(0)(_ + _) - * }}} - * - * @tparam T the type of the value of the input Futures - * @tparam R the type of the value of the returned `Future` - * @param futures the `scala.collection.immutable.Iterable` of Futures to be folded - * @param zero the start value of the fold - * @param op the fold operation to be applied to the zero and futures - * @return the `Future` holding the result of the fold - */ - def foldLeft[T, R](futures: scala.collection.immutable.Iterable[Future[T]])(zero: R)(op: (R, T) => R)(implicit executor: ExecutionContext): Future[R] = - foldNext(futures.iterator, zero, op) - - private[this] def foldNext[T, R](i: Iterator[Future[T]], prevValue: R, op: (R, T) => R)(implicit executor: ExecutionContext): Future[R] = - if (!i.hasNext) successful(prevValue) - else i.next().flatMap { value => foldNext(i, op(prevValue, value), op) } - - /** A non-blocking, asynchronous fold over the specified futures, with the start value of the given zero. - * The fold is performed on the thread where the last future is completed, - * the result will be the first failure of any of the futures, or any failure in the actual fold, - * or the result of the fold. - * - * Example: - * {{{ - * val futureSum = Future.fold(futures)(0)(_ + _) - * }}} - * - * @tparam T the type of the value of the input Futures - * @tparam R the type of the value of the returned `Future` - * @param futures the `TraversableOnce` of Futures to be folded - * @param zero the start value of the fold - * @param op the fold operation to be applied to the zero and futures - * @return the `Future` holding the result of the fold - */ - @deprecated("use Future.foldLeft instead", "2.12.0") - def fold[T, R](futures: TraversableOnce[Future[T]])(zero: R)(@deprecatedName('foldFun) op: (R, T) => R)(implicit executor: ExecutionContext): Future[R] = { - if (futures.isEmpty) successful(zero) - else sequence(futures).map(_.foldLeft(zero)(op)) - } - - /** Initiates a non-blocking, asynchronous, fold over the supplied futures - * where the fold-zero is the result value of the `Future` that's completed first. - * - * Example: - * {{{ - * val futureSum = Future.reduce(futures)(_ + _) - * }}} - * @tparam T the type of the value of the input Futures - * @tparam R the type of the value of the returned `Future` - * @param futures the `TraversableOnce` of Futures to be reduced - * @param op the reduce operation which is applied to the results of the futures - * @return the `Future` holding the result of the reduce - */ - @deprecated("use Future.reduceLeft instead", "2.12.0") - def reduce[T, R >: T](futures: TraversableOnce[Future[T]])(op: (R, T) => R)(implicit executor: ExecutionContext): Future[R] = { - if (futures.isEmpty) failed(new NoSuchElementException("reduce attempted on empty collection")) - else sequence(futures).map(_ reduceLeft op) - } - - /** Initiates a non-blocking, asynchronous, left reduction over the supplied futures - * where the zero is the result value of the first `Future`. - * - * Example: - * {{{ - * val futureSum = Future.reduceLeft(futures)(_ + _) - * }}} - * @tparam T the type of the value of the input Futures - * @tparam R the type of the value of the returned `Future` - * @param futures the `scala.collection.immutable.Iterable` of Futures to be reduced - * @param op the reduce operation which is applied to the results of the futures - * @return the `Future` holding the result of the reduce - */ - def reduceLeft[T, R >: T](futures: scala.collection.immutable.Iterable[Future[T]])(op: (R, T) => R)(implicit executor: ExecutionContext): Future[R] = { - val i = futures.iterator - if (!i.hasNext) failed(new NoSuchElementException("reduceLeft attempted on empty collection")) - else i.next() flatMap { v => foldNext(i, v, op) } - } - - /** Asynchronously and non-blockingly transforms a `TraversableOnce[A]` into a `Future[TraversableOnce[B]]` - * using the provided function `A => Future[B]`. - * This is useful for performing a parallel map. For example, to apply a function to all items of a list - * in parallel: - * - * {{{ - * val myFutureList = Future.traverse(myList)(x => Future(myFunc(x))) - * }}} - * @tparam A the type of the value inside the Futures in the `TraversableOnce` - * @tparam B the type of the value of the returned `Future` - * @tparam M the type of the `TraversableOnce` of Futures - * @param in the `TraversableOnce` of Futures which will be sequenced - * @param fn the function to apply to the `TraversableOnce` of Futures to produce the results - * @return the `Future` of the `TraversableOnce` of results - */ - def traverse[A, B, M[X] <: TraversableOnce[X]](in: M[A])(fn: A => Future[B])(implicit cbf: CanBuildFrom[M[A], B, M[B]], executor: ExecutionContext): Future[M[B]] = - in.foldLeft(successful(cbf(in))) { - (fr, a) => fr.zipWith(fn(a))(_ += _) - }.map(_.result())(InternalCallbackExecutor) - - - // This is used to run callbacks which are internal - // to scala.concurrent; our own callbacks are only - // ever used to eventually run another callback, - // and that other callback will have its own - // executor because all callbacks come with - // an executor. Our own callbacks never block - // and have no "expected" exceptions. - // As a result, this executor can do nothing; - // some other executor will always come after - // it (and sometimes one will be before it), - // and those will be performing the "real" - // dispatch to code outside scala.concurrent. - // Because this exists, ExecutionContext.defaultExecutionContext - // isn't instantiated by Future internals, so - // if some code for some reason wants to avoid - // ever starting up the default context, it can do so - // by just not ever using it itself. scala.concurrent - // doesn't need to create defaultExecutionContext as - // a side effect. - private[concurrent] object InternalCallbackExecutor extends ExecutionContext with BatchingExecutor { - override protected def unbatchedExecute(r: Runnable): Unit = - r.run() - override def reportFailure(t: Throwable): Unit = - throw new IllegalStateException("problem in scala.concurrent internal callback", t) - } -} - -/** A marker indicating that a `java.lang.Runnable` provided to `scala.concurrent.ExecutionContext` - * wraps a callback provided to `Future.onComplete`. - * All callbacks provided to a `Future` end up going through `onComplete`, so this allows an - * `ExecutionContext` to special-case callbacks that were executed by `Future` if desired. - */ -trait OnCompleteRunnable { - self: Runnable => -} - diff --git a/tests/scala2-library/src/library/scala/concurrent/JavaConversions.scala b/tests/scala2-library/src/library/scala/concurrent/JavaConversions.scala deleted file mode 100644 index 91e55d30cbef..000000000000 --- a/tests/scala2-library/src/library/scala/concurrent/JavaConversions.scala +++ /dev/null @@ -1,33 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala.concurrent - -import java.util.concurrent.{ExecutorService, Executor} -import scala.language.implicitConversions - -/** The `JavaConversions` object provides implicit conversions supporting - * interoperability between Scala and Java concurrency classes. - * - * @author Philipp Haller - */ -object JavaConversions { - - /** - * Creates a new `ExecutionContext` which uses the provided `ExecutorService`. - */ - implicit def asExecutionContext(exec: ExecutorService): ExecutionContextExecutorService = - ExecutionContext.fromExecutorService(exec) - - /** - * Creates a new `ExecutionContext` which uses the provided `Executor`. - */ - implicit def asExecutionContext(exec: Executor): ExecutionContextExecutor = - ExecutionContext.fromExecutor(exec) - -} diff --git a/tests/scala2-library/src/library/scala/concurrent/Lock.scala b/tests/scala2-library/src/library/scala/concurrent/Lock.scala deleted file mode 100644 index 757fb94cc774..000000000000 --- a/tests/scala2-library/src/library/scala/concurrent/Lock.scala +++ /dev/null @@ -1,31 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - - - -package scala.concurrent - -/** This class ... - * - * @author Martin Odersky - * @version 1.0, 10/03/2003 - */ -@deprecated("use java.util.concurrent.locks.Lock", "2.11.2") -class Lock { - var available = true - - def acquire() = synchronized { - while (!available) wait() - available = false - } - - def release() = synchronized { - available = true - notify() - } -} diff --git a/tests/scala2-library/src/library/scala/concurrent/Promise.scala b/tests/scala2-library/src/library/scala/concurrent/Promise.scala deleted file mode 100644 index 894b134e8331..000000000000 --- a/tests/scala2-library/src/library/scala/concurrent/Promise.scala +++ /dev/null @@ -1,139 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala.concurrent - -import scala.util.{ Try, Success, Failure } - -/** Promise is an object which can be completed with a value or failed - * with an exception. - * - * @define promiseCompletion - * If the promise has already been fulfilled, failed or has timed out, - * calling this method will throw an IllegalStateException. - * - * @define allowedThrowables - * If the throwable used to fail this promise is an error, a control exception - * or an interrupted exception, it will be wrapped as a cause within an - * `ExecutionException` which will fail the promise. - * - * @define nonDeterministic - * Note: Using this method may result in non-deterministic concurrent programs. - */ -trait Promise[T] { - /** Future containing the value of this promise. - */ - def future: Future[T] - - /** Returns whether the promise has already been completed with - * a value or an exception. - * - * $nonDeterministic - * - * @return `true` if the promise is already completed, `false` otherwise - */ - def isCompleted: Boolean - - /** Completes the promise with either an exception or a value. - * - * @param result Either the value or the exception to complete the promise with. - * - * $promiseCompletion - */ - def complete(result: Try[T]): this.type = - if (tryComplete(result)) this else throw new IllegalStateException("Promise already completed.") - - /** Tries to complete the promise with either a value or the exception. - * - * $nonDeterministic - * - * @return If the promise has already been completed returns `false`, or `true` otherwise. - */ - def tryComplete(result: Try[T]): Boolean - - /** Completes this promise with the specified future, once that future is completed. - * - * @return This promise - */ - final def completeWith(other: Future[T]): this.type = tryCompleteWith(other) - - /** Attempts to complete this promise with the specified future, once that future is completed. - * - * @return This promise - */ - final def tryCompleteWith(other: Future[T]): this.type = { - if (other ne this.future) { // this tryCompleteWith this doesn't make much sense - other.onComplete(this tryComplete _)(Future.InternalCallbackExecutor) - } - this - } - - /** Completes the promise with a value. - * - * @param value The value to complete the promise with. - * - * $promiseCompletion - */ - def success(@deprecatedName('v) value: T): this.type = complete(Success(value)) - - /** Tries to complete the promise with a value. - * - * $nonDeterministic - * - * @return If the promise has already been completed returns `false`, or `true` otherwise. - */ - def trySuccess(value: T): Boolean = tryComplete(Success(value)) - - /** Completes the promise with an exception. - * - * @param cause The throwable to complete the promise with. - * - * $allowedThrowables - * - * $promiseCompletion - */ - def failure(@deprecatedName('t) cause: Throwable): this.type = complete(Failure(cause)) - - /** Tries to complete the promise with an exception. - * - * $nonDeterministic - * - * @return If the promise has already been completed returns `false`, or `true` otherwise. - */ - def tryFailure(@deprecatedName('t) cause: Throwable): Boolean = tryComplete(Failure(cause)) -} - -object Promise { - /** Creates a promise object which can be completed with a value. - * - * @tparam T the type of the value in the promise - * @return the newly created `Promise` object - */ - def apply[T](): Promise[T] = new impl.Promise.DefaultPromise[T]() - - /** Creates an already completed Promise with the specified exception. - * - * @tparam T the type of the value in the promise - * @return the newly created `Promise` object - */ - def failed[T](exception: Throwable): Promise[T] = fromTry(Failure(exception)) - - /** Creates an already completed Promise with the specified result. - * - * @tparam T the type of the value in the promise - * @return the newly created `Promise` object - */ - def successful[T](result: T): Promise[T] = fromTry(Success(result)) - - /** Creates an already completed Promise with the specified result or exception. - * - * @tparam T the type of the value in the promise - * @return the newly created `Promise` object - */ - def fromTry[T](result: Try[T]): Promise[T] = impl.Promise.KeptPromise[T](result) -} diff --git a/tests/scala2-library/src/library/scala/concurrent/SyncChannel.scala b/tests/scala2-library/src/library/scala/concurrent/SyncChannel.scala deleted file mode 100644 index 735598935c11..000000000000 --- a/tests/scala2-library/src/library/scala/concurrent/SyncChannel.scala +++ /dev/null @@ -1,73 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala.concurrent - -/** A `SyncChannel` allows one to exchange data synchronously between - * a reader and a writer thread. The writer thread is blocked until the - * data to be written has been read by a corresponding reader thread. - * - * @author Philipp Haller - * @version 2.0, 04/17/2008 - */ -class SyncChannel[A] { - - private var pendingWrites = List[(A, SyncVar[Boolean])]() - private var pendingReads = List[SyncVar[A]]() - - def write(data: A) { - // create write request - val writeReq = new SyncVar[Boolean] - - this.synchronized { - // check whether there is a reader waiting - if (!pendingReads.isEmpty) { - val readReq = pendingReads.head - pendingReads = pendingReads.tail - - // let reader continue - readReq put data - - // resolve write request - writeReq put true - } - else { - // enqueue write request - pendingWrites = pendingWrites ::: List((data, writeReq)) - } - } - - writeReq.get - } - - def read: A = { - // create read request - val readReq = new SyncVar[A] - - this.synchronized { - // check whether there is a writer waiting - if (!pendingWrites.isEmpty) { - // read data - val (data, writeReq) = pendingWrites.head - pendingWrites = pendingWrites.tail - - // let writer continue - writeReq.put(true) - - // resolve read request - readReq.put (data) - } - else { - // enqueue read request - pendingReads = pendingReads ::: List(readReq) - } - } - - readReq.get - } -} diff --git a/tests/scala2-library/src/library/scala/concurrent/SyncVar.scala b/tests/scala2-library/src/library/scala/concurrent/SyncVar.scala deleted file mode 100644 index 77bfa951198a..000000000000 --- a/tests/scala2-library/src/library/scala/concurrent/SyncVar.scala +++ /dev/null @@ -1,139 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala.concurrent - -import java.util.concurrent.TimeUnit - -/** A class to provide safe concurrent access to a mutable cell. - * All methods are synchronized. - * - * @tparam A type of the contained value - * @author Martin Odersky - * @version 1.0, 10/03/2003 - */ -class SyncVar[A] { - private var isDefined: Boolean = false - private var value: A = _ - - /** - * Wait for this SyncVar to become defined and then get - * the stored value without modifying it. - * - * @return value that is held in this container - */ - def get: A = synchronized { - while (!isDefined) wait() - value - } - - /** Waits `timeout` millis. If `timeout <= 0` just returns 0. - * It never returns negative results. - */ - private def waitMeasuringElapsed(timeout: Long): Long = if (timeout <= 0) 0 else { - val start = System.nanoTime() - wait(timeout) - val elapsed = System.nanoTime() - start - // nanoTime should be monotonic, but it's not possible to rely on that. - // See http://bugs.java.com/bugdatabase/view_bug.do?bug_id=6458294. - if (elapsed < 0) 0 else TimeUnit.NANOSECONDS.toMillis(elapsed) - } - - /** Wait at least `timeout` milliseconds (possibly more) for this `SyncVar` - * to become defined and then get its value. - * - * @param timeout time in milliseconds to wait - * @return `None` if variable is undefined after `timeout`, `Some(value)` otherwise - */ - def get(timeout: Long): Option[A] = synchronized { - /* Defending against the system clock going backward - * by counting time elapsed directly. Loop required - * to deal with spurious wakeups. - */ - var rest = timeout - while (!isDefined && rest > 0) { - val elapsed = waitMeasuringElapsed(rest) - rest -= elapsed - } - if (isDefined) Some(value) else None - } - - /** - * Wait for this SyncVar to become defined and then get - * the stored value, unsetting it as a side effect. - * - * @return value that was held in this container - */ - def take(): A = synchronized { - try get - finally unsetVal() - } - - /** Wait at least `timeout` milliseconds (possibly more) for this `SyncVar` - * to become defined and then get the stored value, unsetting it - * as a side effect. - * - * @param timeout the amount of milliseconds to wait - * @return the value or a throws an exception if the timeout occurs - * @throws NoSuchElementException on timeout - */ - def take(timeout: Long): A = synchronized { - try get(timeout).get - finally unsetVal() - } - - // TODO: this method should be private - // [Heather] the reason why: it doesn't take into consideration - // whether or not the SyncVar is already defined. So, set has been - // deprecated in order to eventually be able to make "setting" private - @deprecated("use `put` to ensure a value cannot be overwritten without a corresponding `take`", "2.10.0") - // NOTE: Used by sbt 0.13.0-M2 and below - def set(x: A): Unit = setVal(x) - - /** Place a value in the SyncVar. If the SyncVar already has a stored value, - * wait until another thread takes it. */ - def put(x: A): Unit = synchronized { - while (isDefined) wait() - setVal(x) - } - - /** Check whether a value is stored in the synchronized variable. */ - def isSet: Boolean = synchronized { - isDefined - } - - // TODO: this method should be private - // [Heather] the reason why: it doesn't take into consideration - // whether or not the SyncVar is already defined. So, unset has been - // deprecated in order to eventually be able to make "unsetting" private - @deprecated("use `take` to ensure a value is never discarded", "2.10.0") - // NOTE: Used by sbt 0.13.0-M2 and below - def unset(): Unit = synchronized { - isDefined = false - value = null.asInstanceOf[A] - notifyAll() - } - - // `setVal` exists so as to retroactively deprecate `set` without - // deprecation warnings where we use `set` internally. The - // implementation of `set` was moved to `setVal` to achieve this - private def setVal(x: A): Unit = synchronized { - isDefined = true - value = x - notifyAll() - } - - // `unsetVal` exists so as to retroactively deprecate `unset` without - // deprecation warnings where we use `unset` internally. The - // implementation of `unset` was moved to `unsetVal` to achieve this - private def unsetVal(): Unit = synchronized { - isDefined = false - value = null.asInstanceOf[A] - notifyAll() - } -} diff --git a/tests/scala2-library/src/library/scala/concurrent/duration/Deadline.scala b/tests/scala2-library/src/library/scala/concurrent/duration/Deadline.scala deleted file mode 100644 index a25a4786027f..000000000000 --- a/tests/scala2-library/src/library/scala/concurrent/duration/Deadline.scala +++ /dev/null @@ -1,81 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala.concurrent.duration - -/** - * This class stores a deadline, as obtained via `Deadline.now` or the - * duration DSL: - * - * {{{ - * import scala.concurrent.duration._ - * 3.seconds.fromNow - * }}} - * - * Its main purpose is to manage repeated attempts to achieve something (like - * awaiting a condition) by offering the methods `hasTimeLeft` and `timeLeft`. All - * durations are measured according to `System.nanoTime` aka wall-time; this - * does not take into account changes to the system clock (such as leap - * seconds). - */ -case class Deadline private (time: FiniteDuration) extends Ordered[Deadline] { - /** - * Return a deadline advanced (i.e., moved into the future) by the given duration. - */ - def +(other: FiniteDuration): Deadline = copy(time = time + other) - /** - * Return a deadline moved backwards (i.e., towards the past) by the given duration. - */ - def -(other: FiniteDuration): Deadline = copy(time = time - other) - /** - * Calculate time difference between this and the other deadline, where the result is directed (i.e., may be negative). - */ - def -(other: Deadline): FiniteDuration = time - other.time - /** - * Calculate time difference between this duration and now; the result is negative if the deadline has passed. - * - * '''''Note that on some systems this operation is costly because it entails a system call.''''' - * Check `System.nanoTime` for your platform. - */ - def timeLeft: FiniteDuration = this - Deadline.now - /** - * Determine whether the deadline still lies in the future at the point where this method is called. - * - * '''''Note that on some systems this operation is costly because it entails a system call.''''' - * Check `System.nanoTime` for your platform. - */ - def hasTimeLeft(): Boolean = !isOverdue() - /** - * Determine whether the deadline lies in the past at the point where this method is called. - * - * '''''Note that on some systems this operation is costly because it entails a system call.''''' - * Check `System.nanoTime` for your platform. - */ - def isOverdue(): Boolean = (time.toNanos - System.nanoTime()) < 0 - /** - * The natural ordering for deadline is determined by the natural order of the underlying (finite) duration. - */ - def compare(other: Deadline) = time compare other.time -} - -object Deadline { - /** - * Construct a deadline due exactly at the point where this method is called. Useful for then - * advancing it to obtain a future deadline, or for sampling the current time exactly once and - * then comparing it to multiple deadlines (using subtraction). - */ - def now: Deadline = Deadline(Duration(System.nanoTime, NANOSECONDS)) - - /** - * The natural ordering for deadline is determined by the natural order of the underlying (finite) duration. - */ - implicit object DeadlineIsOrdered extends Ordering[Deadline] { - def compare(a: Deadline, b: Deadline) = a compare b - } - -} diff --git a/tests/scala2-library/src/library/scala/concurrent/duration/Duration.scala b/tests/scala2-library/src/library/scala/concurrent/duration/Duration.scala deleted file mode 100644 index 1654e69950f8..000000000000 --- a/tests/scala2-library/src/library/scala/concurrent/duration/Duration.scala +++ /dev/null @@ -1,736 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala.concurrent.duration - -import java.lang.{ Double => JDouble, Long => JLong } - -object Duration { - - /** - * Construct a Duration from the given length and unit. Observe that nanosecond precision may be lost if - * - * - the unit is NANOSECONDS - * - and the length has an absolute value greater than 2^53 - * - * Infinite inputs (and NaN) are converted into [[Duration.Inf]], [[Duration.MinusInf]] and [[Duration.Undefined]], respectively. - * - * @throws IllegalArgumentException if the length was finite but the resulting duration cannot be expressed as a [[FiniteDuration]] - */ - def apply(length: Double, unit: TimeUnit): Duration = fromNanos(unit.toNanos(1) * length) - - /** - * Construct a finite duration from the given length and time unit. The unit given is retained - * throughout calculations as long as possible, so that it can be retrieved later. - */ - def apply(length: Long, unit: TimeUnit): FiniteDuration = new FiniteDuration(length, unit) - - /** - * Construct a finite duration from the given length and time unit, where the latter is - * looked up in a list of string representation. Valid choices are: - * - * `d, day, h, hour, min, minute, s, sec, second, ms, milli, millisecond, µs, micro, microsecond, ns, nano, nanosecond` - * and their pluralized forms (for every but the first mentioned form of each unit, i.e. no "ds", but "days"). - */ - def apply(length: Long, unit: String): FiniteDuration = new FiniteDuration(length, Duration.timeUnit(unit)) - - // Double stores 52 bits mantissa, but there is an implied '1' in front, making the limit 2^53 - private[this] final val maxPreciseDouble = 9007199254740992d - - /** - * Parse String into Duration. Format is `""`, where - * whitespace is allowed before, between and after the parts. Infinities are - * designated by `"Inf"`, `"PlusInf"`, `"+Inf"` and `"-Inf"` or `"MinusInf"`. - * - * @throws NumberFormatException if format is not parsable - */ - def apply(s: String): Duration = { - val s1: String = s filterNot (_.isWhitespace) - s1 match { - case "Inf" | "PlusInf" | "+Inf" => Inf - case "MinusInf" | "-Inf" => MinusInf - case _ => - val unitName = s1.reverse.takeWhile(_.isLetter).reverse - timeUnit get unitName match { - case Some(unit) => - val valueStr = s1 dropRight unitName.length - val valueD = JDouble.parseDouble(valueStr) - if (valueD >= -maxPreciseDouble && valueD <= maxPreciseDouble) Duration(valueD, unit) - else Duration(JLong.parseLong(valueStr), unit) - case _ => throw new NumberFormatException("format error " + s) - } - } - } - - // "ms milli millisecond" -> List("ms", "milli", "millis", "millisecond", "milliseconds") - private[this] def words(s: String) = (s.trim split "\\s+").toList - private[this] def expandLabels(labels: String): List[String] = { - val hd :: rest = words(labels) - hd :: rest.flatMap(s => List(s, s + "s")) - } - private[this] val timeUnitLabels = List( - DAYS -> "d day", - HOURS -> "h hour", - MINUTES -> "min minute", - SECONDS -> "s sec second", - MILLISECONDS -> "ms milli millisecond", - MICROSECONDS -> "µs micro microsecond", - NANOSECONDS -> "ns nano nanosecond" - ) - - // TimeUnit => standard label - protected[duration] val timeUnitName: Map[TimeUnit, String] = - timeUnitLabels.toMap.mapValues(s => words(s).last).toMap - - // Label => TimeUnit - protected[duration] val timeUnit: Map[String, TimeUnit] = - timeUnitLabels.flatMap{ case (unit, names) => expandLabels(names) map (_ -> unit) }.toMap - - /** - * Extract length and time unit out of a string, where the format must match the description for [[Duration$.apply(s:String)* apply(String)]]. - * The extractor will not match for malformed strings or non-finite durations. - */ - def unapply(s: String): Option[(Long, TimeUnit)] = - ( try Some(apply(s)) catch { case _: RuntimeException => None } ) flatMap unapply - - /** - * Extract length and time unit out of a duration, if it is finite. - */ - def unapply(d: Duration): Option[(Long, TimeUnit)] = - if (d.isFinite()) Some((d.length, d.unit)) else None - - /** - * Construct a possibly infinite or undefined Duration from the given number of nanoseconds. - * - * - `Double.PositiveInfinity` is mapped to [[Duration.Inf]] - * - `Double.NegativeInfinity` is mapped to [[Duration.MinusInf]] - * - `Double.NaN` is mapped to [[Duration.Undefined]] - * - `-0d` is mapped to [[Duration.Zero]] (exactly like `0d`) - * - * The semantics of the resulting Duration objects matches the semantics of their Double - * counterparts with respect to arithmetic operations. - * - * @throws IllegalArgumentException if the length was finite but the resulting duration cannot be expressed as a [[FiniteDuration]] - */ - def fromNanos(nanos: Double): Duration = { - if (nanos.isInfinite) - if (nanos > 0) Inf else MinusInf - else if (JDouble.isNaN(nanos)) - Undefined - else if (nanos > Long.MaxValue || nanos < Long.MinValue) - throw new IllegalArgumentException("trying to construct too large duration with " + nanos + "ns") - else - fromNanos(nanos.round) - } - - private[this] final val µs_per_ns = 1000L - private[this] final val ms_per_ns = µs_per_ns * 1000 - private[this] final val s_per_ns = ms_per_ns * 1000 - private[this] final val min_per_ns = s_per_ns * 60 - private[this] final val h_per_ns = min_per_ns * 60 - private[this] final val d_per_ns = h_per_ns * 24 - - /** - * Construct a finite duration from the given number of nanoseconds. The - * result will have the coarsest possible time unit which can exactly express - * this duration. - * - * @throws IllegalArgumentException for `Long.MinValue` since that would lead to inconsistent behavior afterwards (cannot be negated) - */ - def fromNanos(nanos: Long): FiniteDuration = { - if (nanos % d_per_ns == 0) Duration(nanos / d_per_ns, DAYS) - else if (nanos % h_per_ns == 0) Duration(nanos / h_per_ns, HOURS) - else if (nanos % min_per_ns == 0) Duration(nanos / min_per_ns, MINUTES) - else if (nanos % s_per_ns == 0) Duration(nanos / s_per_ns, SECONDS) - else if (nanos % ms_per_ns == 0) Duration(nanos / ms_per_ns, MILLISECONDS) - else if (nanos % µs_per_ns == 0) Duration(nanos / µs_per_ns, MICROSECONDS) - else Duration(nanos, NANOSECONDS) - } - - /** - * Preconstructed value of `0.days`. - */ - // unit as coarse as possible to keep (_ + Zero) sane unit-wise - val Zero: FiniteDuration = new FiniteDuration(0, DAYS) - - /** - * The Undefined value corresponds closely to Double.NaN: - * - * - it is the result of otherwise invalid operations - * - it does not equal itself (according to `equals()`) - * - it compares greater than any other Duration apart from itself (for which `compare` returns 0) - * - * The particular comparison semantics mirror those of Double.NaN. - * - * '''''Use `eq` when checking an input of a method against this value.''''' - */ - val Undefined: Infinite = new Infinite { - override def toString = "Duration.Undefined" - override def equals(other: Any) = false - override def +(other: Duration): Duration = this - override def -(other: Duration): Duration = this - override def *(factor: Double): Duration = this - override def /(factor: Double): Duration = this - override def /(other: Duration): Double = Double.NaN - def compare(other: Duration) = if (other eq this) 0 else 1 - def unary_- : Duration = this - def toUnit(unit: TimeUnit): Double = Double.NaN - private def readResolve(): AnyRef = Undefined // Instructs deserialization to use this same instance - } - - sealed abstract class Infinite extends Duration { - def +(other: Duration): Duration = other match { - case x if x eq Undefined => Undefined - case x: Infinite if x ne this => Undefined - case _ => this - } - def -(other: Duration): Duration = other match { - case x if x eq Undefined => Undefined - case x: Infinite if x eq this => Undefined - case _ => this - } - - def *(factor: Double): Duration = - if (factor == 0d || JDouble.isNaN(factor)) Undefined - else if (factor < 0d) -this - else this - def /(divisor: Double): Duration = - if (JDouble.isNaN(divisor) || divisor.isInfinite) Undefined - else if ((divisor compare 0d) < 0) -this - else this - def /(divisor: Duration): Double = divisor match { - case _: Infinite => Double.NaN - case x => Double.PositiveInfinity * (if ((this > Zero) ^ (divisor >= Zero)) -1 else 1) - } - - final def isFinite() = false - - private[this] def fail(what: String) = throw new IllegalArgumentException(s"$what not allowed on infinite Durations") - final def length: Long = fail("length") - final def unit: TimeUnit = fail("unit") - final def toNanos: Long = fail("toNanos") - final def toMicros: Long = fail("toMicros") - final def toMillis: Long = fail("toMillis") - final def toSeconds: Long = fail("toSeconds") - final def toMinutes: Long = fail("toMinutes") - final def toHours: Long = fail("toHours") - final def toDays: Long = fail("toDays") - - final def toCoarsest: Duration = this - } - - /** - * Infinite duration: greater than any other (apart from Undefined) and not equal to any other - * but itself. This value closely corresponds to Double.PositiveInfinity, - * matching its semantics in arithmetic operations. - */ - val Inf: Infinite = new Infinite { - override def toString = "Duration.Inf" - def compare(other: Duration) = other match { - case x if x eq Undefined => -1 // Undefined != Undefined - case x if x eq this => 0 // `case Inf` will include null checks in the byte code - case _ => 1 - } - def unary_- : Duration = MinusInf - def toUnit(unit: TimeUnit): Double = Double.PositiveInfinity - private def readResolve(): AnyRef = Inf // Instructs deserialization to use this same instance - } - - /** - * Infinite duration: less than any other and not equal to any other - * but itself. This value closely corresponds to Double.NegativeInfinity, - * matching its semantics in arithmetic operations. - */ - val MinusInf: Infinite = new Infinite { - override def toString = "Duration.MinusInf" - def compare(other: Duration) = if (other eq this) 0 else -1 - def unary_- : Duration = Inf - def toUnit(unit: TimeUnit): Double = Double.NegativeInfinity - private def readResolve(): AnyRef = MinusInf // Instructs deserialization to use this same instance - } - - // Java Factories - - /** - * Construct a finite duration from the given length and time unit. The unit given is retained - * throughout calculations as long as possible, so that it can be retrieved later. - */ - def create(length: Long, unit: TimeUnit): FiniteDuration = apply(length, unit) - /** - * Construct a Duration from the given length and unit. Observe that nanosecond precision may be lost if - * - * - the unit is NANOSECONDS - * - and the length has an absolute value greater than 2^53 - * - * Infinite inputs (and NaN) are converted into [[Duration.Inf]], [[Duration.MinusInf]] and [[Duration.Undefined]], respectively. - * - * @throws IllegalArgumentException if the length was finite but the resulting duration cannot be expressed as a [[FiniteDuration]] - */ - def create(length: Double, unit: TimeUnit): Duration = apply(length, unit) - /** - * Construct a finite duration from the given length and time unit, where the latter is - * looked up in a list of string representation. Valid choices are: - * - * `d, day, h, hour, min, minute, s, sec, second, ms, milli, millisecond, µs, micro, microsecond, ns, nano, nanosecond` - * and their pluralized forms (for every but the first mentioned form of each unit, i.e. no "ds", but "days"). - */ - def create(length: Long, unit: String): FiniteDuration = apply(length, unit) - /** - * Parse String into Duration. Format is `""`, where - * whitespace is allowed before, between and after the parts. Infinities are - * designated by `"Inf"`, `"PlusInf"`, `"+Inf"` and `"-Inf"` or `"MinusInf"`. - * - * @throws NumberFormatException if format is not parsable - */ - def create(s: String): Duration = apply(s) - - /** - * The natural ordering of durations matches the natural ordering for Double, including non-finite values. - */ - implicit object DurationIsOrdered extends Ordering[Duration] { - def compare(a: Duration, b: Duration) = a compare b - } -} - -/** - *

Utility for working with java.util.concurrent.TimeUnit durations.

- * - * '''''This class is not meant as a general purpose representation of time, it is - * optimized for the needs of `scala.concurrent`.''''' - * - *

Basic Usage

- * - *

- * Examples: - * {{{ - * import scala.concurrent.duration._ - * - * val duration = Duration(100, MILLISECONDS) - * val duration = Duration(100, "millis") - * - * duration.toNanos - * duration < 1.second - * duration <= Duration.Inf - * }}} - * - * '''''Invoking inexpressible conversions (like calling `toSeconds` on an infinite duration) will throw an IllegalArgumentException.''''' - * - *

- * Implicits are also provided for Int, Long and Double. Example usage: - * {{{ - * import scala.concurrent.duration._ - * - * val duration = 100 millis - * }}} - * - * '''''The DSL provided by the implicit conversions always allows construction of finite durations, even for infinite Double inputs; use Duration.Inf instead.''''' - * - * Extractors, parsing and arithmetic are also included: - * {{{ - * val d = Duration("1.2 µs") - * val Duration(length, unit) = 5 millis - * val d2 = d * 2.5 - * val d3 = d2 + 1.millisecond - * }}} - * - *

Handling of Time Units

- * - * Calculations performed on finite durations always retain the more precise unit of either operand, no matter - * whether a coarser unit would be able to exactly express the same duration. This means that Duration can be - * used as a lossless container for a (length, unit) pair if it is constructed using the corresponding methods - * and no arithmetic is performed on it; adding/subtracting durations should in that case be done with care. - * - *

Correspondence to Double Semantics

- * - * The semantics of arithmetic operations on Duration are two-fold: - * - * - exact addition/subtraction with nanosecond resolution for finite durations, independent of the summands' magnitude - * - isomorphic to `java.lang.Double` when it comes to infinite or undefined values - * - * The conversion between Duration and Double is done using [[Duration.toUnit]] (with unit NANOSECONDS) - * and [[Duration$.fromNanos(nanos:Double)* Duration.fromNanos(Double)]] - * - *

Ordering

- * - * The default ordering is consistent with the ordering of Double numbers, which means that Undefined is - * considered greater than all other durations, including [[Duration.Inf]]. - * - * @define exc @throws IllegalArgumentException when invoked on a non-finite duration - * - * @define ovf @throws IllegalArgumentException in case of a finite overflow: the range of a finite duration is +-(2^63-1)ns, and no conversion to infinite durations takes place. - */ -sealed abstract class Duration extends Serializable with Ordered[Duration] { - /** - * Obtain the length of this Duration measured in the unit obtained by the `unit` method. - * - * $exc - */ - def length: Long - /** - * Obtain the time unit in which the length of this duration is measured. - * - * $exc - */ - def unit: TimeUnit - /** - * Return the length of this duration measured in whole nanoseconds, rounding towards zero. - * - * $exc - */ - def toNanos: Long - /** - * Return the length of this duration measured in whole microseconds, rounding towards zero. - * - * $exc - */ - def toMicros: Long - /** - * Return the length of this duration measured in whole milliseconds, rounding towards zero. - * - * $exc - */ - def toMillis: Long - /** - * Return the length of this duration measured in whole seconds, rounding towards zero. - * - * $exc - */ - def toSeconds: Long - /** - * Return the length of this duration measured in whole minutes, rounding towards zero. - * - * $exc - */ - def toMinutes: Long - /** - * Return the length of this duration measured in whole hours, rounding towards zero. - * - * $exc - */ - def toHours: Long - /** - * Return the length of this duration measured in whole days, rounding towards zero. - * - * $exc - */ - def toDays: Long - /** - * Return the number of nanoseconds as floating point number, scaled down to the given unit. - * The result may not precisely represent this duration due to the Double datatype's inherent - * limitations (mantissa size effectively 53 bits). Non-finite durations are represented as - * - [[Duration.Undefined]] is mapped to Double.NaN - * - [[Duration.Inf]] is mapped to Double.PositiveInfinity - * - [[Duration.MinusInf]] is mapped to Double.NegativeInfinity - */ - def toUnit(unit: TimeUnit): Double - - /** - * Return the sum of that duration and this. When involving non-finite summands the semantics match those - * of Double. - * - * $ovf - */ - def +(other: Duration): Duration - /** - * Return the difference of that duration and this. When involving non-finite summands the semantics match those - * of Double. - * - * $ovf - */ - def -(other: Duration): Duration - /** - * Return this duration multiplied by the scalar factor. When involving non-finite factors the semantics match those - * of Double. - * - * $ovf - */ - def *(factor: Double): Duration - /** - * Return this duration divided by the scalar factor. When involving non-finite factors the semantics match those - * of Double. - * - * $ovf - */ - def /(divisor: Double): Duration - /** - * Return the quotient of this and that duration as floating-point number. The semantics are - * determined by Double as if calculating the quotient of the nanosecond lengths of both factors. - */ - def /(divisor: Duration): Double - /** - * Negate this duration. The only two values which are mapped to themselves are [[Duration.Zero]] and [[Duration.Undefined]]. - */ - def unary_- : Duration - /** - * This method returns whether this duration is finite, which is not the same as - * `!isInfinite` for Double because this method also returns `false` for [[Duration.Undefined]]. - */ - def isFinite(): Boolean - /** - * Return the smaller of this and that duration as determined by the natural ordering. - */ - def min(other: Duration): Duration = if (this < other) this else other - /** - * Return the larger of this and that duration as determined by the natural ordering. - */ - def max(other: Duration): Duration = if (this > other) this else other - - // Java API - - /** - * Return this duration divided by the scalar factor. When involving non-finite factors the semantics match those - * of Double. - * - * $ovf - */ - def div(divisor: Double) = this / divisor - /** - * Return the quotient of this and that duration as floating-point number. The semantics are - * determined by Double as if calculating the quotient of the nanosecond lengths of both factors. - */ - def div(other: Duration) = this / other - def gt(other: Duration) = this > other - def gteq(other: Duration) = this >= other - def lt(other: Duration) = this < other - def lteq(other: Duration) = this <= other - /** - * Return the difference of that duration and this. When involving non-finite summands the semantics match those - * of Double. - * - * $ovf - */ - def minus(other: Duration) = this - other - /** - * Return this duration multiplied by the scalar factor. When involving non-finite factors the semantics match those - * of Double. - * - * $ovf - */ - def mul(factor: Double) = this * factor - /** - * Negate this duration. The only two values which are mapped to themselves are [[Duration.Zero]] and [[Duration.Undefined]]. - */ - def neg() = -this - /** - * Return the sum of that duration and this. When involving non-finite summands the semantics match those - * of Double. - * - * $ovf - */ - def plus(other: Duration) = this + other - /** - * Return duration which is equal to this duration but with a coarsest Unit, or self in case it is already the coarsest Unit - *

- * Examples: - * {{{ - * Duration(60, MINUTES).toCoarsest // Duration(1, HOURS) - * Duration(1000, MILLISECONDS).toCoarsest // Duration(1, SECONDS) - * Duration(48, HOURS).toCoarsest // Duration(2, DAYS) - * Duration(5, SECONDS).toCoarsest // Duration(5, SECONDS) - * }}} - */ - def toCoarsest: Duration -} - -object FiniteDuration { - - implicit object FiniteDurationIsOrdered extends Ordering[FiniteDuration] { - def compare(a: FiniteDuration, b: FiniteDuration) = a compare b - } - - def apply(length: Long, unit: TimeUnit) = new FiniteDuration(length, unit) - def apply(length: Long, unit: String) = new FiniteDuration(length, Duration.timeUnit(unit)) - - // limit on abs. value of durations in their units - private final val max_ns = Long.MaxValue - private final val max_µs = max_ns / 1000 - private final val max_ms = max_µs / 1000 - private final val max_s = max_ms / 1000 - private final val max_min= max_s / 60 - private final val max_h = max_min / 60 - private final val max_d = max_h / 24 -} - -/** - * This class represents a finite duration. Its addition and subtraction operators are overloaded to retain - * this guarantee statically. The range of this class is limited to +-(2^63-1)ns, which is roughly 292 years. - */ -final class FiniteDuration(val length: Long, val unit: TimeUnit) extends Duration { - import FiniteDuration._ - import Duration._ - - private[this] def bounded(max: Long) = -max <= length && length <= max - - require(unit match { - /* - * enforce the 2^63-1 ns limit, must be pos/neg symmetrical because of unary_- - */ - case NANOSECONDS ⇒ bounded(max_ns) - case MICROSECONDS ⇒ bounded(max_µs) - case MILLISECONDS ⇒ bounded(max_ms) - case SECONDS ⇒ bounded(max_s) - case MINUTES ⇒ bounded(max_min) - case HOURS ⇒ bounded(max_h) - case DAYS ⇒ bounded(max_d) - case _ ⇒ - val v = DAYS.convert(length, unit) - -max_d <= v && v <= max_d - }, "Duration is limited to +-(2^63-1)ns (ca. 292 years)") - - def toNanos = unit.toNanos(length) - def toMicros = unit.toMicros(length) - def toMillis = unit.toMillis(length) - def toSeconds = unit.toSeconds(length) - def toMinutes = unit.toMinutes(length) - def toHours = unit.toHours(length) - def toDays = unit.toDays(length) - def toUnit(u: TimeUnit) = toNanos.toDouble / NANOSECONDS.convert(1, u) - - /** - * Construct a [[Deadline]] from this duration by adding it to the current instant `Deadline.now`. - */ - def fromNow: Deadline = Deadline.now + this - - private[this] def unitString = timeUnitName(unit) + ( if (length == 1) "" else "s" ) - override def toString = "" + length + " " + unitString - - def compare(other: Duration) = other match { - case x: FiniteDuration => toNanos compare x.toNanos - case _ => -(other compare this) - } - - // see https://www.securecoding.cert.org/confluence/display/java/NUM00-J.+Detect+or+prevent+integer+overflow - private[this] def safeAdd(a: Long, b: Long): Long = { - if ((b > 0) && (a > Long.MaxValue - b) || - (b < 0) && (a < Long.MinValue - b)) throw new IllegalArgumentException("integer overflow") - a + b - } - private[this] def add(otherLength: Long, otherUnit: TimeUnit): FiniteDuration = { - val commonUnit = if (otherUnit.convert(1, unit) == 0) unit else otherUnit - val totalLength = safeAdd(commonUnit.convert(length, unit), commonUnit.convert(otherLength, otherUnit)) - new FiniteDuration(totalLength, commonUnit) - } - - def +(other: Duration) = other match { - case x: FiniteDuration => add(x.length, x.unit) - case _ => other - } - def -(other: Duration) = other match { - case x: FiniteDuration => add(-x.length, x.unit) - case _ => -other - } - - def *(factor: Double) = - if (!factor.isInfinite) fromNanos(toNanos * factor) - else if (JDouble.isNaN(factor)) Undefined - else if ((factor > 0) ^ (this < Zero)) Inf - else MinusInf - - def /(divisor: Double) = - if (!divisor.isInfinite) fromNanos(toNanos / divisor) - else if (JDouble.isNaN(divisor)) Undefined - else Zero - - // if this is made a constant, then scalac will elide the conditional and always return +0.0, scala/bug#6331 - private[this] def minusZero = -0d - def /(divisor: Duration): Double = - if (divisor.isFinite()) toNanos.toDouble / divisor.toNanos - else if (divisor eq Undefined) Double.NaN - else if ((length < 0) ^ (divisor > Zero)) 0d - else minusZero - - // overloaded methods taking FiniteDurations, so that you can calculate while statically staying finite - def +(other: FiniteDuration) = add(other.length, other.unit) - def -(other: FiniteDuration) = add(-other.length, other.unit) - def plus(other: FiniteDuration) = this + other - def minus(other: FiniteDuration) = this - other - def min(other: FiniteDuration) = if (this < other) this else other - def max(other: FiniteDuration) = if (this > other) this else other - - // overloaded methods taking Long so that you can calculate while statically staying finite - - /** - * Return the quotient of this duration and the given integer factor. - * - * @throws ArithmeticException if the factor is 0 - */ - def /(divisor: Long) = fromNanos(toNanos / divisor) - - /** - * Return the product of this duration and the given integer factor. - * - * @throws IllegalArgumentException if the result would overflow the range of FiniteDuration - */ - def *(factor: Long) = new FiniteDuration(safeMul(length, factor), unit) - - /* - * This method avoids the use of Long division, which saves 95% of the time spent, - * by checking that there are enough leading zeros so that the result has a chance - * to fit into a Long again; the remaining edge cases are caught by using the sign - * of the product for overflow detection. - * - * This method is not general purpose because it disallows the (otherwise legal) - * case of Long.MinValue * 1, but that is okay for use in FiniteDuration, since - * Long.MinValue is not a legal `length` anyway. - */ - private def safeMul(_a: Long, _b: Long): Long = { - val a = scala.math.abs(_a) - val b = scala.math.abs(_b) - import java.lang.Long.{ numberOfLeadingZeros => leading } - if (leading(a) + leading(b) < 64) throw new IllegalArgumentException("multiplication overflow") - val product = a * b - if (product < 0) throw new IllegalArgumentException("multiplication overflow") - if (a == _a ^ b == _b) -product else product - } - - /** - * Return the quotient of this duration and the given integer factor. - * - * @throws ArithmeticException if the factor is 0 - */ - def div(divisor: Long) = this / divisor - - /** - * Return the product of this duration and the given integer factor. - * - * @throws IllegalArgumentException if the result would overflow the range of FiniteDuration - */ - def mul(factor: Long) = this * factor - - def unary_- = Duration(-length, unit) - - final def isFinite() = true - - final override def toCoarsest: FiniteDuration = { - def loop(length: Long, unit: TimeUnit): FiniteDuration = { - def coarserOrThis(coarser: TimeUnit, divider: Int) = - if (length % divider == 0) loop(length / divider, coarser) - else if (unit == this.unit) this - else FiniteDuration(length, unit) - - unit match { - case DAYS => FiniteDuration(length, unit) - case HOURS => coarserOrThis(DAYS, 24) - case MINUTES => coarserOrThis(HOURS, 60) - case SECONDS => coarserOrThis(MINUTES, 60) - case MILLISECONDS => coarserOrThis(SECONDS, 1000) - case MICROSECONDS => coarserOrThis(MILLISECONDS, 1000) - case NANOSECONDS => coarserOrThis(MICROSECONDS, 1000) - } - } - - if (unit == DAYS || length == 0) this - else loop(length, unit) - } - - override def equals(other: Any) = other match { - case x: FiniteDuration => toNanos == x.toNanos - case _ => super.equals(other) - } - override def hashCode = toNanos.toInt -} diff --git a/tests/scala2-library/src/library/scala/concurrent/duration/DurationConversions.scala b/tests/scala2-library/src/library/scala/concurrent/duration/DurationConversions.scala deleted file mode 100644 index 74afa0ca1cdd..000000000000 --- a/tests/scala2-library/src/library/scala/concurrent/duration/DurationConversions.scala +++ /dev/null @@ -1,92 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala.concurrent.duration - -import DurationConversions._ - -// Would be nice to limit the visibility of this trait a little bit, -// but it crashes scalac to do so. -trait DurationConversions extends Any { - protected def durationIn(unit: TimeUnit): FiniteDuration - - def nanoseconds = durationIn(NANOSECONDS) - def nanos = nanoseconds - def nanosecond = nanoseconds - def nano = nanoseconds - - def microseconds = durationIn(MICROSECONDS) - def micros = microseconds - def microsecond = microseconds - def micro = microseconds - - def milliseconds = durationIn(MILLISECONDS) - def millis = milliseconds - def millisecond = milliseconds - def milli = milliseconds - - def seconds = durationIn(SECONDS) - def second = seconds - - def minutes = durationIn(MINUTES) - def minute = minutes - - def hours = durationIn(HOURS) - def hour = hours - - def days = durationIn(DAYS) - def day = days - - def nanoseconds[C](c: C)(implicit ev: Classifier[C]): ev.R = ev.convert(nanoseconds) - def nanos[C](c: C)(implicit ev: Classifier[C]): ev.R = nanoseconds(c) - def nanosecond[C](c: C)(implicit ev: Classifier[C]): ev.R = nanoseconds(c) - def nano[C](c: C)(implicit ev: Classifier[C]): ev.R = nanoseconds(c) - - def microseconds[C](c: C)(implicit ev: Classifier[C]): ev.R = ev.convert(microseconds) - def micros[C](c: C)(implicit ev: Classifier[C]): ev.R = microseconds(c) - def microsecond[C](c: C)(implicit ev: Classifier[C]): ev.R = microseconds(c) - def micro[C](c: C)(implicit ev: Classifier[C]): ev.R = microseconds(c) - - def milliseconds[C](c: C)(implicit ev: Classifier[C]): ev.R = ev.convert(milliseconds) - def millis[C](c: C)(implicit ev: Classifier[C]): ev.R = milliseconds(c) - def millisecond[C](c: C)(implicit ev: Classifier[C]): ev.R = milliseconds(c) - def milli[C](c: C)(implicit ev: Classifier[C]): ev.R = milliseconds(c) - - def seconds[C](c: C)(implicit ev: Classifier[C]): ev.R = ev.convert(seconds) - def second[C](c: C)(implicit ev: Classifier[C]): ev.R = seconds(c) - - def minutes[C](c: C)(implicit ev: Classifier[C]): ev.R = ev.convert(minutes) - def minute[C](c: C)(implicit ev: Classifier[C]): ev.R = minutes(c) - - def hours[C](c: C)(implicit ev: Classifier[C]): ev.R = ev.convert(hours) - def hour[C](c: C)(implicit ev: Classifier[C]): ev.R = hours(c) - - def days[C](c: C)(implicit ev: Classifier[C]): ev.R = ev.convert(days) - def day[C](c: C)(implicit ev: Classifier[C]): ev.R = days(c) -} - -/** - * This object just holds some cogs which make the DSL machine work, not for direct consumption. - */ -object DurationConversions { - trait Classifier[C] { - type R - def convert(d: FiniteDuration): R - } - - implicit object spanConvert extends Classifier[span.type] { - type R = FiniteDuration - def convert(d: FiniteDuration) = d - } - - implicit object fromNowConvert extends Classifier[fromNow.type] { - type R = Deadline - def convert(d: FiniteDuration) = Deadline.now + d - } - -} diff --git a/tests/scala2-library/src/library/scala/concurrent/duration/package.scala b/tests/scala2-library/src/library/scala/concurrent/duration/package.scala deleted file mode 100644 index d166975445de..000000000000 --- a/tests/scala2-library/src/library/scala/concurrent/duration/package.scala +++ /dev/null @@ -1,75 +0,0 @@ -package scala.concurrent - -import scala.language.implicitConversions - -package object duration { - /** - * This object can be used as closing token if you prefer dot-less style but do not want - * to enable language.postfixOps: - * - * {{{ - * import scala.concurrent.duration._ - * - * val duration = 2 seconds span - * }}} - */ - object span - - /** - * This object can be used as closing token for declaring a deadline at some future point - * in time: - * - * {{{ - * import scala.concurrent.duration._ - * - * val deadline = 3 seconds fromNow - * }}} - */ - object fromNow - - type TimeUnit = java.util.concurrent.TimeUnit - final val DAYS = java.util.concurrent.TimeUnit.DAYS - final val HOURS = java.util.concurrent.TimeUnit.HOURS - final val MICROSECONDS = java.util.concurrent.TimeUnit.MICROSECONDS - final val MILLISECONDS = java.util.concurrent.TimeUnit.MILLISECONDS - final val MINUTES = java.util.concurrent.TimeUnit.MINUTES - final val NANOSECONDS = java.util.concurrent.TimeUnit.NANOSECONDS - final val SECONDS = java.util.concurrent.TimeUnit.SECONDS - - implicit def pairIntToDuration(p: (Int, TimeUnit)): Duration = Duration(p._1.toLong, p._2) - implicit def pairLongToDuration(p: (Long, TimeUnit)): FiniteDuration = Duration(p._1, p._2) - implicit def durationToPair(d: Duration): (Long, TimeUnit) = (d.length, d.unit) - - implicit final class DurationInt(private val n: Int) extends AnyVal with DurationConversions { - override protected def durationIn(unit: TimeUnit): FiniteDuration = Duration(n.toLong, unit) - } - - implicit final class DurationLong(private val n: Long) extends AnyVal with DurationConversions { - override protected def durationIn(unit: TimeUnit): FiniteDuration = Duration(n, unit) - } - - implicit final class DurationDouble(private val d: Double) extends AnyVal with DurationConversions { - override protected def durationIn(unit: TimeUnit): FiniteDuration = - Duration(d, unit) match { - case f: FiniteDuration => f - case _ => throw new IllegalArgumentException("Duration DSL not applicable to " + d) - } - } - - /* - * Avoid reflection based invocation by using non-duck type - */ - implicit final class IntMult(private val i: Int) extends AnyVal { - def *(d: Duration) = d * i.toDouble - def *(d: FiniteDuration) = d * i.toLong - } - - implicit final class LongMult(private val i: Long) extends AnyVal { - def *(d: Duration) = d * i.toDouble - def *(d: FiniteDuration) = d * i.toLong - } - - implicit final class DoubleMult(private val f: Double) extends AnyVal { - def *(d: Duration) = d * f.toDouble - } -} diff --git a/tests/scala2-library/src/library/scala/concurrent/forkjoin/package.scala b/tests/scala2-library/src/library/scala/concurrent/forkjoin/package.scala deleted file mode 100644 index 889890e30bd8..000000000000 --- a/tests/scala2-library/src/library/scala/concurrent/forkjoin/package.scala +++ /dev/null @@ -1,60 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2015, LAMP/EPFL and Typesafe, Inc. ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala.concurrent -import java.util.{concurrent => juc} -import java.util.Collection - -package object forkjoin { - @deprecated("use java.util.concurrent.ForkJoinPool directly, instead of this alias", "2.12.0") - type ForkJoinPool = juc.ForkJoinPool - @deprecated("use java.util.concurrent.ForkJoinPool directly, instead of this alias", "2.12.0") - object ForkJoinPool { - type ForkJoinWorkerThreadFactory = juc.ForkJoinPool.ForkJoinWorkerThreadFactory - type ManagedBlocker = juc.ForkJoinPool.ManagedBlocker - - val defaultForkJoinWorkerThreadFactory: ForkJoinWorkerThreadFactory = juc.ForkJoinPool.defaultForkJoinWorkerThreadFactory - def managedBlock(blocker: ManagedBlocker): Unit = juc.ForkJoinPool.managedBlock(blocker) - } - - @deprecated("use java.util.concurrent.ForkJoinTask directly, instead of this alias", "2.12.0") - type ForkJoinTask[T] = juc.ForkJoinTask[T] - @deprecated("use java.util.concurrent.ForkJoinTask directly, instead of this alias", "2.12.0") - object ForkJoinTask extends scala.Serializable { - def adapt(runnable: Runnable): ForkJoinTask[_] = juc.ForkJoinTask.adapt(runnable) - def adapt[T](callable: juc.Callable[_ <: T]): ForkJoinTask[T] = juc.ForkJoinTask.adapt(callable) - def adapt[T](runnable: Runnable, result: T): ForkJoinTask[T] = juc.ForkJoinTask.adapt(runnable, result) - def getPool(): ForkJoinPool = juc.ForkJoinTask.getPool - def getQueuedTaskCount(): Int = juc.ForkJoinTask.getQueuedTaskCount - def getSurplusQueuedTaskCount(): Int = juc.ForkJoinTask.getSurplusQueuedTaskCount - def helpQuiesce(): Unit = juc.ForkJoinTask.helpQuiesce - def inForkJoinPool(): Boolean = juc.ForkJoinTask.inForkJoinPool - def invokeAll[T <: ForkJoinTask[_]](tasks: Collection[T]): Collection[T] = juc.ForkJoinTask.invokeAll(tasks) - def invokeAll[T](t1: ForkJoinTask[T]): Unit = juc.ForkJoinTask.invokeAll(t1) - def invokeAll[T](tasks: ForkJoinTask[T]*): Unit = juc.ForkJoinTask.invokeAll(tasks: _*) - } - - @deprecated("use java.util.concurrent.ForkJoinWorkerThread directly, instead of this alias", "2.12.0") - type ForkJoinWorkerThread = juc.ForkJoinWorkerThread - @deprecated("use java.util.concurrent.LinkedTransferQueue directly, instead of this alias", "2.12.0") - type LinkedTransferQueue[T] = juc.LinkedTransferQueue[T] - @deprecated("use java.util.concurrent.RecursiveAction directly, instead of this alias", "2.12.0") - type RecursiveAction = juc.RecursiveAction - @deprecated("use java.util.concurrent.RecursiveTask directly, instead of this alias", "2.12.0") - type RecursiveTask[T] = juc.RecursiveTask[T] - - @deprecated("use java.util.concurrent.ThreadLocalRandom directly, instead of this alias", "2.12.0") - type ThreadLocalRandom = juc.ThreadLocalRandom - @deprecated("use java.util.concurrent.ThreadLocalRandom directly, instead of this alias", "2.12.0") - object ThreadLocalRandom extends scala.Serializable { - // For source compatibility, current must declare the empty argument list. - // Having no argument list makes more sense since it doesn't have any side effects, - // but existing callers will break if they invoked it as `current()`. - def current() = juc.ThreadLocalRandom.current - } -} diff --git a/tests/scala2-library/src/library/scala/concurrent/impl/ExecutionContextImpl.scala b/tests/scala2-library/src/library/scala/concurrent/impl/ExecutionContextImpl.scala deleted file mode 100644 index f54a6b9dbde2..000000000000 --- a/tests/scala2-library/src/library/scala/concurrent/impl/ExecutionContextImpl.scala +++ /dev/null @@ -1,176 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala.concurrent.impl - -import java.util.concurrent.{ ForkJoinPool, ForkJoinWorkerThread, ForkJoinTask, Callable, Executor, ExecutorService, ThreadFactory, TimeUnit } -import java.util.concurrent.atomic.AtomicInteger -import java.util.Collection -import scala.concurrent.{ BlockContext, ExecutionContext, CanAwait, ExecutionContextExecutor, ExecutionContextExecutorService } -import scala.annotation.tailrec - - -private[scala] class ExecutionContextImpl private[impl] (val executor: Executor, val reporter: Throwable => Unit) extends ExecutionContextExecutor { - require(executor ne null, "Executor must not be null") - override def execute(runnable: Runnable) = executor execute runnable - override def reportFailure(t: Throwable) = reporter(t) -} - - -private[concurrent] object ExecutionContextImpl { - - // Implement BlockContext on FJP threads - final class DefaultThreadFactory( - daemonic: Boolean, - maxThreads: Int, - prefix: String, - uncaught: Thread.UncaughtExceptionHandler) extends ThreadFactory with ForkJoinPool.ForkJoinWorkerThreadFactory { - - require(prefix ne null, "DefaultThreadFactory.prefix must be non null") - require(maxThreads > 0, "DefaultThreadFactory.maxThreads must be greater than 0") - - private final val currentNumberOfThreads = new AtomicInteger(0) - - @tailrec private final def reserveThread(): Boolean = currentNumberOfThreads.get() match { - case `maxThreads` | Int.`MaxValue` => false - case other => currentNumberOfThreads.compareAndSet(other, other + 1) || reserveThread() - } - - @tailrec private final def deregisterThread(): Boolean = currentNumberOfThreads.get() match { - case 0 => false - case other => currentNumberOfThreads.compareAndSet(other, other - 1) || deregisterThread() - } - - def wire[T <: Thread](thread: T): T = { - thread.setDaemon(daemonic) - thread.setUncaughtExceptionHandler(uncaught) - thread.setName(prefix + "-" + thread.getId()) - thread - } - - // As per ThreadFactory contract newThread should return `null` if cannot create new thread. - def newThread(runnable: Runnable): Thread = - if (reserveThread()) - wire(new Thread(new Runnable { - // We have to decrement the current thread count when the thread exits - override def run() = try runnable.run() finally deregisterThread() - })) else null - - def newThread(fjp: ForkJoinPool): ForkJoinWorkerThread = - if (reserveThread()) { - wire(new ForkJoinWorkerThread(fjp) with BlockContext { - // We have to decrement the current thread count when the thread exits - final override def onTermination(exception: Throwable): Unit = deregisterThread() - final override def blockOn[T](thunk: =>T)(implicit permission: CanAwait): T = { - var result: T = null.asInstanceOf[T] - ForkJoinPool.managedBlock(new ForkJoinPool.ManagedBlocker { - @volatile var isdone = false - override def block(): Boolean = { - result = try { - // When we block, switch out the BlockContext temporarily so that nested blocking does not created N new Threads - BlockContext.withBlockContext(BlockContext.defaultBlockContext) { thunk } - } finally { - isdone = true - } - - true - } - override def isReleasable = isdone - }) - result - } - }) - } else null - } - - def createDefaultExecutorService(reporter: Throwable => Unit): ExecutorService = { - def getInt(name: String, default: String) = (try System.getProperty(name, default) catch { - case e: SecurityException => default - }) match { - case s if s.charAt(0) == 'x' => (Runtime.getRuntime.availableProcessors * s.substring(1).toDouble).ceil.toInt - case other => other.toInt - } - - def range(floor: Int, desired: Int, ceiling: Int) = scala.math.min(scala.math.max(floor, desired), ceiling) - val numThreads = getInt("scala.concurrent.context.numThreads", "x1") - // The hard limit on the number of active threads that the thread factory will produce - // scala/bug#8955 Deadlocks can happen if maxNoOfThreads is too low, although we're currently not sure - // about what the exact threshold is. numThreads + 256 is conservatively high. - val maxNoOfThreads = getInt("scala.concurrent.context.maxThreads", "x1") - - val desiredParallelism = range( - getInt("scala.concurrent.context.minThreads", "1"), - numThreads, - maxNoOfThreads) - - // The thread factory must provide additional threads to support managed blocking. - val maxExtraThreads = getInt("scala.concurrent.context.maxExtraThreads", "256") - - val uncaughtExceptionHandler: Thread.UncaughtExceptionHandler = new Thread.UncaughtExceptionHandler { - override def uncaughtException(thread: Thread, cause: Throwable): Unit = reporter(cause) - } - - val threadFactory = new ExecutionContextImpl.DefaultThreadFactory(daemonic = true, - maxThreads = maxNoOfThreads + maxExtraThreads, - prefix = "scala-execution-context-global", - uncaught = uncaughtExceptionHandler) - - new ForkJoinPool(desiredParallelism, threadFactory, uncaughtExceptionHandler, true) { - override def execute(runnable: Runnable): Unit = { - val fjt: ForkJoinTask[_] = runnable match { - case t: ForkJoinTask[_] => t - case r => new ExecutionContextImpl.AdaptedForkJoinTask(r) - } - Thread.currentThread match { - case fjw: ForkJoinWorkerThread if fjw.getPool eq this => fjt.fork() - case _ => super.execute(fjt) - } - } - } - } - - final class AdaptedForkJoinTask(runnable: Runnable) extends ForkJoinTask[Unit] { - final override def setRawResult(u: Unit): Unit = () - final override def getRawResult(): Unit = () - final override def exec(): Boolean = try { runnable.run(); true } catch { - case anything: Throwable => - val t = Thread.currentThread - t.getUncaughtExceptionHandler match { - case null => - case some => some.uncaughtException(t, anything) - } - throw anything - } - } - - def fromExecutor(e: Executor, reporter: Throwable => Unit = ExecutionContext.defaultReporter): ExecutionContextImpl = - new ExecutionContextImpl(Option(e).getOrElse(createDefaultExecutorService(reporter)), reporter) - - def fromExecutorService(es: ExecutorService, reporter: Throwable => Unit = ExecutionContext.defaultReporter): - ExecutionContextImpl with ExecutionContextExecutorService = { - new ExecutionContextImpl(Option(es).getOrElse(createDefaultExecutorService(reporter)), reporter) - with ExecutionContextExecutorService { - final def asExecutorService: ExecutorService = executor.asInstanceOf[ExecutorService] - override def execute(command: Runnable) = executor.execute(command) - override def shutdown() { asExecutorService.shutdown() } - override def shutdownNow() = asExecutorService.shutdownNow() - override def isShutdown = asExecutorService.isShutdown - override def isTerminated = asExecutorService.isTerminated - override def awaitTermination(l: Long, timeUnit: TimeUnit) = asExecutorService.awaitTermination(l, timeUnit) - override def submit[T](callable: Callable[T]) = asExecutorService.submit(callable) - override def submit[T](runnable: Runnable, t: T) = asExecutorService.submit(runnable, t) - override def submit(runnable: Runnable) = asExecutorService.submit(runnable) - override def invokeAll[T](callables: Collection[_ <: Callable[T]]) = asExecutorService.invokeAll(callables) - override def invokeAll[T](callables: Collection[_ <: Callable[T]], l: Long, timeUnit: TimeUnit) = asExecutorService.invokeAll(callables, l, timeUnit) - override def invokeAny[T](callables: Collection[_ <: Callable[T]]) = asExecutorService.invokeAny(callables) - override def invokeAny[T](callables: Collection[_ <: Callable[T]], l: Long, timeUnit: TimeUnit) = asExecutorService.invokeAny(callables, l, timeUnit) - } - } -} - - diff --git a/tests/scala2-library/src/library/scala/concurrent/impl/Promise.scala b/tests/scala2-library/src/library/scala/concurrent/impl/Promise.scala deleted file mode 100644 index 7fcc8c9f2dd6..000000000000 --- a/tests/scala2-library/src/library/scala/concurrent/impl/Promise.scala +++ /dev/null @@ -1,408 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala.concurrent.impl - -import scala.concurrent.{ ExecutionContext, CanAwait, OnCompleteRunnable, TimeoutException, ExecutionException } -import scala.concurrent.Future.InternalCallbackExecutor -import scala.concurrent.duration.{ Duration, FiniteDuration } -import scala.annotation.tailrec -import scala.util.control.NonFatal -import scala.util.{ Try, Success, Failure } - -import java.util.concurrent.locks.AbstractQueuedSynchronizer -import java.util.concurrent.atomic.AtomicReference - -private[concurrent] trait Promise[T] extends scala.concurrent.Promise[T] with scala.concurrent.Future[T] { - def future: this.type = this - - import scala.concurrent.Future - import scala.concurrent.impl.Promise.DefaultPromise - - override def transform[S](f: Try[T] => Try[S])(implicit executor: ExecutionContext): Future[S] = { - val p = new DefaultPromise[S]() - onComplete { result => p.complete(try f(result) catch { case NonFatal(t) => Failure(t) }) } - p.future - } - - // If possible, link DefaultPromises to avoid space leaks - override def transformWith[S](f: Try[T] => Future[S])(implicit executor: ExecutionContext): Future[S] = { - val p = new DefaultPromise[S]() - onComplete { - v => try f(v) match { - case fut if fut eq this => p complete v.asInstanceOf[Try[S]] - case dp: DefaultPromise[_] => dp.asInstanceOf[DefaultPromise[S]].linkRootOf(p) - case fut => p completeWith fut - } catch { case NonFatal(t) => p failure t } - } - p.future - } - - override def toString: String = value match { - case Some(result) => "Future("+result+")" - case None => "Future()" - } -} - -/* Precondition: `executor` is prepared, i.e., `executor` has been returned from invocation of `prepare` on some other `ExecutionContext`. - */ -private final class CallbackRunnable[T](val executor: ExecutionContext, val onComplete: Try[T] => Any) extends Runnable with OnCompleteRunnable { - // must be filled in before running it - var value: Try[T] = null - - override def run() = { - require(value ne null) // must set value to non-null before running! - try onComplete(value) catch { case NonFatal(e) => executor reportFailure e } - } - - def executeWithValue(v: Try[T]): Unit = { - require(value eq null) // can't complete it twice - value = v - // Note that we cannot prepare the ExecutionContext at this point, since we might - // already be running on a different thread! - try executor.execute(this) catch { case NonFatal(t) => executor reportFailure t } - } -} - -private[concurrent] object Promise { - - private def resolveTry[T](source: Try[T]): Try[T] = source match { - case Failure(t) => resolver(t) - case _ => source - } - - private def resolver[T](throwable: Throwable): Try[T] = throwable match { - case t: scala.runtime.NonLocalReturnControl[_] => Success(t.value.asInstanceOf[T]) - case t: scala.util.control.ControlThrowable => Failure(new ExecutionException("Boxed ControlThrowable", t)) - case t: InterruptedException => Failure(new ExecutionException("Boxed InterruptedException", t)) - case e: Error => Failure(new ExecutionException("Boxed Error", e)) - case t => Failure(t) - } - - /** - * Latch used to implement waiting on a DefaultPromise's result. - * - * Inspired by: http://gee.cs.oswego.edu/cgi-bin/viewcvs.cgi/jsr166/src/main/java/util/concurrent/locks/AbstractQueuedSynchronizer.java - * Written by Doug Lea with assistance from members of JCP JSR-166 - * Expert Group and released to the public domain, as explained at - * http://creativecommons.org/publicdomain/zero/1.0/ - */ - private final class CompletionLatch[T] extends AbstractQueuedSynchronizer with (Try[T] => Unit) { - override protected def tryAcquireShared(ignored: Int): Int = if (getState != 0) 1 else -1 - override protected def tryReleaseShared(ignore: Int): Boolean = { - setState(1) - true - } - override def apply(ignored: Try[T]): Unit = releaseShared(1) - } - - - /** Default promise implementation. - * - * A DefaultPromise has three possible states. It can be: - * - * 1. Incomplete, with an associated list of callbacks waiting on completion. - * 2. Complete, with a result. - * 3. Linked to another DefaultPromise. - * - * If a DefaultPromise is linked to another DefaultPromise, it will - * delegate all its operations to that other promise. This means that two - * DefaultPromises that are linked will appear, to external callers, to have - * exactly the same state and behaviour. For instance, both will appear as - * incomplete, or as complete with the same result value. - * - * A DefaultPromise stores its state entirely in the AnyRef cell exposed by - * AtomicReference. The type of object stored in the cell fully describes the - * current state of the promise. - * - * 1. List[CallbackRunnable] - The promise is incomplete and has zero or more callbacks - * to call when it is eventually completed. - * 2. Try[T] - The promise is complete and now contains its value. - * 3. DefaultPromise[T] - The promise is linked to another promise. - * - * The ability to link DefaultPromises is needed to prevent memory leaks when - * using Future.flatMap. The previous implementation of Future.flatMap used - * onComplete handlers to propagate the ultimate value of a flatMap operation - * to its promise. Recursive calls to flatMap built a chain of onComplete - * handlers and promises. Unfortunately none of the handlers or promises in - * the chain could be collected until the handlers had been called and - * detached, which only happened when the final flatMap future was completed. - * (In some situations, such as infinite streams, this would never actually - * happen.) Because of the fact that the promise implementation internally - * created references between promises, and these references were invisible to - * user code, it was easy for user code to accidentally build large chains of - * promises and thereby leak memory. - * - * The problem of leaks is solved by automatically breaking these chains of - * promises, so that promises don't refer to each other in a long chain. This - * allows each promise to be individually collected. The idea is to "flatten" - * the chain of promises, so that instead of each promise pointing to its - * neighbour, they instead point directly the promise at the root of the - * chain. This means that only the root promise is referenced, and all the - * other promises are available for garbage collection as soon as they're no - * longer referenced by user code. - * - * To make the chains flattenable, the concept of linking promises together - * needed to become an explicit feature of the DefaultPromise implementation, - * so that the implementation to navigate and rewire links as needed. The idea - * of linking promises is based on the [[Twitter promise implementation - * https://github.com/twitter/util/blob/master/util-core/src/main/scala/com/twitter/util/Promise.scala]]. - * - * In practice, flattening the chain cannot always be done perfectly. When a - * promise is added to the end of the chain, it scans the chain and links - * directly to the root promise. This prevents the chain from growing forwards - * But the root promise for a chain can change, causing the chain to grow - * backwards, and leaving all previously-linked promise pointing at a promise - * which is no longer the root promise. - * - * To mitigate the problem of the root promise changing, whenever a promise's - * methods are called, and it needs a reference to its root promise it calls - * the `compressedRoot()` method. This method re-scans the promise chain to - * get the root promise, and also compresses its links so that it links - * directly to whatever the current root promise is. This ensures that the - * chain is flattened whenever `compressedRoot()` is called. And since - * `compressedRoot()` is called at every possible opportunity (when getting a - * promise's value, when adding an onComplete handler, etc), this will happen - * frequently. Unfortunately, even this eager relinking doesn't absolutely - * guarantee that the chain will be flattened and that leaks cannot occur. - * However eager relinking does greatly reduce the chance that leaks will - * occur. - * - * Future.flatMap links DefaultPromises together by calling the `linkRootOf` - * method. This is the only externally visible interface to linked - * DefaultPromises, and `linkedRootOf` is currently only designed to be called - * by Future.flatMap. - */ - // Left non-final to enable addition of extra fields by Java/Scala converters - // in scala-java8-compat. - class DefaultPromise[T] extends AtomicReference[AnyRef](Nil) with Promise[T] { - - /** Get the root promise for this promise, compressing the link chain to that - * promise if necessary. - * - * For promises that are not linked, the result of calling - * `compressedRoot()` will the promise itself. However for linked promises, - * this method will traverse each link until it locates the root promise at - * the base of the link chain. - * - * As a side effect of calling this method, the link from this promise back - * to the root promise will be updated ("compressed") to point directly to - * the root promise. This allows intermediate promises in the link chain to - * be garbage collected. Also, subsequent calls to this method should be - * faster as the link chain will be shorter. - */ - private def compressedRoot(): DefaultPromise[T] = - get() match { - case linked: DefaultPromise[_] => compressedRoot(linked) - case _ => this - } - - @tailrec - private[this] final def compressedRoot(linked: DefaultPromise[_]): DefaultPromise[T] = { - val target = linked.asInstanceOf[DefaultPromise[T]].root - if (linked eq target) target - else if (compareAndSet(linked, target)) target - else { - get() match { - case newLinked: DefaultPromise[_] => compressedRoot(newLinked) - case _ => this - } - } - } - - /** Get the promise at the root of the chain of linked promises. Used by `compressedRoot()`. - * The `compressedRoot()` method should be called instead of this method, as it is important - * to compress the link chain whenever possible. - */ - @tailrec - private def root: DefaultPromise[T] = - get() match { - case linked: DefaultPromise[_] => linked.asInstanceOf[DefaultPromise[T]].root - case _ => this - } - - /** Try waiting for this promise to be completed. - */ - protected final def tryAwait(atMost: Duration): Boolean = if (!isCompleted) { - import Duration.Undefined - atMost match { - case e if e eq Undefined => throw new IllegalArgumentException("cannot wait for Undefined period") - case Duration.Inf => - val l = new CompletionLatch[T]() - onComplete(l)(InternalCallbackExecutor) - l.acquireSharedInterruptibly(1) - case Duration.MinusInf => // Drop out - case f: FiniteDuration => - if (f > Duration.Zero) { - val l = new CompletionLatch[T]() - onComplete(l)(InternalCallbackExecutor) - l.tryAcquireSharedNanos(1, f.toNanos) - } - } - - isCompleted - } else true // Already completed - - @throws(classOf[TimeoutException]) - @throws(classOf[InterruptedException]) - final def ready(atMost: Duration)(implicit permit: CanAwait): this.type = - if (tryAwait(atMost)) this - else throw new TimeoutException("Futures timed out after [" + atMost + "]") - - @throws(classOf[Exception]) - final def result(atMost: Duration)(implicit permit: CanAwait): T = - ready(atMost).value.get.get // ready throws TimeoutException if timeout so value.get is safe here - - def value: Option[Try[T]] = value0 - - @tailrec - private def value0: Option[Try[T]] = get() match { - case c: Try[_] => Some(c.asInstanceOf[Try[T]]) - case dp: DefaultPromise[_] => compressedRoot(dp).value0 - case _ => None - } - - override final def isCompleted: Boolean = isCompleted0 - - @tailrec - private def isCompleted0: Boolean = get() match { - case _: Try[_] => true - case dp: DefaultPromise[_] => compressedRoot(dp).isCompleted0 - case _ => false - } - - final def tryComplete(value: Try[T]): Boolean = { - val resolved = resolveTry(value) - tryCompleteAndGetListeners(resolved) match { - case null => false - case rs if rs.isEmpty => true - case rs => rs.foreach(r => r.executeWithValue(resolved)); true - } - } - - /** Called by `tryComplete` to store the resolved value and get the list of - * listeners, or `null` if it is already completed. - */ - @tailrec - private def tryCompleteAndGetListeners(v: Try[T]): List[CallbackRunnable[T]] = { - get() match { - case raw: List[_] => - val cur = raw.asInstanceOf[List[CallbackRunnable[T]]] - if (compareAndSet(cur, v)) cur else tryCompleteAndGetListeners(v) - case dp: DefaultPromise[_] => compressedRoot(dp).tryCompleteAndGetListeners(v) - case _ => null - } - } - - final def onComplete[U](func: Try[T] => U)(implicit executor: ExecutionContext): Unit = - dispatchOrAddCallback(new CallbackRunnable[T](executor.prepare(), func)) - - /** Tries to add the callback, if already completed, it dispatches the callback to be executed. - * Used by `onComplete()` to add callbacks to a promise and by `link()` to transfer callbacks - * to the root promise when linking two promises together. - */ - @tailrec - private def dispatchOrAddCallback(runnable: CallbackRunnable[T]): Unit = { - get() match { - case r: Try[_] => runnable.executeWithValue(r.asInstanceOf[Try[T]]) - case dp: DefaultPromise[_] => compressedRoot(dp).dispatchOrAddCallback(runnable) - case listeners: List[_] => if (compareAndSet(listeners, runnable :: listeners)) () - else dispatchOrAddCallback(runnable) - } - } - - /** Link this promise to the root of another promise using `link()`. Should only be - * be called by transformWith. - */ - protected[concurrent] final def linkRootOf(target: DefaultPromise[T]): Unit = link(target.compressedRoot()) - - /** Link this promise to another promise so that both promises share the same - * externally-visible state. Depending on the current state of this promise, this - * may involve different things. For example, any onComplete listeners will need - * to be transferred. - * - * If this promise is already completed, then the same effect as linking - - * sharing the same completed value - is achieved by simply sending this - * promise's result to the target promise. - */ - @tailrec - private def link(target: DefaultPromise[T]): Unit = if (this ne target) { - get() match { - case r: Try[_] => - if (!target.tryComplete(r.asInstanceOf[Try[T]])) - throw new IllegalStateException("Cannot link completed promises together") - case dp: DefaultPromise[_] => - compressedRoot(dp).link(target) - case listeners: List[_] if compareAndSet(listeners, target) => - if (listeners.nonEmpty) - listeners.asInstanceOf[List[CallbackRunnable[T]]].foreach(target.dispatchOrAddCallback(_)) - case _ => - link(target) - } - } - } - - /** An already completed Future is given its result at creation. - * - * Useful in Future-composition when a value to contribute is already available. - */ - object KeptPromise { - import scala.concurrent.Future - import scala.reflect.ClassTag - - private[this] sealed trait Kept[T] extends Promise[T] { - def result: Try[T] - - override def value: Option[Try[T]] = Some(result) - - override def isCompleted: Boolean = true - - override def tryComplete(value: Try[T]): Boolean = false - - override def onComplete[U](func: Try[T] => U)(implicit executor: ExecutionContext): Unit = - (new CallbackRunnable(executor.prepare(), func)).executeWithValue(result) - - override def ready(atMost: Duration)(implicit permit: CanAwait): this.type = this - - override def result(atMost: Duration)(implicit permit: CanAwait): T = result.get - } - - private[this] final class Successful[T](val result: Success[T]) extends Kept[T] { - override def onFailure[U](pf: PartialFunction[Throwable, U])(implicit executor: ExecutionContext): Unit = () - override def failed: Future[Throwable] = KeptPromise(Failure(new NoSuchElementException("Future.failed not completed with a throwable."))).future - override def recover[U >: T](pf: PartialFunction[Throwable, U])(implicit executor: ExecutionContext): Future[U] = this - override def recoverWith[U >: T](pf: PartialFunction[Throwable, Future[U]])(implicit executor: ExecutionContext): Future[U] = this - override def fallbackTo[U >: T](that: Future[U]): Future[U] = this - } - - private[this] final class Failed[T](val result: Failure[T]) extends Kept[T] { - private[this] final def thisAs[S]: Future[S] = future.asInstanceOf[Future[S]] - - override def onSuccess[U](pf: PartialFunction[T, U])(implicit executor: ExecutionContext): Unit = () - override def failed: Future[Throwable] = KeptPromise(Success(result.exception)).future - override def foreach[U](f: T => U)(implicit executor: ExecutionContext): Unit = () - override def map[S](f: T => S)(implicit executor: ExecutionContext): Future[S] = thisAs[S] - override def flatMap[S](f: T => Future[S])(implicit executor: ExecutionContext): Future[S] = thisAs[S] - override def flatten[S](implicit ev: T <:< Future[S]): Future[S] = thisAs[S] - override def filter(p: T => Boolean)(implicit executor: ExecutionContext): Future[T] = this - override def collect[S](pf: PartialFunction[T, S])(implicit executor: ExecutionContext): Future[S] = thisAs[S] - override def zip[U](that: Future[U]): Future[(T, U)] = thisAs[(T,U)] - override def zipWith[U, R](that: Future[U])(f: (T, U) => R)(implicit executor: ExecutionContext): Future[R] = thisAs[R] - override def fallbackTo[U >: T](that: Future[U]): Future[U] = - if (this eq that) this else that.recoverWith({ case _ => this })(InternalCallbackExecutor) - override def mapTo[S](implicit tag: ClassTag[S]): Future[S] = thisAs[S] - } - - def apply[T](result: Try[T]): scala.concurrent.Promise[T] = - resolveTry(result) match { - case s @ Success(_) => new Successful(s) - case f @ Failure(_) => new Failed(f) - } - } - -} diff --git a/tests/scala2-library/src/library/scala/concurrent/package.scala b/tests/scala2-library/src/library/scala/concurrent/package.scala deleted file mode 100644 index 0695ee335194..000000000000 --- a/tests/scala2-library/src/library/scala/concurrent/package.scala +++ /dev/null @@ -1,217 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala - -import scala.concurrent.duration.Duration -import scala.annotation.implicitNotFound - -/** This package object contains primitives for concurrent and parallel programming. - * - * == Guide == - * - * A more detailed guide to Futures and Promises, including discussion and examples - * can be found at - * [[http://docs.scala-lang.org/overviews/core/futures.html]]. - * - * == Common Imports == - * - * When working with Futures, you will often find that importing the whole concurrent - * package is convenient: - * - * {{{ - * import scala.concurrent._ - * }}} - * - * When using things like `Future`s, it is often required to have an implicit `ExecutionContext` - * in scope. The general advice for these implicits are as follows. - * - * If the code in question is a class or method definition, and no `ExecutionContext` is available, - * request one from the caller by adding an implicit parameter list: - * - * {{{ - * def myMethod(myParam: MyType)(implicit ec: ExecutionContext) = … - * //Or - * class MyClass(myParam: MyType)(implicit ec: ExecutionContext) { … } - * }}} - * - * This allows the caller of the method, or creator of the instance of the class, to decide which - * `ExecutionContext` should be used. - * - * For typical REPL usage and experimentation, importing the global `ExecutionContext` is often desired. - * - * {{{ - * import scala.concurrent.ExcutionContext.Implicits.global - * }}} - * - * == Specifying Durations == - * - * Operations often require a duration to be specified. A duration DSL is available - * to make defining these easier: - * - * {{{ - * import scala.concurrent.duration._ - * val d: Duration = 10.seconds - * }}} - * - * == Using Futures For Non-blocking Computation == - * - * Basic use of futures is easy with the factory method on Future, which executes a - * provided function asynchronously, handing you back a future result of that function - * without blocking the current thread. In order to create the Future you will need - * either an implicit or explicit ExecutionContext to be provided: - * - * {{{ - * import scala.concurrent._ - * import ExecutionContext.Implicits.global // implicit execution context - * - * val firstZebra: Future[Int] = Future { - * val source = scala.io.Source.fromFile("/etc/dictionaries-common/words") - * source.toSeq.indexOfSlice("zebra") - * } - * }}} - * - * == Avoid Blocking == - * - * Although blocking is possible in order to await results (with a mandatory timeout duration): - * - * {{{ - * import scala.concurrent.duration._ - * Await.result(firstZebra, 10.seconds) - * }}} - * - * and although this is sometimes necessary to do, in particular for testing purposes, blocking - * in general is discouraged when working with Futures and concurrency in order to avoid - * potential deadlocks and improve performance. Instead, use callbacks or combinators to - * remain in the future domain: - * - * {{{ - * val animalRange: Future[Int] = for { - * aardvark <- firstAardvark - * zebra <- firstZebra - * } yield zebra - aardvark - * - * animalRange.onSuccess { - * case x if x > 500000 => println("It's a long way from Aardvark to Zebra") - * } - * }}} - */ -package object concurrent { - type ExecutionException = java.util.concurrent.ExecutionException - type CancellationException = java.util.concurrent.CancellationException - type TimeoutException = java.util.concurrent.TimeoutException - - /** Starts an asynchronous computation and returns a `Future` object with the result of that computation. - * - * The result becomes available once the asynchronous computation is completed. - * - * @tparam T the type of the result - * @param body the asynchronous computation - * @param executor the execution context on which the future is run - * @return the `Future` holding the result of the computation - */ - @deprecated("use `Future { ... }` instead", "2.11.0") - // removal planned for 2.13.0 - def future[T](body: =>T)(implicit @deprecatedName('execctx) executor: ExecutionContext): Future[T] = Future[T](body) - - /** Creates a promise object which can be completed with a value or an exception. - * - * @tparam T the type of the value in the promise - * @return the newly created `Promise` object - */ - @deprecated("use `Promise[T]()` instead", "2.11.0") - // removal planned for 2.13.0 - def promise[T](): Promise[T] = Promise[T]() - - /** Used to designate a piece of code which potentially blocks, allowing the current [[BlockContext]] to adjust - * the runtime's behavior. - * Properly marking blocking code may improve performance or avoid deadlocks. - * - * Blocking on an [[Awaitable]] should be done using [[Await.result]] instead of `blocking`. - * - * @param body A piece of code which contains potentially blocking or long running calls. - * @throws CancellationException if the computation was cancelled - * @throws InterruptedException in the case that a wait within the blocking `body` was interrupted - */ - @throws(classOf[Exception]) - def blocking[T](body: =>T): T = BlockContext.current.blockOn(body)(scala.concurrent.AwaitPermission) -} - -package concurrent { - /** - * This marker trait is used by [[Await]] to ensure that [[Awaitable.ready]] and [[Awaitable.result]] - * are not directly called by user code. An implicit instance of this trait is only available when - * user code is currently calling the methods on [[Await]]. - */ - @implicitNotFound("Don't call `Awaitable` methods directly, use the `Await` object.") - sealed trait CanAwait - - /** - * Internal usage only, implementation detail. - */ - private[concurrent] object AwaitPermission extends CanAwait - - /** - * `Await` is what is used to ensure proper handling of blocking for `Awaitable` instances. - * - * While occasionally useful, e.g. for testing, it is recommended that you avoid Await whenever possible— - * instead favoring combinators and/or callbacks. - * Await's `result` and `ready` methods will block the calling thread's execution until they return, - * which will cause performance degradation, and possibly, deadlock issues. - */ - object Await { - /** - * Await the "completed" state of an `Awaitable`. - * - * Although this method is blocking, the internal use of [[scala.concurrent.blocking blocking]] ensures that - * the underlying [[ExecutionContext]] is given an opportunity to properly manage the blocking. - * - * WARNING: It is strongly discouraged to supply lengthy timeouts since the progress of the calling thread will be - * suspended—blocked—until either the `Awaitable` becomes ready or the timeout expires. - * - * @param awaitable - * the `Awaitable` to be awaited - * @param atMost - * maximum wait time, which may be negative (no waiting is done), - * [[scala.concurrent.duration.Duration.Inf Duration.Inf]] for unbounded waiting, or a finite positive - * duration - * @return the `awaitable` - * @throws InterruptedException if the current thread is interrupted while waiting - * @throws TimeoutException if after waiting for the specified time this `Awaitable` is still not ready - * @throws IllegalArgumentException if `atMost` is [[scala.concurrent.duration.Duration.Undefined Duration.Undefined]] - */ - @throws(classOf[TimeoutException]) - @throws(classOf[InterruptedException]) - def ready[T](awaitable: Awaitable[T], atMost: Duration): awaitable.type = - blocking(awaitable.ready(atMost)(AwaitPermission)) - - /** - * Await and return the result (of type `T`) of an `Awaitable`. - * - * Although this method is blocking, the internal use of [[scala.concurrent.blocking blocking]] ensures that - * the underlying [[ExecutionContext]] is given an opportunity to properly manage the blocking. - * - * WARNING: It is strongly discouraged to supply lengthy timeouts since the progress of the calling thread will be - * suspended—blocked—until either the `Awaitable` has a result or the timeout expires. - * - * @param awaitable - * the `Awaitable` to be awaited - * @param atMost - * maximum wait time, which may be negative (no waiting is done), - * [[scala.concurrent.duration.Duration.Inf Duration.Inf]] for unbounded waiting, or a finite positive - * duration - * @return the result value if `awaitable` is completed within the specific maximum wait time - * @throws InterruptedException if the current thread is interrupted while waiting - * @throws TimeoutException if after waiting for the specified time `awaitable` is still not ready - * @throws IllegalArgumentException if `atMost` is [[scala.concurrent.duration.Duration.Undefined Duration.Undefined]] - */ - @throws(classOf[Exception]) - def result[T](awaitable: Awaitable[T], atMost: Duration): T = - blocking(awaitable.result(atMost)(AwaitPermission)) - } -} diff --git a/tests/scala2-library/src/library/scala/deprecated.scala b/tests/scala2-library/src/library/scala/deprecated.scala deleted file mode 100644 index a57745dbea78..000000000000 --- a/tests/scala2-library/src/library/scala/deprecated.scala +++ /dev/null @@ -1,64 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala - -import scala.annotation.meta._ - -/** An annotation that designates that a definition is deprecated. - * A deprecation warning is issued upon usage of the annotated definition. - * - * Library authors should state the library's deprecation policy in their documentation to give - * developers guidance on how long a deprecated definition will be preserved. - * - * Library authors should prepend the name of their library to the version number to help - * developers distinguish deprecations coming from different libraries: - * - * {{{ - * @deprecated("this method will be removed", "FooLib 12.0") - * def oldMethod(x: Int) = ... - * }}} - * - * The compiler will emit deprecation warnings grouped by library and version: - * - * {{{ - * oldMethod(1) - * oldMethod(2) - * aDeprecatedMethodFromLibraryBar(3, 4) - * - * // warning: there was one deprecation warning (since BarLib 3.2) - * // warning: there were two deprecation warnings (since FooLib 12.0) - * // warning: there were three deprecation warnings in total; re-run with -deprecation for details - * }}} - * - * '''`@deprecated` in the Scala language and its standard library'''
- * - * A deprecated element of the Scala language or a definition in the Scala standard library will - * be preserved at least for the current major version. - * - * This means that an element deprecated in some 2.12.x release will be preserved in - * all 2.12.x releases, but may be removed in 2.13. (A deprecated element - * might be kept longer to ease migration. Developers should not rely on this.) - * - * '''Special deprecation policy for Scala 2.12'''
- * The Scala team has decided to enact a special deprecation policy for Scala 2.12:
- * - * As an upgrade from 2.11 to 2.12 also requires upgrading from Java 6 to Java 8, - * deprecated elements will not normally be removed in this release, to ease migration - * and cross-building. - * - * @see The official documentation on [[http://www.scala-lang.org/news/2.11.0/#binary-compatibility binary compatibility]]. - * @param message the message to print during compilation if the definition is accessed - * @param since a string identifying the first version in which the definition was deprecated - * @since 2.3 - * @see [[scala.deprecatedInheritance]] - * @see [[scala.deprecatedOverriding]] - * @see [[scala.deprecatedName]] - */ -@getter @setter @beanGetter @beanSetter -class deprecated(message: String = "", since: String = "") extends scala.annotation.StaticAnnotation diff --git a/tests/scala2-library/src/library/scala/deprecatedInheritance.scala b/tests/scala2-library/src/library/scala/deprecatedInheritance.scala deleted file mode 100644 index 994eac9ed839..000000000000 --- a/tests/scala2-library/src/library/scala/deprecatedInheritance.scala +++ /dev/null @@ -1,47 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala - -import scala.annotation.meta._ - -/** An annotation that designates that inheriting from a class is deprecated. - * - * This is usually done to warn about a non-final class being made final in a future version. - * Sub-classing such a class then generates a warning. - * - * No warnings are generated if the subclass is in the same compilation unit. - * - * Library authors should state the library's deprecation policy in their documentation to give - * developers guidance on when a type annotated with `@deprecatedInheritance` will be `final`ized. - * - * Library authors should prepend the name of their library to the version number to help - * developers distinguish deprecations coming from different libraries: - * - * {{{ - * @deprecatedInheritance("this class will be made final", "FooLib 12.0") - * class Foo - * }}} - * - * {{{ - * val foo = new Foo // no deprecation warning - * class Bar extends Foo - * // warning: inheritance from class Foo is deprecated (since FooLib 12.0): this class will be made final - * // class Bar extends Foo - * // ^ - * }}} - * - * @param message the message to print during compilation if the class was sub-classed - * @param since a string identifying the first version in which inheritance was deprecated - * @since 2.10 - * @see [[scala.deprecated]] - * @see [[scala.deprecatedOverriding]] - * @see [[scala.deprecatedName]] - */ -@getter @setter @beanGetter @beanSetter -class deprecatedInheritance(message: String = "", since: String = "") extends scala.annotation.StaticAnnotation diff --git a/tests/scala2-library/src/library/scala/deprecatedName.scala b/tests/scala2-library/src/library/scala/deprecatedName.scala deleted file mode 100644 index f8c6bd32ad77..000000000000 --- a/tests/scala2-library/src/library/scala/deprecatedName.scala +++ /dev/null @@ -1,41 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2010-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala - -import scala.annotation.meta._ - - - /** An annotation that designates that the name of a parameter is deprecated. - * - * Using this name in a named argument generates a deprecation warning. - * - * Library authors should state the library's deprecation policy in their documentation to give - * developers guidance on how long a deprecated name will be preserved. - * - * Library authors should prepend the name of their library to the version number to help - * developers distinguish deprecations coming from different libraries: - * - * {{{ - * def inc(x: Int, @deprecatedName('y, "FooLib 12.0") n: Int): Int = x + n - * inc(1, y = 2) - * }}} - * will produce the following warning: - * {{{ - * warning: the parameter name y is deprecated (since FooLib 12.0): use n instead - * inc(1, y = 2) - * ^ - * }}} - * - * @since 2.8.1 - * @see [[scala.deprecated]] - * @see [[scala.deprecatedInheritance]] - * @see [[scala.deprecatedOverriding]] - */ -@param -class deprecatedName(name: Symbol = Symbol(""), since: String = "") extends scala.annotation.StaticAnnotation diff --git a/tests/scala2-library/src/library/scala/deprecatedOverriding.scala b/tests/scala2-library/src/library/scala/deprecatedOverriding.scala deleted file mode 100644 index 5be6830b2752..000000000000 --- a/tests/scala2-library/src/library/scala/deprecatedOverriding.scala +++ /dev/null @@ -1,48 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala - -import scala.annotation.meta._ - -/** An annotation that designates that overriding a member is deprecated. - * - * Overriding such a member in a sub-class then generates a warning. - * - * Library authors should state the library's deprecation policy in their documentation to give - * developers guidance on when a method annotated with `@deprecatedOverriding` will be `final`ized. - * - * Library authors should prepend the name of their library to the version number to help - * developers distinguish deprecations coming from different libraries: - * - * {{{ - * class Foo { - * @deprecatedOverriding("this method will be made final", "FooLib 12.0") - * def add(x: Int, y: Int) = x + y - * } - * }}} - * - * {{{ - * class Bar extends Foo // no deprecation warning - * class Baz extends Foo { - * override def add(x: Int, y: Int) = x - y - * } - * // warning: overriding method add in class Foo is deprecated (since FooLib 12.0): this method will be made final - * // override def add(x: Int, y: Int) = x - y - * // ^ - * }}} - * - * @param message the message to print during compilation if the member was overridden - * @param since a string identifying the first version in which overriding was deprecated - * @since 2.10 - * @see [[scala.deprecated]] - * @see [[scala.deprecatedInheritance]] - * @see [[scala.deprecatedName]] - */ -@getter @setter @beanGetter @beanSetter -class deprecatedOverriding(message: String = "", since: String = "") extends scala.annotation.StaticAnnotation diff --git a/tests/scala2-library/src/library/scala/inline.scala b/tests/scala2-library/src/library/scala/inline.scala deleted file mode 100644 index f188ccab07c1..000000000000 --- a/tests/scala2-library/src/library/scala/inline.scala +++ /dev/null @@ -1,42 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - - - -package scala - -/** - * An annotation on methods that requests that the compiler should try especially hard to inline the - * annotated method. The annotation can be used at definition site or at callsite. - * - * {{{ - * @inline final def f1(x: Int) = x - * @noinline final def f2(x: Int) = x - * final def f3(x: Int) = x - * - * def t1 = f1(1) // inlined if possible - * def t2 = f2(1) // not inlined - * def t3 = f3(1) // may be inlined (heuristics) - * def t4 = f1(1): @noinline // not inlined (override at callsite) - * def t5 = f2(1): @inline // inlined if possible (override at callsite) - * def t6 = f3(1): @inline // inlined if possible - * def t7 = f3(1): @noinline // not inlined - * } - * }}} - * - * Note: parentheses are required when annotating a callsite within a larger expression. - * - * {{{ - * def t1 = f1(1) + f1(1): @noinline // equivalent to (f1(1) + f1(1)): @noinline - * def t2 = f1(1) + (f1(1): @noinline) // the second call to f1 is not inlined - * }}} - * - * @author Lex Spoon - * @version 1.0, 2007-5-21 - */ -class inline extends scala.annotation.StaticAnnotation diff --git a/tests/scala2-library/src/library/scala/io/AnsiColor.scala b/tests/scala2-library/src/library/scala/io/AnsiColor.scala deleted file mode 100644 index df589bc66c0d..000000000000 --- a/tests/scala2-library/src/library/scala/io/AnsiColor.scala +++ /dev/null @@ -1,164 +0,0 @@ -package scala -package io - -/** ANSI escape codes providing control over text formatting and color on supporting text terminals. - * - * ==ANSI Style and Control Codes== - * - * This group of escape codes provides control over text styling. For example, to turn on reverse video with bold and - * then turn off all styling embed these codes, - * - * {{{ - * import io.AnsiColor._ - * - * object ColorDemo extends App { - * - * println(s"${REVERSED}${BOLD}Hello 1979!${RESET}") - * } - * }}} - * - * ==Foreground and Background Colors== - * - * Embedding ANSI color codes in text output will control the text foreground and background colors. - * - * - * - * - * - * - * - * - * - * - * - *
ForegroundBackground
BLACK BLACK_B
RED RED_B
GREEN GREEN_B
YELLOW YELLOW_B
BLUE BLUE_B
MAGENTAMAGENTA_B
CYAN CYAN_B
WHITE WHITE_B
- * - * @groupname style-control ANSI Style and Control Codes - * @groupprio style-control 101 - * - * @groupname color-black ANSI Black - * @groupdesc color-black
 
- * @groupprio color-black 110 - * - * @groupname color-red ANSI Red - * @groupdesc color-red
 
- * @groupprio color-red 120 - * - * @groupname color-green ANSI Green - * @groupdesc color-green
 
- * @groupprio color-green 130 - * - * @groupname color-yellow ANSI Yellow - * @groupdesc color-yellow
 
- * @groupprio color-yellow 140 - * - * @groupname color-blue ANSI Blue - * @groupdesc color-blue
 
- * @groupprio color-blue 150 - * - * @groupname color-magenta ANSI Magenta - * @groupdesc color-magenta
 
- * @groupprio color-magenta 160 - * - * @groupname color-cyan ANSI Cyan - * @groupdesc color-cyan
 
- * @groupprio color-cyan 170 - * - * @groupname color-white ANSI White - * @groupdesc color-white
 
- * @groupprio color-white 180 - */ -trait AnsiColor { - /** Foreground color for ANSI black - * @group color-black - */ - final val BLACK = "\u001b[30m" - /** Foreground color for ANSI red - * @group color-red - */ - final val RED = "\u001b[31m" - /** Foreground color for ANSI green - * @group color-green - */ - final val GREEN = "\u001b[32m" - /** Foreground color for ANSI yellow - * @group color-yellow - */ - final val YELLOW = "\u001b[33m" - /** Foreground color for ANSI blue - * @group color-blue - */ - final val BLUE = "\u001b[34m" - /** Foreground color for ANSI magenta - * @group color-magenta - */ - final val MAGENTA = "\u001b[35m" - /** Foreground color for ANSI cyan - * @group color-cyan - */ - final val CYAN = "\u001b[36m" - /** Foreground color for ANSI white - * @group color-white - */ - final val WHITE = "\u001b[37m" - - /** Background color for ANSI black - * @group color-black - */ - final val BLACK_B = "\u001b[40m" - /** Background color for ANSI red - * @group color-red - */ - final val RED_B = "\u001b[41m" - /** Background color for ANSI green - * @group color-green - */ - final val GREEN_B = "\u001b[42m" - /** Background color for ANSI yellow - * @group color-yellow - */ - final val YELLOW_B = "\u001b[43m" - /** Background color for ANSI blue - * @group color-blue - */ - final val BLUE_B = "\u001b[44m" - /** Background color for ANSI magenta - * @group color-magenta - */ - final val MAGENTA_B = "\u001b[45m" - /** Background color for ANSI cyan - * @group color-cyan - */ - final val CYAN_B = "\u001b[46m" - /** Background color for ANSI white - * @group color-white - */ - final val WHITE_B = "\u001b[47m" - - /** Reset ANSI styles - * @group style-control - */ - final val RESET = "\u001b[0m" - /** ANSI bold - * @group style-control - */ - final val BOLD = "\u001b[1m" - /** ANSI underlines - * @group style-control - */ - final val UNDERLINED = "\u001b[4m" - /** ANSI blink - * @group style-control - */ - final val BLINK = "\u001b[5m" - /** ANSI reversed - * @group style-control - */ - final val REVERSED = "\u001b[7m" - /** ANSI invisible - * @group style-control - */ - final val INVISIBLE = "\u001b[8m" -} - -object AnsiColor extends AnsiColor { } diff --git a/tests/scala2-library/src/library/scala/io/BufferedSource.scala b/tests/scala2-library/src/library/scala/io/BufferedSource.scala deleted file mode 100644 index 33b5a1468eca..000000000000 --- a/tests/scala2-library/src/library/scala/io/BufferedSource.scala +++ /dev/null @@ -1,99 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala.io - -import java.io.{ InputStream, BufferedReader, InputStreamReader, PushbackReader } -import Source.DefaultBufSize -import scala.collection.{ Iterator, AbstractIterator } - -/** This object provides convenience methods to create an iterable - * representation of a source file. - * - * @author Burak Emir, Paul Phillips - */ -class BufferedSource(inputStream: InputStream, bufferSize: Int)(implicit val codec: Codec) extends Source { - def this(inputStream: InputStream)(implicit codec: Codec) = this(inputStream, DefaultBufSize)(codec) - def reader() = new InputStreamReader(inputStream, codec.decoder) - def bufferedReader() = new BufferedReader(reader(), bufferSize) - - // The same reader has to be shared between the iterators produced - // by iter and getLines. This is because calling hasNext can cause a - // block of data to be read from the stream, which will then be lost - // to getLines if it creates a new reader, even though next() was - // never called on the original. - private var charReaderCreated = false - private lazy val charReader = { - charReaderCreated = true - bufferedReader() - } - - override lazy val iter = ( - Iterator - continually (codec wrap charReader.read()) - takeWhile (_ != -1) - map (_.toChar) - ) - - private def decachedReader: BufferedReader = { - // Don't want to lose a buffered char sitting in iter either. Yes, - // this is ridiculous, but if I can't get rid of Source, and all the - // Iterator bits are designed into Source, and people create Sources - // in the repl, and the repl calls toString for the result line, and - // that calls hasNext to find out if they're empty, and that leads - // to chars being buffered, and no, I don't work here, they left a - // door unlocked. - // To avoid inflicting this silliness indiscriminately, we can - // skip it if the char reader was never created: and almost always - // it will not have been created, since getLines will be called - // immediately on the source. - if (charReaderCreated && iter.hasNext) { - val pb = new PushbackReader(charReader) - pb unread iter.next().toInt - new BufferedReader(pb, bufferSize) - } - else charReader - } - - - class BufferedLineIterator extends AbstractIterator[String] with Iterator[String] { - private val lineReader = decachedReader - var nextLine: String = null - - override def hasNext = { - if (nextLine == null) - nextLine = lineReader.readLine - - nextLine != null - } - override def next(): String = { - val result = { - if (nextLine == null) lineReader.readLine - else try nextLine finally nextLine = null - } - if (result == null) Iterator.empty.next() - else result - } - } - - override def getLines(): Iterator[String] = new BufferedLineIterator - - /** Efficiently converts the entire remaining input into a string. */ - override def mkString = { - // Speed up slurping of whole data set in the simplest cases. - val allReader = decachedReader - val sb = new StringBuilder - val buf = new Array[Char](bufferSize) - var n = 0 - while (n != -1) { - n = allReader.read(buf) - if (n>0) sb.appendAll(buf, 0, n) - } - sb.result - } -} diff --git a/tests/scala2-library/src/library/scala/io/Codec.scala b/tests/scala2-library/src/library/scala/io/Codec.scala deleted file mode 100644 index 7cb7858b36f4..000000000000 --- a/tests/scala2-library/src/library/scala/io/Codec.scala +++ /dev/null @@ -1,131 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala -package io - -import java.nio.charset.{ Charset, CharsetDecoder, CharsetEncoder, CharacterCodingException, CodingErrorAction => Action } -import scala.annotation.migration -import scala.language.implicitConversions - -// Some notes about encodings for use in refining this implementation. -// -// Emails: encoding recorded in header, e.g. Content-Type: charset= "iso-8859-1" -// HTML: optional content-type meta tag. -// -// XML: optional encoding parameter. -// -// -// MacRoman vs. UTF-8: see http://osdir.com/ml/lang-jruby-devel/2009-04/msg00071.html -// -Dfile.encoding: see http://bugs.java.com/bugdatabase/view_bug.do?bug_id=4375816 - -/** A class for character encoding/decoding preferences. - * - */ -class Codec(val charSet: Charset) { - type Configure[T] = (T => T, Boolean) - type Handler = CharacterCodingException => Int - - // these variables allow configuring the Codec object, and then - // all decoders and encoders retrieved from it will use these settings. - private[this] var _onMalformedInput: Action = null - private[this] var _onUnmappableCharacter: Action = null - private[this] var _encodingReplacement: Array[Byte] = null - private[this] var _decodingReplacement: String = null - private[this] var _onCodingException: Handler = e => throw e - - /** The name of the Codec. */ - override def toString = name - - // these methods can be chained to configure the variables above - def onMalformedInput(newAction: Action): this.type = { _onMalformedInput = newAction ; this } - def onUnmappableCharacter(newAction: Action): this.type = { _onUnmappableCharacter = newAction ; this } - def decodingReplaceWith(newReplacement: String): this.type = { _decodingReplacement = newReplacement ; this } - def encodingReplaceWith(newReplacement: Array[Byte]): this.type = { _encodingReplacement = newReplacement ; this } - def onCodingException(handler: Handler): this.type = { _onCodingException = handler ; this } - - def name = charSet.name - def encoder: CharsetEncoder = { - val enc = charSet.newEncoder() - if (_onMalformedInput ne null) enc onMalformedInput _onMalformedInput - if (_onUnmappableCharacter ne null) enc onUnmappableCharacter _onUnmappableCharacter - if (_encodingReplacement ne null) enc replaceWith _encodingReplacement - enc - } - def decoder: CharsetDecoder = { - val dec = charSet.newDecoder() - if (_onMalformedInput ne null) dec onMalformedInput _onMalformedInput - if (_onUnmappableCharacter ne null) dec onUnmappableCharacter _onUnmappableCharacter - if (_decodingReplacement ne null) dec replaceWith _decodingReplacement - dec - } - - def wrap(body: => Int): Int = - try body catch { case e: CharacterCodingException => _onCodingException(e) } -} - -trait LowPriorityCodecImplicits { - self: Codec.type => - - /** The Codec of Last Resort. */ - implicit lazy val fallbackSystemCodec: Codec = defaultCharsetCodec -} - -object Codec extends LowPriorityCodecImplicits { - final val ISO8859: Codec = new Codec(Charset forName "ISO-8859-1") - final val UTF8: Codec = new Codec(Charset forName "UTF-8") - - /** Optimistically these two possible defaults will be the same thing. - * In practice this is not necessarily true, and in fact Sun classifies - * the fact that you can influence anything at all via -Dfile.encoding - * as an accident, with any anomalies considered "not a bug". - */ - def defaultCharsetCodec = apply(Charset.defaultCharset) - def fileEncodingCodec = apply(scala.util.Properties.encodingString) - def default = defaultCharsetCodec - - def apply(encoding: String): Codec = new Codec(Charset forName encoding) - def apply(charSet: Charset): Codec = new Codec(charSet) - def apply(decoder: CharsetDecoder): Codec = { - val _decoder = decoder - new Codec(decoder.charset()) { override def decoder = _decoder } - } - - @migration("This method was previously misnamed `toUTF8`. Converts from Array[Byte] to Array[Char].", "2.9.0") - def fromUTF8(bytes: Array[Byte]): Array[Char] = fromUTF8(bytes, 0, bytes.length) - def fromUTF8(bytes: Array[Byte], offset: Int, len: Int): Array[Char] = { - val bbuffer = java.nio.ByteBuffer.wrap(bytes, offset, len) - val cbuffer = UTF8.charSet decode bbuffer - val chars = new Array[Char](cbuffer.remaining()) - cbuffer get chars - - chars - } - - @migration("This method was previously misnamed `fromUTF8`. Converts from character sequence to Array[Byte].", "2.9.0") - def toUTF8(cs: CharSequence): Array[Byte] = { - val cbuffer = java.nio.CharBuffer.wrap(cs, 0, cs.length) - val bbuffer = UTF8.charSet encode cbuffer - val bytes = new Array[Byte](bbuffer.remaining()) - bbuffer get bytes - - bytes - } - def toUTF8(chars: Array[Char], offset: Int, len: Int): Array[Byte] = { - val cbuffer = java.nio.CharBuffer.wrap(chars, offset, len) - val bbuffer = UTF8.charSet encode cbuffer - val bytes = new Array[Byte](bbuffer.remaining()) - bbuffer get bytes - - bytes - } - - implicit def string2codec(s: String): Codec = apply(s) - implicit def charset2codec(c: Charset): Codec = apply(c) - implicit def decoder2codec(cd: CharsetDecoder): Codec = apply(cd) -} diff --git a/tests/scala2-library/src/library/scala/io/Position.scala b/tests/scala2-library/src/library/scala/io/Position.scala deleted file mode 100644 index 0435ca95ad8e..000000000000 --- a/tests/scala2-library/src/library/scala/io/Position.scala +++ /dev/null @@ -1,80 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala -package io - -/** The object Position provides convenience methods to encode - * line and column number in one single integer. The encoded line - * (column) numbers range from 0 to `LINE_MASK` (`COLUMN_MASK`), - * where `0` indicates that the line (column) is undefined and - * `1` represents the first line (column). - * - * Line (Column) numbers greater than `LINE_MASK` (`COLUMN_MASK`) are - * replaced by `LINE_MASK` (`COLUMN_MASK`). Furthermore, if the encoded - * line number is `LINE_MASK`, the column number is always set to 0. - * - * The following properties hold: - * - * the undefined position is 0: `encode(0,0) == 0` - * encodings are non-negative : `encode(line,column) >= 0` - * position order is preserved: - * {{{ - * (line1 <= line2) || (line1 == line2 && column1 <= column2) - * }}} - * implies - * {{{ - * encode(line1,column1) <= encode(line2,column2) - * }}} - * @author Burak Emir (translated from work by Matthias Zenger and others) - */ -@deprecated("this class will be removed", "2.10.0") -private[scala] abstract class Position { - /** Definable behavior for overflow conditions. - */ - def checkInput(line: Int, column: Int): Unit - - /** Number of bits used to encode the line number */ - final val LINE_BITS = 20 - /** Number of bits used to encode the column number */ - final val COLUMN_BITS = 31 - LINE_BITS // no negatives => 31 - /** Mask to decode the line number */ - final val LINE_MASK = (1 << LINE_BITS) - 1 - /** Mask to decode the column number */ - final val COLUMN_MASK = (1 << COLUMN_BITS) - 1 - - /** Encodes a position into a single integer. */ - final def encode(line: Int, column: Int): Int = { - checkInput(line, column) - - if (line >= LINE_MASK) - LINE_MASK << COLUMN_BITS - else - (line << COLUMN_BITS) | scala.math.min(COLUMN_MASK, column) - } - - /** Returns the line number of the encoded position. */ - final def line(pos: Int): Int = (pos >> COLUMN_BITS) & LINE_MASK - - /** Returns the column number of the encoded position. */ - final def column(pos: Int): Int = pos & COLUMN_MASK - - /** Returns a string representation of the encoded position. */ - def toString(pos: Int): String = line(pos) + ":" + column(pos) -} - -private[scala] object Position extends Position { - def checkInput(line: Int, column: Int) { - if (line < 0) - throw new IllegalArgumentException(line + " < 0") - if ((line == 0) && (column != 0)) - throw new IllegalArgumentException(line + "," + column + " not allowed") - if (column < 0) - throw new IllegalArgumentException(line + "," + column + " not allowed") - } -} diff --git a/tests/scala2-library/src/library/scala/io/Source.scala b/tests/scala2-library/src/library/scala/io/Source.scala deleted file mode 100644 index b4f542a25209..000000000000 --- a/tests/scala2-library/src/library/scala/io/Source.scala +++ /dev/null @@ -1,372 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala -package io - -import scala.collection.AbstractIterator -import java.io.{ FileInputStream, InputStream, PrintStream, File => JFile, Closeable } -import java.net.{ URI, URL } - -/** This object provides convenience methods to create an iterable - * representation of a source file. - * - * @author Burak Emir, Paul Phillips - * @version 1.0, 19/08/2004 - */ -object Source { - val DefaultBufSize = 2048 - - /** Creates a `Source` from System.in. - */ - def stdin = fromInputStream(System.in) - - /** Creates a Source from an Iterable. - * - * @param iterable the Iterable - * @return the Source - */ - def fromIterable(iterable: Iterable[Char]): Source = new Source { - val iter = iterable.iterator - } withReset(() => fromIterable(iterable)) - - /** Creates a Source instance from a single character. - */ - def fromChar(c: Char): Source = fromIterable(Array(c)) - - /** creates Source from array of characters, with empty description. - */ - def fromChars(chars: Array[Char]): Source = fromIterable(chars) - - /** creates Source from a String, with no description. - */ - def fromString(s: String): Source = fromIterable(s) - - /** creates Source from file with given name, setting its description to - * filename. - */ - def fromFile(name: String)(implicit codec: Codec): BufferedSource = - fromFile(new JFile(name))(codec) - - /** creates Source from file with given name, using given encoding, setting - * its description to filename. - */ - def fromFile(name: String, enc: String): BufferedSource = - fromFile(name)(Codec(enc)) - - /** creates `source` from file with given file `URI`. - */ - def fromFile(uri: URI)(implicit codec: Codec): BufferedSource = - fromFile(new JFile(uri))(codec) - - /** creates Source from file with given file: URI - */ - def fromFile(uri: URI, enc: String): BufferedSource = - fromFile(uri)(Codec(enc)) - - /** creates Source from file, using default character encoding, setting its - * description to filename. - */ - def fromFile(file: JFile)(implicit codec: Codec): BufferedSource = - fromFile(file, Source.DefaultBufSize)(codec) - - /** same as fromFile(file, enc, Source.DefaultBufSize) - */ - def fromFile(file: JFile, enc: String): BufferedSource = - fromFile(file)(Codec(enc)) - - def fromFile(file: JFile, enc: String, bufferSize: Int): BufferedSource = - fromFile(file, bufferSize)(Codec(enc)) - - /** Creates Source from `file`, using given character encoding, setting - * its description to filename. Input is buffered in a buffer of size - * `bufferSize`. - */ - def fromFile(file: JFile, bufferSize: Int)(implicit codec: Codec): BufferedSource = { - val inputStream = new FileInputStream(file) - - createBufferedSource( - inputStream, - bufferSize, - () => fromFile(file, bufferSize)(codec), - () => inputStream.close() - )(codec) withDescription ("file:" + file.getAbsolutePath) - } - - /** Create a `Source` from array of bytes, decoding - * the bytes according to codec. - * - * @return the created `Source` instance. - */ - def fromBytes(bytes: Array[Byte])(implicit codec: Codec): Source = - fromString(new String(bytes, codec.name)) - - def fromBytes(bytes: Array[Byte], enc: String): Source = - fromBytes(bytes)(Codec(enc)) - - /** Create a `Source` from array of bytes, assuming - * one byte per character (ISO-8859-1 encoding.) - */ - def fromRawBytes(bytes: Array[Byte]): Source = - fromString(new String(bytes, Codec.ISO8859.name)) - - /** creates `Source` from file with given file: URI - */ - def fromURI(uri: URI)(implicit codec: Codec): BufferedSource = - fromFile(new JFile(uri))(codec) - - /** same as fromURL(new URL(s))(Codec(enc)) - */ - def fromURL(s: String, enc: String): BufferedSource = - fromURL(s)(Codec(enc)) - - /** same as fromURL(new URL(s)) - */ - def fromURL(s: String)(implicit codec: Codec): BufferedSource = - fromURL(new URL(s))(codec) - - /** same as fromInputStream(url.openStream())(Codec(enc)) - */ - def fromURL(url: URL, enc: String): BufferedSource = - fromURL(url)(Codec(enc)) - - /** same as fromInputStream(url.openStream())(codec) - */ - def fromURL(url: URL)(implicit codec: Codec): BufferedSource = - fromInputStream(url.openStream())(codec) - - /** Reads data from inputStream with a buffered reader, using the encoding - * in implicit parameter codec. - * - * @param inputStream the input stream from which to read - * @param bufferSize buffer size (defaults to Source.DefaultBufSize) - * @param reset a () => Source which resets the stream (if unset, reset() will throw an Exception) - * @param close a () => Unit method which closes the stream (if unset, close() will do nothing) - * @param codec (implicit) a scala.io.Codec specifying behavior (defaults to Codec.default) - * @return the buffered source - */ - def createBufferedSource( - inputStream: InputStream, - bufferSize: Int = DefaultBufSize, - reset: () => Source = null, - close: () => Unit = null - )(implicit codec: Codec): BufferedSource = { - // workaround for default arguments being unable to refer to other parameters - val resetFn = if (reset == null) () => createBufferedSource(inputStream, bufferSize, reset, close)(codec) else reset - - new BufferedSource(inputStream, bufferSize)(codec) withReset resetFn withClose close - } - - def fromInputStream(is: InputStream, enc: String): BufferedSource = - fromInputStream(is)(Codec(enc)) - - def fromInputStream(is: InputStream)(implicit codec: Codec): BufferedSource = - createBufferedSource(is, reset = () => fromInputStream(is)(codec), close = () => is.close())(codec) - - /** Reads data from a classpath resource, using either a context classloader (default) or a passed one. - * - * @param resource name of the resource to load from the classpath - * @param classLoader classloader to be used, or context classloader if not specified - * @return the buffered source - */ - def fromResource(resource: String, classLoader: ClassLoader = Thread.currentThread().getContextClassLoader())(implicit codec: Codec): BufferedSource = - fromInputStream(classLoader.getResourceAsStream(resource)) - -} - -/** An iterable representation of source data. - * It may be reset with the optional [[reset]] method. - * - * Subclasses must supply [[scala.io.Source.iter the underlying iterator]]. - * - * Error handling may be customized by overriding the [[scala.io.Source.report report]] method. - * - * The [[scala.io.Source.ch current input]] and [[scala.io.Source.pos position]], - * as well as the [[scala.io.Source.next next character]] methods delegate to - * [[scala.io.Source#Positioner the positioner]]. - * - * The default positioner encodes line and column numbers in the position passed to [[report]]. - * This behavior can be changed by supplying a - * [[scala.io.Source.withPositioning(pos:* custom positioner]]. - * - */ -abstract class Source extends Iterator[Char] with Closeable { - /** the actual iterator */ - protected val iter: Iterator[Char] - - // ------ public values - - /** description of this source, default empty */ - var descr: String = "" - var nerrors = 0 - var nwarnings = 0 - - private def lineNum(line: Int): String = (getLines() drop (line - 1) take 1).mkString - - class LineIterator extends AbstractIterator[String] with Iterator[String] { - private[this] val sb = new StringBuilder - - lazy val iter: BufferedIterator[Char] = Source.this.iter.buffered - def isNewline(ch: Char) = ch == '\r' || ch == '\n' - def getc() = iter.hasNext && { - val ch = iter.next() - if (ch == '\n') false - else if (ch == '\r') { - if (iter.hasNext && iter.head == '\n') - iter.next() - - false - } - else { - sb append ch - true - } - } - def hasNext = iter.hasNext - def next = { - sb.clear() - while (getc()) { } - sb.toString - } - } - - /** Returns an iterator who returns lines (NOT including newline character(s)). - * It will treat any of \r\n, \r, or \n as a line separator (longest match) - if - * you need more refined behavior you can subclass Source#LineIterator directly. - */ - def getLines(): Iterator[String] = new LineIterator() - - /** Returns `'''true'''` if this source has more characters. - */ - def hasNext = iter.hasNext - - /** Returns next character. - */ - def next(): Char = positioner.next() - - class Positioner(encoder: Position) { - def this() = this(RelaxedPosition) - /** the last character returned by next. */ - var ch: Char = _ - - /** position of last character returned by next */ - var pos = 0 - - /** current line and column */ - var cline = 1 - var ccol = 1 - - /** default col increment for tabs '\t', set to 4 initially */ - var tabinc = 4 - - def next(): Char = { - ch = iter.next() - pos = encoder.encode(cline, ccol) - ch match { - case '\n' => - ccol = 1 - cline += 1 - case '\t' => - ccol += tabinc - case _ => - ccol += 1 - } - ch - } - } - /** A Position implementation which ignores errors in - * the positions. - */ - object RelaxedPosition extends Position { - def checkInput(line: Int, column: Int): Unit = () - } - object RelaxedPositioner extends Positioner(RelaxedPosition) { } - object NoPositioner extends Positioner(Position) { - override def next(): Char = iter.next() - } - def ch = positioner.ch - def pos = positioner.pos - - /** Reports an error message to the output stream `out`. - * - * @param pos the source position (line/column) - * @param msg the error message to report - * @param out PrintStream to use (optional: defaults to `Console.err`) - */ - def reportError( - pos: Int, - msg: String, - out: PrintStream = Console.err) - { - nerrors += 1 - report(pos, msg, out) - } - - private def spaces(n: Int) = List.fill(n)(' ').mkString - /** - * @param pos the source position (line/column) - * @param msg the error message to report - * @param out PrintStream to use - */ - def report(pos: Int, msg: String, out: PrintStream) { - val line = Position line pos - val col = Position column pos - - out println "%s:%d:%d: %s%s%s^".format(descr, line, col, msg, lineNum(line), spaces(col - 1)) - } - - /** - * @param pos the source position (line/column) - * @param msg the warning message to report - * @param out PrintStream to use (optional: defaults to `Console.out`) - */ - def reportWarning( - pos: Int, - msg: String, - out: PrintStream = Console.out) - { - nwarnings += 1 - report(pos, "warning! " + msg, out) - } - - private[this] var resetFunction: () => Source = null - private[this] var closeFunction: () => Unit = null - private[this] var positioner: Positioner = RelaxedPositioner - - def withReset(f: () => Source): this.type = { - resetFunction = f - this - } - def withClose(f: () => Unit): this.type = { - closeFunction = f - this - } - def withDescription(text: String): this.type = { - descr = text - this - } - /** Change or disable the positioner. */ - def withPositioning(on: Boolean): this.type = { - positioner = if (on) RelaxedPositioner else NoPositioner - this - } - def withPositioning(pos: Positioner): this.type = { - positioner = pos - this - } - - /** The close() method closes the underlying resource. */ - def close() { - if (closeFunction != null) closeFunction() - } - - /** The reset() method creates a fresh copy of this Source. */ - def reset(): Source = - if (resetFunction != null) resetFunction() - else throw new UnsupportedOperationException("Source's reset() method was not set.") -} diff --git a/tests/scala2-library/src/library/scala/io/StdIn.scala b/tests/scala2-library/src/library/scala/io/StdIn.scala deleted file mode 100644 index 0f9656436b8f..000000000000 --- a/tests/scala2-library/src/library/scala/io/StdIn.scala +++ /dev/null @@ -1,229 +0,0 @@ -package scala -package io - -import java.text.MessageFormat - -/** private[scala] because this is not functionality we should be providing - * in the standard library, at least not in this idiosyncratic form. - * Factored into trait because it is better code structure regardless. - */ -private[scala] trait StdIn { - import scala.Console._ - - /** Read a full line from the default input. Returns `null` if the end of the - * input stream has been reached. - * - * @return the string read from the terminal or null if the end of stream was reached. - */ - def readLine(): String = in.readLine() - - /** Print and flush formatted text to the default output, and read a full line from the default input. - * Returns `null` if the end of the input stream has been reached. - * - * @param text the format of the text to print out, as in `printf`. - * @param args the parameters used to instantiate the format, as in `printf`. - * @return the string read from the default input - */ - def readLine(text: String, args: Any*): String = { - printf(text, args: _*) - out.flush() - readLine() - } - - /** Reads a boolean value from an entire line of the default input. - * Has a fairly liberal interpretation of the input. - * - * @return the boolean value read, or false if it couldn't be converted to a boolean - * @throws java.io.EOFException if the end of the input stream has been reached. - */ - def readBoolean(): Boolean = { - val s = readLine() - if (s == null) - throw new java.io.EOFException("Console has reached end of input") - else - s.toLowerCase() match { - case "true" => true - case "t" => true - case "yes" => true - case "y" => true - case _ => false - } - } - - /** Reads a byte value from an entire line of the default input. - * - * @return the Byte that was read - * @throws java.io.EOFException if the end of the - * input stream has been reached. - * @throws java.lang.NumberFormatException if the value couldn't be converted to a Byte - */ - def readByte(): Byte = { - val s = readLine() - if (s == null) - throw new java.io.EOFException("Console has reached end of input") - else - s.toByte - } - - /** Reads a short value from an entire line of the default input. - * - * @return the short that was read - * @throws java.io.EOFException if the end of the - * input stream has been reached. - * @throws java.lang.NumberFormatException if the value couldn't be converted to a Short - */ - def readShort(): Short = { - val s = readLine() - if (s == null) - throw new java.io.EOFException("Console has reached end of input") - else - s.toShort - } - - /** Reads a char value from an entire line of the default input. - * - * @return the Char that was read - * @throws java.io.EOFException if the end of the - * input stream has been reached. - * @throws java.lang.StringIndexOutOfBoundsException if the line read from default input was empty - */ - def readChar(): Char = { - val s = readLine() - if (s == null) - throw new java.io.EOFException("Console has reached end of input") - else - s charAt 0 - } - - /** Reads an int value from an entire line of the default input. - * - * @return the Int that was read - * @throws java.io.EOFException if the end of the - * input stream has been reached. - * @throws java.lang.NumberFormatException if the value couldn't be converted to an Int - */ - def readInt(): Int = { - val s = readLine() - if (s == null) - throw new java.io.EOFException("Console has reached end of input") - else - s.toInt - } - - /** Reads an long value from an entire line of the default input. - * - * @return the Long that was read - * @throws java.io.EOFException if the end of the - * input stream has been reached. - * @throws java.lang.NumberFormatException if the value couldn't be converted to a Long - */ - def readLong(): Long = { - val s = readLine() - if (s == null) - throw new java.io.EOFException("Console has reached end of input") - else - s.toLong - } - - /** Reads a float value from an entire line of the default input. - * @return the Float that was read. - * @throws java.io.EOFException if the end of the - * input stream has been reached. - * @throws java.lang.NumberFormatException if the value couldn't be converted to a Float - * - */ - def readFloat(): Float = { - val s = readLine() - if (s == null) - throw new java.io.EOFException("Console has reached end of input") - else - s.toFloat - } - - /** Reads a double value from an entire line of the default input. - * - * @return the Double that was read. - * @throws java.io.EOFException if the end of the - * input stream has been reached. - * @throws java.lang.NumberFormatException if the value couldn't be converted to a Float - */ - def readDouble(): Double = { - val s = readLine() - if (s == null) - throw new java.io.EOFException("Console has reached end of input") - else - s.toDouble - } - - /** Reads in some structured input (from the default input), specified by - * a format specifier. See class `java.text.MessageFormat` for details of - * the format specification. - * - * @param format the format of the input. - * @return a list of all extracted values. - * @throws java.io.EOFException if the end of the input stream has been - * reached. - */ - def readf(format: String): List[Any] = { - val s = readLine() - if (s == null) - throw new java.io.EOFException("Console has reached end of input") - else - textComponents(new MessageFormat(format).parse(s)) - } - - /** Reads in some structured input (from the default input), specified by - * a format specifier, returning only the first value extracted, according - * to the format specification. - * - * @param format format string, as accepted by `readf`. - * @return The first value that was extracted from the input - */ - def readf1(format: String): Any = readf(format).head - - /** Reads in some structured input (from the default input), specified - * by a format specifier, returning only the first two values extracted, - * according to the format specification. - * - * @param format format string, as accepted by `readf`. - * @return A [[scala.Tuple2]] containing the first two values extracted - */ - def readf2(format: String): (Any, Any) = { - val res = readf(format) - (res.head, res.tail.head) - } - - /** Reads in some structured input (from the default input), specified - * by a format specifier, returning only the first three values extracted, - * according to the format specification. - * - * @param format format string, as accepted by `readf`. - * @return A [[scala.Tuple3]] containing the first three values extracted - */ - def readf3(format: String): (Any, Any, Any) = { - val res = readf(format) - (res.head, res.tail.head, res.tail.tail.head) - } - - private def textComponents(a: Array[AnyRef]): List[Any] = { - var i: Int = a.length - 1 - var res: List[Any] = Nil - while (i >= 0) { - res = (a(i) match { - case x: java.lang.Boolean => x.booleanValue() - case x: java.lang.Byte => x.byteValue() - case x: java.lang.Short => x.shortValue() - case x: java.lang.Character => x.charValue() - case x: java.lang.Integer => x.intValue() - case x: java.lang.Long => x.longValue() - case x: java.lang.Float => x.floatValue() - case x: java.lang.Double => x.doubleValue() - case x => x - }) :: res - i -= 1 - } - res - } -} - -object StdIn extends StdIn diff --git a/tests/scala2-library/src/library/scala/language.scala b/tests/scala2-library/src/library/scala/language.scala deleted file mode 100644 index 391f1ac90301..000000000000 --- a/tests/scala2-library/src/library/scala/language.scala +++ /dev/null @@ -1,183 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2015, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - - - -package scala - -/** - * The `scala.language` object controls the language features available to the programmer, as proposed in the - * [[https://docs.google.com/document/d/1nlkvpoIRkx7at1qJEZafJwthZ3GeIklTFhqmXMvTX9Q/edit '''SIP-18 document''']]. - * - * Each of these features has to be explicitly imported into the current scope to become available: - * {{{ - * import language.postfixOps // or language._ - * List(1, 2, 3) reverse - * }}} - * - * The language features are: - * - [[dynamics `dynamics`]] enables defining calls rewriting using the [[scala.Dynamic `Dynamic`]] trait - * - [[postfixOps `postfixOps`]] enables postfix operators - * - [[reflectiveCalls `reflectiveCalls`]] enables using structural types - * - [[implicitConversions `implicitConversions`]] enables defining implicit methods and members - * - [[higherKinds `higherKinds`]] enables writing higher-kinded types - * - [[existentials `existentials`]] enables writing existential types - * - [[experimental `experimental`]] contains newer features that have not yet been tested in production - * - * @groupname production Language Features - * @groupname experimental Experimental Language Features - * @groupprio experimental 10 - */ -object language { - - import languageFeature._ - - /** Where enabled, direct or indirect subclasses of trait scala.Dynamic can - * be defined. Unless dynamics is enabled, a definition of a class, trait, - * or object that has Dynamic as a base trait is rejected. Dynamic member - * selection of existing subclasses of trait Dynamic are unaffected; - * they can be used anywhere. - * - * '''Why introduce the feature?''' To enable flexible DSLs and convenient interfacing - * with dynamic languages. - * - * '''Why control it?''' Dynamic member selection can undermine static checkability - * of programs. Furthermore, dynamic member selection often relies on reflection, - * which is not available on all platforms. - * - * @group production - */ - implicit lazy val dynamics: dynamics = languageFeature.dynamics - - /** Only where enabled, postfix operator notation `(expr op)` will be allowed. - * - * '''Why keep the feature?''' Several DSLs written in Scala need the notation. - * - * '''Why control it?''' Postfix operators interact poorly with semicolon inference. - * Most programmers avoid them for this reason. - * - * @group production - */ - implicit lazy val postfixOps: postfixOps = languageFeature.postfixOps - - /** Only where enabled, accesses to members of structural types that need - * reflection are supported. Reminder: A structural type is a type of the form - * `Parents { Decls }` where `Decls` contains declarations of new members that do - * not override any member in `Parents`. To access one of these members, a - * reflective call is needed. - * - * '''Why keep the feature?''' Structural types provide great flexibility because - * they avoid the need to define inheritance hierarchies a priori. Besides, - * their definition falls out quite naturally from Scala’s concept of type refinement. - * - * '''Why control it?''' Reflection is not available on all platforms. Popular tools - * such as ProGuard have problems dealing with it. Even where reflection is available, - * reflective dispatch can lead to surprising performance degradations. - * - * @group production - */ - implicit lazy val reflectiveCalls: reflectiveCalls = languageFeature.reflectiveCalls - - /** Only where enabled, definitions of implicit conversions are allowed. An - * implicit conversion is an implicit value of unary function type `A => B`, - * or an implicit method that has in its first parameter section a single, - * non-implicit parameter. Examples: - * - * {{{ - * implicit def stringToInt(s: String): Int = s.length - * implicit val conv = (s: String) => s.length - * implicit def listToX(xs: List[T])(implicit f: T => X): X = ... - * }}} - * - * implicit values of other types are not affected, and neither are implicit - * classes. - * - * '''Why keep the feature?''' Implicit conversions are central to many aspects - * of Scala’s core libraries. - * - * '''Why control it?''' Implicit conversions are known to cause many pitfalls - * if over-used. And there is a tendency to over-use them because they look - * very powerful and their effects seem to be easy to understand. Also, in - * most situations using implicit parameters leads to a better design than - * implicit conversions. - * - * @group production - */ - implicit lazy val implicitConversions: implicitConversions = languageFeature.implicitConversions - - /** Only where this flag is enabled, higher-kinded types can be written. - * - * '''Why keep the feature?''' Higher-kinded types enable the definition of very general - * abstractions such as functor, monad, or arrow. A significant set of advanced - * libraries relies on them. Higher-kinded types are also at the core of the - * scala-virtualized effort to produce high-performance parallel DSLs through staging. - * - * '''Why control it?''' Higher kinded types in Scala lead to a Turing-complete - * type system, where compiler termination is no longer guaranteed. They tend - * to be useful mostly for type-level computation and for highly generic design - * patterns. The level of abstraction implied by these design patterns is often - * a barrier to understanding for newcomers to a Scala codebase. Some syntactic - * aspects of higher-kinded types are hard to understand for the uninitiated and - * type inference is less effective for them than for normal types. Because we are - * not completely happy with them yet, it is possible that some aspects of - * higher-kinded types will change in future versions of Scala. So an explicit - * enabling also serves as a warning that code involving higher-kinded types - * might have to be slightly revised in the future. - * - * @group production - */ - implicit lazy val higherKinds: higherKinds = languageFeature.higherKinds - - /** Only where enabled, existential types that cannot be expressed as wildcard - * types can be written and are allowed in inferred types of values or return - * types of methods. Existential types with wildcard type syntax such as `List[_]`, - * or `Map[String, _]` are not affected. - * - * '''Why keep the feature?''' Existential types are needed to make sense of Java’s wildcard - * types and raw types and the erased types of run-time values. - * - * '''Why control it?''' Having complex existential types in a code base usually makes - * application code very brittle, with a tendency to produce type errors with - * obscure error messages. Therefore, going overboard with existential types - * is generally perceived not to be a good idea. Also, complicated existential types - * might be no longer supported in a future simplification of the language. - * - * @group production - */ - implicit lazy val existentials: existentials = languageFeature.existentials - - /** The experimental object contains features that have been recently added but have not - * been thoroughly tested in production yet. - * - * Experimental features '''may undergo API changes''' in future releases, so production - * code should not rely on them. - * - * Programmers are encouraged to try out experimental features and - * [[https://github.com/scala/bug/issues report any bugs or API inconsistencies]] - * they encounter so they can be improved in future releases. - * - * @group experimental - */ - object experimental { - - import languageFeature.experimental._ - - /** Where enabled, macro definitions are allowed. Macro implementations and - * macro applications are unaffected; they can be used anywhere. - * - * '''Why introduce the feature?''' Macros promise to make the language more regular, - * replacing ad-hoc language constructs with a general powerful abstraction - * capability that can express them. Macros are also a more disciplined and - * powerful replacement for compiler plugins. - * - * '''Why control it?''' For their very power, macros can lead to code that is hard - * to debug and understand. - */ - implicit lazy val macros: macros = languageFeature.experimental.macros - } -} diff --git a/tests/scala2-library/src/library/scala/languageFeature.scala b/tests/scala2-library/src/library/scala/languageFeature.scala deleted file mode 100644 index ff31274bf694..000000000000 --- a/tests/scala2-library/src/library/scala/languageFeature.scala +++ /dev/null @@ -1,43 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2015, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ -package scala -import scala.annotation.meta.{languageFeature => feature} - -object languageFeature { - - @feature("extension of type scala.Dynamic", enableRequired = true) - sealed trait dynamics - object dynamics extends dynamics - - @feature("postfix operator #", enableRequired = false) - sealed trait postfixOps - object postfixOps extends postfixOps - - @feature("reflective access of structural type member #", enableRequired = false) - sealed trait reflectiveCalls - object reflectiveCalls extends reflectiveCalls - - @feature("implicit conversion #", enableRequired = false) - sealed trait implicitConversions - object implicitConversions extends implicitConversions - - @feature("higher-kinded type", enableRequired = false) - sealed trait higherKinds - object higherKinds extends higherKinds - - @feature("#, which cannot be expressed by wildcards, ", enableRequired = false) - sealed trait existentials - object existentials extends existentials - - object experimental { - @feature("macro definition", enableRequired = true) - sealed trait macros - object macros extends macros - } -} - diff --git a/tests/scala2-library/src/library/scala/math/BigDecimal.scala b/tests/scala2-library/src/library/scala/math/BigDecimal.scala deleted file mode 100644 index 5efffaf12754..000000000000 --- a/tests/scala2-library/src/library/scala/math/BigDecimal.scala +++ /dev/null @@ -1,774 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2007-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - - -package scala -package math - -import scala.language.implicitConversions - -import java.math.{ MathContext, BigDecimal => BigDec } -import scala.collection.immutable.NumericRange - -/** - * @author Stephane Micheloud - * @author Rex Kerr - * @version 1.1 - * @since 2.7 - */ -object BigDecimal { - private final val maximumHashScale = 4934 // Quit maintaining hash identity with BigInt beyond this scale - private final val hashCodeNotComputed = 0x5D50690F // Magic value (happens to be "BigDecimal" old MurmurHash3 value) - private final val deci2binary = 3.3219280948873626 // Ratio of log(10) to log(2) - private val minCached = -512 - private val maxCached = 512 - val defaultMathContext = MathContext.DECIMAL128 - - /** Cache only for defaultMathContext using BigDecimals in a small range. */ - private lazy val cache = new Array[BigDecimal](maxCached - minCached + 1) - - object RoundingMode extends Enumeration { - // Annoying boilerplate to ensure consistency with java.math.RoundingMode - import java.math.{RoundingMode => RM} - type RoundingMode = Value - val UP = Value(RM.UP.ordinal) - val DOWN = Value(RM.DOWN.ordinal) - val CEILING = Value(RM.CEILING.ordinal) - val FLOOR = Value(RM.FLOOR.ordinal) - val HALF_UP = Value(RM.HALF_UP.ordinal) - val HALF_DOWN = Value(RM.HALF_DOWN.ordinal) - val HALF_EVEN = Value(RM.HALF_EVEN.ordinal) - val UNNECESSARY = Value(RM.UNNECESSARY.ordinal) - } - - /** Constructs a `BigDecimal` using the decimal text representation of `Double` value `d`, rounding if necessary. */ - def decimal(d: Double, mc: MathContext): BigDecimal = - new BigDecimal(new BigDec(java.lang.Double.toString(d), mc), mc) - - /** Constructs a `BigDecimal` using the decimal text representation of `Double` value `d`. */ - def decimal(d: Double): BigDecimal = decimal(d, defaultMathContext) - - /** Constructs a `BigDecimal` using the decimal text representation of `Float` value `f`, rounding if necessary. - * Note that `BigDecimal.decimal(0.1f) != 0.1f` since equality agrees with the `Double` representation, and - * `0.1 != 0.1f`. - */ - def decimal(f: Float, mc: MathContext): BigDecimal = - new BigDecimal(new BigDec(java.lang.Float.toString(f), mc), mc) - - /** Constructs a `BigDecimal` using the decimal text representation of `Float` value `f`. - * Note that `BigDecimal.decimal(0.1f) != 0.1f` since equality agrees with the `Double` representation, and - * `0.1 != 0.1f`. - */ - def decimal(f: Float): BigDecimal = decimal(f, defaultMathContext) - - // This exists solely to avoid conversion from Int/Long to Float, screwing everything up. - /** Constructs a `BigDecimal` from a `Long`, rounding if necessary. This is identical to `BigDecimal(l, mc)`. */ - def decimal(l: Long, mc: MathContext): BigDecimal = apply(l, mc) - - // This exists solely to avoid conversion from Int/Long to Float, screwing everything up. - /** Constructs a `BigDecimal` from a `Long`. This is identical to `BigDecimal(l)`. */ - def decimal(l: Long): BigDecimal = apply(l) - - /** Constructs a `BigDecimal` using a `java.math.BigDecimal`, rounding if necessary. */ - def decimal(bd: BigDec, mc: MathContext): BigDecimal = new BigDecimal(bd.round(mc), mc) - - /** Constructs a `BigDecimal` by expanding the binary fraction - * contained by `Double` value `d` into a decimal representation, - * rounding if necessary. When a `Float` is converted to a - * `Double`, the binary fraction is preserved, so this method - * also works for converted `Float`s. - */ - def binary(d: Double, mc: MathContext): BigDecimal = new BigDecimal(new BigDec(d, mc), mc) - - /** Constructs a `BigDecimal` by expanding the binary fraction - * contained by `Double` value `d` into a decimal representation. - * Note: this also works correctly on converted `Float`s. - */ - def binary(d: Double): BigDecimal = binary(d, defaultMathContext) - - /** Constructs a `BigDecimal` from a `java.math.BigDecimal`. The - * precision is the default for `BigDecimal` or enough to represent - * the `java.math.BigDecimal` exactly, whichever is greater. - */ - def exact(repr: BigDec): BigDecimal = { - val mc = - if (repr.precision <= defaultMathContext.getPrecision) defaultMathContext - else new MathContext(repr.precision, java.math.RoundingMode.HALF_EVEN) - new BigDecimal(repr, mc) - } - - /** Constructs a `BigDecimal` by fully expanding the binary fraction - * contained by `Double` value `d`, adjusting the precision as - * necessary. Note: this works correctly on converted `Float`s also. - */ - def exact(d: Double): BigDecimal = exact(new BigDec(d)) - - /** Constructs a `BigDecimal` that exactly represents a `BigInt`. - */ - def exact(bi: BigInt): BigDecimal = exact(new BigDec(bi.bigInteger)) - - /** Constructs a `BigDecimal` that exactly represents a `Long`. Note that - * all creation methods for `BigDecimal` that do not take a `MathContext` - * represent a `Long`; this is equivalent to `apply`, `valueOf`, etc.. - */ - def exact(l: Long): BigDecimal = apply(l) - - /** Constructs a `BigDecimal` that exactly represents the number - * specified in a `String`. - */ - def exact(s: String): BigDecimal = exact(new BigDec(s)) - - /** Constructs a `BigDecimal` that exactly represents the number - * specified in base 10 in a character array. - */ - def exact(cs: Array[Char]): BigDecimal = exact(new BigDec(cs)) - - - /** Constructs a `BigDecimal` using the java BigDecimal static - * valueOf constructor. Equivalent to `BigDecimal.decimal`. - * - * @param d the specified double value - * @return the constructed `BigDecimal` - */ - def valueOf(d: Double): BigDecimal = apply(BigDec valueOf d) - - /** Constructs a `BigDecimal` using the java BigDecimal static - * valueOf constructor, specifying a `MathContext` that is - * used for computations but isn't used for rounding. Use - * `BigDecimal.decimal` to use `MathContext` for rounding, - * or `BigDecimal(java.math.BigDecimal.valueOf(d), mc)` for - * no rounding. - * - * @param d the specified double value - * @param mc the `MathContext` used for future computations - * @return the constructed `BigDecimal` - */ - @deprecated("MathContext is not applied to Doubles in valueOf. Use BigDecimal.decimal to use rounding, or java.math.BigDecimal.valueOf to avoid it.", "2.11.0") - def valueOf(d: Double, mc: MathContext): BigDecimal = apply(BigDec valueOf d, mc) - - /** Constructs a `BigDecimal` using the java BigDecimal static - * valueOf constructor. - * - * @param x the specified `Long` value - * @return the constructed `BigDecimal` - */ - def valueOf(x: Long): BigDecimal = apply(x) - - /** Constructs a `BigDecimal` using the java BigDecimal static - * valueOf constructor. This is unlikely to do what you want; - * use `valueOf(f.toDouble)` or `decimal(f)` instead. - */ - @deprecated("Float arguments to valueOf may not do what you wish. Use decimal or valueOf(f.toDouble).", "2.11.0") - def valueOf(f: Float): BigDecimal = valueOf(f.toDouble) - - /** Constructs a `BigDecimal` using the java BigDecimal static - * valueOf constructor. This is unlikely to do what you want; - * use `valueOf(f.toDouble)` or `decimal(f)` instead. - */ - @deprecated("Float arguments to valueOf may not do what you wish. Use decimal or valueOf(f.toDouble).", "2.11.0") - def valueOf(f: Float, mc: MathContext): BigDecimal = valueOf(f.toDouble, mc) - - - /** Constructs a `BigDecimal` whose value is equal to that of the - * specified `Integer` value. - * - * @param i the specified integer value - * @return the constructed `BigDecimal` - */ - def apply(i: Int): BigDecimal = apply(i, defaultMathContext) - - /** Constructs a `BigDecimal` whose value is equal to that of the - * specified `Integer` value, rounding if necessary. - * - * @param i the specified integer value - * @param mc the precision and rounding mode for creation of this value and future operations on it - * @return the constructed `BigDecimal` - */ - def apply(i: Int, mc: MathContext): BigDecimal = - if (mc == defaultMathContext && minCached <= i && i <= maxCached) { - val offset = i - minCached - var n = cache(offset) - if (n eq null) { n = new BigDecimal(BigDec.valueOf(i.toLong), mc); cache(offset) = n } - n - } - else apply(i.toLong, mc) - - /** Constructs a `BigDecimal` whose value is equal to that of the - * specified long value. - * - * @param l the specified long value - * @return the constructed `BigDecimal` - */ - def apply(l: Long): BigDecimal = - if (minCached <= l && l <= maxCached) apply(l.toInt) - else new BigDecimal(BigDec.valueOf(l), defaultMathContext) - - /** Constructs a `BigDecimal` whose value is equal to that of the - * specified long value, but rounded if necessary. - * - * @param l the specified long value - * @param mc the precision and rounding mode for creation of this value and future operations on it - * @return the constructed `BigDecimal` - */ - def apply(l: Long, mc: MathContext): BigDecimal = - new BigDecimal(new BigDec(l, mc), mc) - - /** Constructs a `BigDecimal` whose unscaled value is equal to that - * of the specified long value. - * - * @param unscaledVal the value - * @param scale the scale - * @return the constructed `BigDecimal` - */ - def apply(unscaledVal: Long, scale: Int): BigDecimal = - apply(BigInt(unscaledVal), scale) - - /** Constructs a `BigDecimal` whose unscaled value is equal to that - * of the specified long value, but rounded if necessary. - * - * @param unscaledVal the value - * @param scale the scale - * @param mc the precision and rounding mode for creation of this value and future operations on it - * @return the constructed `BigDecimal` - */ - def apply(unscaledVal: Long, scale: Int, mc: MathContext): BigDecimal = - apply(BigInt(unscaledVal), scale, mc) - - /** Constructs a `BigDecimal` whose value is equal to that of the - * specified double value. Equivalent to `BigDecimal.decimal`. - * - * @param d the specified `Double` value - * @return the constructed `BigDecimal` - */ - def apply(d: Double): BigDecimal = decimal(d, defaultMathContext) - - // note we don't use the static valueOf because it doesn't let us supply - // a MathContext, but we should be duplicating its logic, modulo caching. - /** Constructs a `BigDecimal` whose value is equal to that of the - * specified double value, but rounded if necessary. Equivalent to - * `BigDecimal.decimal`. - * - * @param d the specified `Double` value - * @param mc the precision and rounding mode for creation of this value and future operations on it - * @return the constructed `BigDecimal` - */ - def apply(d: Double, mc: MathContext): BigDecimal = decimal(d, mc) - - @deprecated("The default conversion from Float may not do what you want. Use BigDecimal.decimal for a String representation, or explicitly convert the Float with .toDouble.", "2.11.0") - def apply(x: Float): BigDecimal = apply(x.toDouble) - - @deprecated("The default conversion from Float may not do what you want. Use BigDecimal.decimal for a String representation, or explicitly convert the Float with .toDouble.", "2.11.0") - def apply(x: Float, mc: MathContext): BigDecimal = apply(x.toDouble, mc) - - /** Translates a character array representation of a `BigDecimal` - * into a `BigDecimal`. - */ - def apply(x: Array[Char]): BigDecimal = exact(x) - - /** Translates a character array representation of a `BigDecimal` - * into a `BigDecimal`, rounding if necessary. - */ - def apply(x: Array[Char], mc: MathContext): BigDecimal = - new BigDecimal(new BigDec(x, mc), mc) - - /** Translates the decimal String representation of a `BigDecimal` - * into a `BigDecimal`. - */ - def apply(x: String): BigDecimal = exact(x) - - /** Translates the decimal String representation of a `BigDecimal` - * into a `BigDecimal`, rounding if necessary. - */ - def apply(x: String, mc: MathContext): BigDecimal = - new BigDecimal(new BigDec(x, mc), mc) - - /** Constructs a `BigDecimal` whose value is equal to that of the - * specified `BigInt` value. - * - * @param x the specified `BigInt` value - * @return the constructed `BigDecimal` - */ - def apply(x: BigInt): BigDecimal = exact(x) - - /** Constructs a `BigDecimal` whose value is equal to that of the - * specified `BigInt` value, rounding if necessary. - * - * @param x the specified `BigInt` value - * @param mc the precision and rounding mode for creation of this value and future operations on it - * @return the constructed `BigDecimal` - */ - def apply(x: BigInt, mc: MathContext): BigDecimal = - new BigDecimal(new BigDec(x.bigInteger, mc), mc) - - /** Constructs a `BigDecimal` whose unscaled value is equal to that - * of the specified `BigInt` value. - * - * @param unscaledVal the specified `BigInt` value - * @param scale the scale - * @return the constructed `BigDecimal` - */ - def apply(unscaledVal: BigInt, scale: Int): BigDecimal = - exact(new BigDec(unscaledVal.bigInteger, scale)) - - /** Constructs a `BigDecimal` whose unscaled value is equal to that - * of the specified `BigInt` value. - * - * @param unscaledVal the specified `BigInt` value - * @param scale the scale - * @param mc the precision and rounding mode for creation of this value and future operations on it - * @return the constructed `BigDecimal` - */ - def apply(unscaledVal: BigInt, scale: Int, mc: MathContext): BigDecimal = - new BigDecimal(new BigDec(unscaledVal.bigInteger, scale, mc), mc) - - /** Constructs a `BigDecimal` from a `java.math.BigDecimal`. */ - def apply(bd: BigDec): BigDecimal = apply(bd, defaultMathContext) - - @deprecated("This method appears to round a java.math.BigDecimal but actually doesn't. Use new BigDecimal(bd, mc) instead for no rounding, or BigDecimal.decimal(bd, mc) for rounding.", "2.11.0") - def apply(bd: BigDec, mc: MathContext): BigDecimal = new BigDecimal(bd, mc) - - /** Implicit conversion from `Int` to `BigDecimal`. */ - implicit def int2bigDecimal(i: Int): BigDecimal = apply(i) - - /** Implicit conversion from `Long` to `BigDecimal`. */ - implicit def long2bigDecimal(l: Long): BigDecimal = apply(l) - - /** Implicit conversion from `Double` to `BigDecimal`. */ - implicit def double2bigDecimal(d: Double): BigDecimal = decimal(d) - - /** Implicit conversion from `java.math.BigDecimal` to `scala.BigDecimal`. */ - implicit def javaBigDecimal2bigDecimal(x: BigDec): BigDecimal = apply(x) -} - -/** - * `BigDecimal` represents decimal floating-point numbers of arbitrary precision. - * By default, the precision approximately matches that of IEEE 128-bit floating - * point numbers (34 decimal digits, `HALF_EVEN` rounding mode). Within the range - * of IEEE binary128 numbers, `BigDecimal` will agree with `BigInt` for both - * equality and hash codes (and will agree with primitive types as well). Beyond - * that range--numbers with more than 4934 digits when written out in full--the - * `hashCode` of `BigInt` and `BigDecimal` is allowed to diverge due to difficulty - * in efficiently computing both the decimal representation in `BigDecimal` and the - * binary representation in `BigInt`. - * - * When creating a `BigDecimal` from a `Double` or `Float`, care must be taken as - * the binary fraction representation of `Double` and `Float` does not easily - * convert into a decimal representation. Three explicit schemes are available - * for conversion. `BigDecimal.decimal` will convert the floating-point number - * to a decimal text representation, and build a `BigDecimal` based on that. - * `BigDecimal.binary` will expand the binary fraction to the requested or default - * precision. `BigDecimal.exact` will expand the binary fraction to the - * full number of digits, thus producing the exact decimal value corresponding to - * the binary fraction of that floating-point number. `BigDecimal` equality - * matches the decimal expansion of `Double`: `BigDecimal.decimal(0.1) == 0.1`. - * Note that since `0.1f != 0.1`, the same is not true for `Float`. Instead, - * `0.1f == BigDecimal.decimal((0.1f).toDouble)`. - * - * To test whether a `BigDecimal` number can be converted to a `Double` or - * `Float` and then back without loss of information by using one of these - * methods, test with `isDecimalDouble`, `isBinaryDouble`, or `isExactDouble` - * or the corresponding `Float` versions. Note that `BigInt`'s `isValidDouble` - * will agree with `isExactDouble`, not the `isDecimalDouble` used by default. - * - * `BigDecimal` uses the decimal representation of binary floating-point numbers - * to determine equality and hash codes. This yields different answers than - * conversion between `Long` and `Double` values, where the exact form is used. - * As always, since floating-point is a lossy representation, it is advisable to - * take care when assuming identity will be maintained across multiple conversions. - * - * `BigDecimal` maintains a `MathContext` that determines the rounding that - * is applied to certain calculations. In most cases, the value of the - * `BigDecimal` is also rounded to the precision specified by the `MathContext`. - * To create a `BigDecimal` with a different precision than its `MathContext`, - * use `new BigDecimal(new java.math.BigDecimal(...), mc)`. Rounding will - * be applied on those mathematical operations that can dramatically change the - * number of digits in a full representation, namely multiplication, division, - * and powers. The left-hand argument's `MathContext` always determines the - * degree of rounding, if any, and is the one propagated through arithmetic - * operations that do not apply rounding themselves. - * - * @author Stephane Micheloud - * @author Rex Kerr - * @version 1.1 - */ -final class BigDecimal(val bigDecimal: BigDec, val mc: MathContext) -extends ScalaNumber with ScalaNumericConversions with Serializable with Ordered[BigDecimal] { - def this(bigDecimal: BigDec) = this(bigDecimal, BigDecimal.defaultMathContext) - import BigDecimal.RoundingMode._ - import BigDecimal.{decimal, binary, exact} - - if (bigDecimal eq null) throw new IllegalArgumentException("null value for BigDecimal") - if (mc eq null) throw new IllegalArgumentException("null MathContext for BigDecimal") - - // There was an implicit to cut down on the wrapper noise for BigDec -> BigDecimal. - // However, this may mask introduction of surprising behavior (e.g. lack of rounding - // where one might expect it). Wrappers should be applied explicitly with an - // eye to correctness. - - // Sane hash code computation (which is surprisingly hard). - // Note--not lazy val because we can't afford the extra space. - private final var computedHashCode: Int = BigDecimal.hashCodeNotComputed - private final def computeHashCode(): Unit = { - computedHashCode = - if (isWhole && (precision - scale) < BigDecimal.maximumHashScale) toBigInt.hashCode - else if (isDecimalDouble) doubleValue.## - else { - val temp = bigDecimal.stripTrailingZeros - scala.util.hashing.MurmurHash3.mixLast( temp.scaleByPowerOfTen(temp.scale).toBigInteger.hashCode, temp.scale ) - } - } - - /** Returns the hash code for this BigDecimal. - * Note that this does not merely use the underlying java object's - * `hashCode` because we compare `BigDecimal`s with `compareTo` - * which deems 2 == 2.00, whereas in java these are unequal - * with unequal `hashCode`s. These hash codes agree with `BigInt` - * for whole numbers up ~4934 digits (the range of IEEE 128 bit floating - * point). Beyond this, hash codes will disagree; this prevents the - * explicit representation of the `BigInt` form for `BigDecimal` values - * with large exponents. - */ - override def hashCode(): Int = { - if (computedHashCode == BigDecimal.hashCodeNotComputed) computeHashCode() - computedHashCode - } - - /** Compares this BigDecimal with the specified value for equality. Where `Float` and `Double` - * disagree, `BigDecimal` will agree with the `Double` value - */ - override def equals (that: Any): Boolean = that match { - case that: BigDecimal => this equals that - case that: BigInt => - that.bitLength > (precision-scale-2)*BigDecimal.deci2binary && - this.toBigIntExact.exists(that equals _) - case that: Double => - !that.isInfinity && { - val d = toDouble - !d.isInfinity && d == that && equals(decimal(d)) - } - case that: Float => - !that.isInfinity && { - val f = toFloat - !f.isInfinity && f == that && equals(decimal(f.toDouble)) - } - case _ => isValidLong && unifiedPrimitiveEquals(that) - } - override def isValidByte = noArithmeticException(toByteExact) - override def isValidShort = noArithmeticException(toShortExact) - override def isValidChar = isValidInt && toIntExact >= Char.MinValue && toIntExact <= Char.MaxValue - override def isValidInt = noArithmeticException(toIntExact) - def isValidLong = noArithmeticException(toLongExact) - /** Tests whether the value is a valid Float. "Valid" has several distinct meanings, however. Use - * `isExactFloat`, `isBinaryFloat`, or `isDecimalFloat`, depending on the intended meaning. - * By default, `decimal` creation is used, so `isDecimalFloat` is probably what you want. - */ - @deprecated("What constitutes validity is unclear. Use `isExactFloat`, `isBinaryFloat`, or `isDecimalFloat` instead.", "2.11.0") - def isValidFloat = { - val f = toFloat - !f.isInfinity && bigDecimal.compareTo(new BigDec(f.toDouble)) == 0 - } - /** Tests whether the value is a valid Double. "Valid" has several distinct meanings, however. Use - * `isExactDouble`, `isBinaryDouble`, or `isDecimalDouble`, depending on the intended meaning. - * By default, `decimal` creation is used, so `isDecimalDouble` is probably what you want. - */ - @deprecated("Validity has distinct meanings. Use `isExactDouble`, `isBinaryDouble`, or `isDecimalDouble` instead.", "2.11.0") - def isValidDouble = { - val d = toDouble - !d.isInfinity && bigDecimal.compareTo(new BigDec(d)) == 0 - } - - /** Tests whether this `BigDecimal` holds the decimal representation of a `Double`. */ - def isDecimalDouble = { - val d = toDouble - !d.isInfinity && equals(decimal(d)) - } - - /** Tests whether this `BigDecimal` holds the decimal representation of a `Float`. */ - def isDecimalFloat = { - val f = toFloat - !f.isInfinity && equals(decimal(f)) - } - - /** Tests whether this `BigDecimal` holds, to within precision, the binary representation of a `Double`. */ - def isBinaryDouble = { - val d = toDouble - !d.isInfinity && equals(binary(d,mc)) - } - - /** Tests whether this `BigDecimal` holds, to within precision, the binary representation of a `Float`. */ - def isBinaryFloat = { - val f = toFloat - !f.isInfinity && equals(binary(f,mc)) - } - - /** Tests whether this `BigDecimal` holds the exact expansion of a `Double`'s binary fractional form into base 10. */ - def isExactDouble = { - val d = toDouble - !d.isInfinity && equals(exact(d)) - } - - /** Tests whether this `BigDecimal` holds the exact expansion of a `Float`'s binary fractional form into base 10. */ - def isExactFloat = { - val f = toFloat - !f.isInfinity && equals(exact(f.toDouble)) - } - - - private def noArithmeticException(body: => Unit): Boolean = { - try { body ; true } - catch { case _: ArithmeticException => false } - } - - def isWhole = scale <= 0 || bigDecimal.stripTrailingZeros.scale <= 0 - - def underlying = bigDecimal - - - /** Compares this BigDecimal with the specified BigDecimal for equality. - */ - def equals (that: BigDecimal): Boolean = compare(that) == 0 - - /** Compares this BigDecimal with the specified BigDecimal - */ - def compare (that: BigDecimal): Int = this.bigDecimal compareTo that.bigDecimal - - /** Addition of BigDecimals - */ - def + (that: BigDecimal): BigDecimal = new BigDecimal(this.bigDecimal add that.bigDecimal, mc) - - /** Subtraction of BigDecimals - */ - def - (that: BigDecimal): BigDecimal = new BigDecimal(this.bigDecimal subtract that.bigDecimal, mc) - - /** Multiplication of BigDecimals - */ - def * (that: BigDecimal): BigDecimal = new BigDecimal(this.bigDecimal.multiply(that.bigDecimal, mc), mc) - - /** Division of BigDecimals - */ - def / (that: BigDecimal): BigDecimal = new BigDecimal(this.bigDecimal.divide(that.bigDecimal, mc), mc) - - /** Division and Remainder - returns tuple containing the result of - * divideToIntegralValue and the remainder. The computation is exact: no rounding is applied. - */ - def /% (that: BigDecimal): (BigDecimal, BigDecimal) = - this.bigDecimal.divideAndRemainder(that.bigDecimal) match { - case Array(q, r) => (new BigDecimal(q, mc), new BigDecimal(r, mc)) - } - - /** Divide to Integral value. - */ - def quot (that: BigDecimal): BigDecimal = - new BigDecimal(this.bigDecimal divideToIntegralValue that.bigDecimal, mc) - - /** Returns the minimum of this and that, or this if the two are equal - */ - def min (that: BigDecimal): BigDecimal = (this compare that) match { - case x if x <= 0 => this - case _ => that - } - - /** Returns the maximum of this and that, or this if the two are equal - */ - def max (that: BigDecimal): BigDecimal = (this compare that) match { - case x if x >= 0 => this - case _ => that - } - - /** Remainder after dividing this by that. - */ - def remainder (that: BigDecimal): BigDecimal = new BigDecimal(this.bigDecimal remainder that.bigDecimal, mc) - - /** Remainder after dividing this by that. - */ - def % (that: BigDecimal): BigDecimal = this remainder that - - /** Returns a BigDecimal whose value is this ** n. - */ - def pow (n: Int): BigDecimal = new BigDecimal(this.bigDecimal.pow(n, mc), mc) - - /** Returns a BigDecimal whose value is the negation of this BigDecimal - */ - def unary_- : BigDecimal = new BigDecimal(this.bigDecimal.negate(), mc) - - /** Returns the absolute value of this BigDecimal - */ - def abs: BigDecimal = if (signum < 0) unary_- else this - - /** Returns the sign of this BigDecimal; - * -1 if it is less than 0, - * +1 if it is greater than 0, - * 0 if it is equal to 0. - */ - def signum: Int = this.bigDecimal.signum() - - /** Returns the precision of this `BigDecimal`. - */ - def precision: Int = this.bigDecimal.precision() - - /** Returns a BigDecimal rounded according to the supplied MathContext settings, but - * preserving its own MathContext for future operations. - */ - def round(mc: MathContext): BigDecimal = { - val r = this.bigDecimal round mc - if (r eq bigDecimal) this else new BigDecimal(r, this.mc) - } - - /** Returns a `BigDecimal` rounded according to its own `MathContext` */ - def rounded: BigDecimal = { - val r = bigDecimal round mc - if (r eq bigDecimal) this else new BigDecimal(r, mc) - } - - /** Returns the scale of this `BigDecimal`. - */ - def scale: Int = this.bigDecimal.scale() - - /** Returns the size of an ulp, a unit in the last place, of this BigDecimal. - */ - def ulp: BigDecimal = new BigDecimal(this.bigDecimal.ulp, mc) - - /** Returns a new BigDecimal based on the supplied MathContext, rounded as needed. - */ - def apply(mc: MathContext): BigDecimal = new BigDecimal(this.bigDecimal round mc, mc) - - /** Returns a `BigDecimal` whose scale is the specified value, and whose value is - * numerically equal to this BigDecimal's. - */ - def setScale(scale: Int): BigDecimal = - if (this.scale == scale) this - else new BigDecimal(this.bigDecimal setScale scale, mc) - - def setScale(scale: Int, mode: RoundingMode): BigDecimal = - if (this.scale == scale) this - else new BigDecimal(this.bigDecimal.setScale(scale, mode.id), mc) - - /** Converts this BigDecimal to a Byte. - * If the BigDecimal is too big to fit in a Byte, only the low-order 8 bits are returned. - * Note that this conversion can lose information about the overall magnitude of the - * BigDecimal value as well as return a result with the opposite sign. - */ - override def byteValue = intValue.toByte - - /** Converts this BigDecimal to a Short. - * If the BigDecimal is too big to fit in a Short, only the low-order 16 bits are returned. - * Note that this conversion can lose information about the overall magnitude of the - * BigDecimal value as well as return a result with the opposite sign. - */ - override def shortValue = intValue.toShort - - /** Converts this BigDecimal to a Char. - * If the BigDecimal is too big to fit in a Char, only the low-order 16 bits are returned. - * Note that this conversion can lose information about the overall magnitude of the - * BigDecimal value and that it always returns a positive result. - */ - def charValue = intValue.toChar - - /** Converts this BigDecimal to an Int. - * If the BigDecimal is too big to fit in an Int, only the low-order 32 bits - * are returned. Note that this conversion can lose information about the - * overall magnitude of the BigDecimal value as well as return a result with - * the opposite sign. - */ - def intValue = this.bigDecimal.intValue - - /** Converts this BigDecimal to a Long. - * If the BigDecimal is too big to fit in a Long, only the low-order 64 bits - * are returned. Note that this conversion can lose information about the - * overall magnitude of the BigDecimal value as well as return a result with - * the opposite sign. - */ - def longValue = this.bigDecimal.longValue - - /** Converts this BigDecimal to a Float. - * if this BigDecimal has too great a magnitude to represent as a float, - * it will be converted to `Float.NEGATIVE_INFINITY` or - * `Float.POSITIVE_INFINITY` as appropriate. - */ - def floatValue = this.bigDecimal.floatValue - - /** Converts this BigDecimal to a Double. - * if this BigDecimal has too great a magnitude to represent as a double, - * it will be converted to `Double.NEGATIVE_INFINITY` or - * `Double.POSITIVE_INFINITY` as appropriate. - */ - def doubleValue = this.bigDecimal.doubleValue - - /** Converts this `BigDecimal` to a [[scala.Byte]], checking for lost information. - * If this `BigDecimal` has a nonzero fractional part, or is out of the possible - * range for a [[scala.Byte]] result, then a `java.lang.ArithmeticException` is - * thrown. - */ - def toByteExact = bigDecimal.byteValueExact - - /** Converts this `BigDecimal` to a [[scala.Short]], checking for lost information. - * If this `BigDecimal` has a nonzero fractional part, or is out of the possible - * range for a [[scala.Short]] result, then a `java.lang.ArithmeticException` is - * thrown. - */ - def toShortExact = bigDecimal.shortValueExact - - /** Converts this `BigDecimal` to a [[scala.Int]], checking for lost information. - * If this `BigDecimal` has a nonzero fractional part, or is out of the possible - * range for an [[scala.Int]] result, then a `java.lang.ArithmeticException` is - * thrown. - */ - def toIntExact = bigDecimal.intValueExact - - /** Converts this `BigDecimal` to a [[scala.Long]], checking for lost information. - * If this `BigDecimal` has a nonzero fractional part, or is out of the possible - * range for a [[scala.Long]] result, then a `java.lang.ArithmeticException` is - * thrown. - */ - def toLongExact = bigDecimal.longValueExact - - /** Creates a partially constructed NumericRange[BigDecimal] in range - * `[start;end)`, where start is the target BigDecimal. The step - * must be supplied via the "by" method of the returned object in order - * to receive the fully constructed range. For example: - * {{{ - * val partial = BigDecimal(1.0) to 2.0 // not usable yet - * val range = partial by 0.01 // now a NumericRange - * val range2 = BigDecimal(0) to 1.0 by 0.01 // all at once of course is fine too - * }}} - * - * @param end the end value of the range (exclusive) - * @return the partially constructed NumericRange - */ - def until(end: BigDecimal): Range.Partial[BigDecimal, NumericRange.Exclusive[BigDecimal]] = - new Range.Partial(until(end, _)) - - /** Same as the one-argument `until`, but creates the range immediately. */ - def until(end: BigDecimal, step: BigDecimal) = Range.BigDecimal(this, end, step) - - /** Like `until`, but inclusive of the end value. */ - def to(end: BigDecimal): Range.Partial[BigDecimal, NumericRange.Inclusive[BigDecimal]] = - new Range.Partial(to(end, _)) - - /** Like `until`, but inclusive of the end value. */ - def to(end: BigDecimal, step: BigDecimal) = Range.BigDecimal.inclusive(this, end, step) - - /** Converts this `BigDecimal` to a scala.BigInt. - */ - def toBigInt: BigInt = new BigInt(this.bigDecimal.toBigInteger) - - /** Converts this `BigDecimal` to a scala.BigInt if it - * can be done losslessly, returning Some(BigInt) or None. - */ - def toBigIntExact: Option[BigInt] = - if (isWhole) { - try Some(new BigInt(this.bigDecimal.toBigIntegerExact)) - catch { case _: ArithmeticException => None } - } - else None - - /** Returns the decimal String representation of this BigDecimal. - */ - override def toString: String = this.bigDecimal.toString() - -} diff --git a/tests/scala2-library/src/library/scala/math/BigInt.scala b/tests/scala2-library/src/library/scala/math/BigInt.scala deleted file mode 100644 index 707a5c076967..000000000000 --- a/tests/scala2-library/src/library/scala/math/BigInt.scala +++ /dev/null @@ -1,404 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2006-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala -package math - -import java.math.BigInteger -import scala.language.implicitConversions - -/** - * @author Martin Odersky - * @version 1.0, 15/07/2003 - * @since 2.1 - */ -object BigInt { - - private val minCached = -1024 - private val maxCached = 1024 - private val cache = new Array[BigInt](maxCached - minCached + 1) - private val minusOne = BigInteger.valueOf(-1) - - /** Constructs a `BigInt` whose value is equal to that of the - * specified integer value. - * - * @param i the specified integer value - * @return the constructed `BigInt` - */ - def apply(i: Int): BigInt = - if (minCached <= i && i <= maxCached) { - val offset = i - minCached - var n = cache(offset) - if (n eq null) { n = new BigInt(BigInteger.valueOf(i.toLong)); cache(offset) = n } - n - } else new BigInt(BigInteger.valueOf(i.toLong)) - - /** Constructs a `BigInt` whose value is equal to that of the - * specified long value. - * - * @param l the specified long value - * @return the constructed `BigInt` - */ - def apply(l: Long): BigInt = - if (minCached <= l && l <= maxCached) apply(l.toInt) - else new BigInt(BigInteger.valueOf(l)) - - /** Translates a byte array containing the two's-complement binary - * representation of a BigInt into a BigInt. - */ - def apply(x: Array[Byte]): BigInt = - new BigInt(new BigInteger(x)) - - /** Translates the sign-magnitude representation of a BigInt into a BigInt. - */ - def apply(signum: Int, magnitude: Array[Byte]): BigInt = - new BigInt(new BigInteger(signum, magnitude)) - - /** Constructs a randomly generated positive BigInt that is probably prime, - * with the specified bitLength. - */ - def apply(bitlength: Int, certainty: Int, rnd: scala.util.Random): BigInt = - new BigInt(new BigInteger(bitlength, certainty, rnd.self)) - - /** Constructs a randomly generated BigInt, uniformly distributed over the - * range `0` to `(2 ^ numBits - 1)`, inclusive. - */ - def apply(numbits: Int, rnd: scala.util.Random): BigInt = - new BigInt(new BigInteger(numbits, rnd.self)) - - /** Translates the decimal String representation of a BigInt into a BigInt. - */ - def apply(x: String): BigInt = - new BigInt(new BigInteger(x)) - - /** Translates the string representation of a `BigInt` in the - * specified `radix` into a BigInt. - */ - def apply(x: String, radix: Int): BigInt = - new BigInt(new BigInteger(x, radix)) - - /** Translates a `java.math.BigInteger` into a BigInt. - */ - def apply(x: BigInteger): BigInt = - new BigInt(x) - - /** Returns a positive BigInt that is probably prime, with the specified bitLength. - */ - def probablePrime(bitLength: Int, rnd: scala.util.Random): BigInt = - new BigInt(BigInteger.probablePrime(bitLength, rnd.self)) - - /** Implicit conversion from `Int` to `BigInt`. - */ - implicit def int2bigInt(i: Int): BigInt = apply(i) - - /** Implicit conversion from `Long` to `BigInt`. - */ - implicit def long2bigInt(l: Long): BigInt = apply(l) - - /** Implicit conversion from `java.math.BigInteger` to `scala.BigInt`. - */ - implicit def javaBigInteger2bigInt(x: BigInteger): BigInt = apply(x) -} - -/** - * @author Martin Odersky - * @version 1.0, 15/07/2003 - */ -final class BigInt(val bigInteger: BigInteger) - extends ScalaNumber - with ScalaNumericConversions - with Serializable - with Ordered[BigInt] -{ - /** Returns the hash code for this BigInt. */ - override def hashCode(): Int = - if (isValidLong) unifiedPrimitiveHashcode() - else bigInteger.## - - /** Compares this BigInt with the specified value for equality. - */ - override def equals(that: Any): Boolean = that match { - case that: BigInt => this equals that - case that: BigDecimal => that equals this - case that: Double => isValidDouble && toDouble == that - case that: Float => isValidFloat && toFloat == that - case x => isValidLong && unifiedPrimitiveEquals(x) - } - override def isValidByte = this >= Byte.MinValue && this <= Byte.MaxValue - override def isValidShort = this >= Short.MinValue && this <= Short.MaxValue - override def isValidChar = this >= Char.MinValue && this <= Char.MaxValue - override def isValidInt = this >= Int.MinValue && this <= Int.MaxValue - def isValidLong = this >= Long.MinValue && this <= Long.MaxValue - /** Returns `true` iff this can be represented exactly by [[scala.Float]]; otherwise returns `false`. - */ - def isValidFloat = { - val bitLen = bitLength - (bitLen <= 24 || - { - val lowest = lowestSetBit - bitLen <= java.lang.Float.MAX_EXPONENT + 1 && // exclude this < -2^128 && this >= 2^128 - lowest >= bitLen - 24 && - lowest < java.lang.Float.MAX_EXPONENT + 1 // exclude this == -2^128 - } - ) && !bitLengthOverflow - } - /** Returns `true` iff this can be represented exactly by [[scala.Double]]; otherwise returns `false`. - */ - def isValidDouble = { - val bitLen = bitLength - (bitLen <= 53 || - { - val lowest = lowestSetBit - bitLen <= java.lang.Double.MAX_EXPONENT + 1 && // exclude this < -2^1024 && this >= 2^1024 - lowest >= bitLen - 53 && - lowest < java.lang.Double.MAX_EXPONENT + 1 // exclude this == -2^1024 - } - ) && !bitLengthOverflow - } - /** Some implementations of java.math.BigInteger allow huge values with bit length greater than Int.MaxValue. - * The BigInteger.bitLength method returns truncated bit length in this case. - * This method tests if result of bitLength is valid. - * This method will become unnecessary if BigInt constructors reject huge BigIntegers. - */ - private def bitLengthOverflow = { - val shifted = bigInteger.shiftRight(Int.MaxValue) - (shifted.signum != 0) && !(shifted equals BigInt.minusOne) - } - - def isWhole() = true - def underlying = bigInteger - - /** Compares this BigInt with the specified BigInt for equality. - */ - def equals (that: BigInt): Boolean = compare(that) == 0 - - /** Compares this BigInt with the specified BigInt - */ - def compare (that: BigInt): Int = this.bigInteger.compareTo(that.bigInteger) - - /** Addition of BigInts - */ - def + (that: BigInt): BigInt = new BigInt(this.bigInteger.add(that.bigInteger)) - - /** Subtraction of BigInts - */ - def - (that: BigInt): BigInt = new BigInt(this.bigInteger.subtract(that.bigInteger)) - - /** Multiplication of BigInts - */ - def * (that: BigInt): BigInt = new BigInt(this.bigInteger.multiply(that.bigInteger)) - - /** Division of BigInts - */ - def / (that: BigInt): BigInt = new BigInt(this.bigInteger.divide(that.bigInteger)) - - /** Remainder of BigInts - */ - def % (that: BigInt): BigInt = new BigInt(this.bigInteger.remainder(that.bigInteger)) - - /** Returns a pair of two BigInts containing (this / that) and (this % that). - */ - def /% (that: BigInt): (BigInt, BigInt) = { - val dr = this.bigInteger.divideAndRemainder(that.bigInteger) - (new BigInt(dr(0)), new BigInt(dr(1))) - } - - /** Leftshift of BigInt - */ - def << (n: Int): BigInt = new BigInt(this.bigInteger.shiftLeft(n)) - - /** (Signed) rightshift of BigInt - */ - def >> (n: Int): BigInt = new BigInt(this.bigInteger.shiftRight(n)) - - /** Bitwise and of BigInts - */ - def & (that: BigInt): BigInt = new BigInt(this.bigInteger.and(that.bigInteger)) - - /** Bitwise or of BigInts - */ - def | (that: BigInt): BigInt = new BigInt(this.bigInteger.or (that.bigInteger)) - - /** Bitwise exclusive-or of BigInts - */ - def ^ (that: BigInt): BigInt = new BigInt(this.bigInteger.xor(that.bigInteger)) - - /** Bitwise and-not of BigInts. Returns a BigInt whose value is (this & ~that). - */ - def &~ (that: BigInt): BigInt = new BigInt(this.bigInteger.andNot(that.bigInteger)) - - /** Returns the greatest common divisor of abs(this) and abs(that) - */ - def gcd (that: BigInt): BigInt = new BigInt(this.bigInteger.gcd(that.bigInteger)) - - /** Returns a BigInt whose value is (this mod that). - * This method differs from `%` in that it always returns a non-negative BigInt. - */ - def mod (that: BigInt): BigInt = new BigInt(this.bigInteger.mod(that.bigInteger)) - - /** Returns the minimum of this and that - */ - def min (that: BigInt): BigInt = new BigInt(this.bigInteger.min(that.bigInteger)) - - /** Returns the maximum of this and that - */ - def max (that: BigInt): BigInt = new BigInt(this.bigInteger.max(that.bigInteger)) - - /** Returns a BigInt whose value is (this raised to the power of exp). - */ - def pow (exp: Int): BigInt = new BigInt(this.bigInteger.pow(exp)) - - /** Returns a BigInt whose value is - * (this raised to the power of exp modulo m). - */ - def modPow (exp: BigInt, m: BigInt): BigInt = - new BigInt(this.bigInteger.modPow(exp.bigInteger, m.bigInteger)) - - /** Returns a BigInt whose value is (the inverse of this modulo m). - */ - def modInverse (m: BigInt): BigInt = new BigInt(this.bigInteger.modInverse(m.bigInteger)) - - /** Returns a BigInt whose value is the negation of this BigInt - */ - def unary_- : BigInt = new BigInt(this.bigInteger.negate()) - - /** Returns the absolute value of this BigInt - */ - def abs: BigInt = new BigInt(this.bigInteger.abs()) - - /** Returns the sign of this BigInt; - * -1 if it is less than 0, - * +1 if it is greater than 0, - * 0 if it is equal to 0. - */ - def signum: Int = this.bigInteger.signum() - - /** Returns the bitwise complement of this BigInt - */ - def unary_~ : BigInt = new BigInt(this.bigInteger.not()) - - /** Returns true if and only if the designated bit is set. - */ - def testBit (n: Int): Boolean = this.bigInteger.testBit(n) - - /** Returns a BigInt whose value is equivalent to this BigInt with the designated bit set. - */ - def setBit (n: Int): BigInt = new BigInt(this.bigInteger.setBit(n)) - - /** Returns a BigInt whose value is equivalent to this BigInt with the designated bit cleared. - */ - def clearBit(n: Int): BigInt = new BigInt(this.bigInteger.clearBit(n)) - - /** Returns a BigInt whose value is equivalent to this BigInt with the designated bit flipped. - */ - def flipBit (n: Int): BigInt = new BigInt(this.bigInteger.flipBit(n)) - - /** Returns the index of the rightmost (lowest-order) one bit in this BigInt - * (the number of zero bits to the right of the rightmost one bit). - */ - def lowestSetBit: Int = this.bigInteger.getLowestSetBit() - - /** Returns the number of bits in the minimal two's-complement representation of this BigInt, - * excluding a sign bit. - */ - def bitLength: Int = this.bigInteger.bitLength() - - /** Returns the number of bits in the two's complement representation of this BigInt - * that differ from its sign bit. - */ - def bitCount: Int = this.bigInteger.bitCount() - - /** Returns true if this BigInt is probably prime, false if it's definitely composite. - * @param certainty a measure of the uncertainty that the caller is willing to tolerate: - * if the call returns true the probability that this BigInt is prime - * exceeds (1 - 1/2 ^ certainty). - * The execution time of this method is proportional to the value of - * this parameter. - */ - def isProbablePrime(certainty: Int) = this.bigInteger.isProbablePrime(certainty) - - /** Converts this BigInt to a byte. - * If the BigInt is too big to fit in a byte, only the low-order 8 bits are returned. - * Note that this conversion can lose information about the overall magnitude of the - * BigInt value as well as return a result with the opposite sign. - */ - override def byteValue = intValue.toByte - - /** Converts this BigInt to a short. - * If the BigInt is too big to fit in a short, only the low-order 16 bits are returned. - * Note that this conversion can lose information about the overall magnitude of the - * BigInt value as well as return a result with the opposite sign. - */ - override def shortValue = intValue.toShort - - /** Converts this BigInt to a char. - * If the BigInt is too big to fit in a char, only the low-order 16 bits are returned. - * Note that this conversion can lose information about the overall magnitude of the - * BigInt value and that it always returns a positive result. - */ - def charValue = intValue.toChar - - /** Converts this BigInt to an int. - * If the BigInt is too big to fit in an int, only the low-order 32 bits - * are returned. Note that this conversion can lose information about the - * overall magnitude of the BigInt value as well as return a result with - * the opposite sign. - */ - def intValue = this.bigInteger.intValue - - /** Converts this BigInt to a long. - * If the BigInt is too big to fit in a long, only the low-order 64 bits - * are returned. Note that this conversion can lose information about the - * overall magnitude of the BigInt value as well as return a result with - * the opposite sign. - */ - def longValue = this.bigInteger.longValue - - /** Converts this `BigInt` to a `float`. - * If this `BigInt` has too great a magnitude to represent as a float, - * it will be converted to `Float.NEGATIVE_INFINITY` or - * `Float.POSITIVE_INFINITY` as appropriate. - */ - def floatValue = this.bigInteger.floatValue - - /** Converts this `BigInt` to a `double`. - * if this `BigInt` has too great a magnitude to represent as a double, - * it will be converted to `Double.NEGATIVE_INFINITY` or - * `Double.POSITIVE_INFINITY` as appropriate. - */ - def doubleValue = this.bigInteger.doubleValue - - /** Create a `NumericRange[BigInt]` in range `[start;end)` - * with the specified step, where start is the target BigInt. - * - * @param end the end value of the range (exclusive) - * @param step the distance between elements (defaults to 1) - * @return the range - */ - def until(end: BigInt, step: BigInt = BigInt(1)) = Range.BigInt(this, end, step) - - /** Like until, but inclusive of the end value. - */ - def to(end: BigInt, step: BigInt = BigInt(1)) = Range.BigInt.inclusive(this, end, step) - - /** Returns the decimal String representation of this BigInt. - */ - override def toString(): String = this.bigInteger.toString() - - /** Returns the String representation in the specified radix of this BigInt. - */ - def toString(radix: Int): String = this.bigInteger.toString(radix) - - /** Returns a byte array containing the two's-complement representation of - * this BigInt. The byte array will be in big-endian byte-order: the most - * significant byte is in the zeroth element. The array will contain the - * minimum number of bytes required to represent this BigInt, including at - * least one sign bit. - */ - def toByteArray: Array[Byte] = this.bigInteger.toByteArray() -} diff --git a/tests/scala2-library/src/library/scala/math/Equiv.scala b/tests/scala2-library/src/library/scala/math/Equiv.scala deleted file mode 100644 index 45b2b3629de1..000000000000 --- a/tests/scala2-library/src/library/scala/math/Equiv.scala +++ /dev/null @@ -1,62 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala -package math - -import java.util.Comparator - -/** A trait for representing equivalence relations. It is important to - * distinguish between a type that can be compared for equality or - * equivalence and a representation of equivalence on some type. This - * trait is for representing the latter. - * - * An [[http://en.wikipedia.org/wiki/Equivalence_relation equivalence relation]] - * is a binary relation on a type. This relation is exposed as - * the `equiv` method of the `Equiv` trait. The relation must be: - * - * 1. reflexive: `equiv(x, x) == true` for any x of type `T`. - * 1. symmetric: `equiv(x, y) == equiv(y, x)` for any `x` and `y` of type `T`. - * 1. transitive: if `equiv(x, y) == true` and `equiv(y, z) == true`, then - * `equiv(x, z) == true` for any `x`, `y`, and `z` of type `T`. - * - * @author Geoffrey Washburn, Paul Phillips - * @version 1.0, 2008-04-03 - * @since 2.7 - */ - -trait Equiv[T] extends Any with Serializable { - /** Returns `true` iff `x` is equivalent to `y`. - */ - def equiv(x: T, y: T): Boolean -} - -trait LowPriorityEquiv { - self: Equiv.type => - - implicit def universalEquiv[T] : Equiv[T] = universal[T] -} - -object Equiv extends LowPriorityEquiv { - def reference[T <: AnyRef] : Equiv[T] = new Equiv[T] { - def equiv(x: T, y: T) = x eq y - } - def universal[T] : Equiv[T] = new Equiv[T] { - def equiv(x: T, y: T) = x == y - } - def fromComparator[T](cmp: Comparator[T]): Equiv[T] = new Equiv[T] { - def equiv(x: T, y: T) = cmp.compare(x, y) == 0 - } - def fromFunction[T](cmp: (T, T) => Boolean): Equiv[T] = new Equiv[T] { - def equiv(x: T, y: T) = cmp(x, y) - } - def by[T, S: Equiv](f: T => S): Equiv[T] = - fromFunction((x, y) => implicitly[Equiv[S]].equiv(f(x), f(y))) - - def apply[T: Equiv] : Equiv[T] = implicitly[Equiv[T]] -} diff --git a/tests/scala2-library/src/library/scala/math/Fractional.scala b/tests/scala2-library/src/library/scala/math/Fractional.scala deleted file mode 100644 index 64c3d3b17df8..000000000000 --- a/tests/scala2-library/src/library/scala/math/Fractional.scala +++ /dev/null @@ -1,32 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala -package math - -import scala.language.implicitConversions - -/** - * @since 2.8 - */ -trait Fractional[T] extends Numeric[T] { - def div(x: T, y: T): T - - class FractionalOps(lhs: T) extends NumericOps(lhs) { - def /(rhs: T) = div(lhs, rhs) - } - override implicit def mkNumericOps(lhs: T): FractionalOps = - new FractionalOps(lhs) -} - -object Fractional { - trait ExtraImplicits { - implicit def infixFractionalOps[T](x: T)(implicit num: Fractional[T]): Fractional[T]#FractionalOps = new num.FractionalOps(x) - } - object Implicits extends ExtraImplicits -} diff --git a/tests/scala2-library/src/library/scala/math/Integral.scala b/tests/scala2-library/src/library/scala/math/Integral.scala deleted file mode 100644 index 53e0de72b765..000000000000 --- a/tests/scala2-library/src/library/scala/math/Integral.scala +++ /dev/null @@ -1,38 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala -package math - -import scala.language.implicitConversions - -/** - * @since 2.8 - */ -trait Integral[T] extends Numeric[T] { - def quot(x: T, y: T): T - def rem(x: T, y: T): T - - class IntegralOps(lhs: T) extends NumericOps(lhs) { - def /(rhs: T) = quot(lhs, rhs) - def %(rhs: T) = rem(lhs, rhs) - def /%(rhs: T) = (quot(lhs, rhs), rem(lhs, rhs)) - } - override implicit def mkNumericOps(lhs: T): IntegralOps = new IntegralOps(lhs) -} - -object Integral { - trait ExtraImplicits { - /** The regrettable design of Numeric/Integral/Fractional has them all - * bumping into one another when searching for this implicit, so they - * are exiled into their own companions. - */ - implicit def infixIntegralOps[T](x: T)(implicit num: Integral[T]): Integral[T]#IntegralOps = new num.IntegralOps(x) - } - object Implicits extends ExtraImplicits -} diff --git a/tests/scala2-library/src/library/scala/math/Numeric.scala b/tests/scala2-library/src/library/scala/math/Numeric.scala deleted file mode 100644 index 40fbba7fa019..000000000000 --- a/tests/scala2-library/src/library/scala/math/Numeric.scala +++ /dev/null @@ -1,227 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala -package math - -import scala.language.implicitConversions - -/** - * @since 2.8 - */ -object Numeric { - trait ExtraImplicits { - /** These implicits create conversions from a value for which an implicit Numeric - * exists to the inner class which creates infix operations. Once imported, you - * can write methods as follows: - * {{{ - * def plus[T: Numeric](x: T, y: T) = x + y - * }}} - */ - implicit def infixNumericOps[T](x: T)(implicit num: Numeric[T]): Numeric[T]#NumericOps = new num.NumericOps(x) - } - object Implicits extends ExtraImplicits { } - - trait BigIntIsIntegral extends Integral[BigInt] { - def plus(x: BigInt, y: BigInt): BigInt = x + y - def minus(x: BigInt, y: BigInt): BigInt = x - y - def times(x: BigInt, y: BigInt): BigInt = x * y - def quot(x: BigInt, y: BigInt): BigInt = x / y - def rem(x: BigInt, y: BigInt): BigInt = x % y - def negate(x: BigInt): BigInt = -x - def fromInt(x: Int): BigInt = BigInt(x) - def toInt(x: BigInt): Int = x.intValue - def toLong(x: BigInt): Long = x.longValue - def toFloat(x: BigInt): Float = x.floatValue - def toDouble(x: BigInt): Double = x.doubleValue - } - implicit object BigIntIsIntegral extends BigIntIsIntegral with Ordering.BigIntOrdering - - trait IntIsIntegral extends Integral[Int] { - def plus(x: Int, y: Int): Int = x + y - def minus(x: Int, y: Int): Int = x - y - def times(x: Int, y: Int): Int = x * y - def quot(x: Int, y: Int): Int = x / y - def rem(x: Int, y: Int): Int = x % y - def negate(x: Int): Int = -x - def fromInt(x: Int): Int = x - def toInt(x: Int): Int = x - def toLong(x: Int): Long = x.toLong - def toFloat(x: Int): Float = x.toFloat - def toDouble(x: Int): Double = x.toDouble - } - implicit object IntIsIntegral extends IntIsIntegral with Ordering.IntOrdering - - trait ShortIsIntegral extends Integral[Short] { - def plus(x: Short, y: Short): Short = (x + y).toShort - def minus(x: Short, y: Short): Short = (x - y).toShort - def times(x: Short, y: Short): Short = (x * y).toShort - def quot(x: Short, y: Short): Short = (x / y).toShort - def rem(x: Short, y: Short): Short = (x % y).toShort - def negate(x: Short): Short = (-x).toShort - def fromInt(x: Int): Short = x.toShort - def toInt(x: Short): Int = x.toInt - def toLong(x: Short): Long = x.toLong - def toFloat(x: Short): Float = x.toFloat - def toDouble(x: Short): Double = x.toDouble - } - implicit object ShortIsIntegral extends ShortIsIntegral with Ordering.ShortOrdering - - trait ByteIsIntegral extends Integral[Byte] { - def plus(x: Byte, y: Byte): Byte = (x + y).toByte - def minus(x: Byte, y: Byte): Byte = (x - y).toByte - def times(x: Byte, y: Byte): Byte = (x * y).toByte - def quot(x: Byte, y: Byte): Byte = (x / y).toByte - def rem(x: Byte, y: Byte): Byte = (x % y).toByte - def negate(x: Byte): Byte = (-x).toByte - def fromInt(x: Int): Byte = x.toByte - def toInt(x: Byte): Int = x.toInt - def toLong(x: Byte): Long = x.toLong - def toFloat(x: Byte): Float = x.toFloat - def toDouble(x: Byte): Double = x.toDouble - } - implicit object ByteIsIntegral extends ByteIsIntegral with Ordering.ByteOrdering - - trait CharIsIntegral extends Integral[Char] { - def plus(x: Char, y: Char): Char = (x + y).toChar - def minus(x: Char, y: Char): Char = (x - y).toChar - def times(x: Char, y: Char): Char = (x * y).toChar - def quot(x: Char, y: Char): Char = (x / y).toChar - def rem(x: Char, y: Char): Char = (x % y).toChar - def negate(x: Char): Char = (-x).toChar - def fromInt(x: Int): Char = x.toChar - def toInt(x: Char): Int = x.toInt - def toLong(x: Char): Long = x.toLong - def toFloat(x: Char): Float = x.toFloat - def toDouble(x: Char): Double = x.toDouble - } - implicit object CharIsIntegral extends CharIsIntegral with Ordering.CharOrdering - - trait LongIsIntegral extends Integral[Long] { - def plus(x: Long, y: Long): Long = x + y - def minus(x: Long, y: Long): Long = x - y - def times(x: Long, y: Long): Long = x * y - def quot(x: Long, y: Long): Long = x / y - def rem(x: Long, y: Long): Long = x % y - def negate(x: Long): Long = -x - def fromInt(x: Int): Long = x.toLong - def toInt(x: Long): Int = x.toInt - def toLong(x: Long): Long = x - def toFloat(x: Long): Float = x.toFloat - def toDouble(x: Long): Double = x.toDouble - } - implicit object LongIsIntegral extends LongIsIntegral with Ordering.LongOrdering - - trait FloatIsConflicted extends Numeric[Float] { - def plus(x: Float, y: Float): Float = x + y - def minus(x: Float, y: Float): Float = x - y - def times(x: Float, y: Float): Float = x * y - def negate(x: Float): Float = -x - def fromInt(x: Int): Float = x.toFloat - def toInt(x: Float): Int = x.toInt - def toLong(x: Float): Long = x.toLong - def toFloat(x: Float): Float = x - def toDouble(x: Float): Double = x.toDouble - // logic in Numeric base trait mishandles abs(-0.0f) - override def abs(x: Float): Float = math.abs(x) - } - trait FloatIsFractional extends FloatIsConflicted with Fractional[Float] { - def div(x: Float, y: Float): Float = x / y - } - trait FloatAsIfIntegral extends FloatIsConflicted with Integral[Float] { - def quot(x: Float, y: Float): Float = (BigDecimal(x) quot BigDecimal(y)).floatValue - def rem(x: Float, y: Float): Float = (BigDecimal(x) remainder BigDecimal(y)).floatValue - } - implicit object FloatIsFractional extends FloatIsFractional with Ordering.FloatOrdering - object FloatAsIfIntegral extends FloatAsIfIntegral with Ordering.FloatOrdering { - } - - trait DoubleIsConflicted extends Numeric[Double] { - def plus(x: Double, y: Double): Double = x + y - def minus(x: Double, y: Double): Double = x - y - def times(x: Double, y: Double): Double = x * y - def negate(x: Double): Double = -x - def fromInt(x: Int): Double = x.toDouble - def toInt(x: Double): Int = x.toInt - def toLong(x: Double): Long = x.toLong - def toFloat(x: Double): Float = x.toFloat - def toDouble(x: Double): Double = x - // logic in Numeric base trait mishandles abs(-0.0) - override def abs(x: Double): Double = math.abs(x) - } - trait DoubleIsFractional extends DoubleIsConflicted with Fractional[Double] { - def div(x: Double, y: Double): Double = x / y - } - trait DoubleAsIfIntegral extends DoubleIsConflicted with Integral[Double] { - def quot(x: Double, y: Double): Double = (BigDecimal(x) quot BigDecimal(y)).doubleValue - def rem(x: Double, y: Double): Double = (BigDecimal(x) remainder BigDecimal(y)).doubleValue - } - - trait BigDecimalIsConflicted extends Numeric[BigDecimal] { - def plus(x: BigDecimal, y: BigDecimal): BigDecimal = x + y - def minus(x: BigDecimal, y: BigDecimal): BigDecimal = x - y - def times(x: BigDecimal, y: BigDecimal): BigDecimal = x * y - def negate(x: BigDecimal): BigDecimal = -x - def fromInt(x: Int): BigDecimal = BigDecimal(x) - def toInt(x: BigDecimal): Int = x.intValue - def toLong(x: BigDecimal): Long = x.longValue - def toFloat(x: BigDecimal): Float = x.floatValue - def toDouble(x: BigDecimal): Double = x.doubleValue - } - - trait BigDecimalIsFractional extends BigDecimalIsConflicted with Fractional[BigDecimal] { - def div(x: BigDecimal, y: BigDecimal): BigDecimal = x / y - } - trait BigDecimalAsIfIntegral extends BigDecimalIsConflicted with Integral[BigDecimal] { - def quot(x: BigDecimal, y: BigDecimal): BigDecimal = x quot y - def rem(x: BigDecimal, y: BigDecimal): BigDecimal = x remainder y - } - - // For Double and BigDecimal we offer implicit Fractional objects, but also one - // which acts like an Integral type, which is useful in NumericRange. - implicit object BigDecimalIsFractional extends BigDecimalIsFractional with Ordering.BigDecimalOrdering - object BigDecimalAsIfIntegral extends BigDecimalAsIfIntegral with Ordering.BigDecimalOrdering - - implicit object DoubleIsFractional extends DoubleIsFractional with Ordering.DoubleOrdering - object DoubleAsIfIntegral extends DoubleAsIfIntegral with Ordering.DoubleOrdering -} - -trait Numeric[T] extends Ordering[T] { - def plus(x: T, y: T): T - def minus(x: T, y: T): T - def times(x: T, y: T): T - def negate(x: T): T - def fromInt(x: Int): T - def toInt(x: T): Int - def toLong(x: T): Long - def toFloat(x: T): Float - def toDouble(x: T): Double - - def zero = fromInt(0) - def one = fromInt(1) - - def abs(x: T): T = if (lt(x, zero)) negate(x) else x - def signum(x: T): Int = - if (lt(x, zero)) -1 - else if (gt(x, zero)) 1 - else 0 - - class NumericOps(lhs: T) { - def +(rhs: T) = plus(lhs, rhs) - def -(rhs: T) = minus(lhs, rhs) - def *(rhs: T) = times(lhs, rhs) - def unary_- = negate(lhs) - def abs: T = Numeric.this.abs(lhs) - def signum: Int = Numeric.this.signum(lhs) - def toInt: Int = Numeric.this.toInt(lhs) - def toLong: Long = Numeric.this.toLong(lhs) - def toFloat: Float = Numeric.this.toFloat(lhs) - def toDouble: Double = Numeric.this.toDouble(lhs) - } - implicit def mkNumericOps(lhs: T): NumericOps = new NumericOps(lhs) -} diff --git a/tests/scala2-library/src/library/scala/math/Ordered.scala b/tests/scala2-library/src/library/scala/math/Ordered.scala deleted file mode 100644 index 51f2765a63c2..000000000000 --- a/tests/scala2-library/src/library/scala/math/Ordered.scala +++ /dev/null @@ -1,99 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala -package math - -import scala.language.implicitConversions - -/** A trait for data that have a single, natural ordering. See - * [[scala.math.Ordering]] before using this trait for - * more information about whether to use [[scala.math.Ordering]] instead. - * - * Classes that implement this trait can be sorted with - * [[scala.util.Sorting]] and can be compared with standard comparison operators - * (e.g. > and <). - * - * Ordered should be used for data with a single, natural ordering (like - * integers) while Ordering allows for multiple ordering implementations. - * An Ordering instance will be implicitly created if necessary. - * - * [[scala.math.Ordering]] is an alternative to this trait that allows multiple orderings to be - * defined for the same type. - * - * [[scala.math.PartiallyOrdered]] is an alternative to this trait for partially ordered data. - * - * For example, create a simple class that implements `Ordered` and then sort it with [[scala.util.Sorting]]: - * {{{ - * case class OrderedClass(n:Int) extends Ordered[OrderedClass] { - * def compare(that: OrderedClass) = this.n - that.n - * } - * - * val x = Array(OrderedClass(1), OrderedClass(5), OrderedClass(3)) - * scala.util.Sorting.quickSort(x) - * x - * }}} - * - * It is important that the `equals` method for an instance of `Ordered[A]` be consistent with the - * compare method. However, due to limitations inherent in the type erasure semantics, there is no - * reasonable way to provide a default implementation of equality for instances of `Ordered[A]`. - * Therefore, if you need to be able to use equality on an instance of `Ordered[A]` you must - * provide it yourself either when inheriting or instantiating. - * - * It is important that the `hashCode` method for an instance of `Ordered[A]` be consistent with - * the `compare` method. However, it is not possible to provide a sensible default implementation. - * Therefore, if you need to be able compute the hash of an instance of `Ordered[A]` you must - * provide it yourself either when inheriting or instantiating. - * - * @see [[scala.math.Ordering]], [[scala.math.PartiallyOrdered]] - * @author Martin Odersky - * @version 1.1, 2006-07-24 - */ -trait Ordered[A] extends Any with java.lang.Comparable[A] { - - /** Result of comparing `this` with operand `that`. - * - * Implement this method to determine how instances of A will be sorted. - * - * Returns `x` where: - * - * - `x < 0` when `this < that` - * - * - `x == 0` when `this == that` - * - * - `x > 0` when `this > that` - * - */ - def compare(that: A): Int - - /** Returns true if `this` is less than `that` - */ - def < (that: A): Boolean = (this compare that) < 0 - - /** Returns true if `this` is greater than `that`. - */ - def > (that: A): Boolean = (this compare that) > 0 - - /** Returns true if `this` is less than or equal to `that`. - */ - def <= (that: A): Boolean = (this compare that) <= 0 - - /** Returns true if `this` is greater than or equal to `that`. - */ - def >= (that: A): Boolean = (this compare that) >= 0 - - /** Result of comparing `this` with operand `that`. - */ - def compareTo(that: A): Int = compare(that) -} - -object Ordered { - /** Lens from `Ordering[T]` to `Ordered[T]` */ - implicit def orderingToOrdered[T](x: T)(implicit ord: Ordering[T]): Ordered[T] = - new Ordered[T] { def compare(that: T): Int = ord.compare(x, that) } -} diff --git a/tests/scala2-library/src/library/scala/math/Ordering.scala b/tests/scala2-library/src/library/scala/math/Ordering.scala deleted file mode 100644 index dde3230529ce..000000000000 --- a/tests/scala2-library/src/library/scala/math/Ordering.scala +++ /dev/null @@ -1,502 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala -package math - -import java.util.Comparator -import scala.language.{implicitConversions, higherKinds} - -/** Ordering is a trait whose instances each represent a strategy for sorting - * instances of a type. - * - * Ordering's companion object defines many implicit objects to deal with - * subtypes of AnyVal (e.g. Int, Double), String, and others. - * - * To sort instances by one or more member variables, you can take advantage - * of these built-in orderings using Ordering.by and Ordering.on: - * - * {{{ - * import scala.util.Sorting - * val pairs = Array(("a", 5, 2), ("c", 3, 1), ("b", 1, 3)) - * - * // sort by 2nd element - * Sorting.quickSort(pairs)(Ordering.by[(String, Int, Int), Int](_._2)) - * - * // sort by the 3rd element, then 1st - * Sorting.quickSort(pairs)(Ordering[(Int, String)].on(x => (x._3, x._1))) - * }}} - * - * An Ordering[T] is implemented by specifying compare(a:T, b:T), which - * decides how to order two instances a and b. Instances of Ordering[T] can be - * used by things like scala.util.Sorting to sort collections like Array[T]. - * - * For example: - * - * {{{ - * import scala.util.Sorting - * - * case class Person(name:String, age:Int) - * val people = Array(Person("bob", 30), Person("ann", 32), Person("carl", 19)) - * - * // sort by age - * object AgeOrdering extends Ordering[Person] { - * def compare(a:Person, b:Person) = a.age compare b.age - * } - * Sorting.quickSort(people)(AgeOrdering) - * }}} - * - * This trait and scala.math.Ordered both provide this same functionality, but - * in different ways. A type T can be given a single way to order itself by - * extending Ordered. Using Ordering, this same type may be sorted in many - * other ways. Ordered and Ordering both provide implicits allowing them to be - * used interchangeably. - * - * You can import scala.math.Ordering.Implicits to gain access to other - * implicit orderings. - * - * @author Geoffrey Washburn - * @version 0.9.5, 2008-04-15 - * @since 2.7 - * @see [[scala.math.Ordered]], [[scala.util.Sorting]] - */ -@annotation.implicitNotFound(msg = "No implicit Ordering defined for ${T}.") -trait Ordering[T] extends Comparator[T] with PartialOrdering[T] with Serializable { - outer => - - /** Returns whether a comparison between `x` and `y` is defined, and if so - * the result of `compare(x, y)`. - */ - def tryCompare(x: T, y: T) = Some(compare(x, y)) - - /** Returns an integer whose sign communicates how x compares to y. - * - * The result sign has the following meaning: - * - * - negative if x < y - * - positive if x > y - * - zero otherwise (if x == y) - */ - def compare(x: T, y: T): Int - - /** Return true if `x` <= `y` in the ordering. */ - override def lteq(x: T, y: T): Boolean = compare(x, y) <= 0 - - /** Return true if `x` >= `y` in the ordering. */ - override def gteq(x: T, y: T): Boolean = compare(x, y) >= 0 - - /** Return true if `x` < `y` in the ordering. */ - override def lt(x: T, y: T): Boolean = compare(x, y) < 0 - - /** Return true if `x` > `y` in the ordering. */ - override def gt(x: T, y: T): Boolean = compare(x, y) > 0 - - /** Return true if `x` == `y` in the ordering. */ - override def equiv(x: T, y: T): Boolean = compare(x, y) == 0 - - /** Return `x` if `x` >= `y`, otherwise `y`. */ - def max(x: T, y: T): T = if (gteq(x, y)) x else y - - /** Return `x` if `x` <= `y`, otherwise `y`. */ - def min(x: T, y: T): T = if (lteq(x, y)) x else y - - /** Return the opposite ordering of this one. */ - override def reverse: Ordering[T] = new Ordering[T] { - override def reverse = outer - def compare(x: T, y: T) = outer.compare(y, x) - } - - /** Given f, a function from U into T, creates an Ordering[U] whose compare - * function is equivalent to: - * - * {{{ - * def compare(x:U, y:U) = Ordering[T].compare(f(x), f(y)) - * }}} - */ - def on[U](f: U => T): Ordering[U] = new Ordering[U] { - def compare(x: U, y: U) = outer.compare(f(x), f(y)) - } - - /** This inner class defines comparison operators available for `T`. */ - class Ops(lhs: T) { - def <(rhs: T) = lt(lhs, rhs) - def <=(rhs: T) = lteq(lhs, rhs) - def >(rhs: T) = gt(lhs, rhs) - def >=(rhs: T) = gteq(lhs, rhs) - def equiv(rhs: T) = Ordering.this.equiv(lhs, rhs) - def max(rhs: T): T = Ordering.this.max(lhs, rhs) - def min(rhs: T): T = Ordering.this.min(lhs, rhs) - } - - /** This implicit method augments `T` with the comparison operators defined - * in `scala.math.Ordering.Ops`. - */ - implicit def mkOrderingOps(lhs: T): Ops = new Ops(lhs) -} - -trait LowPriorityOrderingImplicits { - /** This would conflict with all the nice implicit Orderings - * available, but thanks to the magic of prioritized implicits - * via subclassing we can make `Ordered[A] => Ordering[A]` only - * turn up if nothing else works. Since `Ordered[A]` extends - * `Comparable[A]` anyway, we can throw in some Java interop too. - */ - implicit def ordered[A <% Comparable[A]]: Ordering[A] = new Ordering[A] { - def compare(x: A, y: A): Int = x compareTo y - } - implicit def comparatorToOrdering[A](implicit cmp: Comparator[A]): Ordering[A] = new Ordering[A] { - def compare(x: A, y: A) = cmp.compare(x, y) - } -} - -/** This is the companion object for the [[scala.math.Ordering]] trait. - * - * It contains many implicit orderings as well as well as methods to construct - * new orderings. - */ -object Ordering extends LowPriorityOrderingImplicits { - def apply[T](implicit ord: Ordering[T]) = ord - - trait ExtraImplicits { - /** Not in the standard scope due to the potential for divergence: - * For instance `implicitly[Ordering[Any]]` diverges in its presence. - */ - implicit def seqDerivedOrdering[CC[X] <: scala.collection.Seq[X], T](implicit ord: Ordering[T]): Ordering[CC[T]] = - new Ordering[CC[T]] { - def compare(x: CC[T], y: CC[T]): Int = { - val xe = x.iterator - val ye = y.iterator - - while (xe.hasNext && ye.hasNext) { - val res = ord.compare(xe.next(), ye.next()) - if (res != 0) return res - } - - Ordering.Boolean.compare(xe.hasNext, ye.hasNext) - } - } - - /** This implicit creates a conversion from any value for which an - * implicit `Ordering` exists to the class which creates infix operations. - * With it imported, you can write methods as follows: - * - * {{{ - * def lessThan[T: Ordering](x: T, y: T) = x < y - * }}} - */ - implicit def infixOrderingOps[T](x: T)(implicit ord: Ordering[T]): Ordering[T]#Ops = new ord.Ops(x) - } - - /** An object containing implicits which are not in the default scope. */ - object Implicits extends ExtraImplicits { } - - /** Construct an Ordering[T] given a function `lt`. */ - def fromLessThan[T](cmp: (T, T) => Boolean): Ordering[T] = new Ordering[T] { - def compare(x: T, y: T) = if (cmp(x, y)) -1 else if (cmp(y, x)) 1 else 0 - // overrides to avoid multiple comparisons - override def lt(x: T, y: T): Boolean = cmp(x, y) - override def gt(x: T, y: T): Boolean = cmp(y, x) - override def gteq(x: T, y: T): Boolean = !cmp(x, y) - override def lteq(x: T, y: T): Boolean = !cmp(y, x) - } - - /** Given f, a function from T into S, creates an Ordering[T] whose compare - * function is equivalent to: - * - * {{{ - * def compare(x:T, y:T) = Ordering[S].compare(f(x), f(y)) - * }}} - * - * This function is an analogue to Ordering.on where the Ordering[S] - * parameter is passed implicitly. - */ - def by[T, S](f: T => S)(implicit ord: Ordering[S]): Ordering[T] = new Ordering[T] { - def compare(x: T, y: T) = ord.compare(f(x), f(y)) - override def lt(x: T, y: T): Boolean = ord.lt(f(x), f(y)) - override def gt(x: T, y: T): Boolean = ord.gt(f(x), f(y)) - override def gteq(x: T, y: T): Boolean = ord.gteq(f(x), f(y)) - override def lteq(x: T, y: T): Boolean = ord.lteq(f(x), f(y)) - } - - trait UnitOrdering extends Ordering[Unit] { - def compare(x: Unit, y: Unit) = 0 - } - implicit object Unit extends UnitOrdering - - trait BooleanOrdering extends Ordering[Boolean] { - def compare(x: Boolean, y: Boolean) = java.lang.Boolean.compare(x, y) - } - implicit object Boolean extends BooleanOrdering - - trait ByteOrdering extends Ordering[Byte] { - def compare(x: Byte, y: Byte) = java.lang.Byte.compare(x, y) - } - implicit object Byte extends ByteOrdering - - trait CharOrdering extends Ordering[Char] { - def compare(x: Char, y: Char) = java.lang.Character.compare(x, y) - } - implicit object Char extends CharOrdering - - trait ShortOrdering extends Ordering[Short] { - def compare(x: Short, y: Short) = java.lang.Short.compare(x, y) - } - implicit object Short extends ShortOrdering - - trait IntOrdering extends Ordering[Int] { - def compare(x: Int, y: Int) = java.lang.Integer.compare(x, y) - } - implicit object Int extends IntOrdering - - trait LongOrdering extends Ordering[Long] { - def compare(x: Long, y: Long) = java.lang.Long.compare(x, y) - } - implicit object Long extends LongOrdering - - trait FloatOrdering extends Ordering[Float] { - outer => - - def compare(x: Float, y: Float) = java.lang.Float.compare(x, y) - - override def lteq(x: Float, y: Float): Boolean = x <= y - override def gteq(x: Float, y: Float): Boolean = x >= y - override def lt(x: Float, y: Float): Boolean = x < y - override def gt(x: Float, y: Float): Boolean = x > y - override def equiv(x: Float, y: Float): Boolean = x == y - override def max(x: Float, y: Float): Float = math.max(x, y) - override def min(x: Float, y: Float): Float = math.min(x, y) - - override def reverse: Ordering[Float] = new FloatOrdering { - override def reverse = outer - override def compare(x: Float, y: Float) = outer.compare(y, x) - - override def lteq(x: Float, y: Float): Boolean = outer.lteq(y, x) - override def gteq(x: Float, y: Float): Boolean = outer.gteq(y, x) - override def lt(x: Float, y: Float): Boolean = outer.lt(y, x) - override def gt(x: Float, y: Float): Boolean = outer.gt(y, x) - override def min(x: Float, y: Float): Float = outer.max(x, y) - override def max(x: Float, y: Float): Float = outer.min(x, y) - - } - } - implicit object Float extends FloatOrdering - - trait DoubleOrdering extends Ordering[Double] { - outer => - - def compare(x: Double, y: Double) = java.lang.Double.compare(x, y) - - override def lteq(x: Double, y: Double): Boolean = x <= y - override def gteq(x: Double, y: Double): Boolean = x >= y - override def lt(x: Double, y: Double): Boolean = x < y - override def gt(x: Double, y: Double): Boolean = x > y - override def equiv(x: Double, y: Double): Boolean = x == y - override def max(x: Double, y: Double): Double = math.max(x, y) - override def min(x: Double, y: Double): Double = math.min(x, y) - - override def reverse: Ordering[Double] = new DoubleOrdering { - override def reverse = outer - override def compare(x: Double, y: Double) = outer.compare(y, x) - - override def lteq(x: Double, y: Double): Boolean = outer.lteq(y, x) - override def gteq(x: Double, y: Double): Boolean = outer.gteq(y, x) - override def lt(x: Double, y: Double): Boolean = outer.lt(y, x) - override def gt(x: Double, y: Double): Boolean = outer.gt(y, x) - override def min(x: Double, y: Double): Double = outer.max(x, y) - override def max(x: Double, y: Double): Double = outer.min(x, y) - } - } - implicit object Double extends DoubleOrdering - - trait BigIntOrdering extends Ordering[BigInt] { - def compare(x: BigInt, y: BigInt) = x.compare(y) - } - implicit object BigInt extends BigIntOrdering - - trait BigDecimalOrdering extends Ordering[BigDecimal] { - def compare(x: BigDecimal, y: BigDecimal) = x.compare(y) - } - implicit object BigDecimal extends BigDecimalOrdering - - trait StringOrdering extends Ordering[String] { - def compare(x: String, y: String) = x.compareTo(y) - } - implicit object String extends StringOrdering - - trait OptionOrdering[T] extends Ordering[Option[T]] { - def optionOrdering: Ordering[T] - def compare(x: Option[T], y: Option[T]) = (x, y) match { - case (None, None) => 0 - case (None, _) => -1 - case (_, None) => 1 - case (Some(x), Some(y)) => optionOrdering.compare(x, y) - } - } - implicit def Option[T](implicit ord: Ordering[T]): Ordering[Option[T]] = - new OptionOrdering[T] { val optionOrdering = ord } - - implicit def Iterable[T](implicit ord: Ordering[T]): Ordering[Iterable[T]] = - new Ordering[Iterable[T]] { - def compare(x: Iterable[T], y: Iterable[T]): Int = { - val xe = x.iterator - val ye = y.iterator - - while (xe.hasNext && ye.hasNext) { - val res = ord.compare(xe.next(), ye.next()) - if (res != 0) return res - } - - Boolean.compare(xe.hasNext, ye.hasNext) - } - } - - implicit def Tuple2[T1, T2](implicit ord1: Ordering[T1], ord2: Ordering[T2]): Ordering[(T1, T2)] = - new Ordering[(T1, T2)]{ - def compare(x: (T1, T2), y: (T1, T2)): Int = { - val compare1 = ord1.compare(x._1, y._1) - if (compare1 != 0) return compare1 - val compare2 = ord2.compare(x._2, y._2) - if (compare2 != 0) return compare2 - 0 - } - } - - implicit def Tuple3[T1, T2, T3](implicit ord1: Ordering[T1], ord2: Ordering[T2], ord3: Ordering[T3]) : Ordering[(T1, T2, T3)] = - new Ordering[(T1, T2, T3)]{ - def compare(x: (T1, T2, T3), y: (T1, T2, T3)): Int = { - val compare1 = ord1.compare(x._1, y._1) - if (compare1 != 0) return compare1 - val compare2 = ord2.compare(x._2, y._2) - if (compare2 != 0) return compare2 - val compare3 = ord3.compare(x._3, y._3) - if (compare3 != 0) return compare3 - 0 - } - } - - implicit def Tuple4[T1, T2, T3, T4](implicit ord1: Ordering[T1], ord2: Ordering[T2], ord3: Ordering[T3], ord4: Ordering[T4]) : Ordering[(T1, T2, T3, T4)] = - new Ordering[(T1, T2, T3, T4)]{ - def compare(x: (T1, T2, T3, T4), y: (T1, T2, T3, T4)): Int = { - val compare1 = ord1.compare(x._1, y._1) - if (compare1 != 0) return compare1 - val compare2 = ord2.compare(x._2, y._2) - if (compare2 != 0) return compare2 - val compare3 = ord3.compare(x._3, y._3) - if (compare3 != 0) return compare3 - val compare4 = ord4.compare(x._4, y._4) - if (compare4 != 0) return compare4 - 0 - } - } - - implicit def Tuple5[T1, T2, T3, T4, T5](implicit ord1: Ordering[T1], ord2: Ordering[T2], ord3: Ordering[T3], ord4: Ordering[T4], ord5: Ordering[T5]): Ordering[(T1, T2, T3, T4, T5)] = - new Ordering[(T1, T2, T3, T4, T5)]{ - def compare(x: (T1, T2, T3, T4, T5), y: (T1, T2, T3, T4, T5)): Int = { - val compare1 = ord1.compare(x._1, y._1) - if (compare1 != 0) return compare1 - val compare2 = ord2.compare(x._2, y._2) - if (compare2 != 0) return compare2 - val compare3 = ord3.compare(x._3, y._3) - if (compare3 != 0) return compare3 - val compare4 = ord4.compare(x._4, y._4) - if (compare4 != 0) return compare4 - val compare5 = ord5.compare(x._5, y._5) - if (compare5 != 0) return compare5 - 0 - } - } - - implicit def Tuple6[T1, T2, T3, T4, T5, T6](implicit ord1: Ordering[T1], ord2: Ordering[T2], ord3: Ordering[T3], ord4: Ordering[T4], ord5: Ordering[T5], ord6: Ordering[T6]): Ordering[(T1, T2, T3, T4, T5, T6)] = - new Ordering[(T1, T2, T3, T4, T5, T6)]{ - def compare(x: (T1, T2, T3, T4, T5, T6), y: (T1, T2, T3, T4, T5, T6)): Int = { - val compare1 = ord1.compare(x._1, y._1) - if (compare1 != 0) return compare1 - val compare2 = ord2.compare(x._2, y._2) - if (compare2 != 0) return compare2 - val compare3 = ord3.compare(x._3, y._3) - if (compare3 != 0) return compare3 - val compare4 = ord4.compare(x._4, y._4) - if (compare4 != 0) return compare4 - val compare5 = ord5.compare(x._5, y._5) - if (compare5 != 0) return compare5 - val compare6 = ord6.compare(x._6, y._6) - if (compare6 != 0) return compare6 - 0 - } - } - - implicit def Tuple7[T1, T2, T3, T4, T5, T6, T7](implicit ord1: Ordering[T1], ord2: Ordering[T2], ord3: Ordering[T3], ord4: Ordering[T4], ord5: Ordering[T5], ord6: Ordering[T6], ord7: Ordering[T7]): Ordering[(T1, T2, T3, T4, T5, T6, T7)] = - new Ordering[(T1, T2, T3, T4, T5, T6, T7)]{ - def compare(x: (T1, T2, T3, T4, T5, T6, T7), y: (T1, T2, T3, T4, T5, T6, T7)): Int = { - val compare1 = ord1.compare(x._1, y._1) - if (compare1 != 0) return compare1 - val compare2 = ord2.compare(x._2, y._2) - if (compare2 != 0) return compare2 - val compare3 = ord3.compare(x._3, y._3) - if (compare3 != 0) return compare3 - val compare4 = ord4.compare(x._4, y._4) - if (compare4 != 0) return compare4 - val compare5 = ord5.compare(x._5, y._5) - if (compare5 != 0) return compare5 - val compare6 = ord6.compare(x._6, y._6) - if (compare6 != 0) return compare6 - val compare7 = ord7.compare(x._7, y._7) - if (compare7 != 0) return compare7 - 0 - } - } - - implicit def Tuple8[T1, T2, T3, T4, T5, T6, T7, T8](implicit ord1: Ordering[T1], ord2: Ordering[T2], ord3: Ordering[T3], ord4: Ordering[T4], ord5: Ordering[T5], ord6: Ordering[T6], ord7: Ordering[T7], ord8: Ordering[T8]): Ordering[(T1, T2, T3, T4, T5, T6, T7, T8)] = - new Ordering[(T1, T2, T3, T4, T5, T6, T7, T8)]{ - def compare(x: (T1, T2, T3, T4, T5, T6, T7, T8), y: (T1, T2, T3, T4, T5, T6, T7, T8)): Int = { - val compare1 = ord1.compare(x._1, y._1) - if (compare1 != 0) return compare1 - val compare2 = ord2.compare(x._2, y._2) - if (compare2 != 0) return compare2 - val compare3 = ord3.compare(x._3, y._3) - if (compare3 != 0) return compare3 - val compare4 = ord4.compare(x._4, y._4) - if (compare4 != 0) return compare4 - val compare5 = ord5.compare(x._5, y._5) - if (compare5 != 0) return compare5 - val compare6 = ord6.compare(x._6, y._6) - if (compare6 != 0) return compare6 - val compare7 = ord7.compare(x._7, y._7) - if (compare7 != 0) return compare7 - val compare8 = ord8.compare(x._8, y._8) - if (compare8 != 0) return compare8 - 0 - } - } - - implicit def Tuple9[T1, T2, T3, T4, T5, T6, T7, T8, T9](implicit ord1: Ordering[T1], ord2: Ordering[T2], ord3: Ordering[T3], ord4: Ordering[T4], ord5: Ordering[T5], ord6: Ordering[T6], ord7: Ordering[T7], ord8 : Ordering[T8], ord9: Ordering[T9]): Ordering[(T1, T2, T3, T4, T5, T6, T7, T8, T9)] = - new Ordering[(T1, T2, T3, T4, T5, T6, T7, T8, T9)]{ - def compare(x: (T1, T2, T3, T4, T5, T6, T7, T8, T9), y: (T1, T2, T3, T4, T5, T6, T7, T8, T9)): Int = { - val compare1 = ord1.compare(x._1, y._1) - if (compare1 != 0) return compare1 - val compare2 = ord2.compare(x._2, y._2) - if (compare2 != 0) return compare2 - val compare3 = ord3.compare(x._3, y._3) - if (compare3 != 0) return compare3 - val compare4 = ord4.compare(x._4, y._4) - if (compare4 != 0) return compare4 - val compare5 = ord5.compare(x._5, y._5) - if (compare5 != 0) return compare5 - val compare6 = ord6.compare(x._6, y._6) - if (compare6 != 0) return compare6 - val compare7 = ord7.compare(x._7, y._7) - if (compare7 != 0) return compare7 - val compare8 = ord8.compare(x._8, y._8) - if (compare8 != 0) return compare8 - val compare9 = ord9.compare(x._9, y._9) - if (compare9 != 0) return compare9 - 0 - } - } - -} diff --git a/tests/scala2-library/src/library/scala/math/PartialOrdering.scala b/tests/scala2-library/src/library/scala/math/PartialOrdering.scala deleted file mode 100644 index 8d7fc3253550..000000000000 --- a/tests/scala2-library/src/library/scala/math/PartialOrdering.scala +++ /dev/null @@ -1,80 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala -package math - -/** A trait for representing partial orderings. It is important to - * distinguish between a type that has a partial order and a representation - * of partial ordering on some type. This trait is for representing the - * latter. - * - * A [[http://en.wikipedia.org/wiki/Partial_order partial ordering]] is a - * binary relation on a type `T`, exposed as the `lteq` method of this trait. - * This relation must be: - * - * - reflexive: `lteq(x, x) == '''true'''`, for any `x` of type `T`. - * - anti-symmetric: if `lteq(x, y) == '''true'''` and - * `lteq(y, x) == '''true'''` - * then `equiv(x, y) == '''true'''`, for any `x` and `y` of type `T`. - * - transitive: if `lteq(x, y) == '''true'''` and - * `lteq(y, z) == '''true'''` then `lteq(x, z) == '''true'''`, - * for any `x`, `y`, and `z` of type `T`. - * - * Additionally, a partial ordering induces an - * [[http://en.wikipedia.org/wiki/Equivalence_relation equivalence relation]] - * on a type `T`: `x` and `y` of type `T` are equivalent if and only if - * `lteq(x, y) && lteq(y, x) == '''true'''`. This equivalence relation is - * exposed as the `equiv` method, inherited from the - * [[scala.math.Equiv Equiv]] trait. - * - * @author Geoffrey Washburn - * @version 1.0, 2008-04-0-3 - * @since 2.7 - */ - -trait PartialOrdering[T] extends Equiv[T] { - outer => - - /** Result of comparing `x` with operand `y`. - * Returns `None` if operands are not comparable. - * If operands are comparable, returns `Some(r)` where - * - `r < 0` iff `x < y` - * - `r == 0` iff `x == y` - * - `r > 0` iff `x > y` - */ - def tryCompare(x: T, y: T): Option[Int] - - /** Returns `'''true'''` iff `x` comes before `y` in the ordering. - */ - def lteq(x: T, y: T): Boolean - - /** Returns `'''true'''` iff `y` comes before `x` in the ordering. - */ - def gteq(x: T, y: T): Boolean = lteq(y, x) - - /** Returns `'''true'''` iff `x` comes before `y` in the ordering - * and is not the same as `y`. - */ - def lt(x: T, y: T): Boolean = lteq(x, y) && !equiv(x, y) - - /** Returns `'''true'''` iff `y` comes before `x` in the ordering - * and is not the same as `x`. - */ - def gt(x: T, y: T): Boolean = gteq(x, y) && !equiv(x, y) - - /** Returns `'''true'''` iff `x` is equivalent to `y` in the ordering. - */ - def equiv(x: T, y: T): Boolean = lteq(x,y) && lteq(y,x) - - def reverse : PartialOrdering[T] = new PartialOrdering[T] { - override def reverse = outer - def lteq(x: T, y: T) = outer.lteq(y, x) - def tryCompare(x: T, y: T) = outer.tryCompare(y, x) - } -} diff --git a/tests/scala2-library/src/library/scala/math/PartiallyOrdered.scala b/tests/scala2-library/src/library/scala/math/PartiallyOrdered.scala deleted file mode 100644 index f58210d6a7f7..000000000000 --- a/tests/scala2-library/src/library/scala/math/PartiallyOrdered.scala +++ /dev/null @@ -1,50 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - - - -package scala -package math - -/** A class for partially ordered data. - * - * @author Martin Odersky - * @version 1.0, 23/04/2004 - */ -trait PartiallyOrdered[+A] { - - /** Result of comparing `'''this'''` with operand `that`. - * Returns `None` if operands are not comparable. - * If operands are comparable, returns `Some(x)` where - * - `x < 0` iff `'''this''' < that` - * - `x == 0` iff `'''this''' == that` - * - `x > 0` iff `'''this''' > that` - */ - def tryCompareTo [B >: A <% PartiallyOrdered[B]](that: B): Option[Int] - - def < [B >: A <% PartiallyOrdered[B]](that: B): Boolean = - (this tryCompareTo that) match { - case Some(x) if x < 0 => true - case _ => false - } - def > [B >: A <% PartiallyOrdered[B]](that: B): Boolean = - (this tryCompareTo that) match { - case Some(x) if x > 0 => true - case _ => false - } - def <= [B >: A <% PartiallyOrdered[B]](that: B): Boolean = - (this tryCompareTo that) match { - case Some(x) if x <= 0 => true - case _ => false - } - def >= [B >: A <% PartiallyOrdered[B]](that: B): Boolean = - (this tryCompareTo that) match { - case Some(x) if x >= 0 => true - case _ => false - } -} diff --git a/tests/scala2-library/src/library/scala/math/ScalaNumber.java b/tests/scala2-library/src/library/scala/math/ScalaNumber.java deleted file mode 100644 index f03ba7bf081a..000000000000 --- a/tests/scala2-library/src/library/scala/math/ScalaNumber.java +++ /dev/null @@ -1,19 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2006-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala.math; - -/** A marker class for Number types introduced by Scala - * @author Martin Odersky, Paul Phillips - * @version 2.8 - * @since 2.8 - */ -public abstract class ScalaNumber extends java.lang.Number { - protected abstract boolean isWhole(); - public abstract Object underlying(); -} diff --git a/tests/scala2-library/src/library/scala/math/ScalaNumericConversions.scala b/tests/scala2-library/src/library/scala/math/ScalaNumericConversions.scala deleted file mode 100644 index 0006133b13a8..000000000000 --- a/tests/scala2-library/src/library/scala/math/ScalaNumericConversions.scala +++ /dev/null @@ -1,119 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2007-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala -package math - -/** A slightly more specific conversion trait for classes which - * extend ScalaNumber (which excludes value classes.) - */ -trait ScalaNumericConversions extends ScalaNumber with ScalaNumericAnyConversions { - def underlying(): Object -} - -/** Conversions which present a consistent conversion interface - * across all the numeric types, suitable for use in value classes. - */ -trait ScalaNumericAnyConversions extends Any { - /** @return `'''true'''` if this number has no decimal component, `'''false'''` otherwise. */ - def isWhole(): Boolean - def underlying(): Any - - def byteValue(): Byte - def shortValue(): Short - def intValue(): Int - def longValue(): Long - def floatValue(): Float - def doubleValue(): Double - - /** Returns the value of this as a [[scala.Char]]. This may involve - * rounding or truncation. - */ - def toChar = intValue().toChar - - /** Returns the value of this as a [[scala.Byte]]. This may involve - * rounding or truncation. - */ - def toByte = byteValue() - - /** Returns the value of this as a [[scala.Short]]. This may involve - * rounding or truncation. - */ - def toShort = shortValue() - - /** Returns the value of this as an [[scala.Int]]. This may involve - * rounding or truncation. - */ - def toInt = intValue() - - /** Returns the value of this as a [[scala.Long]]. This may involve - * rounding or truncation. - */ - def toLong = longValue() - - /** Returns the value of this as a [[scala.Float]]. This may involve - * rounding or truncation. - */ - def toFloat = floatValue() - - /** Returns the value of this as a [[scala.Double]]. This may involve - * rounding or truncation. - */ - def toDouble = doubleValue() - - /** Returns `true` iff this has a zero fractional part, and is within the - * range of [[scala.Byte]] MinValue and MaxValue; otherwise returns `false`. - */ - def isValidByte = isWhole && (toInt == toByte) - - /** Returns `true` iff this has a zero fractional part, and is within the - * range of [[scala.Short]] MinValue and MaxValue; otherwise returns `false`. - */ - def isValidShort = isWhole && (toInt == toShort) - - /** Returns `true` iff this has a zero fractional part, and is within the - * range of [[scala.Int]] MinValue and MaxValue; otherwise returns `false`. - */ - def isValidInt = isWhole && (toLong == toInt) - - /** Returns `true` iff this has a zero fractional part, and is within the - * range of [[scala.Char]] MinValue and MaxValue; otherwise returns `false`. - */ - def isValidChar = isWhole && (toInt >= Char.MinValue && toInt <= Char.MaxValue) - - protected def unifiedPrimitiveHashcode() = { - val lv = toLong - if (lv >= Int.MinValue && lv <= Int.MaxValue) lv.toInt - else lv.## - } - - /** Should only be called after all known non-primitive - * types have been excluded. This method won't dispatch - * anywhere else after checking against the primitives - * to avoid infinite recursion between equals and this on - * unknown "Number" variants. - * - * Additionally, this should only be called if the numeric - * type is happy to be converted to Long, Float, and Double. - * If for instance a BigInt much larger than the Long range is - * sent here, it will claim equality with whatever Long is left - * in its lower 64 bits. Or a BigDecimal with more precision - * than Double can hold: same thing. There's no way given the - * interface available here to prevent this error. - */ - protected def unifiedPrimitiveEquals(x: Any) = x match { - case x: Char => isValidChar && (toInt == x.toInt) - case x: Byte => isValidByte && (toByte == x) - case x: Short => isValidShort && (toShort == x) - case x: Int => isValidInt && (toInt == x) - case x: Long => toLong == x - case x: Float => toFloat == x - case x: Double => toDouble == x - case _ => false - } -} diff --git a/tests/scala2-library/src/library/scala/math/package.scala b/tests/scala2-library/src/library/scala/math/package.scala deleted file mode 100644 index 546efef114f1..000000000000 --- a/tests/scala2-library/src/library/scala/math/package.scala +++ /dev/null @@ -1,327 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala - -/** The package object `scala.math` contains methods for performing basic - * numeric operations such as elementary exponential, logarithmic, root and - * trigonometric functions. - * - * All methods forward to [[java.lang.Math]] unless otherwise noted. - * - * @see [[java.lang.Math]] - * - * @groupname math-const Mathematical Constants - * @groupprio math-const 10 - * - * @groupname minmax Minimum and Maximum - * @groupdesc minmax Find the min or max of two numbers. Note: [[scala.collection.TraversableOnce]] has - * min and max methods which determine the min or max of a collection. - * @groupprio minmax 20 - * - * @groupname rounding Rounding - * @groupprio rounding 30 - * - * @groupname explog Exponential and Logarithmic - * @groupprio explog 40 - * - * @groupname trig Trigonometric - * @groupdesc trig Arguments in radians - * @groupprio trig 50 - * - * @groupname angle-conversion Angular Measurement Conversion - * @groupprio angle-conversion 60 - * - * @groupname hyperbolic Hyperbolic - * @groupprio hyperbolic 70 - * - * @groupname abs Absolute Values - * @groupdesc abs Determine the magnitude of a value by discarding the sign. Results are >= 0. - * @groupprio abs 80 - * - * @groupname signum Signs - * @groupdesc signum Extract the sign of a value. Results are -1, 0 or 1. - * Note that these are not pure forwarders to the java versions. - * In particular, the return type of java.lang.Long.signum is Int, - * but here it is widened to Long so that each overloaded variant - * will return the same numeric type it is passed. - * @groupprio signum 90 - * - * @groupname root-extraction Root Extraction - * @groupprio root-extraction 100 - * - * @groupname polar-coords Polar Coordinates - * @groupprio polar-coords 110 - * - * @groupname ulp Unit of Least Precision - * @groupprio ulp 120 - * - * @groupname randomisation Pseudo Random Number Generation - * @groupprio randomisation 130 - */ -package object math { - /** The `Double` value that is closer than any other to `e`, the base of - * the natural logarithms. - * @group math-const - */ - @inline final val E = java.lang.Math.E - - /** The `Double` value that is closer than any other to `pi`, the ratio of - * the circumference of a circle to its diameter. - * @group math-const - */ - @inline final val Pi = java.lang.Math.PI - - /** Returns a `Double` value with a positive sign, greater than or equal - * to `0.0` and less than `1.0`. - * - * @group randomisation - */ - def random(): Double = java.lang.Math.random() - - /** @group trig */ - def sin(x: Double): Double = java.lang.Math.sin(x) - /** @group trig */ - def cos(x: Double): Double = java.lang.Math.cos(x) - /** @group trig */ - def tan(x: Double): Double = java.lang.Math.tan(x) - /** @group trig */ - def asin(x: Double): Double = java.lang.Math.asin(x) - /** @group trig */ - def acos(x: Double): Double = java.lang.Math.acos(x) - /** @group trig */ - def atan(x: Double): Double = java.lang.Math.atan(x) - - /** Converts an angle measured in degrees to an approximately equivalent - * angle measured in radians. - * - * @param x an angle, in degrees - * @return the measurement of the angle `x` in radians. - * @group angle-conversion - */ - def toRadians(x: Double): Double = java.lang.Math.toRadians(x) - - /** Converts an angle measured in radians to an approximately equivalent - * angle measured in degrees. - * - * @param x angle, in radians - * @return the measurement of the angle `x` in degrees. - * @group angle-conversion - */ - def toDegrees(x: Double): Double = java.lang.Math.toDegrees(x) - - /** Converts rectangular coordinates `(x, y)` to polar `(r, theta)`. - * - * @param x the ordinate coordinate - * @param y the abscissa coordinate - * @return the ''theta'' component of the point `(r, theta)` in polar - * coordinates that corresponds to the point `(x, y)` in - * Cartesian coordinates. - * @group polar-coords - */ - def atan2(y: Double, x: Double): Double = java.lang.Math.atan2(y, x) - - /** Returns the square root of the sum of the squares of both given `Double` - * values without intermediate underflow or overflow. - * - * The ''r'' component of the point `(r, theta)` in polar - * coordinates that corresponds to the point `(x, y)` in - * Cartesian coordinates. - * @group polar-coords - */ - def hypot(x: Double, y: Double): Double = java.lang.Math.hypot(x, y) - - // ----------------------------------------------------------------------- - // rounding functions - // ----------------------------------------------------------------------- - - /** @group rounding */ - def ceil(x: Double): Double = java.lang.Math.ceil(x) - /** @group rounding */ - def floor(x: Double): Double = java.lang.Math.floor(x) - - /** Returns the `Double` value that is closest in value to the - * argument and is equal to a mathematical integer. - * - * @param x a `Double` value - * @return the closest floating-point value to a that is equal to a - * mathematical integer. - * @group rounding - */ - def rint(x: Double): Double = java.lang.Math.rint(x) - - /** There is no reason to round a `Long`, but this method prevents unintended conversion to `Float` followed by rounding to `Int`. - * - * @note Does not forward to [[java.lang.Math]] - * @group rounding - */ - @deprecated("This is an integer type; there is no reason to round it. Perhaps you meant to call this with a floating-point value?", "2.11.0") - def round(x: Long): Long = x - - /** Returns the closest `Int` to the argument. - * - * @param x a floating-point value to be rounded to a `Int`. - * @return the value of the argument rounded to the nearest `Int` value. - * @group rounding - */ - def round(x: Float): Int = java.lang.Math.round(x) - - /** Returns the closest `Long` to the argument. - * - * @param x a floating-point value to be rounded to a `Long`. - * @return the value of the argument rounded to the nearest`long` value. - * @group rounding - */ - def round(x: Double): Long = java.lang.Math.round(x) - - /** @group abs */ - def abs(x: Int): Int = java.lang.Math.abs(x) - /** @group abs */ - def abs(x: Long): Long = java.lang.Math.abs(x) - /** @group abs */ - def abs(x: Float): Float = java.lang.Math.abs(x) - /** @group abs */ - def abs(x: Double): Double = java.lang.Math.abs(x) - - /** @group minmax */ - def max(x: Int, y: Int): Int = java.lang.Math.max(x, y) - /** @group minmax */ - def max(x: Long, y: Long): Long = java.lang.Math.max(x, y) - /** @group minmax */ - def max(x: Float, y: Float): Float = java.lang.Math.max(x, y) - /** @group minmax */ - def max(x: Double, y: Double): Double = java.lang.Math.max(x, y) - - /** @group minmax */ - def min(x: Int, y: Int): Int = java.lang.Math.min(x, y) - /** @group minmax */ - def min(x: Long, y: Long): Long = java.lang.Math.min(x, y) - /** @group minmax */ - def min(x: Float, y: Float): Float = java.lang.Math.min(x, y) - /** @group minmax */ - def min(x: Double, y: Double): Double = java.lang.Math.min(x, y) - - /** @group signum - * @note Forwards to [[java.lang.Integer]] - */ - def signum(x: Int): Int = java.lang.Integer.signum(x) - /** @group signum - * @note Forwards to [[java.lang.Long]] - */ - def signum(x: Long): Long = java.lang.Long.signum(x) - /** @group signum */ - def signum(x: Float): Float = java.lang.Math.signum(x) - /** @group signum */ - def signum(x: Double): Double = java.lang.Math.signum(x) - - // ----------------------------------------------------------------------- - // root functions - // ----------------------------------------------------------------------- - - /** Returns the square root of a `Double` value. - * - * @param x the number to take the square root of - * @return the value √x - * @group root-extraction - */ - def sqrt(x: Double): Double = java.lang.Math.sqrt(x) - - /** Returns the cube root of the given `Double` value. - * - * @param x the number to take the cube root of - * @return the value ∛x - * @group root-extraction - */ - def cbrt(x: Double): Double = java.lang.Math.cbrt(x) - - // ----------------------------------------------------------------------- - // exponential functions - // ----------------------------------------------------------------------- - - /** Returns the value of the first argument raised to the power of the - * second argument. - * - * @param x the base. - * @param y the exponent. - * @return the value `x^y^`. - * @group explog - */ - def pow(x: Double, y: Double): Double = java.lang.Math.pow(x, y) - - /** Returns Euler's number `e` raised to the power of a `Double` value. - * - * @param x the exponent to raise `e` to. - * @return the value `e^a^`, where `e` is the base of the natural - * logarithms. - * @group explog - */ - def exp(x: Double): Double = java.lang.Math.exp(x) - - /** Returns `exp(x) - 1`. - * @group explog - */ - def expm1(x: Double): Double = java.lang.Math.expm1(x) - - // ----------------------------------------------------------------------- - // logarithmic functions - // ----------------------------------------------------------------------- - - /** Returns the natural logarithm of a `Double` value. - * - * @param x the number to take the natural logarithm of - * @return the value `logₑ(x)` where `e` is Eulers number - * @group explog - */ - def log(x: Double): Double = java.lang.Math.log(x) - - /** Returns the natural logarithm of the sum of the given `Double` value and 1. - * @group explog - */ - def log1p(x: Double): Double = java.lang.Math.log1p(x) - - /** Returns the base 10 logarithm of the given `Double` value. - * @group explog - */ - def log10(x: Double): Double = java.lang.Math.log10(x) - - // ----------------------------------------------------------------------- - // trigonometric functions - // ----------------------------------------------------------------------- - - /** Returns the hyperbolic sine of the given `Double` value. - * @group hyperbolic - */ - def sinh(x: Double): Double = java.lang.Math.sinh(x) - - /** Returns the hyperbolic cosine of the given `Double` value. - * @group hyperbolic - */ - def cosh(x: Double): Double = java.lang.Math.cosh(x) - - /** Returns the hyperbolic tangent of the given `Double` value. - * @group hyperbolic - */ - def tanh(x: Double):Double = java.lang.Math.tanh(x) - - // ----------------------------------------------------------------------- - // miscellaneous functions - // ----------------------------------------------------------------------- - - /** Returns the size of an ulp of the given `Double` value. - * @group ulp - */ - def ulp(x: Double): Double = java.lang.Math.ulp(x) - - /** Returns the size of an ulp of the given `Float` value. - * @group ulp - */ - def ulp(x: Float): Float = java.lang.Math.ulp(x) - - /** @group rounding */ - def IEEEremainder(x: Double, y: Double): Double = java.lang.Math.IEEEremainder(x, y) -} diff --git a/tests/scala2-library/src/library/scala/native.scala b/tests/scala2-library/src/library/scala/native.scala deleted file mode 100644 index 49d3ced805dd..000000000000 --- a/tests/scala2-library/src/library/scala/native.scala +++ /dev/null @@ -1,26 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - - - -package scala - -/** Marker for native methods. - * - * {{{ - * @native def f(x: Int, y: List[Long]): String = ... - * }}} - * - * A `@native` method is compiled to the platform's native method, - * while discarding the method's body (if any). The body will be type checked if present. - * - * A method marked @native must be a member of a class, not a trait (since 2.12). - * - * @since 2.6 - */ -class native extends scala.annotation.StaticAnnotation {} diff --git a/tests/scala2-library/src/library/scala/noinline.scala b/tests/scala2-library/src/library/scala/noinline.scala deleted file mode 100644 index 6c21ed667d35..000000000000 --- a/tests/scala2-library/src/library/scala/noinline.scala +++ /dev/null @@ -1,43 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - - - -package scala - -/** - * An annotation on methods that forbids the compiler to inline the method, no matter how safe the - * inlining appears to be. The annotation can be used at definition site or at callsite. - * - * {{{ - * @inline final def f1(x: Int) = x - * @noinline final def f2(x: Int) = x - * final def f3(x: Int) = x - * - * def t1 = f1(1) // inlined if possible - * def t2 = f2(1) // not inlined - * def t3 = f3(1) // may be inlined (heuristics) - * def t4 = f1(1): @noinline // not inlined (override at callsite) - * def t5 = f2(1): @inline // inlined if possible (override at callsite) - * def t6 = f3(1): @inline // inlined if possible - * def t7 = f3(1): @noinline // not inlined - * } - * }}} - * - * Note: parentheses are required when annotating a callsite within a larger expression. - * - * {{{ - * def t1 = f1(1) + f1(1): @noinline // equivalent to (f1(1) + f1(1)): @noinline - * def t2 = f1(1) + (f1(1): @noinline) // the second call to f1 is not inlined - * }}} - * - * @author Lex Spoon - * @version 1.0, 2007-5-21 - * @since 2.5 - */ -class noinline extends scala.annotation.StaticAnnotation diff --git a/tests/scala2-library/src/library/scala/package.scala b/tests/scala2-library/src/library/scala/package.scala deleted file mode 100644 index 224112c11cdb..000000000000 --- a/tests/scala2-library/src/library/scala/package.scala +++ /dev/null @@ -1,133 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - - -/** - * Core Scala types. They are always available without an explicit import. - * @contentDiagram hideNodes "scala.Serializable" - */ -package object scala { - type Throwable = java.lang.Throwable - type Exception = java.lang.Exception - type Error = java.lang.Error - - type RuntimeException = java.lang.RuntimeException - type NullPointerException = java.lang.NullPointerException - type ClassCastException = java.lang.ClassCastException - type IndexOutOfBoundsException = java.lang.IndexOutOfBoundsException - type ArrayIndexOutOfBoundsException = java.lang.ArrayIndexOutOfBoundsException - type StringIndexOutOfBoundsException = java.lang.StringIndexOutOfBoundsException - type UnsupportedOperationException = java.lang.UnsupportedOperationException - type IllegalArgumentException = java.lang.IllegalArgumentException - type NoSuchElementException = java.util.NoSuchElementException - type NumberFormatException = java.lang.NumberFormatException - type AbstractMethodError = java.lang.AbstractMethodError - type InterruptedException = java.lang.InterruptedException - - // A dummy used by the specialization annotation. - val AnyRef = new Specializable { - override def toString = "object AnyRef" - } - - type TraversableOnce[+A] = scala.collection.TraversableOnce[A] - - type Traversable[+A] = scala.collection.Traversable[A] - val Traversable = scala.collection.Traversable - - type Iterable[+A] = scala.collection.Iterable[A] - val Iterable = scala.collection.Iterable - - type Seq[+A] = scala.collection.Seq[A] - val Seq = scala.collection.Seq - - type IndexedSeq[+A] = scala.collection.IndexedSeq[A] - val IndexedSeq = scala.collection.IndexedSeq - - type Iterator[+A] = scala.collection.Iterator[A] - val Iterator = scala.collection.Iterator - - type BufferedIterator[+A] = scala.collection.BufferedIterator[A] - - type List[+A] = scala.collection.immutable.List[A] - val List = scala.collection.immutable.List - - val Nil = scala.collection.immutable.Nil - - type ::[A] = scala.collection.immutable.::[A] - val :: = scala.collection.immutable.:: - - val +: = scala.collection.+: - val :+ = scala.collection.:+ - - type Stream[+A] = scala.collection.immutable.Stream[A] - val Stream = scala.collection.immutable.Stream - val #:: = scala.collection.immutable.Stream.#:: - - type Vector[+A] = scala.collection.immutable.Vector[A] - val Vector = scala.collection.immutable.Vector - - type StringBuilder = scala.collection.mutable.StringBuilder - val StringBuilder = scala.collection.mutable.StringBuilder - - type Range = scala.collection.immutable.Range - val Range = scala.collection.immutable.Range - - // Numeric types which were moved into scala.math.* - - type BigDecimal = scala.math.BigDecimal - val BigDecimal = scala.math.BigDecimal - - type BigInt = scala.math.BigInt - val BigInt = scala.math.BigInt - - type Equiv[T] = scala.math.Equiv[T] - val Equiv = scala.math.Equiv - - type Fractional[T] = scala.math.Fractional[T] - val Fractional = scala.math.Fractional - - type Integral[T] = scala.math.Integral[T] - val Integral = scala.math.Integral - - type Numeric[T] = scala.math.Numeric[T] - val Numeric = scala.math.Numeric - - type Ordered[T] = scala.math.Ordered[T] - val Ordered = scala.math.Ordered - - type Ordering[T] = scala.math.Ordering[T] - val Ordering = scala.math.Ordering - - type PartialOrdering[T] = scala.math.PartialOrdering[T] - type PartiallyOrdered[T] = scala.math.PartiallyOrdered[T] - - type Either[+A, +B] = scala.util.Either[A, B] - val Either = scala.util.Either - - type Left[+A, +B] = scala.util.Left[A, B] - val Left = scala.util.Left - - type Right[+A, +B] = scala.util.Right[A, B] - val Right = scala.util.Right - - // Annotations which we might move to annotation.* -/* - type SerialVersionUID = annotation.SerialVersionUID - type deprecated = annotation.deprecated - type deprecatedName = annotation.deprecatedName - type inline = annotation.inline - type native = annotation.native - type noinline = annotation.noinline - type remote = annotation.remote - type specialized = annotation.specialized - type transient = annotation.transient - type throws = annotation.throws - type unchecked = annotation.unchecked.unchecked - type volatile = annotation.volatile - */ -} diff --git a/tests/scala2-library/src/library/scala/ref/PhantomReference.scala b/tests/scala2-library/src/library/scala/ref/PhantomReference.scala deleted file mode 100644 index 80e77bd9d5a6..000000000000 --- a/tests/scala2-library/src/library/scala/ref/PhantomReference.scala +++ /dev/null @@ -1,24 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2006-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - - -package scala.ref - -/** - * @author Sean McDirmid - */ -class PhantomReference[+T <: AnyRef](value: T, queue: ReferenceQueue[T]) extends ReferenceWrapper[T] { - val underlying: java.lang.ref.PhantomReference[_ <: T] = - new PhantomReferenceWithWrapper[T](value, queue, this) -} - -/** - * @author Philipp Haller - */ -private class PhantomReferenceWithWrapper[T <: AnyRef](value: T, queue: ReferenceQueue[T], val wrapper: PhantomReference[T]) - extends java.lang.ref.PhantomReference[T](value, queue.underlying.asInstanceOf[java.lang.ref.ReferenceQueue[T]]) with ReferenceWithWrapper[T] diff --git a/tests/scala2-library/src/library/scala/ref/Reference.scala b/tests/scala2-library/src/library/scala/ref/Reference.scala deleted file mode 100644 index 6377dddcd3ca..000000000000 --- a/tests/scala2-library/src/library/scala/ref/Reference.scala +++ /dev/null @@ -1,24 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2006-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala.ref - -/** - * @see `java.lang.ref.Reference` - * @author Sean McDirmid - */ -trait Reference[+T <: AnyRef] extends Function0[T] { - /** return the underlying value */ - def apply(): T - /** return `Some` underlying if it hasn't been collected, otherwise `None` */ - def get: Option[T] - override def toString = get.map(_.toString).getOrElse("") - def clear(): Unit - def enqueue(): Boolean - def isEnqueued(): Boolean -} diff --git a/tests/scala2-library/src/library/scala/ref/ReferenceQueue.scala b/tests/scala2-library/src/library/scala/ref/ReferenceQueue.scala deleted file mode 100644 index 89215ef35d43..000000000000 --- a/tests/scala2-library/src/library/scala/ref/ReferenceQueue.scala +++ /dev/null @@ -1,31 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2006-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - - -package scala.ref - -/** - * @author Sean McDirmid - * @author Philipp Haller - */ -class ReferenceQueue[+T <: AnyRef] { - - private[ref] val underlying: java.lang.ref.ReferenceQueue[_ <: T] = new java.lang.ref.ReferenceQueue[T] - override def toString = underlying.toString - - protected def Wrapper(jref: java.lang.ref.Reference[_]): Option[Reference[T]] = - jref match { - case null => None - case ref => Some(ref.asInstanceOf[ReferenceWithWrapper[T]].wrapper) - } - - def poll: Option[Reference[T]] = Wrapper(underlying.poll) - def remove: Option[Reference[T]] = Wrapper(underlying.remove) - def remove(timeout: Long): Option[Reference[T]] = Wrapper(underlying.remove(timeout)) - -} diff --git a/tests/scala2-library/src/library/scala/ref/ReferenceWrapper.scala b/tests/scala2-library/src/library/scala/ref/ReferenceWrapper.scala deleted file mode 100644 index 3da1f2ea7c9c..000000000000 --- a/tests/scala2-library/src/library/scala/ref/ReferenceWrapper.scala +++ /dev/null @@ -1,34 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2006-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - - -package scala.ref - -/** - * @author Sean McDirmid - */ -trait ReferenceWrapper[+T <: AnyRef] extends Reference[T] with Proxy { - val underlying: java.lang.ref.Reference[_ <: T] - override def get = Option(underlying.get) - def apply() = { - val ret = underlying.get - if (ret eq null) throw new NoSuchElementException - ret - } - def clear() = underlying.clear() - def enqueue = underlying.enqueue - def isEnqueued = underlying.isEnqueued - def self = underlying -} - -/** - * @author Philipp Haller - */ -private trait ReferenceWithWrapper[T <: AnyRef] { - val wrapper: ReferenceWrapper[T] -} diff --git a/tests/scala2-library/src/library/scala/ref/SoftReference.scala b/tests/scala2-library/src/library/scala/ref/SoftReference.scala deleted file mode 100644 index 5e60f0078892..000000000000 --- a/tests/scala2-library/src/library/scala/ref/SoftReference.scala +++ /dev/null @@ -1,39 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2006-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - - -package scala.ref - -/** - * @author Sean McDirmid - */ -class SoftReference[+T <: AnyRef](value : T, queue : ReferenceQueue[T]) extends ReferenceWrapper[T] { - def this(value : T) = this(value, null) - - val underlying: java.lang.ref.SoftReference[_ <: T] = - new SoftReferenceWithWrapper[T](value, queue, this) -} - -/** - * A companion object that implements an extractor for `SoftReference` values - * @author Rebecca Claire Murphy - */ -object SoftReference { - - /** Creates a `SoftReference` pointing to `value` */ - def apply[T <: AnyRef](value: T) = new SoftReference(value) - - /** Optionally returns the referenced value, or `None` if that value no longer exists */ - def unapply[T <: AnyRef](sr: SoftReference[T]): Option[T] = Option(sr.underlying.get) -} - -/** - * @author Philipp Haller - */ -private class SoftReferenceWithWrapper[T <: AnyRef](value: T, queue: ReferenceQueue[T], val wrapper: SoftReference[T]) - extends java.lang.ref.SoftReference[T](value, if (queue == null) null else queue.underlying.asInstanceOf[java.lang.ref.ReferenceQueue[T]]) with ReferenceWithWrapper[T] diff --git a/tests/scala2-library/src/library/scala/ref/WeakReference.scala b/tests/scala2-library/src/library/scala/ref/WeakReference.scala deleted file mode 100644 index 9dcc0bbe5f97..000000000000 --- a/tests/scala2-library/src/library/scala/ref/WeakReference.scala +++ /dev/null @@ -1,38 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2006-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - - -package scala.ref - -/** - * A wrapper class for java.lang.ref.WeakReference - * The new functionality is (1) results are Option values, instead of using null. - * (2) There is an extractor that maps the weak reference itself into an option. - * @author Sean McDirmid - */ -class WeakReference[+T <: AnyRef](value: T, queue: ReferenceQueue[T]) extends ReferenceWrapper[T] { - def this(value: T) = this(value, null) - val underlying: java.lang.ref.WeakReference[_ <: T] = - new WeakReferenceWithWrapper[T](value, queue, this) -} - -/** An extractor for weak reference values */ -object WeakReference { - - /** Creates a weak reference pointing to `value` */ - def apply[T <: AnyRef](value: T) = new WeakReference(value) - - /** Optionally returns the referenced value, or `None` if that value no longer exists */ - def unapply[T <: AnyRef](wr: WeakReference[T]): Option[T] = Option(wr.underlying.get) -} - -/** - * @author Philipp Haller - */ -private class WeakReferenceWithWrapper[T <: AnyRef](value: T, queue: ReferenceQueue[T], val wrapper: WeakReference[T]) - extends java.lang.ref.WeakReference[T](value, if (queue == null) null else queue.underlying.asInstanceOf[java.lang.ref.ReferenceQueue[T]]) with ReferenceWithWrapper[T] diff --git a/tests/scala2-library/src/library/scala/reflect/ClassManifestDeprecatedApis.scala b/tests/scala2-library/src/library/scala/reflect/ClassManifestDeprecatedApis.scala deleted file mode 100644 index d2ae10747d7b..000000000000 --- a/tests/scala2-library/src/library/scala/reflect/ClassManifestDeprecatedApis.scala +++ /dev/null @@ -1,242 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2007-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala -package reflect - -import scala.collection.mutable.{ WrappedArray, ArrayBuilder } -import java.lang.{ Class => jClass } - -@deprecated("use scala.reflect.ClassTag instead", "2.10.0") -trait ClassManifestDeprecatedApis[T] extends OptManifest[T] { - self: ClassManifest[T] => - - // Still in use in target test.junit.comp. - @deprecated("use runtimeClass instead", "2.10.0") - def erasure: jClass[_] = runtimeClass - - private def subtype(sub: jClass[_], sup: jClass[_]): Boolean = { - def loop(left: Set[jClass[_]], seen: Set[jClass[_]]): Boolean = { - left.nonEmpty && { - val next = left.head - val supers = next.getInterfaces.toSet ++ Option(next.getSuperclass) - supers(sup) || { - val xs = left ++ supers filterNot seen - loop(xs - next, seen + next) - } - } - } - loop(Set(sub), Set()) - } - - private def subargs(args1: List[OptManifest[_]], args2: List[OptManifest[_]]) = (args1 corresponds args2) { - // !!! [Martin] this is wrong, need to take variance into account - case (x: ClassManifest[_], y: ClassManifest[_]) => x <:< y - case (x, y) => (x eq NoManifest) && (y eq NoManifest) - } - - /** Tests whether the type represented by this manifest is a subtype - * of the type represented by `that` manifest, subject to the limitations - * described in the header. - */ - @deprecated("use scala.reflect.runtime.universe.TypeTag for subtype checking instead", "2.10.0") - def <:<(that: ClassManifest[_]): Boolean = { - // All types which could conform to these types will override <:<. - def cannotMatch = { - import Manifest._ - that.isInstanceOf[AnyValManifest[_]] || (that eq AnyVal) || (that eq Nothing) || (that eq Null) - } - - // This is wrong, and I don't know how it can be made right - // without more development of Manifests, due to arity-defying - // relationships like: - // - // List[String] <: AnyRef - // Map[Int, Int] <: Iterable[(Int, Int)] - // - // Given the manifest for Map[A, B] how do I determine that a - // supertype has single type argument (A, B) ? I don't see how we - // can say whether X <:< Y when type arguments are involved except - // when the erasure is the same, even before considering variance. - !cannotMatch && { - // this part is wrong for not considering variance - if (this.runtimeClass == that.runtimeClass) - subargs(this.typeArguments, that.typeArguments) - // this part is wrong for punting unless the rhs has no type - // arguments, but it's better than a blindfolded pinata swing. - else - that.typeArguments.isEmpty && subtype(this.runtimeClass, that.runtimeClass) - } - } - - /** Tests whether the type represented by this manifest is a supertype - * of the type represented by `that` manifest, subject to the limitations - * described in the header. - */ - @deprecated("use scala.reflect.runtime.universe.TypeTag for subtype checking instead", "2.10.0") - def >:>(that: ClassManifest[_]): Boolean = - that <:< this - - override def canEqual(other: Any) = other match { - case _: ClassManifest[_] => true - case _ => false - } - - protected def arrayClass[T](tp: jClass[_]): jClass[Array[T]] = - java.lang.reflect.Array.newInstance(tp, 0).getClass.asInstanceOf[jClass[Array[T]]] - - @deprecated("use wrap instead", "2.10.0") - def arrayManifest: ClassManifest[Array[T]] = - ClassManifest.classType[Array[T]](arrayClass[T](runtimeClass), this) - - override def newArray(len: Int): Array[T] = - java.lang.reflect.Array.newInstance(runtimeClass, len).asInstanceOf[Array[T]] - - @deprecated("use wrap.newArray instead", "2.10.0") - def newArray2(len: Int): Array[Array[T]] = - java.lang.reflect.Array.newInstance(arrayClass[T](runtimeClass), len) - .asInstanceOf[Array[Array[T]]] - - @deprecated("use wrap.wrap.newArray instead", "2.10.0") - def newArray3(len: Int): Array[Array[Array[T]]] = - java.lang.reflect.Array.newInstance(arrayClass[Array[T]](arrayClass[T](runtimeClass)), len) - .asInstanceOf[Array[Array[Array[T]]]] - - @deprecated("use wrap.wrap.wrap.newArray instead", "2.10.0") - def newArray4(len: Int): Array[Array[Array[Array[T]]]] = - java.lang.reflect.Array.newInstance(arrayClass[Array[Array[T]]](arrayClass[Array[T]](arrayClass[T](runtimeClass))), len) - .asInstanceOf[Array[Array[Array[Array[T]]]]] - - @deprecated("use wrap.wrap.wrap.wrap.newArray instead", "2.10.0") - def newArray5(len: Int): Array[Array[Array[Array[Array[T]]]]] = - java.lang.reflect.Array.newInstance(arrayClass[Array[Array[Array[T]]]](arrayClass[Array[Array[T]]](arrayClass[Array[T]](arrayClass[T](runtimeClass)))), len) - .asInstanceOf[Array[Array[Array[Array[Array[T]]]]]] - - @deprecated("create WrappedArray directly instead", "2.10.0") - def newWrappedArray(len: Int): WrappedArray[T] = - // it's safe to assume T <: AnyRef here because the method is overridden for all value type manifests - new WrappedArray.ofRef[T with AnyRef](newArray(len).asInstanceOf[Array[T with AnyRef]]).asInstanceOf[WrappedArray[T]] - - @deprecated("use ArrayBuilder.make(this) instead", "2.10.0") - def newArrayBuilder(): ArrayBuilder[T] = - // it's safe to assume T <: AnyRef here because the method is overridden for all value type manifests - new ArrayBuilder.ofRef[T with AnyRef]()(this.asInstanceOf[ClassManifest[T with AnyRef]]).asInstanceOf[ArrayBuilder[T]] - - @deprecated("use scala.reflect.runtime.universe.TypeTag to capture type structure instead", "2.10.0") - def typeArguments: List[OptManifest[_]] = List() - - protected def argString = - if (typeArguments.nonEmpty) typeArguments.mkString("[", ", ", "]") - else if (runtimeClass.isArray) "["+ClassManifest.fromClass(runtimeClass.getComponentType)+"]" - else "" -} - -/** `ClassManifestFactory` defines factory methods for manifests. - * It is intended for use by the compiler and should not be used in client code. - * - * Unlike `ClassManifest`, this factory isn't annotated with a deprecation warning. - * This is done to prevent avalanches of deprecation warnings in the code that calls methods with manifests. - * - * In a perfect world, we would just remove the @deprecated annotation from `ClassManifest` the object - * and then delete it in 2.11. After all, that object is explicitly marked as internal, so no one should use it. - * However a lot of existing libraries disregarded the Scaladoc that comes with `ClassManifest`, - * so we need to somehow nudge them into migrating prior to removing stuff out of the blue. - * Hence we've introduced this design decision as the lesser of two evils. - */ -object ClassManifestFactory { - val Byte = ManifestFactory.Byte - val Short = ManifestFactory.Short - val Char = ManifestFactory.Char - val Int = ManifestFactory.Int - val Long = ManifestFactory.Long - val Float = ManifestFactory.Float - val Double = ManifestFactory.Double - val Boolean = ManifestFactory.Boolean - val Unit = ManifestFactory.Unit - val Any = ManifestFactory.Any - val Object = ManifestFactory.Object - val AnyVal = ManifestFactory.AnyVal - val Nothing = ManifestFactory.Nothing - val Null = ManifestFactory.Null - - def fromClass[T](clazz: jClass[T]): ClassManifest[T] = clazz match { - case java.lang.Byte.TYPE => Byte.asInstanceOf[ClassManifest[T]] - case java.lang.Short.TYPE => Short.asInstanceOf[ClassManifest[T]] - case java.lang.Character.TYPE => Char.asInstanceOf[ClassManifest[T]] - case java.lang.Integer.TYPE => Int.asInstanceOf[ClassManifest[T]] - case java.lang.Long.TYPE => Long.asInstanceOf[ClassManifest[T]] - case java.lang.Float.TYPE => Float.asInstanceOf[ClassManifest[T]] - case java.lang.Double.TYPE => Double.asInstanceOf[ClassManifest[T]] - case java.lang.Boolean.TYPE => Boolean.asInstanceOf[ClassManifest[T]] - case java.lang.Void.TYPE => Unit.asInstanceOf[ClassManifest[T]] - case _ => classType[T with AnyRef](clazz).asInstanceOf[ClassManifest[T]] - } - - def singleType[T <: AnyRef](value: AnyRef): Manifest[T] = Manifest.singleType(value) - - /** ClassManifest for the class type `clazz`, where `clazz` is - * a top-level or static class. - * @note This no-prefix, no-arguments case is separate because we - * it's called from ScalaRunTime.boxArray itself. If we - * pass varargs as arrays into this, we get an infinitely recursive call - * to boxArray. (Besides, having a separate case is more efficient) - */ - def classType[T](clazz: jClass[_]): ClassManifest[T] = - new ClassTypeManifest[T](None, clazz, Nil) - - /** ClassManifest for the class type `clazz[args]`, where `clazz` is - * a top-level or static class and `args` are its type arguments */ - def classType[T](clazz: jClass[_], arg1: OptManifest[_], args: OptManifest[_]*): ClassManifest[T] = - new ClassTypeManifest[T](None, clazz, arg1 :: args.toList) - - /** ClassManifest for the class type `clazz[args]`, where `clazz` is - * a class with non-package prefix type `prefix` and type arguments `args`. - */ - def classType[T](prefix: OptManifest[_], clazz: jClass[_], args: OptManifest[_]*): ClassManifest[T] = - new ClassTypeManifest[T](Some(prefix), clazz, args.toList) - - def arrayType[T](arg: OptManifest[_]): ClassManifest[Array[T]] = arg match { - case NoManifest => Object.asInstanceOf[ClassManifest[Array[T]]] - case m: ClassManifest[_] => m.asInstanceOf[ClassManifest[T]].arrayManifest - } - - @SerialVersionUID(1L) - private class AbstractTypeClassManifest[T](prefix: OptManifest[_], name: String, clazz: jClass[_], args: OptManifest[_]*) extends ClassManifest[T] { - override def runtimeClass = clazz - override val typeArguments = args.toList - override def toString = prefix.toString+"#"+name+argString - } - - /** ClassManifest for the abstract type `prefix # name`. `upperBound` is not - * strictly necessary as it could be obtained by reflection. It was - * added so that erasure can be calculated without reflection. */ - def abstractType[T](prefix: OptManifest[_], name: String, clazz: jClass[_], args: OptManifest[_]*): ClassManifest[T] = - new AbstractTypeClassManifest(prefix, name, clazz) - - /** ClassManifest for the abstract type `prefix # name`. `upperBound` is not - * strictly necessary as it could be obtained by reflection. It was - * added so that erasure can be calculated without reflection. - * todo: remove after next bootstrap - */ - def abstractType[T](prefix: OptManifest[_], name: String, upperbound: ClassManifest[_], args: OptManifest[_]*): ClassManifest[T] = - new AbstractTypeClassManifest(prefix, name, upperbound.runtimeClass) -} - -/** Manifest for the class type `clazz[args]`, where `clazz` is - * a top-level or static class */ -@SerialVersionUID(1L) -private class ClassTypeManifest[T]( - prefix: Option[OptManifest[_]], - val runtimeClass: jClass[_], - override val typeArguments: List[OptManifest[_]]) extends ClassManifest[T] -{ - override def toString = - (if (prefix.isEmpty) "" else prefix.get.toString+"#") + - (if (runtimeClass.isArray) "Array" else runtimeClass.getName) + - argString -} diff --git a/tests/scala2-library/src/library/scala/reflect/ClassTag.scala b/tests/scala2-library/src/library/scala/reflect/ClassTag.scala deleted file mode 100644 index 30ceadceeb59..000000000000 --- a/tests/scala2-library/src/library/scala/reflect/ClassTag.scala +++ /dev/null @@ -1,143 +0,0 @@ -package scala -package reflect - -import java.lang.{ Class => jClass } - -/** - * - * A `ClassTag[T]` stores the erased class of a given type `T`, accessible via the `runtimeClass` - * field. This is particularly useful for instantiating `Array`s whose element types are unknown - * at compile time. - * - * `ClassTag`s are a weaker special case of [[scala.reflect.api.TypeTags#TypeTag]]s, in that they - * wrap only the runtime class of a given type, whereas a `TypeTag` contains all static type - * information. That is, `ClassTag`s are constructed from knowing only the top-level class of a - * type, without necessarily knowing all of its argument types. This runtime information is enough - * for runtime `Array` creation. - * - * For example: - * {{{ - * scala> def mkArray[T : ClassTag](elems: T*) = Array[T](elems: _*) - * mkArray: [T](elems: T*)(implicit evidence$1: scala.reflect.ClassTag[T])Array[T] - * - * scala> mkArray(42, 13) - * res0: Array[Int] = Array(42, 13) - * - * scala> mkArray("Japan","Brazil","Germany") - * res1: Array[String] = Array(Japan, Brazil, Germany) - * }}} - * - * See [[scala.reflect.api.TypeTags]] for more examples, or the - * [[http://docs.scala-lang.org/overviews/reflection/typetags-manifests.html Reflection Guide: TypeTags]] - * for more details. - * - */ -@scala.annotation.implicitNotFound(msg = "No ClassTag available for ${T}") -trait ClassTag[T] extends ClassManifestDeprecatedApis[T] with Equals with Serializable { - // please, don't add any APIs here, like it was with `newWrappedArray` and `newArrayBuilder` - // class tags, and all tags in general, should be as minimalistic as possible - - /** A class representing the type `U` to which `T` would be erased. - * Note that there is no subtyping relationship between `T` and `U`. - */ - def runtimeClass: jClass[_] - - /** Produces a `ClassTag` that knows how to instantiate an `Array[Array[T]]` */ - def wrap: ClassTag[Array[T]] = ClassTag[Array[T]](arrayClass(runtimeClass)) - - /** Produces a new array with element type `T` and length `len` */ - override def newArray(len: Int): Array[T] = - runtimeClass match { - case java.lang.Byte.TYPE => new Array[Byte](len).asInstanceOf[Array[T]] - case java.lang.Short.TYPE => new Array[Short](len).asInstanceOf[Array[T]] - case java.lang.Character.TYPE => new Array[Char](len).asInstanceOf[Array[T]] - case java.lang.Integer.TYPE => new Array[Int](len).asInstanceOf[Array[T]] - case java.lang.Long.TYPE => new Array[Long](len).asInstanceOf[Array[T]] - case java.lang.Float.TYPE => new Array[Float](len).asInstanceOf[Array[T]] - case java.lang.Double.TYPE => new Array[Double](len).asInstanceOf[Array[T]] - case java.lang.Boolean.TYPE => new Array[Boolean](len).asInstanceOf[Array[T]] - case java.lang.Void.TYPE => new Array[Unit](len).asInstanceOf[Array[T]] - case _ => java.lang.reflect.Array.newInstance(runtimeClass, len).asInstanceOf[Array[T]] - } - - /** A ClassTag[T] can serve as an extractor that matches only objects of type T. - * - * The compiler tries to turn unchecked type tests in pattern matches into checked ones - * by wrapping a `(_: T)` type pattern as `ct(_: T)`, where `ct` is the `ClassTag[T]` instance. - * Type tests necessary before calling other extractors are treated similarly. - * `SomeExtractor(...)` is turned into `ct(SomeExtractor(...))` if `T` in `SomeExtractor.unapply(x: T)` - * is uncheckable, but we have an instance of `ClassTag[T]`. - */ - def unapply(x: Any): Option[T] = - if (null != x && ( - (runtimeClass.isInstance(x)) - || (x.isInstanceOf[Byte] && runtimeClass.isAssignableFrom(classOf[Byte])) - || (x.isInstanceOf[Short] && runtimeClass.isAssignableFrom(classOf[Short])) - || (x.isInstanceOf[Char] && runtimeClass.isAssignableFrom(classOf[Char])) - || (x.isInstanceOf[Int] && runtimeClass.isAssignableFrom(classOf[Int])) - || (x.isInstanceOf[Long] && runtimeClass.isAssignableFrom(classOf[Long])) - || (x.isInstanceOf[Float] && runtimeClass.isAssignableFrom(classOf[Float])) - || (x.isInstanceOf[Double] && runtimeClass.isAssignableFrom(classOf[Double])) - || (x.isInstanceOf[Boolean] && runtimeClass.isAssignableFrom(classOf[Boolean])) - || (x.isInstanceOf[Unit] && runtimeClass.isAssignableFrom(classOf[Unit]))) - ) Some(x.asInstanceOf[T]) - else None - - // case class accessories - override def canEqual(x: Any) = x.isInstanceOf[ClassTag[_]] - override def equals(x: Any) = x.isInstanceOf[ClassTag[_]] && this.runtimeClass == x.asInstanceOf[ClassTag[_]].runtimeClass - override def hashCode = runtimeClass.## - override def toString = { - def prettyprint(clazz: jClass[_]): String = - if (clazz.isArray) s"Array[${prettyprint(clazz.getComponentType)}]" else - clazz.getName - prettyprint(runtimeClass) - } -} - -/** - * Class tags corresponding to primitive types and constructor/extractor for ClassTags. - */ -object ClassTag { - private val ObjectTYPE = classOf[java.lang.Object] - private val NothingTYPE = classOf[scala.runtime.Nothing$] - private val NullTYPE = classOf[scala.runtime.Null$] - - val Byte : ClassTag[scala.Byte] = Manifest.Byte - val Short : ClassTag[scala.Short] = Manifest.Short - val Char : ClassTag[scala.Char] = Manifest.Char - val Int : ClassTag[scala.Int] = Manifest.Int - val Long : ClassTag[scala.Long] = Manifest.Long - val Float : ClassTag[scala.Float] = Manifest.Float - val Double : ClassTag[scala.Double] = Manifest.Double - val Boolean : ClassTag[scala.Boolean] = Manifest.Boolean - val Unit : ClassTag[scala.Unit] = Manifest.Unit - val Any : ClassTag[scala.Any] = Manifest.Any - val Object : ClassTag[java.lang.Object] = Manifest.Object - val AnyVal : ClassTag[scala.AnyVal] = Manifest.AnyVal - val AnyRef : ClassTag[scala.AnyRef] = Manifest.AnyRef - val Nothing : ClassTag[scala.Nothing] = Manifest.Nothing - val Null : ClassTag[scala.Null] = Manifest.Null - - @SerialVersionUID(1L) - private class GenericClassTag[T](val runtimeClass: jClass[_]) extends ClassTag[T] - - def apply[T](runtimeClass1: jClass[_]): ClassTag[T] = - runtimeClass1 match { - case java.lang.Byte.TYPE => ClassTag.Byte.asInstanceOf[ClassTag[T]] - case java.lang.Short.TYPE => ClassTag.Short.asInstanceOf[ClassTag[T]] - case java.lang.Character.TYPE => ClassTag.Char.asInstanceOf[ClassTag[T]] - case java.lang.Integer.TYPE => ClassTag.Int.asInstanceOf[ClassTag[T]] - case java.lang.Long.TYPE => ClassTag.Long.asInstanceOf[ClassTag[T]] - case java.lang.Float.TYPE => ClassTag.Float.asInstanceOf[ClassTag[T]] - case java.lang.Double.TYPE => ClassTag.Double.asInstanceOf[ClassTag[T]] - case java.lang.Boolean.TYPE => ClassTag.Boolean.asInstanceOf[ClassTag[T]] - case java.lang.Void.TYPE => ClassTag.Unit.asInstanceOf[ClassTag[T]] - case ObjectTYPE => ClassTag.Object.asInstanceOf[ClassTag[T]] - case NothingTYPE => ClassTag.Nothing.asInstanceOf[ClassTag[T]] - case NullTYPE => ClassTag.Null.asInstanceOf[ClassTag[T]] - case _ => new GenericClassTag[T](runtimeClass1) - } - - def unapply[T](ctag: ClassTag[T]): Option[Class[_]] = Some(ctag.runtimeClass) -} diff --git a/tests/scala2-library/src/library/scala/reflect/Manifest.scala b/tests/scala2-library/src/library/scala/reflect/Manifest.scala deleted file mode 100644 index 8e5ba6376eea..000000000000 --- a/tests/scala2-library/src/library/scala/reflect/Manifest.scala +++ /dev/null @@ -1,320 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2007-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala -package reflect - -import scala.collection.mutable.{ArrayBuilder, WrappedArray} - -/** A `Manifest[T]` is an opaque descriptor for type T. Its supported use - * is to give access to the erasure of the type as a `Class` instance, as - * is necessary for the creation of native `Arrays` if the class is not - * known at compile time. - * - * The type-relation operators `<:<` and `=:=` should be considered - * approximations only, as there are numerous aspects of type conformance - * which are not yet adequately represented in manifests. - * - * Example usages: - * {{{ - * def arr[T] = new Array[T](0) // does not compile - * def arr[T](implicit m: Manifest[T]) = new Array[T](0) // compiles - * def arr[T: Manifest] = new Array[T](0) // shorthand for the preceding - * - * // Methods manifest, classManifest, and optManifest are in [[scala.Predef]]. - * def isApproxSubType[T: Manifest, U: Manifest] = manifest[T] <:< manifest[U] - * isApproxSubType[List[String], List[AnyRef]] // true - * isApproxSubType[List[String], List[Int]] // false - * - * def methods[T: ClassManifest] = classManifest[T].erasure.getMethods - * def retType[T: ClassManifest](name: String) = - * methods[T] find (_.getName == name) map (_.getGenericReturnType) - * - * retType[Map[_, _]]("values") // Some(scala.collection.Iterable) - * }}} - */ -@scala.annotation.implicitNotFound(msg = "No Manifest available for ${T}.") -// TODO undeprecated until Scala reflection becomes non-experimental -// @deprecated("use scala.reflect.ClassTag (to capture erasures) or scala.reflect.runtime.universe.TypeTag (to capture types) or both instead", "2.10.0") -trait Manifest[T] extends ClassManifest[T] with Equals { - override def typeArguments: List[Manifest[_]] = Nil - - override def arrayManifest: Manifest[Array[T]] = - Manifest.classType[Array[T]](arrayClass[T](runtimeClass), this) - - override def canEqual(that: Any): Boolean = that match { - case _: Manifest[_] => true - case _ => false - } - /** Note: testing for erasure here is important, as it is many times - * faster than <:< and rules out most comparisons. - */ - override def equals(that: Any): Boolean = that match { - case m: Manifest[_] => (m canEqual this) && (this.runtimeClass == m.runtimeClass) && (this <:< m) && (m <:< this) - case _ => false - } - override def hashCode = this.runtimeClass.## -} - -// TODO undeprecated until Scala reflection becomes non-experimental -// @deprecated("use type tags and manually check the corresponding class or type instead", "2.10.0") -@SerialVersionUID(1L) -abstract class AnyValManifest[T <: AnyVal](override val toString: String) extends Manifest[T] with Equals { - override def <:<(that: ClassManifest[_]): Boolean = - (that eq this) || (that eq Manifest.Any) || (that eq Manifest.AnyVal) - override def canEqual(other: Any) = other match { - case _: AnyValManifest[_] => true - case _ => false - } - override def equals(that: Any): Boolean = this eq that.asInstanceOf[AnyRef] - @transient - override val hashCode = System.identityHashCode(this) -} - -/** `ManifestFactory` defines factory methods for manifests. - * It is intended for use by the compiler and should not be used in client code. - * - * Unlike `Manifest`, this factory isn't annotated with a deprecation warning. - * This is done to prevent avalanches of deprecation warnings in the code that calls methods with manifests. - * Why so complicated? Read up the comments for `ClassManifestFactory`. - */ -object ManifestFactory { - def valueManifests: List[AnyValManifest[_]] = - List(Byte, Short, Char, Int, Long, Float, Double, Boolean, Unit) - - @SerialVersionUID(1L) - private class ByteManifest extends AnyValManifest[scala.Byte]("Byte") { - def runtimeClass = java.lang.Byte.TYPE - override def newArray(len: Int): Array[Byte] = new Array[Byte](len) - override def newWrappedArray(len: Int): WrappedArray[Byte] = new WrappedArray.ofByte(new Array[Byte](len)) - override def newArrayBuilder(): ArrayBuilder[Byte] = new ArrayBuilder.ofByte() - private def readResolve(): Any = Manifest.Byte - } - val Byte: AnyValManifest[Byte] = new ByteManifest - - @SerialVersionUID(1L) - private class ShortManifest extends AnyValManifest[scala.Short]("Short") { - def runtimeClass = java.lang.Short.TYPE - override def newArray(len: Int): Array[Short] = new Array[Short](len) - override def newWrappedArray(len: Int): WrappedArray[Short] = new WrappedArray.ofShort(new Array[Short](len)) - override def newArrayBuilder(): ArrayBuilder[Short] = new ArrayBuilder.ofShort() - private def readResolve(): Any = Manifest.Short - } - val Short: AnyValManifest[Short] = new ShortManifest - - @SerialVersionUID(1L) - private class CharManifest extends AnyValManifest[scala.Char]("Char") { - def runtimeClass = java.lang.Character.TYPE - override def newArray(len: Int): Array[Char] = new Array[Char](len) - override def newWrappedArray(len: Int): WrappedArray[Char] = new WrappedArray.ofChar(new Array[Char](len)) - override def newArrayBuilder(): ArrayBuilder[Char] = new ArrayBuilder.ofChar() - private def readResolve(): Any = Manifest.Char - } - val Char: AnyValManifest[Char] = new CharManifest - - @SerialVersionUID(1L) - private class IntManifest extends AnyValManifest[scala.Int]("Int") { - def runtimeClass = java.lang.Integer.TYPE - override def newArray(len: Int): Array[Int] = new Array[Int](len) - override def newWrappedArray(len: Int): WrappedArray[Int] = new WrappedArray.ofInt(new Array[Int](len)) - override def newArrayBuilder(): ArrayBuilder[Int] = new ArrayBuilder.ofInt() - private def readResolve(): Any = Manifest.Int - } - val Int: AnyValManifest[Int] = new IntManifest - - @SerialVersionUID(1L) - private class LongManifest extends AnyValManifest[scala.Long]("Long") { - def runtimeClass = java.lang.Long.TYPE - override def newArray(len: Int): Array[Long] = new Array[Long](len) - override def newWrappedArray(len: Int): WrappedArray[Long] = new WrappedArray.ofLong(new Array[Long](len)) - override def newArrayBuilder(): ArrayBuilder[Long] = new ArrayBuilder.ofLong() - private def readResolve(): Any = Manifest.Long - } - val Long: AnyValManifest[Long] = new LongManifest - - @SerialVersionUID(1L) - private class FloatManifest extends AnyValManifest[scala.Float]("Float") { - def runtimeClass = java.lang.Float.TYPE - override def newArray(len: Int): Array[Float] = new Array[Float](len) - override def newWrappedArray(len: Int): WrappedArray[Float] = new WrappedArray.ofFloat(new Array[Float](len)) - override def newArrayBuilder(): ArrayBuilder[Float] = new ArrayBuilder.ofFloat() - private def readResolve(): Any = Manifest.Float - } - val Float: AnyValManifest[Float] = new FloatManifest - - @SerialVersionUID(1L) - private class DoubleManifest extends AnyValManifest[scala.Double]("Double") { - def runtimeClass = java.lang.Double.TYPE - override def newArray(len: Int): Array[Double] = new Array[Double](len) - override def newWrappedArray(len: Int): WrappedArray[Double] = new WrappedArray.ofDouble(new Array[Double](len)) - override def newArrayBuilder(): ArrayBuilder[Double] = new ArrayBuilder.ofDouble() - private def readResolve(): Any = Manifest.Double - } - val Double: AnyValManifest[Double] = new DoubleManifest - - @SerialVersionUID(1L) - private class BooleanManifest extends AnyValManifest[scala.Boolean]("Boolean") { - def runtimeClass = java.lang.Boolean.TYPE - override def newArray(len: Int): Array[Boolean] = new Array[Boolean](len) - override def newWrappedArray(len: Int): WrappedArray[Boolean] = new WrappedArray.ofBoolean(new Array[Boolean](len)) - override def newArrayBuilder(): ArrayBuilder[Boolean] = new ArrayBuilder.ofBoolean() - private def readResolve(): Any = Manifest.Boolean - } - val Boolean: AnyValManifest[Boolean] = new BooleanManifest - - @SerialVersionUID(1L) - private class UnitManifest extends AnyValManifest[scala.Unit]("Unit") { - def runtimeClass = java.lang.Void.TYPE - override def newArray(len: Int): Array[Unit] = new Array[Unit](len) - override def newWrappedArray(len: Int): WrappedArray[Unit] = new WrappedArray.ofUnit(new Array[Unit](len)) - override def newArrayBuilder(): ArrayBuilder[Unit] = new ArrayBuilder.ofUnit() - override protected def arrayClass[T](tp: Class[_]): Class[Array[T]] = - if (tp eq runtimeClass) classOf[Array[scala.runtime.BoxedUnit]].asInstanceOf[Class[Array[T]]] - else super.arrayClass(tp) - private def readResolve(): Any = Manifest.Unit - } - val Unit: AnyValManifest[Unit] = new UnitManifest - - private val ObjectTYPE = classOf[java.lang.Object] - private val NothingTYPE = classOf[scala.runtime.Nothing$] - private val NullTYPE = classOf[scala.runtime.Null$] - - @SerialVersionUID(1L) - private class AnyManifest extends PhantomManifest[scala.Any](ObjectTYPE, "Any") { - override def newArray(len: Int) = new Array[scala.Any](len) - override def <:<(that: ClassManifest[_]): Boolean = (that eq this) - private def readResolve(): Any = Manifest.Any - } - val Any: Manifest[scala.Any] = new AnyManifest - - @SerialVersionUID(1L) - private class ObjectManifest extends PhantomManifest[java.lang.Object](ObjectTYPE, "Object") { - override def newArray(len: Int) = new Array[java.lang.Object](len) - override def <:<(that: ClassManifest[_]): Boolean = (that eq this) || (that eq Any) - private def readResolve(): Any = Manifest.Object - } - val Object: Manifest[java.lang.Object] = new ObjectManifest - - val AnyRef: Manifest[scala.AnyRef] = Object.asInstanceOf[Manifest[scala.AnyRef]] - - @SerialVersionUID(1L) - private class AnyValPhantomManifest extends PhantomManifest[scala.AnyVal](ObjectTYPE, "AnyVal") { - override def newArray(len: Int) = new Array[scala.AnyVal](len) - override def <:<(that: ClassManifest[_]): Boolean = (that eq this) || (that eq Any) - private def readResolve(): Any = Manifest.AnyVal - } - val AnyVal: Manifest[scala.AnyVal] = new AnyValPhantomManifest - - @SerialVersionUID(1L) - private class NullManifest extends PhantomManifest[scala.Null](NullTYPE, "Null") { - override def newArray(len: Int) = new Array[scala.Null](len) - override def <:<(that: ClassManifest[_]): Boolean = - (that ne null) && (that ne Nothing) && !(that <:< AnyVal) - private def readResolve(): Any = Manifest.Null - } - val Null: Manifest[scala.Null] = new NullManifest - - @SerialVersionUID(1L) - private class NothingManifest extends PhantomManifest[scala.Nothing](NothingTYPE, "Nothing") { - override def newArray(len: Int) = new Array[scala.Nothing](len) - override def <:<(that: ClassManifest[_]): Boolean = (that ne null) - private def readResolve(): Any = Manifest.Nothing - } - val Nothing: Manifest[scala.Nothing] = new NothingManifest - - @SerialVersionUID(1L) - private class SingletonTypeManifest[T <: AnyRef](value: AnyRef) extends Manifest[T] { - lazy val runtimeClass = value.getClass - override lazy val toString = value.toString + ".type" - } - - /** Manifest for the singleton type `value.type`. */ - def singleType[T <: AnyRef](value: AnyRef): Manifest[T] = - new SingletonTypeManifest[T](value) - - /** Manifest for the class type `clazz[args]`, where `clazz` is - * a top-level or static class. - * @note This no-prefix, no-arguments case is separate because we - * it's called from ScalaRunTime.boxArray itself. If we - * pass varargs as arrays into this, we get an infinitely recursive call - * to boxArray. (Besides, having a separate case is more efficient) - */ - def classType[T](clazz: Predef.Class[_]): Manifest[T] = - new ClassTypeManifest[T](None, clazz, Nil) - - /** Manifest for the class type `clazz`, where `clazz` is - * a top-level or static class and args are its type arguments. */ - def classType[T](clazz: Predef.Class[T], arg1: Manifest[_], args: Manifest[_]*): Manifest[T] = - new ClassTypeManifest[T](None, clazz, arg1 :: args.toList) - - /** Manifest for the class type `clazz[args]`, where `clazz` is - * a class with non-package prefix type `prefix` and type arguments `args`. - */ - def classType[T](prefix: Manifest[_], clazz: Predef.Class[_], args: Manifest[_]*): Manifest[T] = - new ClassTypeManifest[T](Some(prefix), clazz, args.toList) - - @SerialVersionUID(1L) - private abstract class PhantomManifest[T](_runtimeClass: Predef.Class[_], - override val toString: String) extends ClassTypeManifest[T](None, _runtimeClass, Nil) { - override def equals(that: Any): Boolean = this eq that.asInstanceOf[AnyRef] - @transient - override val hashCode = System.identityHashCode(this) - } - - /** Manifest for the class type `clazz[args]`, where `clazz` is - * a top-level or static class. */ - @SerialVersionUID(1L) - private class ClassTypeManifest[T](prefix: Option[Manifest[_]], - val runtimeClass: Predef.Class[_], - override val typeArguments: List[Manifest[_]]) extends Manifest[T] { - override def toString = - (if (prefix.isEmpty) "" else prefix.get.toString+"#") + - (if (runtimeClass.isArray) "Array" else runtimeClass.getName) + - argString - } - - def arrayType[T](arg: Manifest[_]): Manifest[Array[T]] = - arg.asInstanceOf[Manifest[T]].arrayManifest - - @SerialVersionUID(1L) - private class AbstractTypeManifest[T](prefix: Manifest[_], name: String, upperBound: Predef.Class[_], args: Seq[Manifest[_]]) extends Manifest[T] { - def runtimeClass = upperBound - override val typeArguments = args.toList - override def toString = prefix.toString+"#"+name+argString - } - - /** Manifest for the abstract type `prefix # name`. `upperBound` is not - * strictly necessary as it could be obtained by reflection. It was - * added so that erasure can be calculated without reflection. */ - def abstractType[T](prefix: Manifest[_], name: String, upperBound: Predef.Class[_], args: Manifest[_]*): Manifest[T] = - new AbstractTypeManifest[T](prefix, name, upperBound, args) - - @SerialVersionUID(1L) - private class WildcardManifest[T](lowerBound: Manifest[_], upperBound: Manifest[_]) extends Manifest[T] { - def runtimeClass = upperBound.runtimeClass - override def toString = - "_" + - (if (lowerBound eq Nothing) "" else " >: "+lowerBound) + - (if (upperBound eq Nothing) "" else " <: "+upperBound) - } - - /** Manifest for the unknown type `_ >: L <: U` in an existential. - */ - def wildcardType[T](lowerBound: Manifest[_], upperBound: Manifest[_]): Manifest[T] = - new WildcardManifest[T](lowerBound, upperBound) - - @SerialVersionUID(1L) - private class IntersectionTypeManifest[T](parents: Seq[Manifest[_]]) extends Manifest[T] { - def runtimeClass = parents.head.runtimeClass - override def toString = parents.mkString(" with ") - } - - /** Manifest for the intersection type `parents_0 with ... with parents_n`. */ - def intersectionType[T](parents: Manifest[_]*): Manifest[T] = - new IntersectionTypeManifest[T](parents) -} diff --git a/tests/scala2-library/src/library/scala/reflect/NameTransformer.scala b/tests/scala2-library/src/library/scala/reflect/NameTransformer.scala deleted file mode 100644 index bdf5165df5a8..000000000000 --- a/tests/scala2-library/src/library/scala/reflect/NameTransformer.scala +++ /dev/null @@ -1,163 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala -package reflect - -/** Provides functions to encode and decode Scala symbolic names. - * Also provides some constants. - */ -object NameTransformer { - // TODO: reduce duplication with and in StdNames - // I made these constants because we cannot change them without bumping our major version anyway. - final val NAME_JOIN_STRING = "$" - final val MODULE_SUFFIX_STRING = "$" - final val MODULE_INSTANCE_NAME = "MODULE$" - final val LOCAL_SUFFIX_STRING = " " - final val LAZY_LOCAL_SUFFIX_STRING = "$lzy" - final val MODULE_VAR_SUFFIX_STRING = "$module" - final val SETTER_SUFFIX_STRING = "_$eq" - final val TRAIT_SETTER_SEPARATOR_STRING = "$_setter_$" - - private val nops = 128 - private val ncodes = 26 * 26 - - private class OpCodes(val op: Char, val code: String, val next: OpCodes) - - private val op2code = new Array[String](nops) - private val code2op = new Array[OpCodes](ncodes) - private def enterOp(op: Char, code: String) = { - op2code(op.toInt) = code - val c = (code.charAt(1) - 'a') * 26 + code.charAt(2) - 'a' - code2op(c.toInt) = new OpCodes(op, code, code2op(c)) - } - - /* Note: decoding assumes opcodes are only ever lowercase. */ - enterOp('~', "$tilde") - enterOp('=', "$eq") - enterOp('<', "$less") - enterOp('>', "$greater") - enterOp('!', "$bang") - enterOp('#', "$hash") - enterOp('%', "$percent") - enterOp('^', "$up") - enterOp('&', "$amp") - enterOp('|', "$bar") - enterOp('*', "$times") - enterOp('/', "$div") - enterOp('+', "$plus") - enterOp('-', "$minus") - enterOp(':', "$colon") - enterOp('\\', "$bslash") - enterOp('?', "$qmark") - enterOp('@', "$at") - - /** Replace operator symbols by corresponding `\$opname`. - * - * @param name the string to encode - * @return the string with all recognized opchars replaced with their encoding - */ - def encode(name: String): String = { - var buf: StringBuilder = null - val len = name.length() - var i = 0 - while (i < len) { - val c = name charAt i - if (c < nops && (op2code(c.toInt) ne null)) { - if (buf eq null) { - buf = new StringBuilder() - buf.append(name.substring(0, i)) - } - buf.append(op2code(c.toInt)) - /* Handle glyphs that are not valid Java/JVM identifiers */ - } - else if (!Character.isJavaIdentifierPart(c)) { - if (buf eq null) { - buf = new StringBuilder() - buf.append(name.substring(0, i)) - } - buf.append("$u%04X".format(c.toInt)) - } - else if (buf ne null) { - buf.append(c) - } - i += 1 - } - if (buf eq null) name else buf.toString() - } - - /** Replace `\$opname` by corresponding operator symbol. - * - * @param name0 the string to decode - * @return the string with all recognized operator symbol encodings replaced with their name - */ - def decode(name0: String): String = { - //System.out.println("decode: " + name);//DEBUG - val name = if (name0.endsWith("")) name0.stripSuffix("") + "this" - else name0 - var buf: StringBuilder = null - val len = name.length() - var i = 0 - while (i < len) { - var ops: OpCodes = null - var unicode = false - val c = name charAt i - if (c == '$' && i + 2 < len) { - val ch1 = name.charAt(i+1) - if ('a' <= ch1 && ch1 <= 'z') { - val ch2 = name.charAt(i+2) - if ('a' <= ch2 && ch2 <= 'z') { - ops = code2op((ch1 - 'a') * 26 + ch2 - 'a') - while ((ops ne null) && !name.startsWith(ops.code, i)) ops = ops.next - if (ops ne null) { - if (buf eq null) { - buf = new StringBuilder() - buf.append(name.substring(0, i)) - } - buf.append(ops.op) - i += ops.code.length() - } - /* Handle the decoding of Unicode glyphs that are - * not valid Java/JVM identifiers */ - } else if ((len - i) >= 6 && // Check that there are enough characters left - ch1 == 'u' && - ((Character.isDigit(ch2)) || - ('A' <= ch2 && ch2 <= 'F'))) { - /* Skip past "$u", next four should be hexadecimal */ - val hex = name.substring(i+2, i+6) - try { - val str = Integer.parseInt(hex, 16).toChar - if (buf eq null) { - buf = new StringBuilder() - buf.append(name.substring(0, i)) - } - buf.append(str) - /* 2 for "$u", 4 for hexadecimal number */ - i += 6 - unicode = true - } catch { - case _:NumberFormatException => - /* `hex` did not decode to a hexadecimal number, so - * do nothing. */ - } - } - } - } - /* If we didn't see an opcode or encoded Unicode glyph, and the - buffer is non-empty, write the current character and advance - one */ - if ((ops eq null) && !unicode) { - if (buf ne null) - buf.append(c) - i += 1 - } - } - //System.out.println("= " + (if (buf == null) name else buf.toString()));//DEBUG - if (buf eq null) name else buf.toString() - } -} diff --git a/tests/scala2-library/src/library/scala/reflect/NoManifest.scala b/tests/scala2-library/src/library/scala/reflect/NoManifest.scala deleted file mode 100644 index 2ef946c80cd9..000000000000 --- a/tests/scala2-library/src/library/scala/reflect/NoManifest.scala +++ /dev/null @@ -1,18 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2007-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala -package reflect - -/** One of the branches of an [[scala.reflect.OptManifest]]. - */ -// TODO undeprecated until Scala reflection becomes non-experimental -// @deprecated("This notion doesn't have a corresponding concept in 2.10, because scala.reflect.runtime.universe.TypeTag can capture arbitrary types. Use type tags instead of manifests, and there will be no need in opt manifests.", "2.10.0") -object NoManifest extends OptManifest[Nothing] with Serializable { - override def toString = "" -} \ No newline at end of file diff --git a/tests/scala2-library/src/library/scala/reflect/OptManifest.scala b/tests/scala2-library/src/library/scala/reflect/OptManifest.scala deleted file mode 100644 index b69f55483cd9..000000000000 --- a/tests/scala2-library/src/library/scala/reflect/OptManifest.scala +++ /dev/null @@ -1,20 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2007-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala -package reflect - -/** A `OptManifest[T]` is an optional [[scala.reflect.Manifest]]. - * - * It is either a `Manifest` or the value `NoManifest`. - * - * @author Martin Odersky - */ -// TODO undeprecated until Scala reflection becomes non-experimental -// @deprecated("This notion doesn't have a corresponding concept in 2.10, because scala.reflect.runtime.universe.TypeTag can capture arbitrary types. Use type tags instead of manifests, and there will be no need in opt manifests.", "2.10.0") -trait OptManifest[+T] extends Serializable \ No newline at end of file diff --git a/tests/scala2-library/src/library/scala/reflect/ScalaLongSignature.java b/tests/scala2-library/src/library/scala/reflect/ScalaLongSignature.java deleted file mode 100644 index 94cf504aa45d..000000000000 --- a/tests/scala2-library/src/library/scala/reflect/ScalaLongSignature.java +++ /dev/null @@ -1,12 +0,0 @@ -package scala.reflect; - -import java.lang.annotation.ElementType; -import java.lang.annotation.Retention; -import java.lang.annotation.RetentionPolicy; -import java.lang.annotation.Target; - -@Retention(RetentionPolicy.RUNTIME) -@Target(ElementType.TYPE) -public @interface ScalaLongSignature { - String[] bytes(); -} diff --git a/tests/scala2-library/src/library/scala/reflect/ScalaSignature.java b/tests/scala2-library/src/library/scala/reflect/ScalaSignature.java deleted file mode 100644 index 217ae8e8f72e..000000000000 --- a/tests/scala2-library/src/library/scala/reflect/ScalaSignature.java +++ /dev/null @@ -1,12 +0,0 @@ -package scala.reflect; - -import java.lang.annotation.ElementType; -import java.lang.annotation.Retention; -import java.lang.annotation.RetentionPolicy; -import java.lang.annotation.Target; - -@Retention(RetentionPolicy.RUNTIME) -@Target(ElementType.TYPE) -public @interface ScalaSignature { - String bytes(); -} diff --git a/tests/scala2-library/src/library/scala/reflect/macros/internal/macroImpl.scala b/tests/scala2-library/src/library/scala/reflect/macros/internal/macroImpl.scala deleted file mode 100644 index b281fb7d12b3..000000000000 --- a/tests/scala2-library/src/library/scala/reflect/macros/internal/macroImpl.scala +++ /dev/null @@ -1,18 +0,0 @@ -package scala.reflect.macros -package internal - -/** Links macro definitions with their implementation. - * This is necessary to preserve macro def -> macro impl links between compilation runs. - * - * More precisely, after typechecking right-hand side of a macro def - * `typedMacroBody` slaps `macroImpl` annotation onto the macro def - * with the result of typechecking as a sole parameter. - * - * As an unfortunate consequence, this annotation must be defined in scala-library.jar, - * because anyone (even those programmers who compile their programs with only scala-library on classpath) - * must be able to define macros. - * - * To lessen the weirdness we define this annotation as `private[scala]`. - * It will not prevent pickling, but it will prevent application developers (and scaladocs) from seeing the annotation. - */ -private[scala] class macroImpl(val referenceToMacroImpl: Any) extends scala.annotation.StaticAnnotation diff --git a/tests/scala2-library/src/library/scala/reflect/package.scala b/tests/scala2-library/src/library/scala/reflect/package.scala deleted file mode 100644 index 6c7070fa4999..000000000000 --- a/tests/scala2-library/src/library/scala/reflect/package.scala +++ /dev/null @@ -1,67 +0,0 @@ -package scala - -import java.lang.reflect.{ AccessibleObject => jAccessibleObject } - -package object reflect { - - // in the new scheme of things ClassManifests are aliased to ClassTags - // this is done because we want `toArray` in collections work with ClassTags - // but changing it to use the ClassTag context bound without aliasing ClassManifest - // will break everyone who subclasses and overrides `toArray` - // luckily for us, aliasing doesn't hamper backward compatibility, so it's ideal in this situation - // I wish we could do the same for Manifests and TypeTags though - - // note, by the way, that we don't touch ClassManifest the object - // because its Byte, Short and so on factory fields are incompatible with ClassTag's - - /** A `ClassManifest[T]` is an opaque descriptor for type `T`. - * It is used by the compiler to preserve information necessary - * for instantiating `Arrays` in those cases where the element type - * is unknown at compile time. - * - * The type-relation operators make an effort to present a more accurate - * picture than can be realized with erased types, but they should not be - * relied upon to give correct answers. In particular they are likely to - * be wrong when variance is involved or when a subtype has a different - * number of type arguments than a supertype. - */ - @deprecated("use scala.reflect.ClassTag instead", "2.10.0") - @annotation.implicitNotFound(msg = "No ClassManifest available for ${T}.") - type ClassManifest[T] = scala.reflect.ClassTag[T] - - /** The object `ClassManifest` defines factory methods for manifests. - * It is intended for use by the compiler and should not be used in client code. - */ - @deprecated("use scala.reflect.ClassTag instead", "2.10.0") - val ClassManifest = ClassManifestFactory - - /** The object `Manifest` defines factory methods for manifests. - * It is intended for use by the compiler and should not be used in client code. - */ - // TODO undeprecated until Scala reflection becomes non-experimental - // @deprecated("use scala.reflect.ClassTag (to capture erasures), scala.reflect.runtime.universe.TypeTag (to capture types) or both instead", "2.10.0") - val Manifest = ManifestFactory - - def classTag[T](implicit ctag: ClassTag[T]) = ctag - - /** Make a java reflection object accessible, if it is not already - * and it is possible to do so. If a SecurityException is thrown in the - * attempt, it is caught and discarded. - */ - def ensureAccessible[T <: jAccessibleObject](m: T): T = { - if (!m.isAccessible) { - try m setAccessible true - catch { case _: SecurityException => } // does nothing - } - m - } - - // anchor for the class tag materialization macro emitted during tag materialization in Implicits.scala - // implementation is hardwired into `scala.reflect.reify.Taggers` - // using the mechanism implemented in `scala.tools.reflect.FastTrack` - // todo. once we have implicit macros for tag generation, we can remove this anchor - private[scala] def materializeClassTag[T](): ClassTag[T] = /*macro*/ ??? -} - -/** An exception that indicates an error during Scala reflection */ -case class ScalaReflectionException(msg: String) extends Exception(msg) diff --git a/tests/scala2-library/src/library/scala/remote.scala b/tests/scala2-library/src/library/scala/remote.scala deleted file mode 100644 index 7265a1519454..000000000000 --- a/tests/scala2-library/src/library/scala/remote.scala +++ /dev/null @@ -1,28 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala - -/** - * An annotation that designates the class to which it is applied as remotable. - * - * For instance, the Scala code - * {{{ - * @remote trait Hello { - * def sayHello(): String - * } - * }}} - * is equivalent to the following Java code: - * {{{ - * public interface Hello extends java.rmi.Remote { - * String sayHello() throws java.rmi.RemoteException; - * } - * }}} - */ -@deprecated("extend java.rmi.Remote instead and add @throws[java.rmi.RemoteException] to public methods", "2.12.0") -class remote extends scala.annotation.StaticAnnotation {} diff --git a/tests/scala2-library/src/library/scala/runtime/AbstractFunction0.scala b/tests/scala2-library/src/library/scala/runtime/AbstractFunction0.scala deleted file mode 100644 index 1e677e800877..000000000000 --- a/tests/scala2-library/src/library/scala/runtime/AbstractFunction0.scala +++ /dev/null @@ -1,14 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ -// GENERATED CODE: DO NOT EDIT. See scala.Function0 for timestamp. - -package scala.runtime - -abstract class AbstractFunction0[@specialized(Specializable.Primitives) +R] extends Function0[R] { - -} diff --git a/tests/scala2-library/src/library/scala/runtime/AbstractFunction1.scala b/tests/scala2-library/src/library/scala/runtime/AbstractFunction1.scala deleted file mode 100644 index 178280cb4600..000000000000 --- a/tests/scala2-library/src/library/scala/runtime/AbstractFunction1.scala +++ /dev/null @@ -1,14 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ -// GENERATED CODE: DO NOT EDIT. See scala.Function0 for timestamp. - -package scala.runtime - -abstract class AbstractFunction1[@specialized(scala.Int, scala.Long, scala.Float, scala.Double) -T1, @specialized(scala.Unit, scala.Boolean, scala.Int, scala.Float, scala.Long, scala.Double) +R] extends Function1[T1, R] { - -} diff --git a/tests/scala2-library/src/library/scala/runtime/AbstractFunction10.scala b/tests/scala2-library/src/library/scala/runtime/AbstractFunction10.scala deleted file mode 100644 index 776f52238d20..000000000000 --- a/tests/scala2-library/src/library/scala/runtime/AbstractFunction10.scala +++ /dev/null @@ -1,14 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ -// GENERATED CODE: DO NOT EDIT. See scala.Function0 for timestamp. - -package scala.runtime - -abstract class AbstractFunction10[-T1, -T2, -T3, -T4, -T5, -T6, -T7, -T8, -T9, -T10, +R] extends Function10[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, R] { - -} diff --git a/tests/scala2-library/src/library/scala/runtime/AbstractFunction11.scala b/tests/scala2-library/src/library/scala/runtime/AbstractFunction11.scala deleted file mode 100644 index 76cd8fbb3c92..000000000000 --- a/tests/scala2-library/src/library/scala/runtime/AbstractFunction11.scala +++ /dev/null @@ -1,14 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ -// GENERATED CODE: DO NOT EDIT. See scala.Function0 for timestamp. - -package scala.runtime - -abstract class AbstractFunction11[-T1, -T2, -T3, -T4, -T5, -T6, -T7, -T8, -T9, -T10, -T11, +R] extends Function11[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, R] { - -} diff --git a/tests/scala2-library/src/library/scala/runtime/AbstractFunction12.scala b/tests/scala2-library/src/library/scala/runtime/AbstractFunction12.scala deleted file mode 100644 index 10066ed4b356..000000000000 --- a/tests/scala2-library/src/library/scala/runtime/AbstractFunction12.scala +++ /dev/null @@ -1,14 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ -// GENERATED CODE: DO NOT EDIT. See scala.Function0 for timestamp. - -package scala.runtime - -abstract class AbstractFunction12[-T1, -T2, -T3, -T4, -T5, -T6, -T7, -T8, -T9, -T10, -T11, -T12, +R] extends Function12[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, R] { - -} diff --git a/tests/scala2-library/src/library/scala/runtime/AbstractFunction13.scala b/tests/scala2-library/src/library/scala/runtime/AbstractFunction13.scala deleted file mode 100644 index 6c3a45734ce3..000000000000 --- a/tests/scala2-library/src/library/scala/runtime/AbstractFunction13.scala +++ /dev/null @@ -1,14 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ -// GENERATED CODE: DO NOT EDIT. See scala.Function0 for timestamp. - -package scala.runtime - -abstract class AbstractFunction13[-T1, -T2, -T3, -T4, -T5, -T6, -T7, -T8, -T9, -T10, -T11, -T12, -T13, +R] extends Function13[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, R] { - -} diff --git a/tests/scala2-library/src/library/scala/runtime/AbstractFunction14.scala b/tests/scala2-library/src/library/scala/runtime/AbstractFunction14.scala deleted file mode 100644 index bf2b6736f405..000000000000 --- a/tests/scala2-library/src/library/scala/runtime/AbstractFunction14.scala +++ /dev/null @@ -1,14 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ -// GENERATED CODE: DO NOT EDIT. See scala.Function0 for timestamp. - -package scala.runtime - -abstract class AbstractFunction14[-T1, -T2, -T3, -T4, -T5, -T6, -T7, -T8, -T9, -T10, -T11, -T12, -T13, -T14, +R] extends Function14[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, R] { - -} diff --git a/tests/scala2-library/src/library/scala/runtime/AbstractFunction15.scala b/tests/scala2-library/src/library/scala/runtime/AbstractFunction15.scala deleted file mode 100644 index 5136f666c827..000000000000 --- a/tests/scala2-library/src/library/scala/runtime/AbstractFunction15.scala +++ /dev/null @@ -1,14 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ -// GENERATED CODE: DO NOT EDIT. See scala.Function0 for timestamp. - -package scala.runtime - -abstract class AbstractFunction15[-T1, -T2, -T3, -T4, -T5, -T6, -T7, -T8, -T9, -T10, -T11, -T12, -T13, -T14, -T15, +R] extends Function15[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, R] { - -} diff --git a/tests/scala2-library/src/library/scala/runtime/AbstractFunction16.scala b/tests/scala2-library/src/library/scala/runtime/AbstractFunction16.scala deleted file mode 100644 index dbafab83015c..000000000000 --- a/tests/scala2-library/src/library/scala/runtime/AbstractFunction16.scala +++ /dev/null @@ -1,14 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ -// GENERATED CODE: DO NOT EDIT. See scala.Function0 for timestamp. - -package scala.runtime - -abstract class AbstractFunction16[-T1, -T2, -T3, -T4, -T5, -T6, -T7, -T8, -T9, -T10, -T11, -T12, -T13, -T14, -T15, -T16, +R] extends Function16[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, R] { - -} diff --git a/tests/scala2-library/src/library/scala/runtime/AbstractFunction17.scala b/tests/scala2-library/src/library/scala/runtime/AbstractFunction17.scala deleted file mode 100644 index 9c36dbf5d8d9..000000000000 --- a/tests/scala2-library/src/library/scala/runtime/AbstractFunction17.scala +++ /dev/null @@ -1,14 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ -// GENERATED CODE: DO NOT EDIT. See scala.Function0 for timestamp. - -package scala.runtime - -abstract class AbstractFunction17[-T1, -T2, -T3, -T4, -T5, -T6, -T7, -T8, -T9, -T10, -T11, -T12, -T13, -T14, -T15, -T16, -T17, +R] extends Function17[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, R] { - -} diff --git a/tests/scala2-library/src/library/scala/runtime/AbstractFunction18.scala b/tests/scala2-library/src/library/scala/runtime/AbstractFunction18.scala deleted file mode 100644 index 30eee9586fbe..000000000000 --- a/tests/scala2-library/src/library/scala/runtime/AbstractFunction18.scala +++ /dev/null @@ -1,14 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ -// GENERATED CODE: DO NOT EDIT. See scala.Function0 for timestamp. - -package scala.runtime - -abstract class AbstractFunction18[-T1, -T2, -T3, -T4, -T5, -T6, -T7, -T8, -T9, -T10, -T11, -T12, -T13, -T14, -T15, -T16, -T17, -T18, +R] extends Function18[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, R] { - -} diff --git a/tests/scala2-library/src/library/scala/runtime/AbstractFunction19.scala b/tests/scala2-library/src/library/scala/runtime/AbstractFunction19.scala deleted file mode 100644 index 14baf5f1eba3..000000000000 --- a/tests/scala2-library/src/library/scala/runtime/AbstractFunction19.scala +++ /dev/null @@ -1,14 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ -// GENERATED CODE: DO NOT EDIT. See scala.Function0 for timestamp. - -package scala.runtime - -abstract class AbstractFunction19[-T1, -T2, -T3, -T4, -T5, -T6, -T7, -T8, -T9, -T10, -T11, -T12, -T13, -T14, -T15, -T16, -T17, -T18, -T19, +R] extends Function19[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, R] { - -} diff --git a/tests/scala2-library/src/library/scala/runtime/AbstractFunction2.scala b/tests/scala2-library/src/library/scala/runtime/AbstractFunction2.scala deleted file mode 100644 index 223ade99838f..000000000000 --- a/tests/scala2-library/src/library/scala/runtime/AbstractFunction2.scala +++ /dev/null @@ -1,14 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ -// GENERATED CODE: DO NOT EDIT. See scala.Function0 for timestamp. - -package scala.runtime - -abstract class AbstractFunction2[@specialized(scala.Int, scala.Long, scala.Double) -T1, @specialized(scala.Int, scala.Long, scala.Double) -T2, @specialized(scala.Unit, scala.Boolean, scala.Int, scala.Float, scala.Long, scala.Double) +R] extends Function2[T1, T2, R] { - -} diff --git a/tests/scala2-library/src/library/scala/runtime/AbstractFunction20.scala b/tests/scala2-library/src/library/scala/runtime/AbstractFunction20.scala deleted file mode 100644 index f5c29571bf97..000000000000 --- a/tests/scala2-library/src/library/scala/runtime/AbstractFunction20.scala +++ /dev/null @@ -1,14 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ -// GENERATED CODE: DO NOT EDIT. See scala.Function0 for timestamp. - -package scala.runtime - -abstract class AbstractFunction20[-T1, -T2, -T3, -T4, -T5, -T6, -T7, -T8, -T9, -T10, -T11, -T12, -T13, -T14, -T15, -T16, -T17, -T18, -T19, -T20, +R] extends Function20[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, R] { - -} diff --git a/tests/scala2-library/src/library/scala/runtime/AbstractFunction21.scala b/tests/scala2-library/src/library/scala/runtime/AbstractFunction21.scala deleted file mode 100644 index 15feea3a669a..000000000000 --- a/tests/scala2-library/src/library/scala/runtime/AbstractFunction21.scala +++ /dev/null @@ -1,14 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ -// GENERATED CODE: DO NOT EDIT. See scala.Function0 for timestamp. - -package scala.runtime - -abstract class AbstractFunction21[-T1, -T2, -T3, -T4, -T5, -T6, -T7, -T8, -T9, -T10, -T11, -T12, -T13, -T14, -T15, -T16, -T17, -T18, -T19, -T20, -T21, +R] extends Function21[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, R] { - -} diff --git a/tests/scala2-library/src/library/scala/runtime/AbstractFunction22.scala b/tests/scala2-library/src/library/scala/runtime/AbstractFunction22.scala deleted file mode 100644 index d77369ff0125..000000000000 --- a/tests/scala2-library/src/library/scala/runtime/AbstractFunction22.scala +++ /dev/null @@ -1,14 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ -// GENERATED CODE: DO NOT EDIT. See scala.Function0 for timestamp. - -package scala.runtime - -abstract class AbstractFunction22[-T1, -T2, -T3, -T4, -T5, -T6, -T7, -T8, -T9, -T10, -T11, -T12, -T13, -T14, -T15, -T16, -T17, -T18, -T19, -T20, -T21, -T22, +R] extends Function22[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22, R] { - -} diff --git a/tests/scala2-library/src/library/scala/runtime/AbstractFunction3.scala b/tests/scala2-library/src/library/scala/runtime/AbstractFunction3.scala deleted file mode 100644 index f8635092140e..000000000000 --- a/tests/scala2-library/src/library/scala/runtime/AbstractFunction3.scala +++ /dev/null @@ -1,14 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ -// GENERATED CODE: DO NOT EDIT. See scala.Function0 for timestamp. - -package scala.runtime - -abstract class AbstractFunction3[-T1, -T2, -T3, +R] extends Function3[T1, T2, T3, R] { - -} diff --git a/tests/scala2-library/src/library/scala/runtime/AbstractFunction4.scala b/tests/scala2-library/src/library/scala/runtime/AbstractFunction4.scala deleted file mode 100644 index 5927015ef8b1..000000000000 --- a/tests/scala2-library/src/library/scala/runtime/AbstractFunction4.scala +++ /dev/null @@ -1,14 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ -// GENERATED CODE: DO NOT EDIT. See scala.Function0 for timestamp. - -package scala.runtime - -abstract class AbstractFunction4[-T1, -T2, -T3, -T4, +R] extends Function4[T1, T2, T3, T4, R] { - -} diff --git a/tests/scala2-library/src/library/scala/runtime/AbstractFunction5.scala b/tests/scala2-library/src/library/scala/runtime/AbstractFunction5.scala deleted file mode 100644 index 411e1e14bf63..000000000000 --- a/tests/scala2-library/src/library/scala/runtime/AbstractFunction5.scala +++ /dev/null @@ -1,14 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ -// GENERATED CODE: DO NOT EDIT. See scala.Function0 for timestamp. - -package scala.runtime - -abstract class AbstractFunction5[-T1, -T2, -T3, -T4, -T5, +R] extends Function5[T1, T2, T3, T4, T5, R] { - -} diff --git a/tests/scala2-library/src/library/scala/runtime/AbstractFunction6.scala b/tests/scala2-library/src/library/scala/runtime/AbstractFunction6.scala deleted file mode 100644 index 411c30d480d3..000000000000 --- a/tests/scala2-library/src/library/scala/runtime/AbstractFunction6.scala +++ /dev/null @@ -1,14 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ -// GENERATED CODE: DO NOT EDIT. See scala.Function0 for timestamp. - -package scala.runtime - -abstract class AbstractFunction6[-T1, -T2, -T3, -T4, -T5, -T6, +R] extends Function6[T1, T2, T3, T4, T5, T6, R] { - -} diff --git a/tests/scala2-library/src/library/scala/runtime/AbstractFunction7.scala b/tests/scala2-library/src/library/scala/runtime/AbstractFunction7.scala deleted file mode 100644 index 498f98633ae7..000000000000 --- a/tests/scala2-library/src/library/scala/runtime/AbstractFunction7.scala +++ /dev/null @@ -1,14 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ -// GENERATED CODE: DO NOT EDIT. See scala.Function0 for timestamp. - -package scala.runtime - -abstract class AbstractFunction7[-T1, -T2, -T3, -T4, -T5, -T6, -T7, +R] extends Function7[T1, T2, T3, T4, T5, T6, T7, R] { - -} diff --git a/tests/scala2-library/src/library/scala/runtime/AbstractFunction8.scala b/tests/scala2-library/src/library/scala/runtime/AbstractFunction8.scala deleted file mode 100644 index c6d320b8874d..000000000000 --- a/tests/scala2-library/src/library/scala/runtime/AbstractFunction8.scala +++ /dev/null @@ -1,14 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ -// GENERATED CODE: DO NOT EDIT. See scala.Function0 for timestamp. - -package scala.runtime - -abstract class AbstractFunction8[-T1, -T2, -T3, -T4, -T5, -T6, -T7, -T8, +R] extends Function8[T1, T2, T3, T4, T5, T6, T7, T8, R] { - -} diff --git a/tests/scala2-library/src/library/scala/runtime/AbstractFunction9.scala b/tests/scala2-library/src/library/scala/runtime/AbstractFunction9.scala deleted file mode 100644 index 34bd9d710721..000000000000 --- a/tests/scala2-library/src/library/scala/runtime/AbstractFunction9.scala +++ /dev/null @@ -1,14 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ -// GENERATED CODE: DO NOT EDIT. See scala.Function0 for timestamp. - -package scala.runtime - -abstract class AbstractFunction9[-T1, -T2, -T3, -T4, -T5, -T6, -T7, -T8, -T9, +R] extends Function9[T1, T2, T3, T4, T5, T6, T7, T8, T9, R] { - -} diff --git a/tests/scala2-library/src/library/scala/runtime/AbstractPartialFunction.scala b/tests/scala2-library/src/library/scala/runtime/AbstractPartialFunction.scala deleted file mode 100644 index 630966d0d41d..000000000000 --- a/tests/scala2-library/src/library/scala/runtime/AbstractPartialFunction.scala +++ /dev/null @@ -1,35 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala -package runtime - -/** `AbstractPartialFunction` reformulates all operations of its supertrait `PartialFunction` - * in terms of `isDefinedAt` and `applyOrElse`. - * - * This allows more efficient implementations in many cases: - * - optimized `orElse` method supports chained `orElse` in linear time, - * and with no slow-down if the `orElse` part is not needed. - * - optimized `lift` method helps to avoid double evaluation of pattern matchers & guards - * of partial function literals. - * - * This trait is used as a basis for implementation of all partial function literals. - * - * @author Pavel Pavlov - * @since 2.10 - */ -abstract class AbstractPartialFunction[@specialized(scala.Int, scala.Long, scala.Float, scala.Double) -T1, @specialized(scala.Unit, scala.Boolean, scala.Int, scala.Float, scala.Long, scala.Double) +R] extends Function1[T1, R] with PartialFunction[T1, R] { self => - // this method must be overridden for better performance, - // for backwards compatibility, fall back to the one inherited from PartialFunction - // this assumes the old-school partial functions override the apply method, though - // override def applyOrElse[A1 <: T1, B1 >: R](x: A1, default: A1 => B1): B1 = ??? - - // probably okay to make final since classes compiled before have overridden against the old version of AbstractPartialFunction - // let's not make it final so as not to confuse anyone - /*final*/ def apply(x: T1): R = applyOrElse(x, PartialFunction.empty) -} diff --git a/tests/scala2-library/src/library/scala/runtime/BooleanRef.java b/tests/scala2-library/src/library/scala/runtime/BooleanRef.java deleted file mode 100644 index 92e8055351f7..000000000000 --- a/tests/scala2-library/src/library/scala/runtime/BooleanRef.java +++ /dev/null @@ -1,23 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - - - -package scala.runtime; - - -public class BooleanRef implements java.io.Serializable { - private static final long serialVersionUID = -5730524563015615974L; - - public boolean elem; - public BooleanRef(boolean elem) { this.elem = elem; } - public String toString() { return String.valueOf(elem); } - - public static BooleanRef create(boolean e) { return new BooleanRef(e); } - public static BooleanRef zero() { return new BooleanRef(false); } -} diff --git a/tests/scala2-library/src/library/scala/runtime/BoxedUnit.java b/tests/scala2-library/src/library/scala/runtime/BoxedUnit.java deleted file mode 100644 index f436b7c2094a..000000000000 --- a/tests/scala2-library/src/library/scala/runtime/BoxedUnit.java +++ /dev/null @@ -1,36 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - - - -package scala.runtime; - - -public final class BoxedUnit implements java.io.Serializable { - private static final long serialVersionUID = 8405543498931817370L; - - public final static BoxedUnit UNIT = new BoxedUnit(); - - public final static Class TYPE = java.lang.Void.TYPE; - - private Object readResolve() { return UNIT; } - - private BoxedUnit() { } - - public boolean equals(java.lang.Object other) { - return this == other; - } - - public int hashCode() { - return 0; - } - - public String toString() { - return "()"; - } -} diff --git a/tests/scala2-library/src/library/scala/runtime/BoxesRunTime.java b/tests/scala2-library/src/library/scala/runtime/BoxesRunTime.java deleted file mode 100644 index 6b3874fc1f6f..000000000000 --- a/tests/scala2-library/src/library/scala/runtime/BoxesRunTime.java +++ /dev/null @@ -1,762 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2006-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - - - -package scala.runtime; - -import scala.math.ScalaNumber; - -/** An object (static class) that defines methods used for creating, - * reverting, and calculating with, boxed values. There are four classes - * of methods in this object: - * - Convenience boxing methods which call the static valueOf method - * on the boxed class, thus utilizing the JVM boxing cache. - * - Convenience unboxing methods returning default value on null. - * - The generalised comparison method to be used when an object may - * be a boxed value. - * - Standard value operators for boxed number and quasi-number values. - * - * @author Gilles Dubochet - * @author Martin Odersky - * @contributor Stepan Koltsov - * @version 2.0 */ -public final class BoxesRunTime -{ - private static final int CHAR = 0, /* BYTE = 1, SHORT = 2, */ INT = 3, LONG = 4, FLOAT = 5, DOUBLE = 6, OTHER = 7; - - /** We don't need to return BYTE and SHORT, as everything which might - * care widens to INT. - */ - private static int typeCode(Object a) { - if (a instanceof java.lang.Integer) return INT; - if (a instanceof java.lang.Double) return DOUBLE; - if (a instanceof java.lang.Long) return LONG; - if (a instanceof java.lang.Character) return CHAR; - if (a instanceof java.lang.Float) return FLOAT; - if ((a instanceof java.lang.Byte) || (a instanceof java.lang.Short)) return INT; - return OTHER; - } - -/* BOXING ... BOXING ... BOXING ... BOXING ... BOXING ... BOXING ... BOXING ... BOXING */ - - public static java.lang.Boolean boxToBoolean(boolean b) { - return java.lang.Boolean.valueOf(b); - } - - public static java.lang.Character boxToCharacter(char c) { - return java.lang.Character.valueOf(c); - } - - public static java.lang.Byte boxToByte(byte b) { - return java.lang.Byte.valueOf(b); - } - - public static java.lang.Short boxToShort(short s) { - return java.lang.Short.valueOf(s); - } - - public static java.lang.Integer boxToInteger(int i) { - return java.lang.Integer.valueOf(i); - } - - public static java.lang.Long boxToLong(long l) { - return java.lang.Long.valueOf(l); - } - - public static java.lang.Float boxToFloat(float f) { - return java.lang.Float.valueOf(f); - } - - public static java.lang.Double boxToDouble(double d) { - // System.out.println("box " + d); - // (new Throwable()).printStackTrace(); - return java.lang.Double.valueOf(d); - } - -/* UNBOXING ... UNBOXING ... UNBOXING ... UNBOXING ... UNBOXING ... UNBOXING ... UNBOXING */ - - public static boolean unboxToBoolean(Object b) { - return b == null ? false : ((java.lang.Boolean)b).booleanValue(); - } - - public static char unboxToChar(Object c) { - return c == null ? 0 : ((java.lang.Character)c).charValue(); - } - - public static byte unboxToByte(Object b) { - return b == null ? 0 : ((java.lang.Byte)b).byteValue(); - } - - public static short unboxToShort(Object s) { - return s == null ? 0 : ((java.lang.Short)s).shortValue(); - } - - public static int unboxToInt(Object i) { - return i == null ? 0 : ((java.lang.Integer)i).intValue(); - } - - public static long unboxToLong(Object l) { - return l == null ? 0 : ((java.lang.Long)l).longValue(); - } - - public static float unboxToFloat(Object f) { - return f == null ? 0.0f : ((java.lang.Float)f).floatValue(); - } - - public static double unboxToDouble(Object d) { - // System.out.println("unbox " + d); - return d == null ? 0.0d : ((java.lang.Double)d).doubleValue(); - } - - /* COMPARISON ... COMPARISON ... COMPARISON ... COMPARISON ... COMPARISON ... COMPARISON */ - - public static boolean equals(Object x, Object y) { - if (x == y) return true; - return equals2(x, y); - } - - /** Since all applicable logic has to be present in the equals method of a ScalaNumber - * in any case, we dispatch to it as soon as we spot one on either side. - */ - public static boolean equals2(Object x, Object y) { - if (x instanceof java.lang.Number) - return equalsNumObject((java.lang.Number)x, y); - if (x instanceof java.lang.Character) - return equalsCharObject((java.lang.Character)x, y); - if (x == null) - return y == null; - - return x.equals(y); - } - - public static boolean equalsNumObject(java.lang.Number xn, Object y) { - if (y instanceof java.lang.Number) - return equalsNumNum(xn, (java.lang.Number)y); - if (y instanceof java.lang.Character) - return equalsNumChar(xn, (java.lang.Character)y); - if (xn == null) - return y == null; - - return xn.equals(y); - } - - public static boolean equalsNumNum(java.lang.Number xn, java.lang.Number yn) { - int xcode = typeCode(xn); - int ycode = typeCode(yn); - switch (ycode > xcode ? ycode : xcode) { - case INT: - return xn.intValue() == yn.intValue(); - case LONG: - return xn.longValue() == yn.longValue(); - case FLOAT: - return xn.floatValue() == yn.floatValue(); - case DOUBLE: - return xn.doubleValue() == yn.doubleValue(); - default: - if ((yn instanceof ScalaNumber) && !(xn instanceof ScalaNumber)) - return yn.equals(xn); - } - if (xn == null) - return yn == null; - - return xn.equals(yn); - } - - public static boolean equalsCharObject(java.lang.Character xc, Object y) { - if (y instanceof java.lang.Character) - return xc.charValue() == ((java.lang.Character)y).charValue(); - if (y instanceof java.lang.Number) - return equalsNumChar((java.lang.Number)y, xc); - if (xc == null) - return y == null; - - return xc.equals(y); - } - - public static boolean equalsNumChar(java.lang.Number xn, java.lang.Character yc) { - if (yc == null) - return xn == null; - - char ch = yc.charValue(); - switch (typeCode(xn)) { - case INT: - return xn.intValue() == ch; - case LONG: - return xn.longValue() == ch; - case FLOAT: - return xn.floatValue() == ch; - case DOUBLE: - return xn.doubleValue() == ch; - default: - return xn.equals(yc); - } - } - - private static int unboxCharOrInt(Object arg1, int code) { - if (code == CHAR) - return ((java.lang.Character) arg1).charValue(); - else - return ((java.lang.Number) arg1).intValue(); - } - private static long unboxCharOrLong(Object arg1, int code) { - if (code == CHAR) - return ((java.lang.Character) arg1).charValue(); - else - return ((java.lang.Number) arg1).longValue(); - } - private static float unboxCharOrFloat(Object arg1, int code) { - if (code == CHAR) - return ((java.lang.Character) arg1).charValue(); - else - return ((java.lang.Number) arg1).floatValue(); - } - private static double unboxCharOrDouble(Object arg1, int code) { - if (code == CHAR) - return ((java.lang.Character) arg1).charValue(); - else - return ((java.lang.Number) arg1).doubleValue(); - } - -/* OPERATORS ... OPERATORS ... OPERATORS ... OPERATORS ... OPERATORS ... OPERATORS ... OPERATORS ... OPERATORS */ - - /** arg1 + arg2 */ - public static Object add(Object arg1, Object arg2) throws NoSuchMethodException { - int code1 = typeCode(arg1); - int code2 = typeCode(arg2); - int maxcode = (code1 < code2) ? code2 : code1; - if (maxcode <= INT) { - return boxToInteger(unboxCharOrInt(arg1, code1) + unboxCharOrInt(arg2, code2)); - } - if (maxcode <= LONG) { - return boxToLong(unboxCharOrLong(arg1, code1) + unboxCharOrLong(arg2, code2)); - } - if (maxcode <= FLOAT) { - return boxToFloat(unboxCharOrFloat(arg1, code1) + unboxCharOrFloat(arg2, code2)); - } - if (maxcode <= DOUBLE) { - return boxToDouble(unboxCharOrDouble(arg1, code1) + unboxCharOrDouble(arg2, code2)); - } - throw new NoSuchMethodException(); - } - - /** arg1 - arg2 */ - public static Object subtract(Object arg1, Object arg2) throws NoSuchMethodException { - int code1 = typeCode(arg1); - int code2 = typeCode(arg2); - int maxcode = (code1 < code2) ? code2 : code1; - if (maxcode <= INT) { - return boxToInteger(unboxCharOrInt(arg1, code1) - unboxCharOrInt(arg2, code2)); - } - if (maxcode <= LONG) { - return boxToLong(unboxCharOrLong(arg1, code1) - unboxCharOrLong(arg2, code2)); - } - if (maxcode <= FLOAT) { - return boxToFloat(unboxCharOrFloat(arg1, code1) - unboxCharOrFloat(arg2, code2)); - } - if (maxcode <= DOUBLE) { - return boxToDouble(unboxCharOrDouble(arg1, code1) - unboxCharOrDouble(arg2, code2)); - } - throw new NoSuchMethodException(); - } - - /** arg1 * arg2 */ - public static Object multiply(Object arg1, Object arg2) throws NoSuchMethodException { - int code1 = typeCode(arg1); - int code2 = typeCode(arg2); - int maxcode = (code1 < code2) ? code2 : code1; - if (maxcode <= INT) { - return boxToInteger(unboxCharOrInt(arg1, code1) * unboxCharOrInt(arg2, code2)); - } - if (maxcode <= LONG) { - return boxToLong(unboxCharOrLong(arg1, code1) * unboxCharOrLong(arg2, code2)); - } - if (maxcode <= FLOAT) { - return boxToFloat(unboxCharOrFloat(arg1, code1) * unboxCharOrFloat(arg2, code2)); - } - if (maxcode <= DOUBLE) { - return boxToDouble(unboxCharOrDouble(arg1, code1) * unboxCharOrDouble(arg2, code2)); - } - throw new NoSuchMethodException(); - } - - /** arg1 / arg2 */ - public static Object divide(Object arg1, Object arg2) throws NoSuchMethodException { - int code1 = typeCode(arg1); - int code2 = typeCode(arg2); - int maxcode = (code1 < code2) ? code2 : code1; - - if (maxcode <= INT) - return boxToInteger(unboxCharOrInt(arg1, code1) / unboxCharOrInt(arg2, code2)); - if (maxcode <= LONG) - return boxToLong(unboxCharOrLong(arg1, code1) / unboxCharOrLong(arg2, code2)); - if (maxcode <= FLOAT) - return boxToFloat(unboxCharOrFloat(arg1, code1) / unboxCharOrFloat(arg2, code2)); - if (maxcode <= DOUBLE) - return boxToDouble(unboxCharOrDouble(arg1, code1) / unboxCharOrDouble(arg2, code2)); - - throw new NoSuchMethodException(); - } - - /** arg1 % arg2 */ - public static Object takeModulo(Object arg1, Object arg2) throws NoSuchMethodException { - int code1 = typeCode(arg1); - int code2 = typeCode(arg2); - int maxcode = (code1 < code2) ? code2 : code1; - - if (maxcode <= INT) - return boxToInteger(unboxCharOrInt(arg1, code1) % unboxCharOrInt(arg2, code2)); - if (maxcode <= LONG) - return boxToLong(unboxCharOrLong(arg1, code1) % unboxCharOrLong(arg2, code2)); - if (maxcode <= FLOAT) - return boxToFloat(unboxCharOrFloat(arg1, code1) % unboxCharOrFloat(arg2, code2)); - if (maxcode <= DOUBLE) - return boxToDouble(unboxCharOrDouble(arg1, code1) % unboxCharOrDouble(arg2, code2)); - - throw new NoSuchMethodException(); - } - - /** arg1 >> arg2 */ - public static Object shiftSignedRight(Object arg1, Object arg2) throws NoSuchMethodException { - int code1 = typeCode(arg1); - int code2 = typeCode(arg2); - if (code1 <= INT) { - int val1 = unboxCharOrInt(arg1, code1); - if (code2 <= INT) { - int val2 = unboxCharOrInt(arg2, code2); - return boxToInteger(val1 >> val2); - } - if (code2 <= LONG) { - long val2 = unboxCharOrLong(arg2, code2); - return boxToInteger(val1 >> val2); - } - } - if (code1 <= LONG) { - long val1 = unboxCharOrLong(arg1, code1); - if (code2 <= INT) { - int val2 = unboxCharOrInt(arg2, code2); - return boxToLong(val1 >> val2); - } - if (code2 <= LONG) { - long val2 = unboxCharOrLong(arg2, code2); - return boxToLong(val1 >> val2); - } - } - throw new NoSuchMethodException(); - } - - /** arg1 << arg2 */ - public static Object shiftSignedLeft(Object arg1, Object arg2) throws NoSuchMethodException { - int code1 = typeCode(arg1); - int code2 = typeCode(arg2); - if (code1 <= INT) { - int val1 = unboxCharOrInt(arg1, code1); - if (code2 <= INT) { - int val2 = unboxCharOrInt(arg2, code2); - return boxToInteger(val1 << val2); - } - if (code2 <= LONG) { - long val2 = unboxCharOrLong(arg2, code2); - return boxToInteger(val1 << val2); - } - } - if (code1 <= LONG) { - long val1 = unboxCharOrLong(arg1, code1); - if (code2 <= INT) { - int val2 = unboxCharOrInt(arg2, code2); - return boxToLong(val1 << val2); - } - if (code2 <= LONG) { - long val2 = unboxCharOrLong(arg2, code2); - return boxToLong(val1 << val2); - } - } - throw new NoSuchMethodException(); - } - - /** arg1 >>> arg2 */ - public static Object shiftLogicalRight(Object arg1, Object arg2) throws NoSuchMethodException { - int code1 = typeCode(arg1); - int code2 = typeCode(arg2); - if (code1 <= INT) { - int val1 = unboxCharOrInt(arg1, code1); - if (code2 <= INT) { - int val2 = unboxCharOrInt(arg2, code2); - return boxToInteger(val1 >>> val2); - } - if (code2 <= LONG) { - long val2 = unboxCharOrLong(arg2, code2); - return boxToInteger(val1 >>> val2); - } - } - if (code1 <= LONG) { - long val1 = unboxCharOrLong(arg1, code1); - if (code2 <= INT) { - int val2 = unboxCharOrInt(arg2, code2); - return boxToLong(val1 >>> val2); - } - if (code2 <= LONG) { - long val2 = unboxCharOrLong(arg2, code2); - return boxToLong(val1 >>> val2); - } - } - throw new NoSuchMethodException(); - } - - /** -arg */ - public static Object negate(Object arg) throws NoSuchMethodException { - int code = typeCode(arg); - if (code <= INT) { - int val = unboxCharOrInt(arg, code); - return boxToInteger(-val); - } - if (code <= LONG) { - long val = unboxCharOrLong(arg, code); - return boxToLong(-val); - } - if (code <= FLOAT) { - float val = unboxCharOrFloat(arg, code); - return boxToFloat(-val); - } - if (code <= DOUBLE) { - double val = unboxCharOrDouble(arg, code); - return boxToDouble(-val); - } - throw new NoSuchMethodException(); - } - - /** +arg */ - public static Object positive(Object arg) throws NoSuchMethodException { - int code = typeCode(arg); - if (code <= INT) { - return boxToInteger(+unboxCharOrInt(arg, code)); - } - if (code <= LONG) { - return boxToLong(+unboxCharOrLong(arg, code)); - } - if (code <= FLOAT) { - return boxToFloat(+unboxCharOrFloat(arg, code)); - } - if (code <= DOUBLE) { - return boxToDouble(+unboxCharOrDouble(arg, code)); - } - throw new NoSuchMethodException(); - } - - /** arg1 & arg2 */ - public static Object takeAnd(Object arg1, Object arg2) throws NoSuchMethodException { - if ((arg1 instanceof Boolean) || (arg2 instanceof Boolean)) { - if ((arg1 instanceof Boolean) && (arg2 instanceof Boolean)) - return boxToBoolean(((java.lang.Boolean) arg1).booleanValue() & ((java.lang.Boolean) arg2).booleanValue()); - else - throw new NoSuchMethodException(); - } - int code1 = typeCode(arg1); - int code2 = typeCode(arg2); - int maxcode = (code1 < code2) ? code2 : code1; - - if (maxcode <= INT) - return boxToInteger(unboxCharOrInt(arg1, code1) & unboxCharOrInt(arg2, code2)); - if (maxcode <= LONG) - return boxToLong(unboxCharOrLong(arg1, code1) & unboxCharOrLong(arg2, code2)); - - throw new NoSuchMethodException(); - } - - /** arg1 | arg2 */ - public static Object takeOr(Object arg1, Object arg2) throws NoSuchMethodException { - if ((arg1 instanceof Boolean) || (arg2 instanceof Boolean)) { - if ((arg1 instanceof Boolean) && (arg2 instanceof Boolean)) - return boxToBoolean(((java.lang.Boolean) arg1).booleanValue() | ((java.lang.Boolean) arg2).booleanValue()); - else - throw new NoSuchMethodException(); - } - int code1 = typeCode(arg1); - int code2 = typeCode(arg2); - int maxcode = (code1 < code2) ? code2 : code1; - - if (maxcode <= INT) - return boxToInteger(unboxCharOrInt(arg1, code1) | unboxCharOrInt(arg2, code2)); - if (maxcode <= LONG) - return boxToLong(unboxCharOrLong(arg1, code1) | unboxCharOrLong(arg2, code2)); - - throw new NoSuchMethodException(); - } - - /** arg1 ^ arg2 */ - public static Object takeXor(Object arg1, Object arg2) throws NoSuchMethodException { - if ((arg1 instanceof Boolean) || (arg2 instanceof Boolean)) { - if ((arg1 instanceof Boolean) && (arg2 instanceof Boolean)) - return boxToBoolean(((java.lang.Boolean) arg1).booleanValue() ^ ((java.lang.Boolean) arg2).booleanValue()); - else - throw new NoSuchMethodException(); - } - int code1 = typeCode(arg1); - int code2 = typeCode(arg2); - int maxcode = (code1 < code2) ? code2 : code1; - - if (maxcode <= INT) - return boxToInteger(unboxCharOrInt(arg1, code1) ^ unboxCharOrInt(arg2, code2)); - if (maxcode <= LONG) - return boxToLong(unboxCharOrLong(arg1, code1) ^ unboxCharOrLong(arg2, code2)); - - throw new NoSuchMethodException(); - } - - /** arg1 && arg2 */ - public static Object takeConditionalAnd(Object arg1, Object arg2) throws NoSuchMethodException { - if ((arg1 instanceof Boolean) && (arg2 instanceof Boolean)) { - return boxToBoolean(((java.lang.Boolean) arg1).booleanValue() && ((java.lang.Boolean) arg2).booleanValue()); - } - throw new NoSuchMethodException(); - } - - /** arg1 || arg2 */ - public static Object takeConditionalOr(Object arg1, Object arg2) throws NoSuchMethodException { - if ((arg1 instanceof Boolean) && (arg2 instanceof Boolean)) { - return boxToBoolean(((java.lang.Boolean) arg1).booleanValue() || ((java.lang.Boolean) arg2).booleanValue()); - } - throw new NoSuchMethodException(); - } - - /** ~arg */ - public static Object complement(Object arg) throws NoSuchMethodException { - int code = typeCode(arg); - if (code <= INT) { - return boxToInteger(~unboxCharOrInt(arg, code)); - } - if (code <= LONG) { - return boxToLong(~unboxCharOrLong(arg, code)); - } - throw new NoSuchMethodException(); - } - - /** !arg */ - public static Object takeNot(Object arg) throws NoSuchMethodException { - if (arg instanceof Boolean) { - return boxToBoolean(!((java.lang.Boolean) arg).booleanValue()); - } - throw new NoSuchMethodException(); - } - - public static Object testEqual(Object arg1, Object arg2) throws NoSuchMethodException { - return boxToBoolean(arg1 == arg2); - } - - public static Object testNotEqual(Object arg1, Object arg2) throws NoSuchMethodException { - return boxToBoolean(arg1 != arg2); - } - - public static Object testLessThan(Object arg1, Object arg2) throws NoSuchMethodException { - int code1 = typeCode(arg1); - int code2 = typeCode(arg2); - int maxcode = (code1 < code2) ? code2 : code1; - if (maxcode <= INT) { - int val1 = unboxCharOrInt(arg1, code1); - int val2 = unboxCharOrInt(arg2, code2); - return boxToBoolean(val1 < val2); - } - if (maxcode <= LONG) { - long val1 = unboxCharOrLong(arg1, code1); - long val2 = unboxCharOrLong(arg2, code2); - return boxToBoolean(val1 < val2); - } - if (maxcode <= FLOAT) { - float val1 = unboxCharOrFloat(arg1, code1); - float val2 = unboxCharOrFloat(arg2, code2); - return boxToBoolean(val1 < val2); - } - if (maxcode <= DOUBLE) { - double val1 = unboxCharOrDouble(arg1, code1); - double val2 = unboxCharOrDouble(arg2, code2); - return boxToBoolean(val1 < val2); - } - throw new NoSuchMethodException(); - } - - public static Object testLessOrEqualThan(Object arg1, Object arg2) throws NoSuchMethodException { - int code1 = typeCode(arg1); - int code2 = typeCode(arg2); - int maxcode = (code1 < code2) ? code2 : code1; - if (maxcode <= INT) { - int val1 = unboxCharOrInt(arg1, code1); - int val2 = unboxCharOrInt(arg2, code2); - return boxToBoolean(val1 <= val2); - } - if (maxcode <= LONG) { - long val1 = unboxCharOrLong(arg1, code1); - long val2 = unboxCharOrLong(arg2, code2); - return boxToBoolean(val1 <= val2); - } - if (maxcode <= FLOAT) { - float val1 = unboxCharOrFloat(arg1, code1); - float val2 = unboxCharOrFloat(arg2, code2); - return boxToBoolean(val1 <= val2); - } - if (maxcode <= DOUBLE) { - double val1 = unboxCharOrDouble(arg1, code1); - double val2 = unboxCharOrDouble(arg2, code2); - return boxToBoolean(val1 <= val2); - } - throw new NoSuchMethodException(); - } - - public static Object testGreaterOrEqualThan(Object arg1, Object arg2) throws NoSuchMethodException { - int code1 = typeCode(arg1); - int code2 = typeCode(arg2); - int maxcode = (code1 < code2) ? code2 : code1; - if (maxcode <= INT) { - int val1 = unboxCharOrInt(arg1, code1); - int val2 = unboxCharOrInt(arg2, code2); - return boxToBoolean(val1 >= val2); - } - if (maxcode <= LONG) { - long val1 = unboxCharOrLong(arg1, code1); - long val2 = unboxCharOrLong(arg2, code2); - return boxToBoolean(val1 >= val2); - } - if (maxcode <= FLOAT) { - float val1 = unboxCharOrFloat(arg1, code1); - float val2 = unboxCharOrFloat(arg2, code2); - return boxToBoolean(val1 >= val2); - } - if (maxcode <= DOUBLE) { - double val1 = unboxCharOrDouble(arg1, code1); - double val2 = unboxCharOrDouble(arg2, code2); - return boxToBoolean(val1 >= val2); - } - throw new NoSuchMethodException(); - } - - public static Object testGreaterThan(Object arg1, Object arg2) throws NoSuchMethodException { - int code1 = typeCode(arg1); - int code2 = typeCode(arg2); - int maxcode = (code1 < code2) ? code2 : code1; - if (maxcode <= INT) { - int val1 = unboxCharOrInt(arg1, code1); - int val2 = unboxCharOrInt(arg2, code2); - return boxToBoolean(val1 > val2); - } - if (maxcode <= LONG) { - long val1 = unboxCharOrLong(arg1, code1); - long val2 = unboxCharOrLong(arg2, code2); - return boxToBoolean(val1 > val2); - } - if (maxcode <= FLOAT) { - float val1 = unboxCharOrFloat(arg1, code1); - float val2 = unboxCharOrFloat(arg2, code2); - return boxToBoolean(val1 > val2); - } - if (maxcode <= DOUBLE) { - double val1 = unboxCharOrDouble(arg1, code1); - double val2 = unboxCharOrDouble(arg2, code2); - return boxToBoolean(val1 > val2); - } - throw new NoSuchMethodException(); - } - - public static boolean isBoxedNumberOrBoolean(Object arg) { - return (arg instanceof java.lang.Boolean) || isBoxedNumber(arg); - } - public static boolean isBoxedNumber(Object arg) { - return ( - (arg instanceof java.lang.Integer) - || (arg instanceof java.lang.Long) - || (arg instanceof java.lang.Double) - || (arg instanceof java.lang.Float) - || (arg instanceof java.lang.Short) - || (arg instanceof java.lang.Character) - || (arg instanceof java.lang.Byte) - ); - } - - /** arg.toChar */ - public static java.lang.Character toCharacter(Object arg) throws NoSuchMethodException { - if (arg instanceof java.lang.Integer) return boxToCharacter((char)unboxToInt(arg)); - if (arg instanceof java.lang.Short) return boxToCharacter((char)unboxToShort(arg)); - if (arg instanceof java.lang.Character) return (java.lang.Character)arg; - if (arg instanceof java.lang.Long) return boxToCharacter((char)unboxToLong(arg)); - if (arg instanceof java.lang.Byte) return boxToCharacter((char)unboxToByte(arg)); - if (arg instanceof java.lang.Float) return boxToCharacter((char)unboxToFloat(arg)); - if (arg instanceof java.lang.Double) return boxToCharacter((char)unboxToDouble(arg)); - throw new NoSuchMethodException(); - } - - /** arg.toByte */ - public static java.lang.Byte toByte(Object arg) throws NoSuchMethodException { - if (arg instanceof java.lang.Integer) return boxToByte((byte)unboxToInt(arg)); - if (arg instanceof java.lang.Character) return boxToByte((byte)unboxToChar(arg)); - if (arg instanceof java.lang.Byte) return (java.lang.Byte)arg; - if (arg instanceof java.lang.Long) return boxToByte((byte)unboxToLong(arg)); - if (arg instanceof java.lang.Short) return boxToByte((byte)unboxToShort(arg)); - if (arg instanceof java.lang.Float) return boxToByte((byte)unboxToFloat(arg)); - if (arg instanceof java.lang.Double) return boxToByte((byte)unboxToDouble(arg)); - throw new NoSuchMethodException(); - } - - /** arg.toShort */ - public static java.lang.Short toShort(Object arg) throws NoSuchMethodException { - if (arg instanceof java.lang.Integer) return boxToShort((short)unboxToInt(arg)); - if (arg instanceof java.lang.Long) return boxToShort((short)unboxToLong(arg)); - if (arg instanceof java.lang.Character) return boxToShort((short)unboxToChar(arg)); - if (arg instanceof java.lang.Byte) return boxToShort((short)unboxToByte(arg)); - if (arg instanceof java.lang.Short) return (java.lang.Short)arg; - if (arg instanceof java.lang.Float) return boxToShort((short)unboxToFloat(arg)); - if (arg instanceof java.lang.Double) return boxToShort((short)unboxToDouble(arg)); - throw new NoSuchMethodException(); - } - - /** arg.toInt */ - public static java.lang.Integer toInteger(Object arg) throws NoSuchMethodException { - if (arg instanceof java.lang.Integer) return (java.lang.Integer)arg; - if (arg instanceof java.lang.Long) return boxToInteger((int)unboxToLong(arg)); - if (arg instanceof java.lang.Double) return boxToInteger((int)unboxToDouble(arg)); - if (arg instanceof java.lang.Float) return boxToInteger((int)unboxToFloat(arg)); - if (arg instanceof java.lang.Character) return boxToInteger((int)unboxToChar(arg)); - if (arg instanceof java.lang.Byte) return boxToInteger((int)unboxToByte(arg)); - if (arg instanceof java.lang.Short) return boxToInteger((int)unboxToShort(arg)); - throw new NoSuchMethodException(); - } - - /** arg.toLong */ - public static java.lang.Long toLong(Object arg) throws NoSuchMethodException { - if (arg instanceof java.lang.Integer) return boxToLong((long)unboxToInt(arg)); - if (arg instanceof java.lang.Double) return boxToLong((long)unboxToDouble(arg)); - if (arg instanceof java.lang.Float) return boxToLong((long)unboxToFloat(arg)); - if (arg instanceof java.lang.Long) return (java.lang.Long)arg; - if (arg instanceof java.lang.Character) return boxToLong((long)unboxToChar(arg)); - if (arg instanceof java.lang.Byte) return boxToLong((long)unboxToByte(arg)); - if (arg instanceof java.lang.Short) return boxToLong((long)unboxToShort(arg)); - throw new NoSuchMethodException(); - } - - /** arg.toFloat */ - public static java.lang.Float toFloat(Object arg) throws NoSuchMethodException { - if (arg instanceof java.lang.Integer) return boxToFloat((float)unboxToInt(arg)); - if (arg instanceof java.lang.Long) return boxToFloat((float)unboxToLong(arg)); - if (arg instanceof java.lang.Float) return (java.lang.Float)arg; - if (arg instanceof java.lang.Double) return boxToFloat((float)unboxToDouble(arg)); - if (arg instanceof java.lang.Character) return boxToFloat((float)unboxToChar(arg)); - if (arg instanceof java.lang.Byte) return boxToFloat((float)unboxToByte(arg)); - if (arg instanceof java.lang.Short) return boxToFloat((float)unboxToShort(arg)); - throw new NoSuchMethodException(); - } - - /** arg.toDouble */ - public static java.lang.Double toDouble(Object arg) throws NoSuchMethodException { - if (arg instanceof java.lang.Integer) return boxToDouble((double)unboxToInt(arg)); - if (arg instanceof java.lang.Float) return boxToDouble((double)unboxToFloat(arg)); - if (arg instanceof java.lang.Double) return (java.lang.Double)arg; - if (arg instanceof java.lang.Long) return boxToDouble((double)unboxToLong(arg)); - if (arg instanceof java.lang.Character) return boxToDouble((double)unboxToChar(arg)); - if (arg instanceof java.lang.Byte) return boxToDouble((double)unboxToByte(arg)); - if (arg instanceof java.lang.Short) return boxToDouble((double)unboxToShort(arg)); - throw new NoSuchMethodException(); - } - -} diff --git a/tests/scala2-library/src/library/scala/runtime/ByteRef.java b/tests/scala2-library/src/library/scala/runtime/ByteRef.java deleted file mode 100644 index 27d3259db370..000000000000 --- a/tests/scala2-library/src/library/scala/runtime/ByteRef.java +++ /dev/null @@ -1,23 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - - - -package scala.runtime; - - -public class ByteRef implements java.io.Serializable { - private static final long serialVersionUID = -100666928446877072L; - - public byte elem; - public ByteRef(byte elem) { this.elem = elem; } - public String toString() { return java.lang.Byte.toString(elem); } - - public static ByteRef create(byte e) { return new ByteRef(e); } - public static ByteRef zero() { return new ByteRef((byte)0); } -} diff --git a/tests/scala2-library/src/library/scala/runtime/CharRef.java b/tests/scala2-library/src/library/scala/runtime/CharRef.java deleted file mode 100644 index 31956f5b5576..000000000000 --- a/tests/scala2-library/src/library/scala/runtime/CharRef.java +++ /dev/null @@ -1,23 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - - - -package scala.runtime; - - -public class CharRef implements java.io.Serializable { - private static final long serialVersionUID = 6537214938268005702L; - - public char elem; - public CharRef(char elem) { this.elem = elem; } - public String toString() { return java.lang.Character.toString(elem); } - - public static CharRef create(char e) { return new CharRef(e); } - public static CharRef zero() { return new CharRef((char)0); } -} diff --git a/tests/scala2-library/src/library/scala/runtime/DoubleRef.java b/tests/scala2-library/src/library/scala/runtime/DoubleRef.java deleted file mode 100644 index 0c7d9156d6b8..000000000000 --- a/tests/scala2-library/src/library/scala/runtime/DoubleRef.java +++ /dev/null @@ -1,23 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - - - -package scala.runtime; - - -public class DoubleRef implements java.io.Serializable { - private static final long serialVersionUID = 8304402127373655534L; - - public double elem; - public DoubleRef(double elem) { this.elem = elem; } - public String toString() { return java.lang.Double.toString(elem); } - - public static DoubleRef create(double e) { return new DoubleRef(e); } - public static DoubleRef zero() { return new DoubleRef(0); } -} diff --git a/tests/scala2-library/src/library/scala/runtime/FloatRef.java b/tests/scala2-library/src/library/scala/runtime/FloatRef.java deleted file mode 100644 index f0e1d5f8f36c..000000000000 --- a/tests/scala2-library/src/library/scala/runtime/FloatRef.java +++ /dev/null @@ -1,23 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - - - -package scala.runtime; - - -public class FloatRef implements java.io.Serializable { - private static final long serialVersionUID = -5793980990371366933L; - - public float elem; - public FloatRef(float elem) { this.elem = elem; } - public String toString() { return java.lang.Float.toString(elem); } - - public static FloatRef create(float e) { return new FloatRef(e); } - public static FloatRef zero() { return new FloatRef(0); } -} diff --git a/tests/scala2-library/src/library/scala/runtime/IntRef.java b/tests/scala2-library/src/library/scala/runtime/IntRef.java deleted file mode 100644 index adcf474aaea4..000000000000 --- a/tests/scala2-library/src/library/scala/runtime/IntRef.java +++ /dev/null @@ -1,23 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - - - -package scala.runtime; - - -public class IntRef implements java.io.Serializable { - private static final long serialVersionUID = 1488197132022872888L; - - public int elem; - public IntRef(int elem) { this.elem = elem; } - public String toString() { return java.lang.Integer.toString(elem); } - - public static IntRef create(int e) { return new IntRef(e); } - public static IntRef zero() { return new IntRef(0); } -} diff --git a/tests/scala2-library/src/library/scala/runtime/LambdaDeserialize.java b/tests/scala2-library/src/library/scala/runtime/LambdaDeserialize.java deleted file mode 100644 index 4c5198cc483c..000000000000 --- a/tests/scala2-library/src/library/scala/runtime/LambdaDeserialize.java +++ /dev/null @@ -1,38 +0,0 @@ -package scala.runtime; - - -import java.lang.invoke.*; -import java.util.HashMap; - -public final class LambdaDeserialize { - public static final MethodType DESERIALIZE_LAMBDA_MT = MethodType.fromMethodDescriptorString("(Ljava/lang/invoke/SerializedLambda;)Ljava/lang/Object;", LambdaDeserialize.class.getClassLoader()); - - private MethodHandles.Lookup lookup; - private final HashMap cache = new HashMap<>(); - private final LambdaDeserializer$ l = LambdaDeserializer$.MODULE$; - private final HashMap targetMethodMap; - - private LambdaDeserialize(MethodHandles.Lookup lookup, MethodHandle[] targetMethods) { - this.lookup = lookup; - targetMethodMap = new HashMap<>(targetMethods.length); - for (MethodHandle targetMethod : targetMethods) { - MethodHandleInfo info = lookup.revealDirect(targetMethod); - String key = nameAndDescriptorKey(info.getName(), info.getMethodType().toMethodDescriptorString()); - targetMethodMap.put(key, targetMethod); - } - } - - public Object deserializeLambda(SerializedLambda serialized) { - return l.deserializeLambda(lookup, cache, targetMethodMap, serialized); - } - - public static CallSite bootstrap(MethodHandles.Lookup lookup, String invokedName, - MethodType invokedType, MethodHandle... targetMethods) throws Throwable { - MethodHandle deserializeLambda = lookup.findVirtual(LambdaDeserialize.class, "deserializeLambda", DESERIALIZE_LAMBDA_MT); - MethodHandle exact = deserializeLambda.bindTo(new LambdaDeserialize(lookup, targetMethods)).asType(invokedType); - return new ConstantCallSite(exact); - } - public static String nameAndDescriptorKey(String name, String descriptor) { - return name + descriptor; - } -} diff --git a/tests/scala2-library/src/library/scala/runtime/LambdaDeserializer.scala b/tests/scala2-library/src/library/scala/runtime/LambdaDeserializer.scala deleted file mode 100644 index 3c36f30cf8fd..000000000000 --- a/tests/scala2-library/src/library/scala/runtime/LambdaDeserializer.scala +++ /dev/null @@ -1,122 +0,0 @@ -package scala.runtime - -import java.lang.invoke._ - -/** - * This class is only intended to be called by synthetic `$deserializeLambda$` method that the Scala 2.12 - * compiler will add to classes hosting lambdas. - * - * It is not intended to be consumed directly. - */ -object LambdaDeserializer { - /** - * Deserialize a lambda by calling `LambdaMetafactory.altMetafactory` to spin up a lambda class - * and instantiating this class with the captured arguments. - * - * A cache may be provided to ensure that subsequent deserialization of the same lambda expression - * is cheap, it amounts to a reflective call to the constructor of the previously created class. - * However, deserialization of the same lambda expression is not guaranteed to use the same class, - * concurrent deserialization of the same lambda expression may spin up more than one class. - * - * Assumptions: - * - No additional marker interfaces are required beyond `{java.io,scala.}Serializable`. These are - * not stored in `SerializedLambda`, so we can't reconstitute them. - * - No additional bridge methods are passed to `altMetafactory`. Again, these are not stored. - * - * @param lookup The factory for method handles. Must have access to the implementation method, the - * functional interface class, and `java.io.Serializable` or `scala.Serializable` as - * required. - * @param cache A cache used to avoid spinning up a class for each deserialization of a given lambda. May be `null` - * @param serialized The lambda to deserialize. Note that this is typically created by the `readResolve` - * member of the anonymous class created by `LambdaMetaFactory`. - * @return An instance of the functional interface - */ - def deserializeLambda(lookup: MethodHandles.Lookup, cache: java.util.Map[String, MethodHandle], - targetMethodMap: java.util.Map[String, MethodHandle], serialized: SerializedLambda): AnyRef = { - assert(targetMethodMap != null) - def slashDot(name: String) = name.replaceAll("/", ".") - val loader = lookup.lookupClass().getClassLoader - val implClass = loader.loadClass(slashDot(serialized.getImplClass)) - val key = LambdaDeserialize.nameAndDescriptorKey(serialized.getImplMethodName, serialized.getImplMethodSignature) - - def makeCallSite: CallSite = { - import serialized._ - def parseDescriptor(s: String) = - MethodType.fromMethodDescriptorString(s, loader) - - val funcInterfaceSignature = parseDescriptor(getFunctionalInterfaceMethodSignature) - val instantiated = parseDescriptor(getInstantiatedMethodType) - val functionalInterfaceClass = loader.loadClass(slashDot(getFunctionalInterfaceClass)) - - val implMethodSig = parseDescriptor(getImplMethodSignature) - // Construct the invoked type from the impl method type. This is the type of a factory - // that will be generated by the meta-factory. It is a method type, with param types - // coming form the types of the captures, and return type being the functional interface. - val invokedType: MethodType = { - // 1. Add receiver for non-static impl methods - val withReceiver = getImplMethodKind match { - case MethodHandleInfo.REF_invokeStatic | MethodHandleInfo.REF_newInvokeSpecial => - implMethodSig - case _ => - implMethodSig.insertParameterTypes(0, implClass) - } - // 2. Remove lambda parameters, leaving only captures. Note: the receiver may be a lambda parameter, - // such as in `Function s = Object::toString` - val lambdaArity = funcInterfaceSignature.parameterCount() - val from = withReceiver.parameterCount() - lambdaArity - val to = withReceiver.parameterCount() - - // 3. Drop the lambda return type and replace with the functional interface. - withReceiver.dropParameterTypes(from, to).changeReturnType(functionalInterfaceClass) - } - - // Lookup the implementation method - val implMethod: MethodHandle = if (targetMethodMap.containsKey(key)) { - targetMethodMap.get(key) - } else { - throw new IllegalArgumentException("Illegal lambda deserialization") - } - - val flags: Int = LambdaMetafactory.FLAG_SERIALIZABLE | LambdaMetafactory.FLAG_MARKERS - val isScalaFunction = functionalInterfaceClass.getName.startsWith("scala.Function") - val markerInterface: Class[_] = loader.loadClass(if (isScalaFunction) ScalaSerializable else JavaIOSerializable) - - LambdaMetafactory.altMetafactory( - lookup, getFunctionalInterfaceMethodName, invokedType, - - /* samMethodType = */ funcInterfaceSignature, - /* implMethod = */ implMethod, - /* instantiatedMethodType = */ instantiated, - /* flags = */ flags.asInstanceOf[AnyRef], - /* markerInterfaceCount = */ 1.asInstanceOf[AnyRef], - /* markerInterfaces[0] = */ markerInterface, - /* bridgeCount = */ 0.asInstanceOf[AnyRef] - ) - } - - val factory: MethodHandle = if (cache == null) { - makeCallSite.getTarget - } else cache.synchronized{ - cache.get(key) match { - case null => - val callSite = makeCallSite - val temp = callSite.getTarget - cache.put(key, temp) - temp - case target => target - } - } - - val captures = Array.tabulate(serialized.getCapturedArgCount)(n => serialized.getCapturedArg(n)) - factory.invokeWithArguments(captures: _*) - } - - private val ScalaSerializable = "scala.Serializable" - - private val JavaIOSerializable = { - // We could actually omit this marker interface as LambdaMetaFactory will add it if - // the FLAG_SERIALIZABLE is set and of the provided markers extend it. But the code - // is cleaner if we uniformly add a single marker, so I'm leaving it in place. - "java.io.Serializable" - } -} diff --git a/tests/scala2-library/src/library/scala/runtime/LazyRef.scala b/tests/scala2-library/src/library/scala/runtime/LazyRef.scala deleted file mode 100644 index 5a0bd5442c64..000000000000 --- a/tests/scala2-library/src/library/scala/runtime/LazyRef.scala +++ /dev/null @@ -1,157 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2016, LAMP/EPFL and Lightbend, Inc ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala.runtime - -/** Classes used as holders for lazy vals defined in methods. */ - -class LazyRef[T] { - @volatile private[this] var _initialized: Boolean = _ - def initialized = _initialized - - private[this] var _value: T = _ - def value: T = _value - def initialize(value: T): T = { - _value = value - _initialized = true - value - } - - override def toString = s"LazyRef ${if (_initialized) s"of: ${_value}" else "thunk"}" -} - -class LazyBoolean { - @volatile private[this] var _initialized: Boolean = _ - def initialized = _initialized - - private[this] var _value: Boolean = _ - def value: Boolean = _value - def initialize(value: Boolean): Boolean = { - _value = value - _initialized = true - value - } - - override def toString = s"LazyBoolean ${if (_initialized) s"of: ${_value}" else "thunk"}" -} - -class LazyByte { - @volatile private[this] var _initialized: Boolean = _ - def initialized = _initialized - - private[this] var _value: Byte = _ - - def value: Byte = _value - - def initialize(value: Byte): Byte = { - _value = value - _initialized = true - value - } - - override def toString = s"LazyByte ${if (_initialized) s"of: ${_value}" else "thunk"}" -} - -class LazyChar { - @volatile private[this] var _initialized: Boolean = _ - def initialized = _initialized - - private[this] var _value: Char = _ - def value: Char = _value - def initialize(value: Char): Char = { - _value = value - _initialized = true - value - } - - override def toString = s"LazyChar ${if (_initialized) s"of: ${_value}" else "thunk"}" -} - -class LazyShort { - @volatile private[this] var _initialized: Boolean = _ - def initialized = _initialized - - private[this] var _value: Short = _ - def value: Short = _value - def initialize(value: Short): Short = { - _value = value - _initialized = true - value - } - - override def toString = s"LazyShort ${if (_initialized) s"of: ${_value}" else "thunk"}" -} - -class LazyInt { - @volatile private[this] var _initialized: Boolean = _ - def initialized = _initialized - - private[this] var _value: Int = _ - def value: Int = _value - def initialize(value: Int): Int = { - _value = value - _initialized = true - value - } - - override def toString = s"LazyInt ${if (_initialized) s"of: ${_value}" else "thunk"}" -} - -class LazyLong { - @volatile private[this] var _initialized: Boolean = _ - def initialized = _initialized - - private[this] var _value: Long = _ - def value: Long = _value - def initialize(value: Long): Long = { - _value = value - _initialized = true - value - } - - override def toString = s"LazyLong ${if (_initialized) s"of: ${_value}" else "thunk"}" -} - -class LazyFloat { - @volatile private[this] var _initialized: Boolean = _ - def initialized = _initialized - - private[this] var _value: Float = _ - def value: Float = _value - def initialize(value: Float): Float = { - _value = value - _initialized = true - value - } - - override def toString = s"LazyFloat ${if (_initialized) s"of: ${_value}" else "thunk"}" -} - -class LazyDouble { - @volatile private[this] var _initialized: Boolean = _ - def initialized = _initialized - - private[this] var _value: Double = _ - def value: Double = _value - def initialize(value: Double): Double = { - _value = value - _initialized = true - value - } - - override def toString = s"LazyDouble ${if (_initialized) s"of: ${_value}" else "thunk"}" -} - -class LazyUnit { - @volatile private[this] var _initialized: Boolean = _ - def initialized = _initialized - - def initialize(): Unit = _initialized = true - - override def toString = s"LazyUnit${if (_initialized) "" else " thunk"}" -} diff --git a/tests/scala2-library/src/library/scala/runtime/LongRef.java b/tests/scala2-library/src/library/scala/runtime/LongRef.java deleted file mode 100644 index 51426ab8f693..000000000000 --- a/tests/scala2-library/src/library/scala/runtime/LongRef.java +++ /dev/null @@ -1,23 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - - - -package scala.runtime; - - -public class LongRef implements java.io.Serializable { - private static final long serialVersionUID = -3567869820105829499L; - - public long elem; - public LongRef(long elem) { this.elem = elem; } - public String toString() { return java.lang.Long.toString(elem); } - - public static LongRef create(long e) { return new LongRef(e); } - public static LongRef zero() { return new LongRef(0); } -} diff --git a/tests/scala2-library/src/library/scala/runtime/MethodCache.scala b/tests/scala2-library/src/library/scala/runtime/MethodCache.scala deleted file mode 100644 index a8fdfc10595d..000000000000 --- a/tests/scala2-library/src/library/scala/runtime/MethodCache.scala +++ /dev/null @@ -1,83 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala -package runtime - - -import java.lang.reflect.{ Method => JMethod } -import java.lang.{ Class => JClass } - -import scala.annotation.tailrec - -/** An element of a polymorphic object cache. - * This class is referred to by the `CleanUp` phase. Each `PolyMethodCache` chain - * must only relate to one method as `PolyMethodCache` does not identify - * the method name and argument types. In practice, one variable will be - * generated per call point, and will uniquely relate to the method called - * at that point, making the method name and argument types irrelevant. */ -/* TODO: if performance is acceptable, PolyMethodCache should be made generic on the method type */ -private[scala] sealed abstract class MethodCache { - /** Searches for a cached method in the `MethodCache` chain that - * is compatible with receiver class `forReceiver`. If none is cached, - * `null` is returned. If `null` is returned, find's caller should look- - * up the right method using whichever means it prefers, and add it to - * the cache for later use. */ - def find(forReceiver: JClass[_]): JMethod - def add(forReceiver: JClass[_], forMethod: JMethod): MethodCache -} - -private[scala] final class EmptyMethodCache extends MethodCache { - - def find(forReceiver: JClass[_]): JMethod = null - - def add(forReceiver: JClass[_], forMethod: JMethod): MethodCache = - new PolyMethodCache(this, forReceiver, forMethod, 1) - -} - -private[scala] final class MegaMethodCache( - private[this] val forName: String, - private[this] val forParameterTypes: Array[JClass[_]] -) extends MethodCache { - - def find(forReceiver: JClass[_]): JMethod = - forReceiver.getMethod(forName, forParameterTypes:_*) - - def add(forReceiver: JClass[_], forMethod: JMethod): MethodCache = this - -} - -private[scala] final class PolyMethodCache( - private[this] val next: MethodCache, - private[this] val receiver: JClass[_], - private[this] val method: JMethod, - private[this] val complexity: Int -) extends MethodCache { - - /** To achieve tail recursion this must be a separate method - * from `find`, because the type of next is not `PolyMethodCache`. - */ - @tailrec private def findInternal(forReceiver: JClass[_]): JMethod = - if (forReceiver eq receiver) method - else next match { - case x: PolyMethodCache => x findInternal forReceiver - case _ => next find forReceiver - } - - def find(forReceiver: JClass[_]): JMethod = findInternal(forReceiver) - - // TODO: come up with a more realistic number - final private val MaxComplexity = 160 - - def add(forReceiver: JClass[_], forMethod: JMethod): MethodCache = - if (complexity < MaxComplexity) - new PolyMethodCache(this, forReceiver, forMethod, complexity + 1) - else - new MegaMethodCache(forMethod.getName, forMethod.getParameterTypes) -} diff --git a/tests/scala2-library/src/library/scala/runtime/NonLocalReturnControl.scala b/tests/scala2-library/src/library/scala/runtime/NonLocalReturnControl.scala deleted file mode 100644 index a926956acf8b..000000000000 --- a/tests/scala2-library/src/library/scala/runtime/NonLocalReturnControl.scala +++ /dev/null @@ -1,16 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala -package runtime - -import scala.util.control.ControlThrowable - -class NonLocalReturnControl[@specialized T](val key: AnyRef, val value: T) extends ControlThrowable { - final override def fillInStackTrace(): Throwable = this -} diff --git a/tests/scala2-library/src/library/scala/runtime/Nothing$.scala b/tests/scala2-library/src/library/scala/runtime/Nothing$.scala deleted file mode 100644 index 4ecc5362231f..000000000000 --- a/tests/scala2-library/src/library/scala/runtime/Nothing$.scala +++ /dev/null @@ -1,18 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala -package runtime - - -/** - * Dummy class which exist only to satisfy the JVM. It corresponds - * to `scala.Nothing`. If such type appears in method - * signatures, it is erased to this one. - */ -sealed abstract class Nothing$ extends Throwable diff --git a/tests/scala2-library/src/library/scala/runtime/Null$.scala b/tests/scala2-library/src/library/scala/runtime/Null$.scala deleted file mode 100644 index 87ce0a249831..000000000000 --- a/tests/scala2-library/src/library/scala/runtime/Null$.scala +++ /dev/null @@ -1,18 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala -package runtime - -/** - * Dummy class which exist only to satisfy the JVM. It corresponds to - * `scala.Null`. If such type appears in method signatures, it is erased - * to this one. A private constructor ensures that Java code can't create - * subclasses. The only value of type Null$ should be null - */ -sealed abstract class Null$ private () diff --git a/tests/scala2-library/src/library/scala/runtime/ObjectRef.java b/tests/scala2-library/src/library/scala/runtime/ObjectRef.java deleted file mode 100644 index b34f81c9c89c..000000000000 --- a/tests/scala2-library/src/library/scala/runtime/ObjectRef.java +++ /dev/null @@ -1,24 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - - - -package scala.runtime; - - -public class ObjectRef implements java.io.Serializable { - private static final long serialVersionUID = -9055728157600312291L; - - public T elem; - public ObjectRef(T elem) { this.elem = elem; } - @Override - public String toString() { return String.valueOf(elem); } - - public static ObjectRef create(U e) { return new ObjectRef(e); } - public static ObjectRef zero() { return new ObjectRef(null); } -} diff --git a/tests/scala2-library/src/library/scala/runtime/RichBoolean.scala b/tests/scala2-library/src/library/scala/runtime/RichBoolean.scala deleted file mode 100644 index 4f867960a095..000000000000 --- a/tests/scala2-library/src/library/scala/runtime/RichBoolean.scala +++ /dev/null @@ -1,15 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala -package runtime - - -final class RichBoolean(val self: Boolean) extends AnyVal with OrderedProxy[Boolean] { - protected def ord = scala.math.Ordering.Boolean -} diff --git a/tests/scala2-library/src/library/scala/runtime/RichByte.scala b/tests/scala2-library/src/library/scala/runtime/RichByte.scala deleted file mode 100644 index ce658d2277ee..000000000000 --- a/tests/scala2-library/src/library/scala/runtime/RichByte.scala +++ /dev/null @@ -1,30 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala -package runtime - - -final class RichByte(val self: Byte) extends AnyVal with ScalaWholeNumberProxy[Byte] { - protected def num = scala.math.Numeric.ByteIsIntegral - protected def ord = scala.math.Ordering.Byte - - override def doubleValue() = self.toDouble - override def floatValue() = self.toFloat - override def longValue() = self.toLong - override def intValue() = self.toInt - override def byteValue() = self - override def shortValue() = self.toShort - - override def isValidByte = true - - override def abs: Byte = math.abs(self).toByte - override def max(that: Byte): Byte = math.max(self, that).toByte - override def min(that: Byte): Byte = math.min(self, that).toByte - override def signum: Int = math.signum(self.toInt) -} diff --git a/tests/scala2-library/src/library/scala/runtime/RichChar.scala b/tests/scala2-library/src/library/scala/runtime/RichChar.scala deleted file mode 100644 index 71ea3a21e1a7..000000000000 --- a/tests/scala2-library/src/library/scala/runtime/RichChar.scala +++ /dev/null @@ -1,67 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2006-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala -package runtime - - -import java.lang.Character - -final class RichChar(val self: Char) extends AnyVal with IntegralProxy[Char] { - protected def num = scala.math.Numeric.CharIsIntegral - protected def ord = scala.math.Ordering.Char - - override def doubleValue() = self.toDouble - override def floatValue() = self.toFloat - override def longValue() = self.toLong - override def intValue() = self.toInt - override def byteValue() = self.toByte - override def shortValue() = self.toShort - - override def isValidChar = true - - override def abs: Char = self - override def max(that: Char): Char = math.max(self.toInt, that.toInt).toChar - override def min(that: Char): Char = math.min(self.toInt, that.toInt).toChar - override def signum: Int = math.signum(self.toInt) - - def asDigit: Int = Character.digit(self, Character.MAX_RADIX) - - def isControl: Boolean = Character.isISOControl(self) - def isDigit: Boolean = Character.isDigit(self) - def isLetter: Boolean = Character.isLetter(self) - def isLetterOrDigit: Boolean = Character.isLetterOrDigit(self) - def isWhitespace: Boolean = Character.isWhitespace(self) - def isSpaceChar: Boolean = Character.isSpaceChar(self) - def isHighSurrogate: Boolean = Character.isHighSurrogate(self) - def isLowSurrogate: Boolean = Character.isLowSurrogate(self) - def isSurrogate: Boolean = isHighSurrogate || isLowSurrogate - def isUnicodeIdentifierStart: Boolean = Character.isUnicodeIdentifierStart(self) - def isUnicodeIdentifierPart: Boolean = Character.isUnicodeIdentifierPart(self) - def isIdentifierIgnorable: Boolean = Character.isIdentifierIgnorable(self) - def isMirrored: Boolean = Character.isMirrored(self) - - def isLower: Boolean = Character.isLowerCase(self) - def isUpper: Boolean = Character.isUpperCase(self) - def isTitleCase: Boolean = Character.isTitleCase(self) - - def toLower: Char = Character.toLowerCase(self) - def toUpper: Char = Character.toUpperCase(self) - def toTitleCase: Char = Character.toTitleCase(self) - - def getType: Int = Character.getType(self) - def getNumericValue: Int = Character.getNumericValue(self) - def getDirectionality: Byte = Character.getDirectionality(self) - def reverseBytes: Char = Character.reverseBytes(self) - - // Java 5 Character methods not added: - // - // public static boolean isDefined(char ch) - // public static boolean isJavaIdentifierStart(char ch) - // public static boolean isJavaIdentifierPart(char ch) -} diff --git a/tests/scala2-library/src/library/scala/runtime/RichDouble.scala b/tests/scala2-library/src/library/scala/runtime/RichDouble.scala deleted file mode 100644 index 9d7a55d5cd7f..000000000000 --- a/tests/scala2-library/src/library/scala/runtime/RichDouble.scala +++ /dev/null @@ -1,62 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala -package runtime - -final class RichDouble(val self: Double) extends AnyVal with FractionalProxy[Double] { - protected def num = scala.math.Numeric.DoubleIsFractional - protected def ord = scala.math.Ordering.Double - protected def integralNum = scala.math.Numeric.DoubleAsIfIntegral - - override def doubleValue() = self - override def floatValue() = self.toFloat - override def longValue() = self.toLong - override def intValue() = self.toInt - override def byteValue() = self.toByte - override def shortValue() = self.toShort - - override def isWhole = { - val l = self.toLong - l.toDouble == self || l == Long.MaxValue && self < Double.PositiveInfinity || l == Long.MinValue && self > Double.NegativeInfinity - } - override def isValidByte = self.toByte.toDouble == self - override def isValidShort = self.toShort.toDouble == self - override def isValidChar = self.toChar.toDouble == self - override def isValidInt = self.toInt.toDouble == self - // override def isValidLong = { val l = self.toLong; l.toDouble == self && l != Long.MaxValue } - // override def isValidFloat = self.toFloat.toDouble == self - // override def isValidDouble = !java.lang.Double.isNaN(self) - - def isNaN: Boolean = java.lang.Double.isNaN(self) - def isInfinity: Boolean = java.lang.Double.isInfinite(self) - def isPosInfinity: Boolean = Double.PositiveInfinity == self - def isNegInfinity: Boolean = Double.NegativeInfinity == self - - override def abs: Double = math.abs(self) - override def max(that: Double): Double = math.max(self, that) - override def min(that: Double): Double = math.min(self, that) - override def signum: Int = math.signum(self).toInt // !!! NaN - - def round: Long = math.round(self) - def ceil: Double = math.ceil(self) - def floor: Double = math.floor(self) - - /** Converts an angle measured in degrees to an approximately equivalent - * angle measured in radians. - * - * @return the measurement of the angle x in radians. - */ - def toRadians: Double = math.toRadians(self) - - /** Converts an angle measured in radians to an approximately equivalent - * angle measured in degrees. - * @return the measurement of the angle x in degrees. - */ - def toDegrees: Double = math.toDegrees(self) -} diff --git a/tests/scala2-library/src/library/scala/runtime/RichException.scala b/tests/scala2-library/src/library/scala/runtime/RichException.scala deleted file mode 100644 index 2863fb6d7ce2..000000000000 --- a/tests/scala2-library/src/library/scala/runtime/RichException.scala +++ /dev/null @@ -1,17 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala -package runtime - -import scala.compat.Platform.EOL - -@deprecated("use Throwable#getStackTrace", "2.11.0") -final class RichException(exc: Throwable) { - def getStackTraceString = exc.getStackTrace().mkString("", EOL, EOL) -} diff --git a/tests/scala2-library/src/library/scala/runtime/RichFloat.scala b/tests/scala2-library/src/library/scala/runtime/RichFloat.scala deleted file mode 100644 index 93777f2405fa..000000000000 --- a/tests/scala2-library/src/library/scala/runtime/RichFloat.scala +++ /dev/null @@ -1,63 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala -package runtime - -final class RichFloat(val self: Float) extends AnyVal with FractionalProxy[Float] { - protected def num = scala.math.Numeric.FloatIsFractional - protected def ord = scala.math.Ordering.Float - protected def integralNum = scala.math.Numeric.FloatAsIfIntegral - - override def doubleValue() = self.toDouble - override def floatValue() = self - override def longValue() = self.toLong - override def intValue() = self.toInt - override def byteValue() = self.toByte - override def shortValue() = self.toShort - - override def isWhole = { - val l = self.toLong - l.toFloat == self || l == Long.MaxValue && self < Float.PositiveInfinity || l == Long.MinValue && self > Float.NegativeInfinity - } - override def isValidByte = self.toByte.toFloat == self - override def isValidShort = self.toShort.toFloat == self - override def isValidChar = self.toChar.toFloat == self - override def isValidInt = { val i = self.toInt; i.toFloat == self && i != Int.MaxValue } - // override def isValidLong = { val l = self.toLong; l.toFloat == self && l != Long.MaxValue } - // override def isValidFloat = !java.lang.Float.isNaN(self) - // override def isValidDouble = !java.lang.Float.isNaN(self) - - def isNaN: Boolean = java.lang.Float.isNaN(self) - def isInfinity: Boolean = java.lang.Float.isInfinite(self) - def isPosInfinity: Boolean = Float.PositiveInfinity == self - def isNegInfinity: Boolean = Float.NegativeInfinity == self - - override def abs: Float = math.abs(self) - override def max(that: Float): Float = math.max(self, that) - override def min(that: Float): Float = math.min(self, that) - override def signum: Int = math.signum(self).toInt // !!! NaN - - def round: Int = math.round(self) - def ceil: Float = math.ceil(self.toDouble).toFloat - def floor: Float = math.floor(self.toDouble).toFloat - - /** Converts an angle measured in degrees to an approximately equivalent - * angle measured in radians. - * - * @return the measurement of the angle `x` in radians. - */ - def toRadians: Float = math.toRadians(self.toDouble).toFloat - - /** Converts an angle measured in radians to an approximately equivalent - * angle measured in degrees. - * - * @return the measurement of the angle `x` in degrees. - */ - def toDegrees: Float = math.toDegrees(self.toDouble).toFloat -} diff --git a/tests/scala2-library/src/library/scala/runtime/RichInt.scala b/tests/scala2-library/src/library/scala/runtime/RichInt.scala deleted file mode 100644 index 157d5a92b7fd..000000000000 --- a/tests/scala2-library/src/library/scala/runtime/RichInt.scala +++ /dev/null @@ -1,80 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala -package runtime - -import scala.collection.immutable.Range - -// Note that this does not implement IntegralProxy[Int] so that it can return -// the Int-specific Range class from until/to. -final class RichInt(val self: Int) extends AnyVal with ScalaNumberProxy[Int] with RangedProxy[Int] { - protected def num = scala.math.Numeric.IntIsIntegral - protected def ord = scala.math.Ordering.Int - - override def doubleValue() = self.toDouble - override def floatValue() = self.toFloat - override def longValue() = self.toLong - override def intValue() = self - override def byteValue() = self.toByte - override def shortValue() = self.toShort - - /** Returns `'''true'''` if this number has no decimal component. - * Always `'''true'''` for `RichInt`. - */ - def isWhole = true - - override def isValidInt = true - def isValidLong = true - - override def abs: Int = math.abs(self) - override def max(that: Int): Int = math.max(self, that) - override def min(that: Int): Int = math.min(self, that) - override def signum: Int = math.signum(self) - - /** There is no reason to round an `Int`, but this method is provided to avoid accidental loss of precision from a detour through `Float`. */ - @deprecated("this is an integer type; there is no reason to round it. Perhaps you meant to call this on a floating-point value?", "2.11.0") - def round: Int = self - - def toBinaryString: String = java.lang.Integer.toBinaryString(self) - def toHexString: String = java.lang.Integer.toHexString(self) - def toOctalString: String = java.lang.Integer.toOctalString(self) - - type ResultWithoutStep = Range - - /** - * @param end The final bound of the range to make. - * @return A [[scala.collection.immutable.Range]] from `this` up to but - * not including `end`. - */ - def until(end: Int): Range = Range(self, end) - - /** - * @param end The final bound of the range to make. - * @param step The number to increase by for each step of the range. - * @return A [[scala.collection.immutable.Range]] from `this` up to but - * not including `end`. - */ - def until(end: Int, step: Int): Range = Range(self, end, step) - - /** like `until`, but includes the last index */ - /** - * @param end The final bound of the range to make. - * @return A [[scala.collection.immutable.Range]] from `'''this'''` up to - * and including `end`. - */ - def to(end: Int): Range.Inclusive = Range.inclusive(self, end) - - /** - * @param end The final bound of the range to make. - * @param step The number to increase by for each step of the range. - * @return A [[scala.collection.immutable.Range]] from `'''this'''` up to - * and including `end`. - */ - def to(end: Int, step: Int): Range.Inclusive = Range.inclusive(self, end, step) -} diff --git a/tests/scala2-library/src/library/scala/runtime/RichLong.scala b/tests/scala2-library/src/library/scala/runtime/RichLong.scala deleted file mode 100644 index 233ce231b4c8..000000000000 --- a/tests/scala2-library/src/library/scala/runtime/RichLong.scala +++ /dev/null @@ -1,43 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala -package runtime - -final class RichLong(val self: Long) extends AnyVal with IntegralProxy[Long] { - protected def num = scala.math.Numeric.LongIsIntegral - protected def ord = scala.math.Ordering.Long - - override def doubleValue() = self.toDouble - override def floatValue() = self.toFloat - override def longValue() = self - override def intValue() = self.toInt - override def byteValue() = self.toByte - override def shortValue() = self.toShort - - override def isValidByte = self.toByte.toLong == self - override def isValidShort = self.toShort.toLong == self - override def isValidChar = self.toChar.toLong == self - override def isValidInt = self.toInt.toLong == self - def isValidLong = true - // override def isValidFloat = self.toFloat.toLong == self && self != Long.MaxValue - // override def isValidDouble = self.toDouble.toLong == self && self != Long.MaxValue - - override def abs: Long = math.abs(self) - override def max(that: Long): Long = math.max(self, that) - override def min(that: Long): Long = math.min(self, that) - override def signum: Int = math.signum(self).toInt - - /** There is no reason to round a `Long`, but this method is provided to avoid accidental conversion to `Int` through `Float`. */ - @deprecated("this is an integer type; there is no reason to round it. Perhaps you meant to call this on a floating-point value?", "2.11.0") - def round: Long = self - - def toBinaryString: String = java.lang.Long.toBinaryString(self) - def toHexString: String = java.lang.Long.toHexString(self) - def toOctalString: String = java.lang.Long.toOctalString(self) -} diff --git a/tests/scala2-library/src/library/scala/runtime/RichShort.scala b/tests/scala2-library/src/library/scala/runtime/RichShort.scala deleted file mode 100644 index b35beff7eb4c..000000000000 --- a/tests/scala2-library/src/library/scala/runtime/RichShort.scala +++ /dev/null @@ -1,30 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala -package runtime - - -final class RichShort(val self: Short) extends AnyVal with ScalaWholeNumberProxy[Short] { - protected def num = scala.math.Numeric.ShortIsIntegral - protected def ord = scala.math.Ordering.Short - - override def doubleValue() = self.toDouble - override def floatValue() = self.toFloat - override def longValue() = self.toLong - override def intValue() = self.toInt - override def byteValue() = self.toByte - override def shortValue() = self - - override def isValidShort = true - - override def abs: Short = math.abs(self.toInt).toShort - override def max(that: Short): Short = math.max(self.toInt, that.toInt).toShort - override def min(that: Short): Short = math.min(self.toInt, that.toInt).toShort - override def signum: Int = math.signum(self.toInt) -} diff --git a/tests/scala2-library/src/library/scala/runtime/ScalaNumberProxy.scala b/tests/scala2-library/src/library/scala/runtime/ScalaNumberProxy.scala deleted file mode 100644 index e7345c014d85..000000000000 --- a/tests/scala2-library/src/library/scala/runtime/ScalaNumberProxy.scala +++ /dev/null @@ -1,86 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala -package runtime - -import scala.collection.immutable -import scala.math.ScalaNumericAnyConversions -import immutable.NumericRange -import Proxy.Typed - -/** Base classes for the Rich* wrappers of the primitive types. - * As with all classes in scala.runtime.*, this is not a supported API. - * - * @author Paul Phillips - * @version 2.9 - * @since 2.9 - */ -trait ScalaNumberProxy[T] extends Any with ScalaNumericAnyConversions with Typed[T] with OrderedProxy[T] { - protected implicit def num: Numeric[T] - - def underlying() = self.asInstanceOf[AnyRef] - def doubleValue() = num.toDouble(self) - def floatValue() = num.toFloat(self) - def longValue() = num.toLong(self) - def intValue() = num.toInt(self) - def byteValue() = intValue().toByte - def shortValue() = intValue().toShort - - /** Returns `'''this'''` if `'''this''' < that` or `that` otherwise. */ - def min(that: T): T = num.min(self, that) - /** Returns `'''this'''` if `'''this''' > that` or `that` otherwise. */ - def max(that: T): T = num.max(self, that) - /** Returns the absolute value of `'''this'''`. */ - def abs = num.abs(self) - /** Returns the signum of `'''this'''`. */ - def signum = num.signum(self) -} -trait ScalaWholeNumberProxy[T] extends Any with ScalaNumberProxy[T] { - def isWhole = true -} -trait IntegralProxy[T] extends Any with ScalaWholeNumberProxy[T] with RangedProxy[T] { - protected implicit def num: Integral[T] - type ResultWithoutStep = NumericRange[T] - - def until(end: T): NumericRange.Exclusive[T] = NumericRange(self, end, num.one) - def until(end: T, step: T): NumericRange.Exclusive[T] = NumericRange(self, end, step) - def to(end: T): NumericRange.Inclusive[T] = NumericRange.inclusive(self, end, num.one) - def to(end: T, step: T): NumericRange.Inclusive[T] = NumericRange.inclusive(self, end, step) -} -trait FractionalProxy[T] extends Any with ScalaNumberProxy[T] with RangedProxy[T] { - protected implicit def num: Fractional[T] - protected implicit def integralNum: Integral[T] - - /** In order to supply predictable ranges, we require an Integral[T] which provides - * us with discrete operations on the (otherwise fractional) T. See Numeric.DoubleAsIfIntegral - * for an example. - */ - type ResultWithoutStep = Range.Partial[T, NumericRange[T]] - - def isWhole = false - def until(end: T): ResultWithoutStep = new Range.Partial(NumericRange(self, end, _)) - def until(end: T, step: T): NumericRange.Exclusive[T] = NumericRange(self, end, step) - def to(end: T): ResultWithoutStep = new Range.Partial(NumericRange.inclusive(self, end, _)) - def to(end: T, step: T): NumericRange.Inclusive[T] = NumericRange.inclusive(self, end, step) -} - -trait OrderedProxy[T] extends Any with Ordered[T] with Typed[T] { - protected def ord: Ordering[T] - - def compare(y: T) = ord.compare(self, y) -} -trait RangedProxy[T] extends Any with Typed[T] { - type ResultWithoutStep - - def until(end: T): ResultWithoutStep - def until(end: T, step: T): immutable.IndexedSeq[T] - def to(end: T): ResultWithoutStep - def to(end: T, step: T): immutable.IndexedSeq[T] -} - diff --git a/tests/scala2-library/src/library/scala/runtime/ScalaRunTime.scala b/tests/scala2-library/src/library/scala/runtime/ScalaRunTime.scala deleted file mode 100644 index ca9f98fa9fb0..000000000000 --- a/tests/scala2-library/src/library/scala/runtime/ScalaRunTime.scala +++ /dev/null @@ -1,268 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala -package runtime - -import scala.collection.{ TraversableView, AbstractIterator, GenIterable } -import scala.collection.mutable.WrappedArray -import scala.collection.immutable.{ StringLike, NumericRange } -import scala.collection.generic.{ Sorted, IsTraversableLike } -import scala.reflect.{ ClassTag, classTag } -import java.lang.{ Class => jClass } - -import java.lang.reflect.{ Method => JMethod } - -/** The object ScalaRunTime provides support methods required by - * the scala runtime. All these methods should be considered - * outside the API and subject to change or removal without notice. - */ -object ScalaRunTime { - def isArray(x: Any, atLevel: Int = 1): Boolean = - x != null && isArrayClass(x.getClass, atLevel) - - private def isArrayClass(clazz: jClass[_], atLevel: Int): Boolean = - clazz.isArray && (atLevel == 1 || isArrayClass(clazz.getComponentType, atLevel - 1)) - - // A helper method to make my life in the pattern matcher a lot easier. - def drop[Repr](coll: Repr, num: Int)(implicit traversable: IsTraversableLike[Repr]): Repr = - traversable conversion coll drop num - - /** Return the class object representing an array with element class `clazz`. - */ - def arrayClass(clazz: jClass[_]): jClass[_] = { - // newInstance throws an exception if the erasure is Void.TYPE. see scala/bug#5680 - if (clazz == java.lang.Void.TYPE) classOf[Array[Unit]] - else java.lang.reflect.Array.newInstance(clazz, 0).getClass - } - - /** Return the class object representing an unboxed value type, - * e.g., classOf[int], not classOf[java.lang.Integer]. The compiler - * rewrites expressions like 5.getClass to come here. - */ - def anyValClass[T <: AnyVal : ClassTag](value: T): jClass[T] = - classTag[T].runtimeClass.asInstanceOf[jClass[T]] - - /** Retrieve generic array element */ - def array_apply(xs: AnyRef, idx: Int): Any = { - xs match { - case x: Array[AnyRef] => x(idx).asInstanceOf[Any] - case x: Array[Int] => x(idx).asInstanceOf[Any] - case x: Array[Double] => x(idx).asInstanceOf[Any] - case x: Array[Long] => x(idx).asInstanceOf[Any] - case x: Array[Float] => x(idx).asInstanceOf[Any] - case x: Array[Char] => x(idx).asInstanceOf[Any] - case x: Array[Byte] => x(idx).asInstanceOf[Any] - case x: Array[Short] => x(idx).asInstanceOf[Any] - case x: Array[Boolean] => x(idx).asInstanceOf[Any] - case x: Array[Unit] => x(idx).asInstanceOf[Any] - case null => throw new NullPointerException - } - } - - /** update generic array element */ - def array_update(xs: AnyRef, idx: Int, value: Any): Unit = { - xs match { - case x: Array[AnyRef] => x(idx) = value.asInstanceOf[AnyRef] - case x: Array[Int] => x(idx) = value.asInstanceOf[Int] - case x: Array[Double] => x(idx) = value.asInstanceOf[Double] - case x: Array[Long] => x(idx) = value.asInstanceOf[Long] - case x: Array[Float] => x(idx) = value.asInstanceOf[Float] - case x: Array[Char] => x(idx) = value.asInstanceOf[Char] - case x: Array[Byte] => x(idx) = value.asInstanceOf[Byte] - case x: Array[Short] => x(idx) = value.asInstanceOf[Short] - case x: Array[Boolean] => x(idx) = value.asInstanceOf[Boolean] - case x: Array[Unit] => x(idx) = value.asInstanceOf[Unit] - case null => throw new NullPointerException - } - } - - /** Get generic array length */ - def array_length(xs: AnyRef): Int = xs match { - case x: Array[AnyRef] => x.length - case x: Array[Int] => x.length - case x: Array[Double] => x.length - case x: Array[Long] => x.length - case x: Array[Float] => x.length - case x: Array[Char] => x.length - case x: Array[Byte] => x.length - case x: Array[Short] => x.length - case x: Array[Boolean] => x.length - case x: Array[Unit] => x.length - case null => throw new NullPointerException - } - - def array_clone(xs: AnyRef): AnyRef = xs match { - case x: Array[AnyRef] => x.clone() - case x: Array[Int] => x.clone() - case x: Array[Double] => x.clone() - case x: Array[Long] => x.clone() - case x: Array[Float] => x.clone() - case x: Array[Char] => x.clone() - case x: Array[Byte] => x.clone() - case x: Array[Short] => x.clone() - case x: Array[Boolean] => x.clone() - case x: Array[Unit] => x - case null => throw new NullPointerException - } - - /** Convert an array to an object array. - * Needed to deal with vararg arguments of primitive types that are passed - * to a generic Java vararg parameter T ... - */ - def toObjectArray(src: AnyRef): Array[Object] = src match { - case x: Array[AnyRef] => x - case _ => - val length = array_length(src) - val dest = new Array[Object](length) - for (i <- 0 until length) - array_update(dest, i, array_apply(src, i)) - dest - } - - def toArray[T](xs: scala.collection.Seq[T]) = { - val arr = new Array[AnyRef](xs.length) - var i = 0 - for (x <- xs) { - arr(i) = x.asInstanceOf[AnyRef] - i += 1 - } - arr - } - - // Java bug: http://bugs.java.com/bugdatabase/view_bug.do?bug_id=4071957 - // More background at ticket #2318. - def ensureAccessible(m: JMethod): JMethod = scala.reflect.ensureAccessible(m) - - def _toString(x: Product): String = - x.productIterator.mkString(x.productPrefix + "(", ",", ")") - - def _hashCode(x: Product): Int = scala.util.hashing.MurmurHash3.productHash(x) - - /** A helper for case classes. */ - def typedProductIterator[T](x: Product): Iterator[T] = { - new AbstractIterator[T] { - private var c: Int = 0 - private val cmax = x.productArity - def hasNext = c < cmax - def next() = { - val result = x.productElement(c) - c += 1 - result.asInstanceOf[T] - } - } - } - - /** Old implementation of `##`. */ - @deprecated("Use scala.runtime.Statics.anyHash instead.", "2.12.0") - def hash(x: Any): Int = Statics.anyHash(x.asInstanceOf[Object]) - - /** Given any Scala value, convert it to a String. - * - * The primary motivation for this method is to provide a means for - * correctly obtaining a String representation of a value, while - * avoiding the pitfalls of naively calling toString on said value. - * In particular, it addresses the fact that (a) toString cannot be - * called on null and (b) depending on the apparent type of an - * array, toString may or may not print it in a human-readable form. - * - * @param arg the value to stringify - * @return a string representation of arg. - */ - def stringOf(arg: Any): String = stringOf(arg, scala.Int.MaxValue) - def stringOf(arg: Any, maxElements: Int): String = { - def packageOf(x: AnyRef) = x.getClass.getPackage match { - case null => "" - case p => p.getName - } - def isScalaClass(x: AnyRef) = packageOf(x) startsWith "scala." - def isScalaCompilerClass(x: AnyRef) = packageOf(x) startsWith "scala.tools.nsc." - - // includes specialized subclasses and future proofed against hypothetical TupleN (for N > 22) - def isTuple(x: Any) = x != null && x.getClass.getName.startsWith("scala.Tuple") - - // We use reflection because the scala.xml package might not be available - def isSubClassOf(potentialSubClass: Class[_], ofClass: String) = - try { - val classLoader = potentialSubClass.getClassLoader - val clazz = Class.forName(ofClass, /*initialize =*/ false, classLoader) - clazz.isAssignableFrom(potentialSubClass) - } catch { - case cnfe: ClassNotFoundException => false - } - def isXmlNode(potentialSubClass: Class[_]) = isSubClassOf(potentialSubClass, "scala.xml.Node") - def isXmlMetaData(potentialSubClass: Class[_]) = isSubClassOf(potentialSubClass, "scala.xml.MetaData") - - // When doing our own iteration is dangerous - def useOwnToString(x: Any) = x match { - // Range/NumericRange have a custom toString to avoid walking a gazillion elements - case _: Range | _: NumericRange[_] => true - // Sorted collections to the wrong thing (for us) on iteration - ticket #3493 - case _: Sorted[_, _] => true - // StringBuilder(a, b, c) and similar not so attractive - case _: StringLike[_] => true - // Don't want to evaluate any elements in a view - case _: TraversableView[_, _] => true - // Node extends NodeSeq extends Seq[Node] and MetaData extends Iterable[MetaData] - // -> catch those by isXmlNode and isXmlMetaData. - // Don't want to a) traverse infinity or b) be overly helpful with peoples' custom - // collections which may have useful toString methods - ticket #3710 - // or c) print AbstractFiles which are somehow also Iterable[AbstractFile]s. - case x: Traversable[_] => !x.hasDefiniteSize || !isScalaClass(x) || isScalaCompilerClass(x) || isXmlNode(x.getClass) || isXmlMetaData(x.getClass) - // Otherwise, nothing could possibly go wrong - case _ => false - } - - // A variation on inner for maps so they print -> instead of bare tuples - def mapInner(arg: Any): String = arg match { - case (k, v) => inner(k) + " -> " + inner(v) - case _ => inner(arg) - } - - // Special casing Unit arrays, the value class which uses a reference array type. - def arrayToString(x: AnyRef) = { - if (x.getClass.getComponentType == classOf[BoxedUnit]) - 0 until (array_length(x) min maxElements) map (_ => "()") mkString ("Array(", ", ", ")") - else - WrappedArray make x take maxElements map inner mkString ("Array(", ", ", ")") - } - - // The recursively applied attempt to prettify Array printing. - // Note that iterator is used if possible and foreach is used as a - // last resort, because the parallel collections "foreach" in a - // random order even on sequences. - def inner(arg: Any): String = arg match { - case null => "null" - case "" => "\"\"" - case x: String => if (x.head.isWhitespace || x.last.isWhitespace) "\"" + x + "\"" else x - case x if useOwnToString(x) => x.toString - case x: AnyRef if isArray(x) => arrayToString(x) - case x: scala.collection.Map[_, _] => x.iterator take maxElements map mapInner mkString (x.stringPrefix + "(", ", ", ")") - case x: GenIterable[_] => x.iterator take maxElements map inner mkString (x.stringPrefix + "(", ", ", ")") - case x: Traversable[_] => x take maxElements map inner mkString (x.stringPrefix + "(", ", ", ")") - case x: Product1[_] if isTuple(x) => "(" + inner(x._1) + ",)" // that special trailing comma - case x: Product if isTuple(x) => x.productIterator map inner mkString ("(", ",", ")") - case x => x.toString - } - - // The try/catch is defense against iterables which aren't actually designed - // to be iterated, such as some scala.tools.nsc.io.AbstractFile derived classes. - try inner(arg) - catch { - case _: UnsupportedOperationException | _: AssertionError => "" + arg - } - } - - /** stringOf formatted for use in a repl result. */ - def replStringOf(arg: Any, maxElements: Int): String = { - val s = stringOf(arg, maxElements) - val nl = if (s contains "\n") "\n" else "" - - nl + s + "\n" - } -} diff --git a/tests/scala2-library/src/library/scala/runtime/SeqCharSequence.scala b/tests/scala2-library/src/library/scala/runtime/SeqCharSequence.scala deleted file mode 100644 index 7751bf815c8a..000000000000 --- a/tests/scala2-library/src/library/scala/runtime/SeqCharSequence.scala +++ /dev/null @@ -1,51 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala -package runtime - -@deprecated("use Predef.SeqCharSequence", "2.11.0") -final class SeqCharSequence(val xs: scala.collection.IndexedSeq[Char]) extends CharSequence { - def length: Int = xs.length - def charAt(index: Int): Char = xs(index) - def subSequence(start: Int, end: Int): CharSequence = new SeqCharSequence(xs.slice(start, end)) - override def toString = xs.mkString("") -} - -// Still need this one since the implicit class ArrayCharSequence only converts -// a single argument. -final class ArrayCharSequence(val xs: Array[Char], start: Int, end: Int) extends CharSequence { - // yikes - // java.lang.VerifyError: (class: scala/runtime/ArrayCharSequence, method: signature: ([C)V) - // Constructor must call super() or this() - // - // def this(xs: Array[Char]) = this(xs, 0, xs.length) - - def length: Int = math.max(0, end - start) - def charAt(index: Int): Char = { - if (0 <= index && index < length) - xs(start + index) - else throw new ArrayIndexOutOfBoundsException(index) - } - def subSequence(start0: Int, end0: Int): CharSequence = { - if (start0 < 0) throw new ArrayIndexOutOfBoundsException(start0) - else if (end0 > length) throw new ArrayIndexOutOfBoundsException(end0) - else if (end0 <= start0) new ArrayCharSequence(xs, 0, 0) - else { - val newlen = end0 - start0 - val start1 = start + start0 - new ArrayCharSequence(xs, start1, start1 + newlen) - } - } - override def toString = { - val start = math.max(this.start, 0) - val end = math.min(xs.length, start + length) - - if (start >= end) "" else new String(xs, start, end - start) - } -} diff --git a/tests/scala2-library/src/library/scala/runtime/ShortRef.java b/tests/scala2-library/src/library/scala/runtime/ShortRef.java deleted file mode 100644 index e5e8de3d8b82..000000000000 --- a/tests/scala2-library/src/library/scala/runtime/ShortRef.java +++ /dev/null @@ -1,23 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - - - -package scala.runtime; - - -public class ShortRef implements java.io.Serializable { - private static final long serialVersionUID = 4218441291229072313L; - - public short elem; - public ShortRef(short elem) { this.elem = elem; } - public String toString() { return java.lang.Short.toString(elem); } - - public static ShortRef create(short e) { return new ShortRef(e); } - public static ShortRef zero() { return new ShortRef((short)0); } -} diff --git a/tests/scala2-library/src/library/scala/runtime/Statics.java b/tests/scala2-library/src/library/scala/runtime/Statics.java deleted file mode 100644 index 62390cb9d030..000000000000 --- a/tests/scala2-library/src/library/scala/runtime/Statics.java +++ /dev/null @@ -1,117 +0,0 @@ -package scala.runtime; - -/** Not for public consumption. Usage by the runtime only. - */ - -public final class Statics { - public static int mix(int hash, int data) { - int h = mixLast(hash, data); - h = Integer.rotateLeft(h, 13); - return h * 5 + 0xe6546b64; - } - - public static int mixLast(int hash, int data) { - int k = data; - - k *= 0xcc9e2d51; - k = Integer.rotateLeft(k, 15); - k *= 0x1b873593; - - return hash ^ k; - } - - public static int finalizeHash(int hash, int length) { - return avalanche(hash ^ length); - } - - /** Force all bits of the hash to avalanche. Used for finalizing the hash. */ - public static int avalanche(int h) { - h ^= h >>> 16; - h *= 0x85ebca6b; - h ^= h >>> 13; - h *= 0xc2b2ae35; - h ^= h >>> 16; - - return h; - } - - public static int longHash(long lv) { - int iv = (int)lv; - if (iv == lv) - return iv; - - return java.lang.Long.hashCode(lv); - } - - public static int doubleHash(double dv) { - int iv = (int)dv; - if (iv == dv) - return iv; - - long lv = (long)dv; - if (lv == dv) - return java.lang.Long.hashCode(lv); - - float fv = (float)dv; - if (fv == dv) - return java.lang.Float.hashCode(fv); - - return java.lang.Double.hashCode(dv); - } - - public static int floatHash(float fv) { - int iv = (int)fv; - if (iv == fv) - return iv; - - long lv = (long)fv; - if (lv == fv) - return java.lang.Long.hashCode(lv); - - return java.lang.Float.hashCode(fv); - } - - /** - * Hashcode algorithm is driven by the requirements imposed - * by primitive equality semantics, namely that equal objects - * have equal hashCodes. The first priority are the integral/char - * types, which already have the same hashCodes for the same - * values except for Long. So Long's hashCode is altered to - * conform to Int's for all values in Int's range. - * - * Float is problematic because it's far too small to hold - * all the Ints, so for instance Int.MaxValue.toFloat claims - * to be == to each of the largest 64 Ints. There is no way - * to preserve equals/hashCode alignment without compromising - * the hashCode distribution, so Floats are only guaranteed - * to have the same hashCode for whole Floats in the range - * Short.MinValue to Short.MaxValue (2^16 total.) - * - * Double has its hashCode altered to match the entire Int range, - * but is not guaranteed beyond that. (But could/should it be? - * The hashCode is only 32 bits so this is a more tractable - * issue than Float's, but it might be better simply to exclude it.) - * - * Note: BigInt and BigDecimal, being arbitrary precision, could - * be made consistent with all other types for the Int range, but - * as yet have not. - * - * Note: Among primitives, Float.NaN != Float.NaN, but the boxed - * versions are equal. This still needs reconciliation. - */ - public static int anyHash(Object x) { - if (x == null) - return 0; - - if (x instanceof java.lang.Long) - return longHash(((java.lang.Long)x).longValue()); - - if (x instanceof java.lang.Double) - return doubleHash(((java.lang.Double)x).doubleValue()); - - if (x instanceof java.lang.Float) - return floatHash(((java.lang.Float)x).floatValue()); - - return x.hashCode(); - } -} diff --git a/tests/scala2-library/src/library/scala/runtime/StringAdd.scala b/tests/scala2-library/src/library/scala/runtime/StringAdd.scala deleted file mode 100644 index 37f077bcadfa..000000000000 --- a/tests/scala2-library/src/library/scala/runtime/StringAdd.scala +++ /dev/null @@ -1,17 +0,0 @@ -/* *\ -** ________ ___ __ ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ |_| ** -** ** -\* */ - -package scala -package runtime - - -/** A wrapper class that adds string concatenation `+` to any value */ -@deprecated("use Predef.StringAdd", "2.11.0") -final class StringAdd(val self: Any) extends AnyVal { - def +(other: String) = String.valueOf(self) + other -} diff --git a/tests/scala2-library/src/library/scala/runtime/StringFormat.scala b/tests/scala2-library/src/library/scala/runtime/StringFormat.scala deleted file mode 100644 index 5376c3f98201..000000000000 --- a/tests/scala2-library/src/library/scala/runtime/StringFormat.scala +++ /dev/null @@ -1,22 +0,0 @@ -/* *\ -** ________ ___ __ ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ |_| ** -** ** -\* */ - -package scala -package runtime - - -/** A wrapper class that adds a `formatted` operation to any value - */ -@deprecated("use Predef.StringFormat", "2.11.0") -final class StringFormat(val self: Any) extends AnyVal { - /** Returns string formatted according to given `format` string. - * Format strings are as for `String.format` - * (@see java.lang.String.format). - */ - @inline def formatted(fmtstr: String): String = fmtstr format self -} diff --git a/tests/scala2-library/src/library/scala/runtime/StructuralCallSite.java b/tests/scala2-library/src/library/scala/runtime/StructuralCallSite.java deleted file mode 100644 index f73b4f08e622..000000000000 --- a/tests/scala2-library/src/library/scala/runtime/StructuralCallSite.java +++ /dev/null @@ -1,43 +0,0 @@ -package scala.runtime; - - -import java.lang.invoke.*; -import java.lang.ref.SoftReference; -import java.lang.reflect.Method; - -public final class StructuralCallSite { - - private Class[] parameterTypes; - private SoftReference cache = new SoftReference<>(new EmptyMethodCache()); - - private StructuralCallSite(MethodType callType) { - parameterTypes = callType.parameterArray(); - } - - public MethodCache get() { - MethodCache cache = this.cache.get(); - if (cache == null) { - cache = new EmptyMethodCache(); - this.cache = new SoftReference<>(cache); - } - return cache; - } - - public Method find(Class receiver) { - return get().find(receiver); - } - - public Method add(Class receiver, Method m) { - cache = new SoftReference(get().add(receiver, m)); - return m; - } - public Class[] parameterTypes() { - return parameterTypes; - } - - public static CallSite bootstrap(MethodHandles.Lookup lookup, String invokedName, - MethodType invokedType, MethodType reflectiveCallType) throws Throwable { - StructuralCallSite structuralCallSite = new StructuralCallSite(reflectiveCallType); - return new ConstantCallSite(MethodHandles.constant(StructuralCallSite.class, structuralCallSite)); - } -} diff --git a/tests/scala2-library/src/library/scala/runtime/SymbolLiteral.java b/tests/scala2-library/src/library/scala/runtime/SymbolLiteral.java deleted file mode 100644 index d57204165d8d..000000000000 --- a/tests/scala2-library/src/library/scala/runtime/SymbolLiteral.java +++ /dev/null @@ -1,19 +0,0 @@ -package scala.runtime; - -import java.lang.invoke.*; - -public final class SymbolLiteral { - private SymbolLiteral() { - } - - public static CallSite bootstrap(MethodHandles.Lookup lookup, String invokedName, - MethodType invokedType, - String value) throws Throwable { - ClassLoader classLoader = lookup.lookupClass().getClassLoader(); - MethodType type = MethodType.fromMethodDescriptorString("(Ljava/lang/Object;)Ljava/lang/Object;", classLoader); - Class symbolClass = Class.forName("scala.Symbol", false, classLoader); - MethodHandle factoryMethod = lookup.findStatic(symbolClass, "apply", type); - Object symbolValue = factoryMethod.invokeWithArguments(value); - return new ConstantCallSite(MethodHandles.constant(symbolClass, symbolValue)); - } -} diff --git a/tests/scala2-library/src/library/scala/runtime/TraitSetter.java b/tests/scala2-library/src/library/scala/runtime/TraitSetter.java deleted file mode 100644 index d8dd8c6b04ba..000000000000 --- a/tests/scala2-library/src/library/scala/runtime/TraitSetter.java +++ /dev/null @@ -1,7 +0,0 @@ -package scala.runtime; - -/** A marker annotation to tag a setter of a mutable variable in a trait - */ -@Deprecated -public @interface TraitSetter { -} \ No newline at end of file diff --git a/tests/scala2-library/src/library/scala/runtime/Tuple2Zipped.scala b/tests/scala2-library/src/library/scala/runtime/Tuple2Zipped.scala deleted file mode 100644 index 52dd1da09e82..000000000000 --- a/tests/scala2-library/src/library/scala/runtime/Tuple2Zipped.scala +++ /dev/null @@ -1,139 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2016, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala -package runtime - - -import scala.collection.{ TraversableLike, IterableLike } -import scala.collection.generic.{ CanBuildFrom => CBF } -import scala.language.{ higherKinds, implicitConversions } - -/** This interface is intended as a minimal interface, not complicated - * by the requirement to resolve type constructors, for implicit search (which only - * needs to find an implicit conversion to Traversable for our purposes.) - * @define Coll `ZippedTraversable2` - * @define coll collection - * @define collectExample - * @define willNotTerminateInf - */ -trait ZippedTraversable2[+El1, +El2] extends Any { - def foreach[U](f: (El1, El2) => U): Unit -} -object ZippedTraversable2 { - implicit def zippedTraversable2ToTraversable[El1, El2](zz: ZippedTraversable2[El1, El2]): Traversable[(El1, El2)] = { - new scala.collection.AbstractTraversable[(El1, El2)] { - def foreach[U](f: ((El1, El2)) => U): Unit = zz foreach Function.untupled(f) - } - } -} - -final class Tuple2Zipped[El1, Repr1, El2, Repr2](val colls: (TraversableLike[El1, Repr1], IterableLike[El2, Repr2])) extends AnyVal with ZippedTraversable2[El1, El2] { - private def coll1 = colls._1 - private def coll2 = colls._2 - - def map[B, To](f: (El1, El2) => B)(implicit cbf: CBF[Repr1, B, To]): To = { - val b = cbf(coll1.repr) - b.sizeHint(coll1) - val elems2 = coll2.iterator - - for (el1 <- coll1) { - if (elems2.hasNext) - b += f(el1, elems2.next()) - else - return b.result() - } - - b.result() - } - - def flatMap[B, To](f: (El1, El2) => TraversableOnce[B])(implicit cbf: CBF[Repr1, B, To]): To = { - val b = cbf(coll1.repr) - val elems2 = coll2.iterator - - for (el1 <- coll1) { - if (elems2.hasNext) - b ++= f(el1, elems2.next()) - else - return b.result() - } - - b.result() - } - - def filter[To1, To2](f: (El1, El2) => Boolean)(implicit cbf1: CBF[Repr1, El1, To1], cbf2: CBF[Repr2, El2, To2]): (To1, To2) = { - val b1 = cbf1(coll1.repr) - val b2 = cbf2(coll2.repr) - val elems2 = coll2.iterator - - for (el1 <- coll1) { - if (elems2.hasNext) { - val el2 = elems2.next() - if (f(el1, el2)) { - b1 += el1 - b2 += el2 - } - } - else return (b1.result(), b2.result()) - } - - (b1.result(), b2.result()) - } - - def exists(@deprecatedName('f) p: (El1, El2) => Boolean): Boolean = { - val elems2 = coll2.iterator - - for (el1 <- coll1) { - if (elems2.hasNext) { - if (p(el1, elems2.next())) - return true - } - else return false - } - false - } - - def forall(@deprecatedName('f) p: (El1, El2) => Boolean): Boolean = - !exists((x, y) => !p(x, y)) - - def foreach[U](f: (El1, El2) => U): Unit = { - val elems2 = coll2.iterator - - for (el1 <- coll1) { - if (elems2.hasNext) - f(el1, elems2.next()) - else - return - } - } - - override def toString = s"($coll1, $coll2).zipped" -} - -object Tuple2Zipped { - final class Ops[T1, T2](private val x: (T1, T2)) extends AnyVal { - def invert[El1, CC1[X] <: TraversableOnce[X], El2, CC2[X] <: TraversableOnce[X], That] - (implicit w1: T1 <:< CC1[El1], - w2: T2 <:< CC2[El2], - bf: scala.collection.generic.CanBuildFrom[CC1[_], (El1, El2), That] - ): That = { - val buf = bf(x._1) - val it1 = x._1.toIterator - val it2 = x._2.toIterator - while (it1.hasNext && it2.hasNext) - buf += ((it1.next(), it2.next())) - - buf.result() - } - - def zipped[El1, Repr1, El2, Repr2] - (implicit w1: T1 => TraversableLike[El1, Repr1], - w2: T2 => IterableLike[El2, Repr2] - ): Tuple2Zipped[El1, Repr1, El2, Repr2] = new Tuple2Zipped((x._1, x._2)) - } -} diff --git a/tests/scala2-library/src/library/scala/runtime/Tuple3Zipped.scala b/tests/scala2-library/src/library/scala/runtime/Tuple3Zipped.scala deleted file mode 100644 index a4a86f8e55b0..000000000000 --- a/tests/scala2-library/src/library/scala/runtime/Tuple3Zipped.scala +++ /dev/null @@ -1,153 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala -package runtime - - -import scala.collection.{ TraversableLike, IterableLike } -import scala.collection.generic.{ CanBuildFrom => CBF } -import scala.language.{ higherKinds, implicitConversions } - -/** See comment on ZippedTraversable2 - * @define Coll `ZippedTraversable3` - * @define coll collection - * @define collectExample - * @define willNotTerminateInf - */ -trait ZippedTraversable3[+El1, +El2, +El3] extends Any { - def foreach[U](f: (El1, El2, El3) => U): Unit -} -object ZippedTraversable3 { - implicit def zippedTraversable3ToTraversable[El1, El2, El3](zz: ZippedTraversable3[El1, El2, El3]): Traversable[(El1, El2, El3)] = { - new scala.collection.AbstractTraversable[(El1, El2, El3)] { - def foreach[U](f: ((El1, El2, El3)) => U): Unit = zz foreach Function.untupled(f) - } - } -} - -final class Tuple3Zipped[El1, Repr1, El2, Repr2, El3, Repr3](val colls: (TraversableLike[El1, Repr1], IterableLike[El2, Repr2], IterableLike[El3, Repr3])) - extends AnyVal with ZippedTraversable3[El1, El2, El3] { - - private def coll1 = colls._1 - private def coll2 = colls._2 - private def coll3 = colls._3 - - def map[B, To](f: (El1, El2, El3) => B)(implicit cbf: CBF[Repr1, B, To]): To = { - val b = cbf(coll1.repr) - val elems2 = coll2.iterator - val elems3 = coll3.iterator - - for (el1 <- coll1) { - if (elems2.hasNext && elems3.hasNext) - b += f(el1, elems2.next(), elems3.next()) - else - return b.result() - } - b.result() - } - - def flatMap[B, To](f: (El1, El2, El3) => TraversableOnce[B])(implicit cbf: CBF[Repr1, B, To]): To = { - val b = cbf(coll1.repr) - val elems2 = coll2.iterator - val elems3 = coll3.iterator - - for (el1 <- coll1) { - if (elems2.hasNext && elems3.hasNext) - b ++= f(el1, elems2.next(), elems3.next()) - else - return b.result() - } - b.result() - } - - def filter[To1, To2, To3](f: (El1, El2, El3) => Boolean)( - implicit cbf1: CBF[Repr1, El1, To1], - cbf2: CBF[Repr2, El2, To2], - cbf3: CBF[Repr3, El3, To3]): (To1, To2, To3) = { - val b1 = cbf1(coll1.repr) - val b2 = cbf2(coll2.repr) - val b3 = cbf3(coll3.repr) - val elems2 = coll2.iterator - val elems3 = coll3.iterator - def result = (b1.result(), b2.result(), b3.result()) - - for (el1 <- coll1) { - if (elems2.hasNext && elems3.hasNext) { - val el2 = elems2.next() - val el3 = elems3.next() - - if (f(el1, el2, el3)) { - b1 += el1 - b2 += el2 - b3 += el3 - } - } - else return result - } - - result - } - - def exists(@deprecatedName('f) p: (El1, El2, El3) => Boolean): Boolean = { - val elems2 = coll2.iterator - val elems3 = coll3.iterator - - for (el1 <- coll1) { - if (elems2.hasNext && elems3.hasNext) { - if (p(el1, elems2.next(), elems3.next())) - return true - } - else return false - } - false - } - - def forall(@deprecatedName('f) p: (El1, El2, El3) => Boolean): Boolean = - !exists((x, y, z) => !p(x, y, z)) - - def foreach[U](f: (El1, El2, El3) => U): Unit = { - val elems2 = coll2.iterator - val elems3 = coll3.iterator - - for (el1 <- coll1) { - if (elems2.hasNext && elems3.hasNext) - f(el1, elems2.next(), elems3.next()) - else - return - } - } - - override def toString = s"($coll1, $coll2, $coll3).zipped" -} - -object Tuple3Zipped { - final class Ops[T1, T2, T3](private val x: (T1, T2, T3)) extends AnyVal { - def invert[El1, CC1[X] <: TraversableOnce[X], El2, CC2[X] <: TraversableOnce[X], El3, CC3[X] <: TraversableOnce[X], That] - (implicit w1: T1 <:< CC1[El1], - w2: T2 <:< CC2[El2], - w3: T3 <:< CC3[El3], - bf: scala.collection.generic.CanBuildFrom[CC1[_], (El1, El2, El3), That] - ): That = { - val buf = bf(x._1) - val it1 = x._1.toIterator - val it2 = x._2.toIterator - val it3 = x._3.toIterator - while (it1.hasNext && it2.hasNext && it3.hasNext) - buf += ((it1.next(), it2.next(), it3.next())) - - buf.result() - } - - def zipped[El1, Repr1, El2, Repr2, El3, Repr3] - (implicit w1: T1 => TraversableLike[El1, Repr1], - w2: T2 => IterableLike[El2, Repr2], - w3: T3 => IterableLike[El3, Repr3] - ): Tuple3Zipped[El1, Repr1, El2, Repr2, El3, Repr3] = new Tuple3Zipped((x._1, x._2, x._3)) - } -} diff --git a/tests/scala2-library/src/library/scala/runtime/VolatileBooleanRef.java b/tests/scala2-library/src/library/scala/runtime/VolatileBooleanRef.java deleted file mode 100644 index ef5b69111880..000000000000 --- a/tests/scala2-library/src/library/scala/runtime/VolatileBooleanRef.java +++ /dev/null @@ -1,23 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - - - -package scala.runtime; - - -public class VolatileBooleanRef implements java.io.Serializable { - private static final long serialVersionUID = -5730524563015615974L; - - volatile public boolean elem; - public VolatileBooleanRef(boolean elem) { this.elem = elem; } - public String toString() { return String.valueOf(elem); } - - public static VolatileBooleanRef create(boolean e) { return new VolatileBooleanRef(e); } - public static VolatileBooleanRef zero() { return new VolatileBooleanRef(false); } -} diff --git a/tests/scala2-library/src/library/scala/runtime/VolatileByteRef.java b/tests/scala2-library/src/library/scala/runtime/VolatileByteRef.java deleted file mode 100644 index d792b0a386a8..000000000000 --- a/tests/scala2-library/src/library/scala/runtime/VolatileByteRef.java +++ /dev/null @@ -1,23 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - - - -package scala.runtime; - - -public class VolatileByteRef implements java.io.Serializable { - private static final long serialVersionUID = -100666928446877072L; - - volatile public byte elem; - public VolatileByteRef(byte elem) { this.elem = elem; } - public String toString() { return java.lang.Byte.toString(elem); } - - public static VolatileByteRef create(byte e) { return new VolatileByteRef(e); } - public static VolatileByteRef zero() { return new VolatileByteRef((byte)0); } -} diff --git a/tests/scala2-library/src/library/scala/runtime/VolatileCharRef.java b/tests/scala2-library/src/library/scala/runtime/VolatileCharRef.java deleted file mode 100644 index 555b17128321..000000000000 --- a/tests/scala2-library/src/library/scala/runtime/VolatileCharRef.java +++ /dev/null @@ -1,23 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - - - -package scala.runtime; - - -public class VolatileCharRef implements java.io.Serializable { - private static final long serialVersionUID = 6537214938268005702L; - - volatile public char elem; - public VolatileCharRef(char elem) { this.elem = elem; } - public String toString() { return java.lang.Character.toString(elem); } - - public static VolatileCharRef create(char e) { return new VolatileCharRef(e); } - public static VolatileCharRef zero() { return new VolatileCharRef((char)0); } -} diff --git a/tests/scala2-library/src/library/scala/runtime/VolatileDoubleRef.java b/tests/scala2-library/src/library/scala/runtime/VolatileDoubleRef.java deleted file mode 100644 index 1932055c6add..000000000000 --- a/tests/scala2-library/src/library/scala/runtime/VolatileDoubleRef.java +++ /dev/null @@ -1,22 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - - - -package scala.runtime; - -public class VolatileDoubleRef implements java.io.Serializable { - private static final long serialVersionUID = 8304402127373655534L; - - volatile public double elem; - public VolatileDoubleRef(double elem) { this.elem = elem; } - public String toString() { return java.lang.Double.toString(elem); } - - public static VolatileDoubleRef create(double e) { return new VolatileDoubleRef(e); } - public static VolatileDoubleRef zero() { return new VolatileDoubleRef(0); } -} diff --git a/tests/scala2-library/src/library/scala/runtime/VolatileFloatRef.java b/tests/scala2-library/src/library/scala/runtime/VolatileFloatRef.java deleted file mode 100644 index 3a81be114602..000000000000 --- a/tests/scala2-library/src/library/scala/runtime/VolatileFloatRef.java +++ /dev/null @@ -1,23 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - - - -package scala.runtime; - - -public class VolatileFloatRef implements java.io.Serializable { - private static final long serialVersionUID = -5793980990371366933L; - - volatile public float elem; - public VolatileFloatRef(float elem) { this.elem = elem; } - public String toString() { return java.lang.Float.toString(elem); } - - public static VolatileFloatRef create(float e) { return new VolatileFloatRef(e); } - public static VolatileFloatRef zero() { return new VolatileFloatRef(0); } -} diff --git a/tests/scala2-library/src/library/scala/runtime/VolatileIntRef.java b/tests/scala2-library/src/library/scala/runtime/VolatileIntRef.java deleted file mode 100644 index ae015bc8b122..000000000000 --- a/tests/scala2-library/src/library/scala/runtime/VolatileIntRef.java +++ /dev/null @@ -1,22 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - - - -package scala.runtime; - -public class VolatileIntRef implements java.io.Serializable { - private static final long serialVersionUID = 1488197132022872888L; - - volatile public int elem; - public VolatileIntRef(int elem) { this.elem = elem; } - public String toString() { return java.lang.Integer.toString(elem); } - - public static VolatileIntRef create(int e) { return new VolatileIntRef(e); } - public static VolatileIntRef zero() { return new VolatileIntRef(0); } -} diff --git a/tests/scala2-library/src/library/scala/runtime/VolatileLongRef.java b/tests/scala2-library/src/library/scala/runtime/VolatileLongRef.java deleted file mode 100644 index e596f5aa6964..000000000000 --- a/tests/scala2-library/src/library/scala/runtime/VolatileLongRef.java +++ /dev/null @@ -1,23 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - - - -package scala.runtime; - - -public class VolatileLongRef implements java.io.Serializable { - private static final long serialVersionUID = -3567869820105829499L; - - volatile public long elem; - public VolatileLongRef(long elem) { this.elem = elem; } - public String toString() { return java.lang.Long.toString(elem); } - - public static VolatileLongRef create(long e) { return new VolatileLongRef(e); } - public static VolatileLongRef zero() { return new VolatileLongRef(0); } -} diff --git a/tests/scala2-library/src/library/scala/runtime/VolatileObjectRef.java b/tests/scala2-library/src/library/scala/runtime/VolatileObjectRef.java deleted file mode 100644 index 6063501ffb0e..000000000000 --- a/tests/scala2-library/src/library/scala/runtime/VolatileObjectRef.java +++ /dev/null @@ -1,24 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - - - -package scala.runtime; - - -public class VolatileObjectRef implements java.io.Serializable { - private static final long serialVersionUID = -9055728157600312291L; - - volatile public T elem; - public VolatileObjectRef(T elem) { this.elem = elem; } - @Override - public String toString() { return String.valueOf(elem); } - - public static VolatileObjectRef create(U e) { return new VolatileObjectRef(e); } - public static VolatileObjectRef zero() { return new VolatileObjectRef(null); } -} diff --git a/tests/scala2-library/src/library/scala/runtime/VolatileShortRef.java b/tests/scala2-library/src/library/scala/runtime/VolatileShortRef.java deleted file mode 100644 index 0a2825941fbf..000000000000 --- a/tests/scala2-library/src/library/scala/runtime/VolatileShortRef.java +++ /dev/null @@ -1,23 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - - - -package scala.runtime; - - -public class VolatileShortRef implements java.io.Serializable { - private static final long serialVersionUID = 4218441291229072313L; - - volatile public short elem; - public VolatileShortRef(short elem) { this.elem = elem; } - public String toString() { return java.lang.Short.toString(elem); } - - public static VolatileShortRef create(short e) { return new VolatileShortRef(e); } - public static VolatileShortRef zero() { return new VolatileShortRef((short)0); } -} diff --git a/tests/scala2-library/src/library/scala/runtime/java8/JFunction0$mcB$sp.java b/tests/scala2-library/src/library/scala/runtime/java8/JFunction0$mcB$sp.java deleted file mode 100644 index 622dbabcf111..000000000000 --- a/tests/scala2-library/src/library/scala/runtime/java8/JFunction0$mcB$sp.java +++ /dev/null @@ -1,13 +0,0 @@ - -/* - * Copyright (C) 2012-2015 Typesafe Inc. - */ - -package scala.runtime.java8; - -@FunctionalInterface -public interface JFunction0$mcB$sp extends scala.Function0, java.io.Serializable { - byte apply$mcB$sp(); - - default Object apply() { return scala.runtime.BoxesRunTime.boxToByte(apply$mcB$sp()); } -} diff --git a/tests/scala2-library/src/library/scala/runtime/java8/JFunction0$mcC$sp.java b/tests/scala2-library/src/library/scala/runtime/java8/JFunction0$mcC$sp.java deleted file mode 100644 index ad9a14ffa8fc..000000000000 --- a/tests/scala2-library/src/library/scala/runtime/java8/JFunction0$mcC$sp.java +++ /dev/null @@ -1,13 +0,0 @@ - -/* - * Copyright (C) 2012-2015 Typesafe Inc. - */ - -package scala.runtime.java8; - -@FunctionalInterface -public interface JFunction0$mcC$sp extends scala.Function0, java.io.Serializable { - char apply$mcC$sp(); - - default Object apply() { return scala.runtime.BoxesRunTime.boxToCharacter(apply$mcC$sp()); } -} diff --git a/tests/scala2-library/src/library/scala/runtime/java8/JFunction0$mcD$sp.java b/tests/scala2-library/src/library/scala/runtime/java8/JFunction0$mcD$sp.java deleted file mode 100644 index 291b50db4bd0..000000000000 --- a/tests/scala2-library/src/library/scala/runtime/java8/JFunction0$mcD$sp.java +++ /dev/null @@ -1,13 +0,0 @@ - -/* - * Copyright (C) 2012-2015 Typesafe Inc. - */ - -package scala.runtime.java8; - -@FunctionalInterface -public interface JFunction0$mcD$sp extends scala.Function0, java.io.Serializable { - double apply$mcD$sp(); - - default Object apply() { return scala.runtime.BoxesRunTime.boxToDouble(apply$mcD$sp()); } -} diff --git a/tests/scala2-library/src/library/scala/runtime/java8/JFunction0$mcF$sp.java b/tests/scala2-library/src/library/scala/runtime/java8/JFunction0$mcF$sp.java deleted file mode 100644 index 73b31dea0f0f..000000000000 --- a/tests/scala2-library/src/library/scala/runtime/java8/JFunction0$mcF$sp.java +++ /dev/null @@ -1,13 +0,0 @@ - -/* - * Copyright (C) 2012-2015 Typesafe Inc. - */ - -package scala.runtime.java8; - -@FunctionalInterface -public interface JFunction0$mcF$sp extends scala.Function0, java.io.Serializable { - float apply$mcF$sp(); - - default Object apply() { return scala.runtime.BoxesRunTime.boxToFloat(apply$mcF$sp()); } -} diff --git a/tests/scala2-library/src/library/scala/runtime/java8/JFunction0$mcI$sp.java b/tests/scala2-library/src/library/scala/runtime/java8/JFunction0$mcI$sp.java deleted file mode 100644 index f9b2d659ad31..000000000000 --- a/tests/scala2-library/src/library/scala/runtime/java8/JFunction0$mcI$sp.java +++ /dev/null @@ -1,13 +0,0 @@ - -/* - * Copyright (C) 2012-2015 Typesafe Inc. - */ - -package scala.runtime.java8; - -@FunctionalInterface -public interface JFunction0$mcI$sp extends scala.Function0, java.io.Serializable { - int apply$mcI$sp(); - - default Object apply() { return scala.runtime.BoxesRunTime.boxToInteger(apply$mcI$sp()); } -} diff --git a/tests/scala2-library/src/library/scala/runtime/java8/JFunction0$mcJ$sp.java b/tests/scala2-library/src/library/scala/runtime/java8/JFunction0$mcJ$sp.java deleted file mode 100644 index 73c41976b7a3..000000000000 --- a/tests/scala2-library/src/library/scala/runtime/java8/JFunction0$mcJ$sp.java +++ /dev/null @@ -1,13 +0,0 @@ - -/* - * Copyright (C) 2012-2015 Typesafe Inc. - */ - -package scala.runtime.java8; - -@FunctionalInterface -public interface JFunction0$mcJ$sp extends scala.Function0, java.io.Serializable { - long apply$mcJ$sp(); - - default Object apply() { return scala.runtime.BoxesRunTime.boxToLong(apply$mcJ$sp()); } -} diff --git a/tests/scala2-library/src/library/scala/runtime/java8/JFunction0$mcS$sp.java b/tests/scala2-library/src/library/scala/runtime/java8/JFunction0$mcS$sp.java deleted file mode 100644 index 5fbabb2358e2..000000000000 --- a/tests/scala2-library/src/library/scala/runtime/java8/JFunction0$mcS$sp.java +++ /dev/null @@ -1,13 +0,0 @@ - -/* - * Copyright (C) 2012-2015 Typesafe Inc. - */ - -package scala.runtime.java8; - -@FunctionalInterface -public interface JFunction0$mcS$sp extends scala.Function0, java.io.Serializable { - short apply$mcS$sp(); - - default Object apply() { return scala.runtime.BoxesRunTime.boxToShort(apply$mcS$sp()); } -} diff --git a/tests/scala2-library/src/library/scala/runtime/java8/JFunction0$mcV$sp.java b/tests/scala2-library/src/library/scala/runtime/java8/JFunction0$mcV$sp.java deleted file mode 100644 index 735843796ce8..000000000000 --- a/tests/scala2-library/src/library/scala/runtime/java8/JFunction0$mcV$sp.java +++ /dev/null @@ -1,13 +0,0 @@ - -/* - * Copyright (C) 2012-2015 Typesafe Inc. - */ - -package scala.runtime.java8; - -@FunctionalInterface -public interface JFunction0$mcV$sp extends scala.Function0, java.io.Serializable { - void apply$mcV$sp(); - - default Object apply() { apply$mcV$sp(); return scala.runtime.BoxedUnit.UNIT; } -} diff --git a/tests/scala2-library/src/library/scala/runtime/java8/JFunction0$mcZ$sp.java b/tests/scala2-library/src/library/scala/runtime/java8/JFunction0$mcZ$sp.java deleted file mode 100644 index 01234c1728a2..000000000000 --- a/tests/scala2-library/src/library/scala/runtime/java8/JFunction0$mcZ$sp.java +++ /dev/null @@ -1,13 +0,0 @@ - -/* - * Copyright (C) 2012-2015 Typesafe Inc. - */ - -package scala.runtime.java8; - -@FunctionalInterface -public interface JFunction0$mcZ$sp extends scala.Function0, java.io.Serializable { - boolean apply$mcZ$sp(); - - default Object apply() { return scala.runtime.BoxesRunTime.boxToBoolean(apply$mcZ$sp()); } -} diff --git a/tests/scala2-library/src/library/scala/runtime/java8/JFunction1$mcDD$sp.java b/tests/scala2-library/src/library/scala/runtime/java8/JFunction1$mcDD$sp.java deleted file mode 100644 index 07b85eed59a9..000000000000 --- a/tests/scala2-library/src/library/scala/runtime/java8/JFunction1$mcDD$sp.java +++ /dev/null @@ -1,13 +0,0 @@ - -/* - * Copyright (C) 2012-2015 Typesafe Inc. - */ - -package scala.runtime.java8; - -@FunctionalInterface -public interface JFunction1$mcDD$sp extends scala.Function1, java.io.Serializable { - double apply$mcDD$sp(double v1); - - default Object apply(Object t) { return scala.runtime.BoxesRunTime.boxToDouble(apply$mcDD$sp(scala.runtime.BoxesRunTime.unboxToDouble(t))); } -} diff --git a/tests/scala2-library/src/library/scala/runtime/java8/JFunction1$mcDF$sp.java b/tests/scala2-library/src/library/scala/runtime/java8/JFunction1$mcDF$sp.java deleted file mode 100644 index f09edd2ce25f..000000000000 --- a/tests/scala2-library/src/library/scala/runtime/java8/JFunction1$mcDF$sp.java +++ /dev/null @@ -1,13 +0,0 @@ - -/* - * Copyright (C) 2012-2015 Typesafe Inc. - */ - -package scala.runtime.java8; - -@FunctionalInterface -public interface JFunction1$mcDF$sp extends scala.Function1, java.io.Serializable { - double apply$mcDF$sp(float v1); - - default Object apply(Object t) { return scala.runtime.BoxesRunTime.boxToDouble(apply$mcDF$sp(scala.runtime.BoxesRunTime.unboxToFloat(t))); } -} diff --git a/tests/scala2-library/src/library/scala/runtime/java8/JFunction1$mcDI$sp.java b/tests/scala2-library/src/library/scala/runtime/java8/JFunction1$mcDI$sp.java deleted file mode 100644 index 3cf40cb74998..000000000000 --- a/tests/scala2-library/src/library/scala/runtime/java8/JFunction1$mcDI$sp.java +++ /dev/null @@ -1,13 +0,0 @@ - -/* - * Copyright (C) 2012-2015 Typesafe Inc. - */ - -package scala.runtime.java8; - -@FunctionalInterface -public interface JFunction1$mcDI$sp extends scala.Function1, java.io.Serializable { - double apply$mcDI$sp(int v1); - - default Object apply(Object t) { return scala.runtime.BoxesRunTime.boxToDouble(apply$mcDI$sp(scala.runtime.BoxesRunTime.unboxToInt(t))); } -} diff --git a/tests/scala2-library/src/library/scala/runtime/java8/JFunction1$mcDJ$sp.java b/tests/scala2-library/src/library/scala/runtime/java8/JFunction1$mcDJ$sp.java deleted file mode 100644 index 4023f30bc052..000000000000 --- a/tests/scala2-library/src/library/scala/runtime/java8/JFunction1$mcDJ$sp.java +++ /dev/null @@ -1,13 +0,0 @@ - -/* - * Copyright (C) 2012-2015 Typesafe Inc. - */ - -package scala.runtime.java8; - -@FunctionalInterface -public interface JFunction1$mcDJ$sp extends scala.Function1, java.io.Serializable { - double apply$mcDJ$sp(long v1); - - default Object apply(Object t) { return scala.runtime.BoxesRunTime.boxToDouble(apply$mcDJ$sp(scala.runtime.BoxesRunTime.unboxToLong(t))); } -} diff --git a/tests/scala2-library/src/library/scala/runtime/java8/JFunction1$mcFD$sp.java b/tests/scala2-library/src/library/scala/runtime/java8/JFunction1$mcFD$sp.java deleted file mode 100644 index d4608958383a..000000000000 --- a/tests/scala2-library/src/library/scala/runtime/java8/JFunction1$mcFD$sp.java +++ /dev/null @@ -1,13 +0,0 @@ - -/* - * Copyright (C) 2012-2015 Typesafe Inc. - */ - -package scala.runtime.java8; - -@FunctionalInterface -public interface JFunction1$mcFD$sp extends scala.Function1, java.io.Serializable { - float apply$mcFD$sp(double v1); - - default Object apply(Object t) { return scala.runtime.BoxesRunTime.boxToFloat(apply$mcFD$sp(scala.runtime.BoxesRunTime.unboxToDouble(t))); } -} diff --git a/tests/scala2-library/src/library/scala/runtime/java8/JFunction1$mcFF$sp.java b/tests/scala2-library/src/library/scala/runtime/java8/JFunction1$mcFF$sp.java deleted file mode 100644 index 6c591800cadc..000000000000 --- a/tests/scala2-library/src/library/scala/runtime/java8/JFunction1$mcFF$sp.java +++ /dev/null @@ -1,13 +0,0 @@ - -/* - * Copyright (C) 2012-2015 Typesafe Inc. - */ - -package scala.runtime.java8; - -@FunctionalInterface -public interface JFunction1$mcFF$sp extends scala.Function1, java.io.Serializable { - float apply$mcFF$sp(float v1); - - default Object apply(Object t) { return scala.runtime.BoxesRunTime.boxToFloat(apply$mcFF$sp(scala.runtime.BoxesRunTime.unboxToFloat(t))); } -} diff --git a/tests/scala2-library/src/library/scala/runtime/java8/JFunction1$mcFI$sp.java b/tests/scala2-library/src/library/scala/runtime/java8/JFunction1$mcFI$sp.java deleted file mode 100644 index 666919591438..000000000000 --- a/tests/scala2-library/src/library/scala/runtime/java8/JFunction1$mcFI$sp.java +++ /dev/null @@ -1,13 +0,0 @@ - -/* - * Copyright (C) 2012-2015 Typesafe Inc. - */ - -package scala.runtime.java8; - -@FunctionalInterface -public interface JFunction1$mcFI$sp extends scala.Function1, java.io.Serializable { - float apply$mcFI$sp(int v1); - - default Object apply(Object t) { return scala.runtime.BoxesRunTime.boxToFloat(apply$mcFI$sp(scala.runtime.BoxesRunTime.unboxToInt(t))); } -} diff --git a/tests/scala2-library/src/library/scala/runtime/java8/JFunction1$mcFJ$sp.java b/tests/scala2-library/src/library/scala/runtime/java8/JFunction1$mcFJ$sp.java deleted file mode 100644 index cd953677aec6..000000000000 --- a/tests/scala2-library/src/library/scala/runtime/java8/JFunction1$mcFJ$sp.java +++ /dev/null @@ -1,13 +0,0 @@ - -/* - * Copyright (C) 2012-2015 Typesafe Inc. - */ - -package scala.runtime.java8; - -@FunctionalInterface -public interface JFunction1$mcFJ$sp extends scala.Function1, java.io.Serializable { - float apply$mcFJ$sp(long v1); - - default Object apply(Object t) { return scala.runtime.BoxesRunTime.boxToFloat(apply$mcFJ$sp(scala.runtime.BoxesRunTime.unboxToLong(t))); } -} diff --git a/tests/scala2-library/src/library/scala/runtime/java8/JFunction1$mcID$sp.java b/tests/scala2-library/src/library/scala/runtime/java8/JFunction1$mcID$sp.java deleted file mode 100644 index 37f686493681..000000000000 --- a/tests/scala2-library/src/library/scala/runtime/java8/JFunction1$mcID$sp.java +++ /dev/null @@ -1,13 +0,0 @@ - -/* - * Copyright (C) 2012-2015 Typesafe Inc. - */ - -package scala.runtime.java8; - -@FunctionalInterface -public interface JFunction1$mcID$sp extends scala.Function1, java.io.Serializable { - int apply$mcID$sp(double v1); - - default Object apply(Object t) { return scala.runtime.BoxesRunTime.boxToInteger(apply$mcID$sp(scala.runtime.BoxesRunTime.unboxToDouble(t))); } -} diff --git a/tests/scala2-library/src/library/scala/runtime/java8/JFunction1$mcIF$sp.java b/tests/scala2-library/src/library/scala/runtime/java8/JFunction1$mcIF$sp.java deleted file mode 100644 index 8a7656a286d5..000000000000 --- a/tests/scala2-library/src/library/scala/runtime/java8/JFunction1$mcIF$sp.java +++ /dev/null @@ -1,13 +0,0 @@ - -/* - * Copyright (C) 2012-2015 Typesafe Inc. - */ - -package scala.runtime.java8; - -@FunctionalInterface -public interface JFunction1$mcIF$sp extends scala.Function1, java.io.Serializable { - int apply$mcIF$sp(float v1); - - default Object apply(Object t) { return scala.runtime.BoxesRunTime.boxToInteger(apply$mcIF$sp(scala.runtime.BoxesRunTime.unboxToFloat(t))); } -} diff --git a/tests/scala2-library/src/library/scala/runtime/java8/JFunction1$mcII$sp.java b/tests/scala2-library/src/library/scala/runtime/java8/JFunction1$mcII$sp.java deleted file mode 100644 index 792627b4005e..000000000000 --- a/tests/scala2-library/src/library/scala/runtime/java8/JFunction1$mcII$sp.java +++ /dev/null @@ -1,13 +0,0 @@ - -/* - * Copyright (C) 2012-2015 Typesafe Inc. - */ - -package scala.runtime.java8; - -@FunctionalInterface -public interface JFunction1$mcII$sp extends scala.Function1, java.io.Serializable { - int apply$mcII$sp(int v1); - - default Object apply(Object t) { return scala.runtime.BoxesRunTime.boxToInteger(apply$mcII$sp(scala.runtime.BoxesRunTime.unboxToInt(t))); } -} diff --git a/tests/scala2-library/src/library/scala/runtime/java8/JFunction1$mcIJ$sp.java b/tests/scala2-library/src/library/scala/runtime/java8/JFunction1$mcIJ$sp.java deleted file mode 100644 index 01c47a67dac7..000000000000 --- a/tests/scala2-library/src/library/scala/runtime/java8/JFunction1$mcIJ$sp.java +++ /dev/null @@ -1,13 +0,0 @@ - -/* - * Copyright (C) 2012-2015 Typesafe Inc. - */ - -package scala.runtime.java8; - -@FunctionalInterface -public interface JFunction1$mcIJ$sp extends scala.Function1, java.io.Serializable { - int apply$mcIJ$sp(long v1); - - default Object apply(Object t) { return scala.runtime.BoxesRunTime.boxToInteger(apply$mcIJ$sp(scala.runtime.BoxesRunTime.unboxToLong(t))); } -} diff --git a/tests/scala2-library/src/library/scala/runtime/java8/JFunction1$mcJD$sp.java b/tests/scala2-library/src/library/scala/runtime/java8/JFunction1$mcJD$sp.java deleted file mode 100644 index d8d5274ca1db..000000000000 --- a/tests/scala2-library/src/library/scala/runtime/java8/JFunction1$mcJD$sp.java +++ /dev/null @@ -1,13 +0,0 @@ - -/* - * Copyright (C) 2012-2015 Typesafe Inc. - */ - -package scala.runtime.java8; - -@FunctionalInterface -public interface JFunction1$mcJD$sp extends scala.Function1, java.io.Serializable { - long apply$mcJD$sp(double v1); - - default Object apply(Object t) { return scala.runtime.BoxesRunTime.boxToLong(apply$mcJD$sp(scala.runtime.BoxesRunTime.unboxToDouble(t))); } -} diff --git a/tests/scala2-library/src/library/scala/runtime/java8/JFunction1$mcJF$sp.java b/tests/scala2-library/src/library/scala/runtime/java8/JFunction1$mcJF$sp.java deleted file mode 100644 index cc1fad36d04a..000000000000 --- a/tests/scala2-library/src/library/scala/runtime/java8/JFunction1$mcJF$sp.java +++ /dev/null @@ -1,13 +0,0 @@ - -/* - * Copyright (C) 2012-2015 Typesafe Inc. - */ - -package scala.runtime.java8; - -@FunctionalInterface -public interface JFunction1$mcJF$sp extends scala.Function1, java.io.Serializable { - long apply$mcJF$sp(float v1); - - default Object apply(Object t) { return scala.runtime.BoxesRunTime.boxToLong(apply$mcJF$sp(scala.runtime.BoxesRunTime.unboxToFloat(t))); } -} diff --git a/tests/scala2-library/src/library/scala/runtime/java8/JFunction1$mcJI$sp.java b/tests/scala2-library/src/library/scala/runtime/java8/JFunction1$mcJI$sp.java deleted file mode 100644 index fe941dd61a7e..000000000000 --- a/tests/scala2-library/src/library/scala/runtime/java8/JFunction1$mcJI$sp.java +++ /dev/null @@ -1,13 +0,0 @@ - -/* - * Copyright (C) 2012-2015 Typesafe Inc. - */ - -package scala.runtime.java8; - -@FunctionalInterface -public interface JFunction1$mcJI$sp extends scala.Function1, java.io.Serializable { - long apply$mcJI$sp(int v1); - - default Object apply(Object t) { return scala.runtime.BoxesRunTime.boxToLong(apply$mcJI$sp(scala.runtime.BoxesRunTime.unboxToInt(t))); } -} diff --git a/tests/scala2-library/src/library/scala/runtime/java8/JFunction1$mcJJ$sp.java b/tests/scala2-library/src/library/scala/runtime/java8/JFunction1$mcJJ$sp.java deleted file mode 100644 index 7034115bad79..000000000000 --- a/tests/scala2-library/src/library/scala/runtime/java8/JFunction1$mcJJ$sp.java +++ /dev/null @@ -1,13 +0,0 @@ - -/* - * Copyright (C) 2012-2015 Typesafe Inc. - */ - -package scala.runtime.java8; - -@FunctionalInterface -public interface JFunction1$mcJJ$sp extends scala.Function1, java.io.Serializable { - long apply$mcJJ$sp(long v1); - - default Object apply(Object t) { return scala.runtime.BoxesRunTime.boxToLong(apply$mcJJ$sp(scala.runtime.BoxesRunTime.unboxToLong(t))); } -} diff --git a/tests/scala2-library/src/library/scala/runtime/java8/JFunction1$mcVD$sp.java b/tests/scala2-library/src/library/scala/runtime/java8/JFunction1$mcVD$sp.java deleted file mode 100644 index dde9f557226b..000000000000 --- a/tests/scala2-library/src/library/scala/runtime/java8/JFunction1$mcVD$sp.java +++ /dev/null @@ -1,13 +0,0 @@ - -/* - * Copyright (C) 2012-2015 Typesafe Inc. - */ - -package scala.runtime.java8; - -@FunctionalInterface -public interface JFunction1$mcVD$sp extends scala.Function1, java.io.Serializable { - void apply$mcVD$sp(double v1); - - default Object apply(Object t) { apply$mcVD$sp(scala.runtime.BoxesRunTime.unboxToDouble(t)); return scala.runtime.BoxedUnit.UNIT; } -} diff --git a/tests/scala2-library/src/library/scala/runtime/java8/JFunction1$mcVF$sp.java b/tests/scala2-library/src/library/scala/runtime/java8/JFunction1$mcVF$sp.java deleted file mode 100644 index 0ffd80621f13..000000000000 --- a/tests/scala2-library/src/library/scala/runtime/java8/JFunction1$mcVF$sp.java +++ /dev/null @@ -1,13 +0,0 @@ - -/* - * Copyright (C) 2012-2015 Typesafe Inc. - */ - -package scala.runtime.java8; - -@FunctionalInterface -public interface JFunction1$mcVF$sp extends scala.Function1, java.io.Serializable { - void apply$mcVF$sp(float v1); - - default Object apply(Object t) { apply$mcVF$sp(scala.runtime.BoxesRunTime.unboxToFloat(t)); return scala.runtime.BoxedUnit.UNIT; } -} diff --git a/tests/scala2-library/src/library/scala/runtime/java8/JFunction1$mcVI$sp.java b/tests/scala2-library/src/library/scala/runtime/java8/JFunction1$mcVI$sp.java deleted file mode 100644 index 2543d23e3134..000000000000 --- a/tests/scala2-library/src/library/scala/runtime/java8/JFunction1$mcVI$sp.java +++ /dev/null @@ -1,13 +0,0 @@ - -/* - * Copyright (C) 2012-2015 Typesafe Inc. - */ - -package scala.runtime.java8; - -@FunctionalInterface -public interface JFunction1$mcVI$sp extends scala.Function1, java.io.Serializable { - void apply$mcVI$sp(int v1); - - default Object apply(Object t) { apply$mcVI$sp(scala.runtime.BoxesRunTime.unboxToInt(t)); return scala.runtime.BoxedUnit.UNIT; } -} diff --git a/tests/scala2-library/src/library/scala/runtime/java8/JFunction1$mcVJ$sp.java b/tests/scala2-library/src/library/scala/runtime/java8/JFunction1$mcVJ$sp.java deleted file mode 100644 index 7564175402df..000000000000 --- a/tests/scala2-library/src/library/scala/runtime/java8/JFunction1$mcVJ$sp.java +++ /dev/null @@ -1,13 +0,0 @@ - -/* - * Copyright (C) 2012-2015 Typesafe Inc. - */ - -package scala.runtime.java8; - -@FunctionalInterface -public interface JFunction1$mcVJ$sp extends scala.Function1, java.io.Serializable { - void apply$mcVJ$sp(long v1); - - default Object apply(Object t) { apply$mcVJ$sp(scala.runtime.BoxesRunTime.unboxToLong(t)); return scala.runtime.BoxedUnit.UNIT; } -} diff --git a/tests/scala2-library/src/library/scala/runtime/java8/JFunction1$mcZD$sp.java b/tests/scala2-library/src/library/scala/runtime/java8/JFunction1$mcZD$sp.java deleted file mode 100644 index ce5bd300297d..000000000000 --- a/tests/scala2-library/src/library/scala/runtime/java8/JFunction1$mcZD$sp.java +++ /dev/null @@ -1,13 +0,0 @@ - -/* - * Copyright (C) 2012-2015 Typesafe Inc. - */ - -package scala.runtime.java8; - -@FunctionalInterface -public interface JFunction1$mcZD$sp extends scala.Function1, java.io.Serializable { - boolean apply$mcZD$sp(double v1); - - default Object apply(Object t) { return scala.runtime.BoxesRunTime.boxToBoolean(apply$mcZD$sp(scala.runtime.BoxesRunTime.unboxToDouble(t))); } -} diff --git a/tests/scala2-library/src/library/scala/runtime/java8/JFunction1$mcZF$sp.java b/tests/scala2-library/src/library/scala/runtime/java8/JFunction1$mcZF$sp.java deleted file mode 100644 index baa691e5480f..000000000000 --- a/tests/scala2-library/src/library/scala/runtime/java8/JFunction1$mcZF$sp.java +++ /dev/null @@ -1,13 +0,0 @@ - -/* - * Copyright (C) 2012-2015 Typesafe Inc. - */ - -package scala.runtime.java8; - -@FunctionalInterface -public interface JFunction1$mcZF$sp extends scala.Function1, java.io.Serializable { - boolean apply$mcZF$sp(float v1); - - default Object apply(Object t) { return scala.runtime.BoxesRunTime.boxToBoolean(apply$mcZF$sp(scala.runtime.BoxesRunTime.unboxToFloat(t))); } -} diff --git a/tests/scala2-library/src/library/scala/runtime/java8/JFunction1$mcZI$sp.java b/tests/scala2-library/src/library/scala/runtime/java8/JFunction1$mcZI$sp.java deleted file mode 100644 index bf04b5922b93..000000000000 --- a/tests/scala2-library/src/library/scala/runtime/java8/JFunction1$mcZI$sp.java +++ /dev/null @@ -1,13 +0,0 @@ - -/* - * Copyright (C) 2012-2015 Typesafe Inc. - */ - -package scala.runtime.java8; - -@FunctionalInterface -public interface JFunction1$mcZI$sp extends scala.Function1, java.io.Serializable { - boolean apply$mcZI$sp(int v1); - - default Object apply(Object t) { return scala.runtime.BoxesRunTime.boxToBoolean(apply$mcZI$sp(scala.runtime.BoxesRunTime.unboxToInt(t))); } -} diff --git a/tests/scala2-library/src/library/scala/runtime/java8/JFunction1$mcZJ$sp.java b/tests/scala2-library/src/library/scala/runtime/java8/JFunction1$mcZJ$sp.java deleted file mode 100644 index 808eea87b8c7..000000000000 --- a/tests/scala2-library/src/library/scala/runtime/java8/JFunction1$mcZJ$sp.java +++ /dev/null @@ -1,13 +0,0 @@ - -/* - * Copyright (C) 2012-2015 Typesafe Inc. - */ - -package scala.runtime.java8; - -@FunctionalInterface -public interface JFunction1$mcZJ$sp extends scala.Function1, java.io.Serializable { - boolean apply$mcZJ$sp(long v1); - - default Object apply(Object t) { return scala.runtime.BoxesRunTime.boxToBoolean(apply$mcZJ$sp(scala.runtime.BoxesRunTime.unboxToLong(t))); } -} diff --git a/tests/scala2-library/src/library/scala/runtime/java8/JFunction2$mcDDD$sp.java b/tests/scala2-library/src/library/scala/runtime/java8/JFunction2$mcDDD$sp.java deleted file mode 100644 index 80ab5203d954..000000000000 --- a/tests/scala2-library/src/library/scala/runtime/java8/JFunction2$mcDDD$sp.java +++ /dev/null @@ -1,13 +0,0 @@ - -/* - * Copyright (C) 2012-2015 Typesafe Inc. - */ - -package scala.runtime.java8; - -@FunctionalInterface -public interface JFunction2$mcDDD$sp extends scala.Function2, java.io.Serializable { - double apply$mcDDD$sp(double v1, double v2); - - default Object apply(Object v1, Object v2) { return scala.runtime.BoxesRunTime.boxToDouble(apply$mcDDD$sp(scala.runtime.BoxesRunTime.unboxToDouble(v1), scala.runtime.BoxesRunTime.unboxToDouble(v2))); } -} diff --git a/tests/scala2-library/src/library/scala/runtime/java8/JFunction2$mcDDI$sp.java b/tests/scala2-library/src/library/scala/runtime/java8/JFunction2$mcDDI$sp.java deleted file mode 100644 index 8e92338b825b..000000000000 --- a/tests/scala2-library/src/library/scala/runtime/java8/JFunction2$mcDDI$sp.java +++ /dev/null @@ -1,13 +0,0 @@ - -/* - * Copyright (C) 2012-2015 Typesafe Inc. - */ - -package scala.runtime.java8; - -@FunctionalInterface -public interface JFunction2$mcDDI$sp extends scala.Function2, java.io.Serializable { - double apply$mcDDI$sp(double v1, int v2); - - default Object apply(Object v1, Object v2) { return scala.runtime.BoxesRunTime.boxToDouble(apply$mcDDI$sp(scala.runtime.BoxesRunTime.unboxToDouble(v1), scala.runtime.BoxesRunTime.unboxToInt(v2))); } -} diff --git a/tests/scala2-library/src/library/scala/runtime/java8/JFunction2$mcDDJ$sp.java b/tests/scala2-library/src/library/scala/runtime/java8/JFunction2$mcDDJ$sp.java deleted file mode 100644 index 3d4f4a7cded9..000000000000 --- a/tests/scala2-library/src/library/scala/runtime/java8/JFunction2$mcDDJ$sp.java +++ /dev/null @@ -1,13 +0,0 @@ - -/* - * Copyright (C) 2012-2015 Typesafe Inc. - */ - -package scala.runtime.java8; - -@FunctionalInterface -public interface JFunction2$mcDDJ$sp extends scala.Function2, java.io.Serializable { - double apply$mcDDJ$sp(double v1, long v2); - - default Object apply(Object v1, Object v2) { return scala.runtime.BoxesRunTime.boxToDouble(apply$mcDDJ$sp(scala.runtime.BoxesRunTime.unboxToDouble(v1), scala.runtime.BoxesRunTime.unboxToLong(v2))); } -} diff --git a/tests/scala2-library/src/library/scala/runtime/java8/JFunction2$mcDID$sp.java b/tests/scala2-library/src/library/scala/runtime/java8/JFunction2$mcDID$sp.java deleted file mode 100644 index bd6652e51ac1..000000000000 --- a/tests/scala2-library/src/library/scala/runtime/java8/JFunction2$mcDID$sp.java +++ /dev/null @@ -1,13 +0,0 @@ - -/* - * Copyright (C) 2012-2015 Typesafe Inc. - */ - -package scala.runtime.java8; - -@FunctionalInterface -public interface JFunction2$mcDID$sp extends scala.Function2, java.io.Serializable { - double apply$mcDID$sp(int v1, double v2); - - default Object apply(Object v1, Object v2) { return scala.runtime.BoxesRunTime.boxToDouble(apply$mcDID$sp(scala.runtime.BoxesRunTime.unboxToInt(v1), scala.runtime.BoxesRunTime.unboxToDouble(v2))); } -} diff --git a/tests/scala2-library/src/library/scala/runtime/java8/JFunction2$mcDII$sp.java b/tests/scala2-library/src/library/scala/runtime/java8/JFunction2$mcDII$sp.java deleted file mode 100644 index d06a246d3385..000000000000 --- a/tests/scala2-library/src/library/scala/runtime/java8/JFunction2$mcDII$sp.java +++ /dev/null @@ -1,13 +0,0 @@ - -/* - * Copyright (C) 2012-2015 Typesafe Inc. - */ - -package scala.runtime.java8; - -@FunctionalInterface -public interface JFunction2$mcDII$sp extends scala.Function2, java.io.Serializable { - double apply$mcDII$sp(int v1, int v2); - - default Object apply(Object v1, Object v2) { return scala.runtime.BoxesRunTime.boxToDouble(apply$mcDII$sp(scala.runtime.BoxesRunTime.unboxToInt(v1), scala.runtime.BoxesRunTime.unboxToInt(v2))); } -} diff --git a/tests/scala2-library/src/library/scala/runtime/java8/JFunction2$mcDIJ$sp.java b/tests/scala2-library/src/library/scala/runtime/java8/JFunction2$mcDIJ$sp.java deleted file mode 100644 index cda23c4dcd74..000000000000 --- a/tests/scala2-library/src/library/scala/runtime/java8/JFunction2$mcDIJ$sp.java +++ /dev/null @@ -1,13 +0,0 @@ - -/* - * Copyright (C) 2012-2015 Typesafe Inc. - */ - -package scala.runtime.java8; - -@FunctionalInterface -public interface JFunction2$mcDIJ$sp extends scala.Function2, java.io.Serializable { - double apply$mcDIJ$sp(int v1, long v2); - - default Object apply(Object v1, Object v2) { return scala.runtime.BoxesRunTime.boxToDouble(apply$mcDIJ$sp(scala.runtime.BoxesRunTime.unboxToInt(v1), scala.runtime.BoxesRunTime.unboxToLong(v2))); } -} diff --git a/tests/scala2-library/src/library/scala/runtime/java8/JFunction2$mcDJD$sp.java b/tests/scala2-library/src/library/scala/runtime/java8/JFunction2$mcDJD$sp.java deleted file mode 100644 index 723efd8451eb..000000000000 --- a/tests/scala2-library/src/library/scala/runtime/java8/JFunction2$mcDJD$sp.java +++ /dev/null @@ -1,13 +0,0 @@ - -/* - * Copyright (C) 2012-2015 Typesafe Inc. - */ - -package scala.runtime.java8; - -@FunctionalInterface -public interface JFunction2$mcDJD$sp extends scala.Function2, java.io.Serializable { - double apply$mcDJD$sp(long v1, double v2); - - default Object apply(Object v1, Object v2) { return scala.runtime.BoxesRunTime.boxToDouble(apply$mcDJD$sp(scala.runtime.BoxesRunTime.unboxToLong(v1), scala.runtime.BoxesRunTime.unboxToDouble(v2))); } -} diff --git a/tests/scala2-library/src/library/scala/runtime/java8/JFunction2$mcDJI$sp.java b/tests/scala2-library/src/library/scala/runtime/java8/JFunction2$mcDJI$sp.java deleted file mode 100644 index c90352ef301b..000000000000 --- a/tests/scala2-library/src/library/scala/runtime/java8/JFunction2$mcDJI$sp.java +++ /dev/null @@ -1,13 +0,0 @@ - -/* - * Copyright (C) 2012-2015 Typesafe Inc. - */ - -package scala.runtime.java8; - -@FunctionalInterface -public interface JFunction2$mcDJI$sp extends scala.Function2, java.io.Serializable { - double apply$mcDJI$sp(long v1, int v2); - - default Object apply(Object v1, Object v2) { return scala.runtime.BoxesRunTime.boxToDouble(apply$mcDJI$sp(scala.runtime.BoxesRunTime.unboxToLong(v1), scala.runtime.BoxesRunTime.unboxToInt(v2))); } -} diff --git a/tests/scala2-library/src/library/scala/runtime/java8/JFunction2$mcDJJ$sp.java b/tests/scala2-library/src/library/scala/runtime/java8/JFunction2$mcDJJ$sp.java deleted file mode 100644 index 33612197878e..000000000000 --- a/tests/scala2-library/src/library/scala/runtime/java8/JFunction2$mcDJJ$sp.java +++ /dev/null @@ -1,13 +0,0 @@ - -/* - * Copyright (C) 2012-2015 Typesafe Inc. - */ - -package scala.runtime.java8; - -@FunctionalInterface -public interface JFunction2$mcDJJ$sp extends scala.Function2, java.io.Serializable { - double apply$mcDJJ$sp(long v1, long v2); - - default Object apply(Object v1, Object v2) { return scala.runtime.BoxesRunTime.boxToDouble(apply$mcDJJ$sp(scala.runtime.BoxesRunTime.unboxToLong(v1), scala.runtime.BoxesRunTime.unboxToLong(v2))); } -} diff --git a/tests/scala2-library/src/library/scala/runtime/java8/JFunction2$mcFDD$sp.java b/tests/scala2-library/src/library/scala/runtime/java8/JFunction2$mcFDD$sp.java deleted file mode 100644 index 2b9236b5d1b4..000000000000 --- a/tests/scala2-library/src/library/scala/runtime/java8/JFunction2$mcFDD$sp.java +++ /dev/null @@ -1,13 +0,0 @@ - -/* - * Copyright (C) 2012-2015 Typesafe Inc. - */ - -package scala.runtime.java8; - -@FunctionalInterface -public interface JFunction2$mcFDD$sp extends scala.Function2, java.io.Serializable { - float apply$mcFDD$sp(double v1, double v2); - - default Object apply(Object v1, Object v2) { return scala.runtime.BoxesRunTime.boxToFloat(apply$mcFDD$sp(scala.runtime.BoxesRunTime.unboxToDouble(v1), scala.runtime.BoxesRunTime.unboxToDouble(v2))); } -} diff --git a/tests/scala2-library/src/library/scala/runtime/java8/JFunction2$mcFDI$sp.java b/tests/scala2-library/src/library/scala/runtime/java8/JFunction2$mcFDI$sp.java deleted file mode 100644 index 2c564962a716..000000000000 --- a/tests/scala2-library/src/library/scala/runtime/java8/JFunction2$mcFDI$sp.java +++ /dev/null @@ -1,13 +0,0 @@ - -/* - * Copyright (C) 2012-2015 Typesafe Inc. - */ - -package scala.runtime.java8; - -@FunctionalInterface -public interface JFunction2$mcFDI$sp extends scala.Function2, java.io.Serializable { - float apply$mcFDI$sp(double v1, int v2); - - default Object apply(Object v1, Object v2) { return scala.runtime.BoxesRunTime.boxToFloat(apply$mcFDI$sp(scala.runtime.BoxesRunTime.unboxToDouble(v1), scala.runtime.BoxesRunTime.unboxToInt(v2))); } -} diff --git a/tests/scala2-library/src/library/scala/runtime/java8/JFunction2$mcFDJ$sp.java b/tests/scala2-library/src/library/scala/runtime/java8/JFunction2$mcFDJ$sp.java deleted file mode 100644 index a0785f4cd2c6..000000000000 --- a/tests/scala2-library/src/library/scala/runtime/java8/JFunction2$mcFDJ$sp.java +++ /dev/null @@ -1,13 +0,0 @@ - -/* - * Copyright (C) 2012-2015 Typesafe Inc. - */ - -package scala.runtime.java8; - -@FunctionalInterface -public interface JFunction2$mcFDJ$sp extends scala.Function2, java.io.Serializable { - float apply$mcFDJ$sp(double v1, long v2); - - default Object apply(Object v1, Object v2) { return scala.runtime.BoxesRunTime.boxToFloat(apply$mcFDJ$sp(scala.runtime.BoxesRunTime.unboxToDouble(v1), scala.runtime.BoxesRunTime.unboxToLong(v2))); } -} diff --git a/tests/scala2-library/src/library/scala/runtime/java8/JFunction2$mcFID$sp.java b/tests/scala2-library/src/library/scala/runtime/java8/JFunction2$mcFID$sp.java deleted file mode 100644 index ba67ddb5931d..000000000000 --- a/tests/scala2-library/src/library/scala/runtime/java8/JFunction2$mcFID$sp.java +++ /dev/null @@ -1,13 +0,0 @@ - -/* - * Copyright (C) 2012-2015 Typesafe Inc. - */ - -package scala.runtime.java8; - -@FunctionalInterface -public interface JFunction2$mcFID$sp extends scala.Function2, java.io.Serializable { - float apply$mcFID$sp(int v1, double v2); - - default Object apply(Object v1, Object v2) { return scala.runtime.BoxesRunTime.boxToFloat(apply$mcFID$sp(scala.runtime.BoxesRunTime.unboxToInt(v1), scala.runtime.BoxesRunTime.unboxToDouble(v2))); } -} diff --git a/tests/scala2-library/src/library/scala/runtime/java8/JFunction2$mcFII$sp.java b/tests/scala2-library/src/library/scala/runtime/java8/JFunction2$mcFII$sp.java deleted file mode 100644 index d58284b7522d..000000000000 --- a/tests/scala2-library/src/library/scala/runtime/java8/JFunction2$mcFII$sp.java +++ /dev/null @@ -1,13 +0,0 @@ - -/* - * Copyright (C) 2012-2015 Typesafe Inc. - */ - -package scala.runtime.java8; - -@FunctionalInterface -public interface JFunction2$mcFII$sp extends scala.Function2, java.io.Serializable { - float apply$mcFII$sp(int v1, int v2); - - default Object apply(Object v1, Object v2) { return scala.runtime.BoxesRunTime.boxToFloat(apply$mcFII$sp(scala.runtime.BoxesRunTime.unboxToInt(v1), scala.runtime.BoxesRunTime.unboxToInt(v2))); } -} diff --git a/tests/scala2-library/src/library/scala/runtime/java8/JFunction2$mcFIJ$sp.java b/tests/scala2-library/src/library/scala/runtime/java8/JFunction2$mcFIJ$sp.java deleted file mode 100644 index 4bc6eeb9085f..000000000000 --- a/tests/scala2-library/src/library/scala/runtime/java8/JFunction2$mcFIJ$sp.java +++ /dev/null @@ -1,13 +0,0 @@ - -/* - * Copyright (C) 2012-2015 Typesafe Inc. - */ - -package scala.runtime.java8; - -@FunctionalInterface -public interface JFunction2$mcFIJ$sp extends scala.Function2, java.io.Serializable { - float apply$mcFIJ$sp(int v1, long v2); - - default Object apply(Object v1, Object v2) { return scala.runtime.BoxesRunTime.boxToFloat(apply$mcFIJ$sp(scala.runtime.BoxesRunTime.unboxToInt(v1), scala.runtime.BoxesRunTime.unboxToLong(v2))); } -} diff --git a/tests/scala2-library/src/library/scala/runtime/java8/JFunction2$mcFJD$sp.java b/tests/scala2-library/src/library/scala/runtime/java8/JFunction2$mcFJD$sp.java deleted file mode 100644 index f2435e23f7fd..000000000000 --- a/tests/scala2-library/src/library/scala/runtime/java8/JFunction2$mcFJD$sp.java +++ /dev/null @@ -1,13 +0,0 @@ - -/* - * Copyright (C) 2012-2015 Typesafe Inc. - */ - -package scala.runtime.java8; - -@FunctionalInterface -public interface JFunction2$mcFJD$sp extends scala.Function2, java.io.Serializable { - float apply$mcFJD$sp(long v1, double v2); - - default Object apply(Object v1, Object v2) { return scala.runtime.BoxesRunTime.boxToFloat(apply$mcFJD$sp(scala.runtime.BoxesRunTime.unboxToLong(v1), scala.runtime.BoxesRunTime.unboxToDouble(v2))); } -} diff --git a/tests/scala2-library/src/library/scala/runtime/java8/JFunction2$mcFJI$sp.java b/tests/scala2-library/src/library/scala/runtime/java8/JFunction2$mcFJI$sp.java deleted file mode 100644 index 1362d00e9408..000000000000 --- a/tests/scala2-library/src/library/scala/runtime/java8/JFunction2$mcFJI$sp.java +++ /dev/null @@ -1,13 +0,0 @@ - -/* - * Copyright (C) 2012-2015 Typesafe Inc. - */ - -package scala.runtime.java8; - -@FunctionalInterface -public interface JFunction2$mcFJI$sp extends scala.Function2, java.io.Serializable { - float apply$mcFJI$sp(long v1, int v2); - - default Object apply(Object v1, Object v2) { return scala.runtime.BoxesRunTime.boxToFloat(apply$mcFJI$sp(scala.runtime.BoxesRunTime.unboxToLong(v1), scala.runtime.BoxesRunTime.unboxToInt(v2))); } -} diff --git a/tests/scala2-library/src/library/scala/runtime/java8/JFunction2$mcFJJ$sp.java b/tests/scala2-library/src/library/scala/runtime/java8/JFunction2$mcFJJ$sp.java deleted file mode 100644 index c9bcf515b736..000000000000 --- a/tests/scala2-library/src/library/scala/runtime/java8/JFunction2$mcFJJ$sp.java +++ /dev/null @@ -1,13 +0,0 @@ - -/* - * Copyright (C) 2012-2015 Typesafe Inc. - */ - -package scala.runtime.java8; - -@FunctionalInterface -public interface JFunction2$mcFJJ$sp extends scala.Function2, java.io.Serializable { - float apply$mcFJJ$sp(long v1, long v2); - - default Object apply(Object v1, Object v2) { return scala.runtime.BoxesRunTime.boxToFloat(apply$mcFJJ$sp(scala.runtime.BoxesRunTime.unboxToLong(v1), scala.runtime.BoxesRunTime.unboxToLong(v2))); } -} diff --git a/tests/scala2-library/src/library/scala/runtime/java8/JFunction2$mcIDD$sp.java b/tests/scala2-library/src/library/scala/runtime/java8/JFunction2$mcIDD$sp.java deleted file mode 100644 index 28693910a57c..000000000000 --- a/tests/scala2-library/src/library/scala/runtime/java8/JFunction2$mcIDD$sp.java +++ /dev/null @@ -1,13 +0,0 @@ - -/* - * Copyright (C) 2012-2015 Typesafe Inc. - */ - -package scala.runtime.java8; - -@FunctionalInterface -public interface JFunction2$mcIDD$sp extends scala.Function2, java.io.Serializable { - int apply$mcIDD$sp(double v1, double v2); - - default Object apply(Object v1, Object v2) { return scala.runtime.BoxesRunTime.boxToInteger(apply$mcIDD$sp(scala.runtime.BoxesRunTime.unboxToDouble(v1), scala.runtime.BoxesRunTime.unboxToDouble(v2))); } -} diff --git a/tests/scala2-library/src/library/scala/runtime/java8/JFunction2$mcIDI$sp.java b/tests/scala2-library/src/library/scala/runtime/java8/JFunction2$mcIDI$sp.java deleted file mode 100644 index 50c775fbd9fc..000000000000 --- a/tests/scala2-library/src/library/scala/runtime/java8/JFunction2$mcIDI$sp.java +++ /dev/null @@ -1,13 +0,0 @@ - -/* - * Copyright (C) 2012-2015 Typesafe Inc. - */ - -package scala.runtime.java8; - -@FunctionalInterface -public interface JFunction2$mcIDI$sp extends scala.Function2, java.io.Serializable { - int apply$mcIDI$sp(double v1, int v2); - - default Object apply(Object v1, Object v2) { return scala.runtime.BoxesRunTime.boxToInteger(apply$mcIDI$sp(scala.runtime.BoxesRunTime.unboxToDouble(v1), scala.runtime.BoxesRunTime.unboxToInt(v2))); } -} diff --git a/tests/scala2-library/src/library/scala/runtime/java8/JFunction2$mcIDJ$sp.java b/tests/scala2-library/src/library/scala/runtime/java8/JFunction2$mcIDJ$sp.java deleted file mode 100644 index 3231aa7a888b..000000000000 --- a/tests/scala2-library/src/library/scala/runtime/java8/JFunction2$mcIDJ$sp.java +++ /dev/null @@ -1,13 +0,0 @@ - -/* - * Copyright (C) 2012-2015 Typesafe Inc. - */ - -package scala.runtime.java8; - -@FunctionalInterface -public interface JFunction2$mcIDJ$sp extends scala.Function2, java.io.Serializable { - int apply$mcIDJ$sp(double v1, long v2); - - default Object apply(Object v1, Object v2) { return scala.runtime.BoxesRunTime.boxToInteger(apply$mcIDJ$sp(scala.runtime.BoxesRunTime.unboxToDouble(v1), scala.runtime.BoxesRunTime.unboxToLong(v2))); } -} diff --git a/tests/scala2-library/src/library/scala/runtime/java8/JFunction2$mcIID$sp.java b/tests/scala2-library/src/library/scala/runtime/java8/JFunction2$mcIID$sp.java deleted file mode 100644 index 01568b2fd626..000000000000 --- a/tests/scala2-library/src/library/scala/runtime/java8/JFunction2$mcIID$sp.java +++ /dev/null @@ -1,13 +0,0 @@ - -/* - * Copyright (C) 2012-2015 Typesafe Inc. - */ - -package scala.runtime.java8; - -@FunctionalInterface -public interface JFunction2$mcIID$sp extends scala.Function2, java.io.Serializable { - int apply$mcIID$sp(int v1, double v2); - - default Object apply(Object v1, Object v2) { return scala.runtime.BoxesRunTime.boxToInteger(apply$mcIID$sp(scala.runtime.BoxesRunTime.unboxToInt(v1), scala.runtime.BoxesRunTime.unboxToDouble(v2))); } -} diff --git a/tests/scala2-library/src/library/scala/runtime/java8/JFunction2$mcIII$sp.java b/tests/scala2-library/src/library/scala/runtime/java8/JFunction2$mcIII$sp.java deleted file mode 100644 index e0fba76675cc..000000000000 --- a/tests/scala2-library/src/library/scala/runtime/java8/JFunction2$mcIII$sp.java +++ /dev/null @@ -1,13 +0,0 @@ - -/* - * Copyright (C) 2012-2015 Typesafe Inc. - */ - -package scala.runtime.java8; - -@FunctionalInterface -public interface JFunction2$mcIII$sp extends scala.Function2, java.io.Serializable { - int apply$mcIII$sp(int v1, int v2); - - default Object apply(Object v1, Object v2) { return scala.runtime.BoxesRunTime.boxToInteger(apply$mcIII$sp(scala.runtime.BoxesRunTime.unboxToInt(v1), scala.runtime.BoxesRunTime.unboxToInt(v2))); } -} diff --git a/tests/scala2-library/src/library/scala/runtime/java8/JFunction2$mcIIJ$sp.java b/tests/scala2-library/src/library/scala/runtime/java8/JFunction2$mcIIJ$sp.java deleted file mode 100644 index 7155548e9f01..000000000000 --- a/tests/scala2-library/src/library/scala/runtime/java8/JFunction2$mcIIJ$sp.java +++ /dev/null @@ -1,13 +0,0 @@ - -/* - * Copyright (C) 2012-2015 Typesafe Inc. - */ - -package scala.runtime.java8; - -@FunctionalInterface -public interface JFunction2$mcIIJ$sp extends scala.Function2, java.io.Serializable { - int apply$mcIIJ$sp(int v1, long v2); - - default Object apply(Object v1, Object v2) { return scala.runtime.BoxesRunTime.boxToInteger(apply$mcIIJ$sp(scala.runtime.BoxesRunTime.unboxToInt(v1), scala.runtime.BoxesRunTime.unboxToLong(v2))); } -} diff --git a/tests/scala2-library/src/library/scala/runtime/java8/JFunction2$mcIJD$sp.java b/tests/scala2-library/src/library/scala/runtime/java8/JFunction2$mcIJD$sp.java deleted file mode 100644 index f541cfdef405..000000000000 --- a/tests/scala2-library/src/library/scala/runtime/java8/JFunction2$mcIJD$sp.java +++ /dev/null @@ -1,13 +0,0 @@ - -/* - * Copyright (C) 2012-2015 Typesafe Inc. - */ - -package scala.runtime.java8; - -@FunctionalInterface -public interface JFunction2$mcIJD$sp extends scala.Function2, java.io.Serializable { - int apply$mcIJD$sp(long v1, double v2); - - default Object apply(Object v1, Object v2) { return scala.runtime.BoxesRunTime.boxToInteger(apply$mcIJD$sp(scala.runtime.BoxesRunTime.unboxToLong(v1), scala.runtime.BoxesRunTime.unboxToDouble(v2))); } -} diff --git a/tests/scala2-library/src/library/scala/runtime/java8/JFunction2$mcIJI$sp.java b/tests/scala2-library/src/library/scala/runtime/java8/JFunction2$mcIJI$sp.java deleted file mode 100644 index e484efe42778..000000000000 --- a/tests/scala2-library/src/library/scala/runtime/java8/JFunction2$mcIJI$sp.java +++ /dev/null @@ -1,13 +0,0 @@ - -/* - * Copyright (C) 2012-2015 Typesafe Inc. - */ - -package scala.runtime.java8; - -@FunctionalInterface -public interface JFunction2$mcIJI$sp extends scala.Function2, java.io.Serializable { - int apply$mcIJI$sp(long v1, int v2); - - default Object apply(Object v1, Object v2) { return scala.runtime.BoxesRunTime.boxToInteger(apply$mcIJI$sp(scala.runtime.BoxesRunTime.unboxToLong(v1), scala.runtime.BoxesRunTime.unboxToInt(v2))); } -} diff --git a/tests/scala2-library/src/library/scala/runtime/java8/JFunction2$mcIJJ$sp.java b/tests/scala2-library/src/library/scala/runtime/java8/JFunction2$mcIJJ$sp.java deleted file mode 100644 index ec3538779cb6..000000000000 --- a/tests/scala2-library/src/library/scala/runtime/java8/JFunction2$mcIJJ$sp.java +++ /dev/null @@ -1,13 +0,0 @@ - -/* - * Copyright (C) 2012-2015 Typesafe Inc. - */ - -package scala.runtime.java8; - -@FunctionalInterface -public interface JFunction2$mcIJJ$sp extends scala.Function2, java.io.Serializable { - int apply$mcIJJ$sp(long v1, long v2); - - default Object apply(Object v1, Object v2) { return scala.runtime.BoxesRunTime.boxToInteger(apply$mcIJJ$sp(scala.runtime.BoxesRunTime.unboxToLong(v1), scala.runtime.BoxesRunTime.unboxToLong(v2))); } -} diff --git a/tests/scala2-library/src/library/scala/runtime/java8/JFunction2$mcJDD$sp.java b/tests/scala2-library/src/library/scala/runtime/java8/JFunction2$mcJDD$sp.java deleted file mode 100644 index b13502de5b30..000000000000 --- a/tests/scala2-library/src/library/scala/runtime/java8/JFunction2$mcJDD$sp.java +++ /dev/null @@ -1,13 +0,0 @@ - -/* - * Copyright (C) 2012-2015 Typesafe Inc. - */ - -package scala.runtime.java8; - -@FunctionalInterface -public interface JFunction2$mcJDD$sp extends scala.Function2, java.io.Serializable { - long apply$mcJDD$sp(double v1, double v2); - - default Object apply(Object v1, Object v2) { return scala.runtime.BoxesRunTime.boxToLong(apply$mcJDD$sp(scala.runtime.BoxesRunTime.unboxToDouble(v1), scala.runtime.BoxesRunTime.unboxToDouble(v2))); } -} diff --git a/tests/scala2-library/src/library/scala/runtime/java8/JFunction2$mcJDI$sp.java b/tests/scala2-library/src/library/scala/runtime/java8/JFunction2$mcJDI$sp.java deleted file mode 100644 index 9ec9adda6003..000000000000 --- a/tests/scala2-library/src/library/scala/runtime/java8/JFunction2$mcJDI$sp.java +++ /dev/null @@ -1,13 +0,0 @@ - -/* - * Copyright (C) 2012-2015 Typesafe Inc. - */ - -package scala.runtime.java8; - -@FunctionalInterface -public interface JFunction2$mcJDI$sp extends scala.Function2, java.io.Serializable { - long apply$mcJDI$sp(double v1, int v2); - - default Object apply(Object v1, Object v2) { return scala.runtime.BoxesRunTime.boxToLong(apply$mcJDI$sp(scala.runtime.BoxesRunTime.unboxToDouble(v1), scala.runtime.BoxesRunTime.unboxToInt(v2))); } -} diff --git a/tests/scala2-library/src/library/scala/runtime/java8/JFunction2$mcJDJ$sp.java b/tests/scala2-library/src/library/scala/runtime/java8/JFunction2$mcJDJ$sp.java deleted file mode 100644 index 68ef9ead143a..000000000000 --- a/tests/scala2-library/src/library/scala/runtime/java8/JFunction2$mcJDJ$sp.java +++ /dev/null @@ -1,13 +0,0 @@ - -/* - * Copyright (C) 2012-2015 Typesafe Inc. - */ - -package scala.runtime.java8; - -@FunctionalInterface -public interface JFunction2$mcJDJ$sp extends scala.Function2, java.io.Serializable { - long apply$mcJDJ$sp(double v1, long v2); - - default Object apply(Object v1, Object v2) { return scala.runtime.BoxesRunTime.boxToLong(apply$mcJDJ$sp(scala.runtime.BoxesRunTime.unboxToDouble(v1), scala.runtime.BoxesRunTime.unboxToLong(v2))); } -} diff --git a/tests/scala2-library/src/library/scala/runtime/java8/JFunction2$mcJID$sp.java b/tests/scala2-library/src/library/scala/runtime/java8/JFunction2$mcJID$sp.java deleted file mode 100644 index 29c9c5e3d300..000000000000 --- a/tests/scala2-library/src/library/scala/runtime/java8/JFunction2$mcJID$sp.java +++ /dev/null @@ -1,13 +0,0 @@ - -/* - * Copyright (C) 2012-2015 Typesafe Inc. - */ - -package scala.runtime.java8; - -@FunctionalInterface -public interface JFunction2$mcJID$sp extends scala.Function2, java.io.Serializable { - long apply$mcJID$sp(int v1, double v2); - - default Object apply(Object v1, Object v2) { return scala.runtime.BoxesRunTime.boxToLong(apply$mcJID$sp(scala.runtime.BoxesRunTime.unboxToInt(v1), scala.runtime.BoxesRunTime.unboxToDouble(v2))); } -} diff --git a/tests/scala2-library/src/library/scala/runtime/java8/JFunction2$mcJII$sp.java b/tests/scala2-library/src/library/scala/runtime/java8/JFunction2$mcJII$sp.java deleted file mode 100644 index bb23086125bf..000000000000 --- a/tests/scala2-library/src/library/scala/runtime/java8/JFunction2$mcJII$sp.java +++ /dev/null @@ -1,13 +0,0 @@ - -/* - * Copyright (C) 2012-2015 Typesafe Inc. - */ - -package scala.runtime.java8; - -@FunctionalInterface -public interface JFunction2$mcJII$sp extends scala.Function2, java.io.Serializable { - long apply$mcJII$sp(int v1, int v2); - - default Object apply(Object v1, Object v2) { return scala.runtime.BoxesRunTime.boxToLong(apply$mcJII$sp(scala.runtime.BoxesRunTime.unboxToInt(v1), scala.runtime.BoxesRunTime.unboxToInt(v2))); } -} diff --git a/tests/scala2-library/src/library/scala/runtime/java8/JFunction2$mcJIJ$sp.java b/tests/scala2-library/src/library/scala/runtime/java8/JFunction2$mcJIJ$sp.java deleted file mode 100644 index 649fe2432562..000000000000 --- a/tests/scala2-library/src/library/scala/runtime/java8/JFunction2$mcJIJ$sp.java +++ /dev/null @@ -1,13 +0,0 @@ - -/* - * Copyright (C) 2012-2015 Typesafe Inc. - */ - -package scala.runtime.java8; - -@FunctionalInterface -public interface JFunction2$mcJIJ$sp extends scala.Function2, java.io.Serializable { - long apply$mcJIJ$sp(int v1, long v2); - - default Object apply(Object v1, Object v2) { return scala.runtime.BoxesRunTime.boxToLong(apply$mcJIJ$sp(scala.runtime.BoxesRunTime.unboxToInt(v1), scala.runtime.BoxesRunTime.unboxToLong(v2))); } -} diff --git a/tests/scala2-library/src/library/scala/runtime/java8/JFunction2$mcJJD$sp.java b/tests/scala2-library/src/library/scala/runtime/java8/JFunction2$mcJJD$sp.java deleted file mode 100644 index 8e6071d44817..000000000000 --- a/tests/scala2-library/src/library/scala/runtime/java8/JFunction2$mcJJD$sp.java +++ /dev/null @@ -1,13 +0,0 @@ - -/* - * Copyright (C) 2012-2015 Typesafe Inc. - */ - -package scala.runtime.java8; - -@FunctionalInterface -public interface JFunction2$mcJJD$sp extends scala.Function2, java.io.Serializable { - long apply$mcJJD$sp(long v1, double v2); - - default Object apply(Object v1, Object v2) { return scala.runtime.BoxesRunTime.boxToLong(apply$mcJJD$sp(scala.runtime.BoxesRunTime.unboxToLong(v1), scala.runtime.BoxesRunTime.unboxToDouble(v2))); } -} diff --git a/tests/scala2-library/src/library/scala/runtime/java8/JFunction2$mcJJI$sp.java b/tests/scala2-library/src/library/scala/runtime/java8/JFunction2$mcJJI$sp.java deleted file mode 100644 index 61366ac26de1..000000000000 --- a/tests/scala2-library/src/library/scala/runtime/java8/JFunction2$mcJJI$sp.java +++ /dev/null @@ -1,13 +0,0 @@ - -/* - * Copyright (C) 2012-2015 Typesafe Inc. - */ - -package scala.runtime.java8; - -@FunctionalInterface -public interface JFunction2$mcJJI$sp extends scala.Function2, java.io.Serializable { - long apply$mcJJI$sp(long v1, int v2); - - default Object apply(Object v1, Object v2) { return scala.runtime.BoxesRunTime.boxToLong(apply$mcJJI$sp(scala.runtime.BoxesRunTime.unboxToLong(v1), scala.runtime.BoxesRunTime.unboxToInt(v2))); } -} diff --git a/tests/scala2-library/src/library/scala/runtime/java8/JFunction2$mcJJJ$sp.java b/tests/scala2-library/src/library/scala/runtime/java8/JFunction2$mcJJJ$sp.java deleted file mode 100644 index a44e97318e11..000000000000 --- a/tests/scala2-library/src/library/scala/runtime/java8/JFunction2$mcJJJ$sp.java +++ /dev/null @@ -1,13 +0,0 @@ - -/* - * Copyright (C) 2012-2015 Typesafe Inc. - */ - -package scala.runtime.java8; - -@FunctionalInterface -public interface JFunction2$mcJJJ$sp extends scala.Function2, java.io.Serializable { - long apply$mcJJJ$sp(long v1, long v2); - - default Object apply(Object v1, Object v2) { return scala.runtime.BoxesRunTime.boxToLong(apply$mcJJJ$sp(scala.runtime.BoxesRunTime.unboxToLong(v1), scala.runtime.BoxesRunTime.unboxToLong(v2))); } -} diff --git a/tests/scala2-library/src/library/scala/runtime/java8/JFunction2$mcVDD$sp.java b/tests/scala2-library/src/library/scala/runtime/java8/JFunction2$mcVDD$sp.java deleted file mode 100644 index 8e7cbd7d1bd2..000000000000 --- a/tests/scala2-library/src/library/scala/runtime/java8/JFunction2$mcVDD$sp.java +++ /dev/null @@ -1,13 +0,0 @@ - -/* - * Copyright (C) 2012-2015 Typesafe Inc. - */ - -package scala.runtime.java8; - -@FunctionalInterface -public interface JFunction2$mcVDD$sp extends scala.Function2, java.io.Serializable { - void apply$mcVDD$sp(double v1, double v2); - - default Object apply(Object v1, Object v2) { apply$mcVDD$sp(scala.runtime.BoxesRunTime.unboxToDouble(v1), scala.runtime.BoxesRunTime.unboxToDouble(v2)); return scala.runtime.BoxedUnit.UNIT; } -} diff --git a/tests/scala2-library/src/library/scala/runtime/java8/JFunction2$mcVDI$sp.java b/tests/scala2-library/src/library/scala/runtime/java8/JFunction2$mcVDI$sp.java deleted file mode 100644 index 1dee353d6b32..000000000000 --- a/tests/scala2-library/src/library/scala/runtime/java8/JFunction2$mcVDI$sp.java +++ /dev/null @@ -1,13 +0,0 @@ - -/* - * Copyright (C) 2012-2015 Typesafe Inc. - */ - -package scala.runtime.java8; - -@FunctionalInterface -public interface JFunction2$mcVDI$sp extends scala.Function2, java.io.Serializable { - void apply$mcVDI$sp(double v1, int v2); - - default Object apply(Object v1, Object v2) { apply$mcVDI$sp(scala.runtime.BoxesRunTime.unboxToDouble(v1), scala.runtime.BoxesRunTime.unboxToInt(v2)); return scala.runtime.BoxedUnit.UNIT; } -} diff --git a/tests/scala2-library/src/library/scala/runtime/java8/JFunction2$mcVDJ$sp.java b/tests/scala2-library/src/library/scala/runtime/java8/JFunction2$mcVDJ$sp.java deleted file mode 100644 index 0b9560868472..000000000000 --- a/tests/scala2-library/src/library/scala/runtime/java8/JFunction2$mcVDJ$sp.java +++ /dev/null @@ -1,13 +0,0 @@ - -/* - * Copyright (C) 2012-2015 Typesafe Inc. - */ - -package scala.runtime.java8; - -@FunctionalInterface -public interface JFunction2$mcVDJ$sp extends scala.Function2, java.io.Serializable { - void apply$mcVDJ$sp(double v1, long v2); - - default Object apply(Object v1, Object v2) { apply$mcVDJ$sp(scala.runtime.BoxesRunTime.unboxToDouble(v1), scala.runtime.BoxesRunTime.unboxToLong(v2)); return scala.runtime.BoxedUnit.UNIT; } -} diff --git a/tests/scala2-library/src/library/scala/runtime/java8/JFunction2$mcVID$sp.java b/tests/scala2-library/src/library/scala/runtime/java8/JFunction2$mcVID$sp.java deleted file mode 100644 index f0ed7e7e9789..000000000000 --- a/tests/scala2-library/src/library/scala/runtime/java8/JFunction2$mcVID$sp.java +++ /dev/null @@ -1,13 +0,0 @@ - -/* - * Copyright (C) 2012-2015 Typesafe Inc. - */ - -package scala.runtime.java8; - -@FunctionalInterface -public interface JFunction2$mcVID$sp extends scala.Function2, java.io.Serializable { - void apply$mcVID$sp(int v1, double v2); - - default Object apply(Object v1, Object v2) { apply$mcVID$sp(scala.runtime.BoxesRunTime.unboxToInt(v1), scala.runtime.BoxesRunTime.unboxToDouble(v2)); return scala.runtime.BoxedUnit.UNIT; } -} diff --git a/tests/scala2-library/src/library/scala/runtime/java8/JFunction2$mcVII$sp.java b/tests/scala2-library/src/library/scala/runtime/java8/JFunction2$mcVII$sp.java deleted file mode 100644 index 52d7922cc1aa..000000000000 --- a/tests/scala2-library/src/library/scala/runtime/java8/JFunction2$mcVII$sp.java +++ /dev/null @@ -1,13 +0,0 @@ - -/* - * Copyright (C) 2012-2015 Typesafe Inc. - */ - -package scala.runtime.java8; - -@FunctionalInterface -public interface JFunction2$mcVII$sp extends scala.Function2, java.io.Serializable { - void apply$mcVII$sp(int v1, int v2); - - default Object apply(Object v1, Object v2) { apply$mcVII$sp(scala.runtime.BoxesRunTime.unboxToInt(v1), scala.runtime.BoxesRunTime.unboxToInt(v2)); return scala.runtime.BoxedUnit.UNIT; } -} diff --git a/tests/scala2-library/src/library/scala/runtime/java8/JFunction2$mcVIJ$sp.java b/tests/scala2-library/src/library/scala/runtime/java8/JFunction2$mcVIJ$sp.java deleted file mode 100644 index ac256bf163d6..000000000000 --- a/tests/scala2-library/src/library/scala/runtime/java8/JFunction2$mcVIJ$sp.java +++ /dev/null @@ -1,13 +0,0 @@ - -/* - * Copyright (C) 2012-2015 Typesafe Inc. - */ - -package scala.runtime.java8; - -@FunctionalInterface -public interface JFunction2$mcVIJ$sp extends scala.Function2, java.io.Serializable { - void apply$mcVIJ$sp(int v1, long v2); - - default Object apply(Object v1, Object v2) { apply$mcVIJ$sp(scala.runtime.BoxesRunTime.unboxToInt(v1), scala.runtime.BoxesRunTime.unboxToLong(v2)); return scala.runtime.BoxedUnit.UNIT; } -} diff --git a/tests/scala2-library/src/library/scala/runtime/java8/JFunction2$mcVJD$sp.java b/tests/scala2-library/src/library/scala/runtime/java8/JFunction2$mcVJD$sp.java deleted file mode 100644 index 6e2dea3fbfef..000000000000 --- a/tests/scala2-library/src/library/scala/runtime/java8/JFunction2$mcVJD$sp.java +++ /dev/null @@ -1,13 +0,0 @@ - -/* - * Copyright (C) 2012-2015 Typesafe Inc. - */ - -package scala.runtime.java8; - -@FunctionalInterface -public interface JFunction2$mcVJD$sp extends scala.Function2, java.io.Serializable { - void apply$mcVJD$sp(long v1, double v2); - - default Object apply(Object v1, Object v2) { apply$mcVJD$sp(scala.runtime.BoxesRunTime.unboxToLong(v1), scala.runtime.BoxesRunTime.unboxToDouble(v2)); return scala.runtime.BoxedUnit.UNIT; } -} diff --git a/tests/scala2-library/src/library/scala/runtime/java8/JFunction2$mcVJI$sp.java b/tests/scala2-library/src/library/scala/runtime/java8/JFunction2$mcVJI$sp.java deleted file mode 100644 index d1cba439e667..000000000000 --- a/tests/scala2-library/src/library/scala/runtime/java8/JFunction2$mcVJI$sp.java +++ /dev/null @@ -1,13 +0,0 @@ - -/* - * Copyright (C) 2012-2015 Typesafe Inc. - */ - -package scala.runtime.java8; - -@FunctionalInterface -public interface JFunction2$mcVJI$sp extends scala.Function2, java.io.Serializable { - void apply$mcVJI$sp(long v1, int v2); - - default Object apply(Object v1, Object v2) { apply$mcVJI$sp(scala.runtime.BoxesRunTime.unboxToLong(v1), scala.runtime.BoxesRunTime.unboxToInt(v2)); return scala.runtime.BoxedUnit.UNIT; } -} diff --git a/tests/scala2-library/src/library/scala/runtime/java8/JFunction2$mcVJJ$sp.java b/tests/scala2-library/src/library/scala/runtime/java8/JFunction2$mcVJJ$sp.java deleted file mode 100644 index 67f848a60e7c..000000000000 --- a/tests/scala2-library/src/library/scala/runtime/java8/JFunction2$mcVJJ$sp.java +++ /dev/null @@ -1,13 +0,0 @@ - -/* - * Copyright (C) 2012-2015 Typesafe Inc. - */ - -package scala.runtime.java8; - -@FunctionalInterface -public interface JFunction2$mcVJJ$sp extends scala.Function2, java.io.Serializable { - void apply$mcVJJ$sp(long v1, long v2); - - default Object apply(Object v1, Object v2) { apply$mcVJJ$sp(scala.runtime.BoxesRunTime.unboxToLong(v1), scala.runtime.BoxesRunTime.unboxToLong(v2)); return scala.runtime.BoxedUnit.UNIT; } -} diff --git a/tests/scala2-library/src/library/scala/runtime/java8/JFunction2$mcZDD$sp.java b/tests/scala2-library/src/library/scala/runtime/java8/JFunction2$mcZDD$sp.java deleted file mode 100644 index b430c5f1343f..000000000000 --- a/tests/scala2-library/src/library/scala/runtime/java8/JFunction2$mcZDD$sp.java +++ /dev/null @@ -1,13 +0,0 @@ - -/* - * Copyright (C) 2012-2015 Typesafe Inc. - */ - -package scala.runtime.java8; - -@FunctionalInterface -public interface JFunction2$mcZDD$sp extends scala.Function2, java.io.Serializable { - boolean apply$mcZDD$sp(double v1, double v2); - - default Object apply(Object v1, Object v2) { return scala.runtime.BoxesRunTime.boxToBoolean(apply$mcZDD$sp(scala.runtime.BoxesRunTime.unboxToDouble(v1), scala.runtime.BoxesRunTime.unboxToDouble(v2))); } -} diff --git a/tests/scala2-library/src/library/scala/runtime/java8/JFunction2$mcZDI$sp.java b/tests/scala2-library/src/library/scala/runtime/java8/JFunction2$mcZDI$sp.java deleted file mode 100644 index 01fb8ba003e6..000000000000 --- a/tests/scala2-library/src/library/scala/runtime/java8/JFunction2$mcZDI$sp.java +++ /dev/null @@ -1,13 +0,0 @@ - -/* - * Copyright (C) 2012-2015 Typesafe Inc. - */ - -package scala.runtime.java8; - -@FunctionalInterface -public interface JFunction2$mcZDI$sp extends scala.Function2, java.io.Serializable { - boolean apply$mcZDI$sp(double v1, int v2); - - default Object apply(Object v1, Object v2) { return scala.runtime.BoxesRunTime.boxToBoolean(apply$mcZDI$sp(scala.runtime.BoxesRunTime.unboxToDouble(v1), scala.runtime.BoxesRunTime.unboxToInt(v2))); } -} diff --git a/tests/scala2-library/src/library/scala/runtime/java8/JFunction2$mcZDJ$sp.java b/tests/scala2-library/src/library/scala/runtime/java8/JFunction2$mcZDJ$sp.java deleted file mode 100644 index a7d28e3cfc71..000000000000 --- a/tests/scala2-library/src/library/scala/runtime/java8/JFunction2$mcZDJ$sp.java +++ /dev/null @@ -1,13 +0,0 @@ - -/* - * Copyright (C) 2012-2015 Typesafe Inc. - */ - -package scala.runtime.java8; - -@FunctionalInterface -public interface JFunction2$mcZDJ$sp extends scala.Function2, java.io.Serializable { - boolean apply$mcZDJ$sp(double v1, long v2); - - default Object apply(Object v1, Object v2) { return scala.runtime.BoxesRunTime.boxToBoolean(apply$mcZDJ$sp(scala.runtime.BoxesRunTime.unboxToDouble(v1), scala.runtime.BoxesRunTime.unboxToLong(v2))); } -} diff --git a/tests/scala2-library/src/library/scala/runtime/java8/JFunction2$mcZID$sp.java b/tests/scala2-library/src/library/scala/runtime/java8/JFunction2$mcZID$sp.java deleted file mode 100644 index e77719bf7568..000000000000 --- a/tests/scala2-library/src/library/scala/runtime/java8/JFunction2$mcZID$sp.java +++ /dev/null @@ -1,13 +0,0 @@ - -/* - * Copyright (C) 2012-2015 Typesafe Inc. - */ - -package scala.runtime.java8; - -@FunctionalInterface -public interface JFunction2$mcZID$sp extends scala.Function2, java.io.Serializable { - boolean apply$mcZID$sp(int v1, double v2); - - default Object apply(Object v1, Object v2) { return scala.runtime.BoxesRunTime.boxToBoolean(apply$mcZID$sp(scala.runtime.BoxesRunTime.unboxToInt(v1), scala.runtime.BoxesRunTime.unboxToDouble(v2))); } -} diff --git a/tests/scala2-library/src/library/scala/runtime/java8/JFunction2$mcZII$sp.java b/tests/scala2-library/src/library/scala/runtime/java8/JFunction2$mcZII$sp.java deleted file mode 100644 index 5f1f83aaf8b2..000000000000 --- a/tests/scala2-library/src/library/scala/runtime/java8/JFunction2$mcZII$sp.java +++ /dev/null @@ -1,13 +0,0 @@ - -/* - * Copyright (C) 2012-2015 Typesafe Inc. - */ - -package scala.runtime.java8; - -@FunctionalInterface -public interface JFunction2$mcZII$sp extends scala.Function2, java.io.Serializable { - boolean apply$mcZII$sp(int v1, int v2); - - default Object apply(Object v1, Object v2) { return scala.runtime.BoxesRunTime.boxToBoolean(apply$mcZII$sp(scala.runtime.BoxesRunTime.unboxToInt(v1), scala.runtime.BoxesRunTime.unboxToInt(v2))); } -} diff --git a/tests/scala2-library/src/library/scala/runtime/java8/JFunction2$mcZIJ$sp.java b/tests/scala2-library/src/library/scala/runtime/java8/JFunction2$mcZIJ$sp.java deleted file mode 100644 index 38fabd6f691c..000000000000 --- a/tests/scala2-library/src/library/scala/runtime/java8/JFunction2$mcZIJ$sp.java +++ /dev/null @@ -1,13 +0,0 @@ - -/* - * Copyright (C) 2012-2015 Typesafe Inc. - */ - -package scala.runtime.java8; - -@FunctionalInterface -public interface JFunction2$mcZIJ$sp extends scala.Function2, java.io.Serializable { - boolean apply$mcZIJ$sp(int v1, long v2); - - default Object apply(Object v1, Object v2) { return scala.runtime.BoxesRunTime.boxToBoolean(apply$mcZIJ$sp(scala.runtime.BoxesRunTime.unboxToInt(v1), scala.runtime.BoxesRunTime.unboxToLong(v2))); } -} diff --git a/tests/scala2-library/src/library/scala/runtime/java8/JFunction2$mcZJD$sp.java b/tests/scala2-library/src/library/scala/runtime/java8/JFunction2$mcZJD$sp.java deleted file mode 100644 index 59c82cb01e60..000000000000 --- a/tests/scala2-library/src/library/scala/runtime/java8/JFunction2$mcZJD$sp.java +++ /dev/null @@ -1,13 +0,0 @@ - -/* - * Copyright (C) 2012-2015 Typesafe Inc. - */ - -package scala.runtime.java8; - -@FunctionalInterface -public interface JFunction2$mcZJD$sp extends scala.Function2, java.io.Serializable { - boolean apply$mcZJD$sp(long v1, double v2); - - default Object apply(Object v1, Object v2) { return scala.runtime.BoxesRunTime.boxToBoolean(apply$mcZJD$sp(scala.runtime.BoxesRunTime.unboxToLong(v1), scala.runtime.BoxesRunTime.unboxToDouble(v2))); } -} diff --git a/tests/scala2-library/src/library/scala/runtime/java8/JFunction2$mcZJI$sp.java b/tests/scala2-library/src/library/scala/runtime/java8/JFunction2$mcZJI$sp.java deleted file mode 100644 index 3e73b8a794e8..000000000000 --- a/tests/scala2-library/src/library/scala/runtime/java8/JFunction2$mcZJI$sp.java +++ /dev/null @@ -1,13 +0,0 @@ - -/* - * Copyright (C) 2012-2015 Typesafe Inc. - */ - -package scala.runtime.java8; - -@FunctionalInterface -public interface JFunction2$mcZJI$sp extends scala.Function2, java.io.Serializable { - boolean apply$mcZJI$sp(long v1, int v2); - - default Object apply(Object v1, Object v2) { return scala.runtime.BoxesRunTime.boxToBoolean(apply$mcZJI$sp(scala.runtime.BoxesRunTime.unboxToLong(v1), scala.runtime.BoxesRunTime.unboxToInt(v2))); } -} diff --git a/tests/scala2-library/src/library/scala/runtime/java8/JFunction2$mcZJJ$sp.java b/tests/scala2-library/src/library/scala/runtime/java8/JFunction2$mcZJJ$sp.java deleted file mode 100644 index 96a14e98a531..000000000000 --- a/tests/scala2-library/src/library/scala/runtime/java8/JFunction2$mcZJJ$sp.java +++ /dev/null @@ -1,13 +0,0 @@ - -/* - * Copyright (C) 2012-2015 Typesafe Inc. - */ - -package scala.runtime.java8; - -@FunctionalInterface -public interface JFunction2$mcZJJ$sp extends scala.Function2, java.io.Serializable { - boolean apply$mcZJJ$sp(long v1, long v2); - - default Object apply(Object v1, Object v2) { return scala.runtime.BoxesRunTime.boxToBoolean(apply$mcZJJ$sp(scala.runtime.BoxesRunTime.unboxToLong(v1), scala.runtime.BoxesRunTime.unboxToLong(v2))); } -} diff --git a/tests/scala2-library/src/library/scala/runtime/package.scala b/tests/scala2-library/src/library/scala/runtime/package.scala deleted file mode 100644 index e4472b3ea183..000000000000 --- a/tests/scala2-library/src/library/scala/runtime/package.scala +++ /dev/null @@ -1,3 +0,0 @@ -package scala - -package object runtime { } diff --git a/tests/scala2-library/src/library/scala/specialized.scala b/tests/scala2-library/src/library/scala/specialized.scala deleted file mode 100644 index cb7793536cab..000000000000 --- a/tests/scala2-library/src/library/scala/specialized.scala +++ /dev/null @@ -1,32 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala - -import Specializable._ - -/** Annotate type parameters on which code should be automatically - * specialized. For example: - * {{{ - * class MyList[@specialized T] ... - * }}} - * - * Type T can be specialized on a subset of the primitive types by - * specifying a list of primitive types to specialize at: - * {{{ - * class MyList[@specialized(Int, Double, Boolean) T] .. - * }}} - * - * @since 2.8 - */ -// class tspecialized[T](group: Group[T]) extends scala.annotation.StaticAnnotation { - -class specialized(group: SpecializedGroup) extends scala.annotation.StaticAnnotation { - def this(types: Specializable*) = this(new Group(types.toList)) - def this() = this(Primitives) -} diff --git a/tests/scala2-library/src/library/scala/sys/BooleanProp.scala b/tests/scala2-library/src/library/scala/sys/BooleanProp.scala deleted file mode 100644 index b0008b41fd41..000000000000 --- a/tests/scala2-library/src/library/scala/sys/BooleanProp.scala +++ /dev/null @@ -1,80 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala -package sys - -import scala.language.implicitConversions - -/** A few additional conveniences for Boolean properties. - */ -trait BooleanProp extends Prop[Boolean] { - /** The semantics of value are determined at Prop creation. See methods - * `valueIsTrue` and `keyExists` in object BooleanProp for examples. - * - * @return true if the current String is considered true, false otherwise - */ - def value: Boolean - - /** Alter this property so that `value` will be true. */ - def enable(): Unit - - /** Alter this property so that `value` will be false. */ - def disable(): Unit - - /** Toggle the property between enabled and disabled states. */ - def toggle(): Unit -} - -object BooleanProp { - private[sys] - class BooleanPropImpl(key: String, valueFn: String => Boolean) extends PropImpl(key, valueFn) with BooleanProp { - override def setValue[T1 >: Boolean](newValue: T1): Boolean = newValue match { - case x: Boolean if !x => val old = value ; clear() ; old - case x => super.setValue(newValue) - } - def enable() = this setValue true - def disable() = this.clear() - def toggle() = if (value) disable() else enable() - } - private[sys] - class ConstantImpl(val key: String, val value: Boolean) extends BooleanProp { - val isSet = value - def set(newValue: String) = "" + value - def setValue[T1 >: Boolean](newValue: T1): Boolean = value - def get: String = "" + value - val clear, enable, disable, toggle = () - def option = if (isSet) Some(value) else None - //def or[T1 >: Boolean](alt: => T1): T1 = if (value) true else alt - - protected def zero = false - } - - /** The java definition of property truth is that the key be in the map and - * the value be equal to the String "true", case insensitively. This method - * creates a BooleanProp instance which adheres to that definition. - * - * @return A BooleanProp which acts like java's Boolean.getBoolean - */ - def valueIsTrue[T](key: String): BooleanProp = new BooleanPropImpl(key, _.toLowerCase == "true") - - /** As an alternative, this method creates a BooleanProp which is true - * if the key exists in the map and is not assigned a value other than "true", - * compared case-insensitively, or the empty string. This way -Dmy.property - * results in a true-valued property, but -Dmy.property=false does not. - * - * @return A BooleanProp with a liberal truth policy - */ - def keyExists[T](key: String): BooleanProp = new BooleanPropImpl(key, s => s == "" || s.equalsIgnoreCase("true")) - - /** A constant true or false property which ignores all method calls. - */ - def constant(key: String, isOn: Boolean): BooleanProp = new ConstantImpl(key, isOn) - - implicit def booleanPropAsBoolean(b: BooleanProp): Boolean = b.value -} diff --git a/tests/scala2-library/src/library/scala/sys/Prop.scala b/tests/scala2-library/src/library/scala/sys/Prop.scala deleted file mode 100644 index 52a3d89ecba3..000000000000 --- a/tests/scala2-library/src/library/scala/sys/Prop.scala +++ /dev/null @@ -1,93 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala -package sys - -/** A lightweight interface wrapping a property contained in some - * unspecified map. Generally it'll be the system properties but this - * is not a requirement. - * - * See `scala.sys.SystemProperties` for an example usage. - * - * @author Paul Phillips - * @version 2.9 - * @since 2.9 - */ -trait Prop[+T] { - /** The full name of the property, e.g., "java.awt.headless". - */ - def key: String - - /** If the key exists in the properties map, converts the value - * to type `T` using valueFn. As yet no validation is performed: - * it will throw an exception on a failed conversion. - * @return the converted value, or `zero` if not in the map - */ - def value: T - - /** True if the key exists in the properties map. Note that this - * is not sufficient for a Boolean property to be considered true. - * @return whether the map contains the key - */ - def isSet: Boolean - - /** Sets the property. - * - * @param newValue the new string value - * @return the old value, or null if it was unset. - */ - def set(newValue: String): String - - /** Sets the property with a value of the represented type. - */ - def setValue[T1 >: T](value: T1): T - - /** Gets the current string value if any. Will not return null: use - * `isSet` to test for existence. - * @return the current string value if any, else the empty string - */ - def get: String - - /** Some(value) if the property is set, None otherwise. - */ - def option: Option[T] - - // Do not open until 2.12. - //** This value if the property is set, an alternative value otherwise. */ - //def or[T1 >: T](alt: => T1): T1 - - /** Removes the property from the underlying map. - */ - def clear(): Unit - - /** A value of type `T` for use when the property is unset. - * The default implementation delivers null for reference types - * and 0/0.0/false for non-reference types. - */ - protected def zero: T -} - -object Prop { - /** A creator of property instances. For any type `T`, if an implicit - * parameter of type Creator[T] is in scope, a Prop[T] can be created - * via this object's apply method. - */ - @annotation.implicitNotFound("No implicit property creator available for type ${T}.") - trait Creator[+T] { - /** Creates a Prop[T] of this type based on the given key. */ - def apply(key: String): Prop[T] - } - - implicit object FileProp extends CreatorImpl[java.io.File](s => new java.io.File(s)) - implicit object StringProp extends CreatorImpl[String](s => s) - implicit object IntProp extends CreatorImpl[Int](_.toInt) - implicit object DoubleProp extends CreatorImpl[Double](_.toDouble) - - def apply[T: Creator](key: String): Prop[T] = implicitly[Creator[T]] apply key -} diff --git a/tests/scala2-library/src/library/scala/sys/PropImpl.scala b/tests/scala2-library/src/library/scala/sys/PropImpl.scala deleted file mode 100644 index 3b451ab1d932..000000000000 --- a/tests/scala2-library/src/library/scala/sys/PropImpl.scala +++ /dev/null @@ -1,48 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala -package sys - -import scala.collection.mutable - -/** The internal implementation of scala.sys.Prop. - */ -private[sys] class PropImpl[+T](val key: String, valueFn: String => T) extends Prop[T] { - def value: T = if (isSet) valueFn(get) else zero - def isSet = underlying contains key - def set(newValue: String): String = { - val old = if (isSet) get else null - underlying(key) = newValue - old - } - def setValue[T1 >: T](newValue: T1): T = { - val old = value - if (newValue == null) set(null) - else set("" + newValue) - old - } - def get: String = - if (isSet) underlying.getOrElse(key, "") - else "" - - def clear(): Unit = underlying -= key - def option: Option[T] = if (isSet) Some(value) else None - def or[T1 >: T](alt: => T1): T1 = if (isSet) value else alt - - /** The underlying property map, in our case always sys.props */ - protected def underlying: mutable.Map[String, String] = scala.sys.props - protected def zero: T = null.asInstanceOf[T] - private def getString = if (isSet) "currently: " + get else "unset" - override def toString = "%s (%s)".format(key, getString) -} - -private[sys] abstract class CreatorImpl[+T](f: String => T) extends Prop.Creator[T] { - def apply(key: String): Prop[T] = new PropImpl[T](key, f) -} - diff --git a/tests/scala2-library/src/library/scala/sys/ShutdownHookThread.scala b/tests/scala2-library/src/library/scala/sys/ShutdownHookThread.scala deleted file mode 100644 index 6018ac852b12..000000000000 --- a/tests/scala2-library/src/library/scala/sys/ShutdownHookThread.scala +++ /dev/null @@ -1,39 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala -package sys - -/** A minimal Thread wrapper to enhance shutdown hooks. It knows - * how to unregister itself. - * - * @author Paul Phillips - * @version 2.9 - * @since 2.9 - */ -class ShutdownHookThread private (name: String) extends Thread(name) { - def remove() = runtime removeShutdownHook this -} - -object ShutdownHookThread { - private var hookNameCount: Int = 0 - private def hookName(): String = synchronized { - hookNameCount += 1 - "shutdownHook" + hookNameCount - } - /** Creates, names, and registers a shutdown hook to run the - * given code. - */ - def apply(body: => Unit): ShutdownHookThread = { - val t = new ShutdownHookThread(hookName()) { - override def run() = body - } - runtime addShutdownHook t - t - } -} diff --git a/tests/scala2-library/src/library/scala/sys/SystemProperties.scala b/tests/scala2-library/src/library/scala/sys/SystemProperties.scala deleted file mode 100644 index e5606f3c3b74..000000000000 --- a/tests/scala2-library/src/library/scala/sys/SystemProperties.scala +++ /dev/null @@ -1,93 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala -package sys - -import scala.collection.{ mutable, Iterator } -import scala.collection.JavaConverters._ -import java.security.AccessControlException -import scala.language.implicitConversions - - -/** A bidirectional map wrapping the java System properties. - * Changes to System properties will be immediately visible in the map, - * and modifications made to the map will be immediately applied to the - * System properties. If a security manager is in place which prevents - * the properties from being read or written, the AccessControlException - * will be caught and discarded. - * @define Coll `collection.mutable.Map` - * @define coll mutable map - * - * @author Paul Phillips - * @version 2.9 - * @since 2.9 - */ -class SystemProperties -extends mutable.AbstractMap[String, String] - with mutable.Map[String, String] { - - override def empty = mutable.Map[String, String]() - override def default(key: String): String = null - - def iterator: Iterator[(String, String)] = wrapAccess { - val ps = System.getProperties() - names map (k => (k, ps getProperty k)) filter (_._2 ne null) - } getOrElse Iterator.empty - - def names: Iterator[String] = wrapAccess ( - System.getProperties().stringPropertyNames().asScala.iterator - ) getOrElse Iterator.empty - - def get(key: String) = - wrapAccess(Option(System.getProperty(key))) flatMap (x => x) - override def contains(key: String) = - wrapAccess(super.contains(key)) exists (x => x) - - def -= (key: String): this.type = { wrapAccess(System.clearProperty(key)) ; this } - def += (kv: (String, String)): this.type = { wrapAccess(System.setProperty(kv._1, kv._2)) ; this } - - def wrapAccess[T](body: => T): Option[T] = - try Some(body) catch { case _: AccessControlException => None } -} - -/** The values in SystemProperties can be used to access and manipulate - * designated system properties. See `scala.sys.Prop` for particulars. - * @example {{{ - * if (!headless.isSet) headless.enable() - * }}} - */ -object SystemProperties { - /** An unenforceable, advisory only place to do some synchronization when - * mutating system properties. - */ - def exclusively[T](body: => T) = this synchronized body - - implicit def systemPropertiesToCompanion(p: SystemProperties): SystemProperties.type = this - - private final val HeadlessKey = "java.awt.headless" - private final val PreferIPv4StackKey = "java.net.preferIPv4Stack" - private final val PreferIPv6AddressesKey = "java.net.preferIPv6Addresses" - private final val NoTraceSuppressionKey = "scala.control.noTraceSuppression" - - def help(key: String): String = key match { - case HeadlessKey => "system should not utilize a display device" - case PreferIPv4StackKey => "system should prefer IPv4 sockets" - case PreferIPv6AddressesKey => "system should prefer IPv6 addresses" - case NoTraceSuppressionKey => "scala should not suppress any stack trace creation" - case _ => "" - } - - lazy val headless: BooleanProp = BooleanProp.keyExists(HeadlessKey) - lazy val preferIPv4Stack: BooleanProp = BooleanProp.keyExists(PreferIPv4StackKey) - lazy val preferIPv6Addresses: BooleanProp = BooleanProp.keyExists(PreferIPv6AddressesKey) - lazy val noTraceSuppression: BooleanProp = BooleanProp.valueIsTrue(NoTraceSuppressionKey) - @deprecated("use noTraceSuppression", "2.12.0") - def noTraceSupression = noTraceSuppression -} - diff --git a/tests/scala2-library/src/library/scala/sys/package.scala b/tests/scala2-library/src/library/scala/sys/package.scala deleted file mode 100644 index e493603bc24e..000000000000 --- a/tests/scala2-library/src/library/scala/sys/package.scala +++ /dev/null @@ -1,87 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala - -import scala.collection.immutable -import scala.collection.JavaConverters._ - -/** The package object `scala.sys` contains methods for reading - * and altering core aspects of the virtual machine as well as the - * world outside of it. - * - * @author Paul Phillips - * @version 2.9 - * @since 2.9 - */ -package object sys { - /** Throw a new RuntimeException with the supplied message. - * - * @return Nothing. - */ - def error(message: String): Nothing = throw new RuntimeException(message) - - /** Exit the JVM with the default status code. - * - * @return Nothing. - */ - def exit(): Nothing = exit(0) - - /** Exit the JVM with the given status code. - * - * @return Nothing. - */ - def exit(status: Int): Nothing = { - java.lang.System.exit(status) - throw new Throwable() - } - - /** A convenience method to get the current Runtime instance. - * - * @return the result of `java.lang.Runtime.getRuntime()` - */ - def runtime: Runtime = Runtime.getRuntime - - /** A bidirectional, mutable Map representing the current system Properties. - * - * @return a SystemProperties. - * @see [[scala.sys.SystemProperties]] - */ - def props: SystemProperties = new SystemProperties - - /** An immutable Map representing the current system environment. - * - * @return a Map containing the system environment variables. - */ - def env: immutable.Map[String, String] = immutable.Map(System.getenv().asScala.toSeq: _*) - - /** Register a shutdown hook to be run when the VM exits. - * The hook is automatically registered: the returned value can be ignored, - * but is available in case the Thread requires further modification. - * It can also be unregistered by calling ShutdownHookThread#remove(). - * - * Note that shutdown hooks are NOT guaranteed to be run. - * - * @param body the body of code to run at shutdown - * @return the Thread which will run the shutdown hook. - * @see [[scala.sys.ShutdownHookThread]] - */ - def addShutdownHook(body: => Unit): ShutdownHookThread = ShutdownHookThread(body) - - /** Returns all active thread in the current thread's thread group and subgroups. - * - * @return an IndexedSeq containing the threads. - */ - def allThreads(): IndexedSeq[Thread] = { - val num = Thread.activeCount() - val tarray = new Array[Thread](num) - val got = Thread.enumerate(tarray) - - tarray take got - } -} diff --git a/tests/scala2-library/src/library/scala/sys/process/BasicIO.scala b/tests/scala2-library/src/library/scala/sys/process/BasicIO.scala deleted file mode 100644 index b39ae77c62c8..000000000000 --- a/tests/scala2-library/src/library/scala/sys/process/BasicIO.scala +++ /dev/null @@ -1,245 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala -package sys -package process - -import processInternal._ -import java.io.{ BufferedReader, InputStreamReader, FilterInputStream, FilterOutputStream } -import java.util.concurrent.LinkedBlockingQueue -import scala.collection.immutable.Stream -import scala.annotation.tailrec - -/** - * This object contains factories for [[scala.sys.process.ProcessIO]], - * which can be used to control the I/O of a [[scala.sys.process.Process]] - * when a [[scala.sys.process.ProcessBuilder]] is started with the `run` - * command. - * - * It also contains some helper methods that can be used to in the creation of - * `ProcessIO`. - * - * It is used by other classes in the package in the implementation of various - * features, but can also be used by client code. - */ -object BasicIO { - /** Size of the buffer used in all the functions that copy data */ - final val BufferSize = 8192 - - /** Used to separate lines in the `processFully` function that takes `Appendable`. */ - final val Newline = System.lineSeparator - - private[process] final class Streamed[T]( - val process: T => Unit, - val done: Int => Unit, - val stream: () => Stream[T] - ) - - private[process] object Streamed { - def apply[T](nonzeroException: Boolean): Streamed[T] = { - val q = new LinkedBlockingQueue[Either[Int, T]] - def next(): Stream[T] = q.take match { - case Left(0) => Stream.empty - case Left(code) => if (nonzeroException) scala.sys.error("Nonzero exit code: " + code) else Stream.empty - case Right(s) => Stream.cons(s, next()) - } - new Streamed((s: T) => q put Right(s), code => q put Left(code), () => next()) - } - } - - private[process] trait Uncloseable extends Closeable { - final override def close() { } - } - private[process] object Uncloseable { - def apply(in: InputStream): InputStream = new FilterInputStream(in) with Uncloseable { } - def apply(out: OutputStream): OutputStream = new FilterOutputStream(out) with Uncloseable { } - def protect(in: InputStream): InputStream = if (in eq stdin) Uncloseable(in) else in - def protect(out: OutputStream): OutputStream = if ((out eq stdout) || (out eq stderr)) Uncloseable(out) else out - } - - /** Creates a `ProcessIO` from a function `String => Unit`. It can attach the - * process input to stdin, and it will either send the error stream to - * stderr, or to a `ProcessLogger`. - * - * For example, the `ProcessIO` created below will print all normal output - * while ignoring all error output. No input will be provided. - * {{{ - * import scala.sys.process.BasicIO - * val errToDevNull = BasicIO(false, println(_), None) - * }}} - * - * @param withIn True if the process input should be attached to stdin. - * @param output A function that will be called with the process output. - * @param log An optional `ProcessLogger` to which the output should be - * sent. If `None`, output will be sent to stderr. - * @return A `ProcessIO` with the characteristics above. - */ - def apply(withIn: Boolean, output: String => Unit, log: Option[ProcessLogger]) = - new ProcessIO(input(withIn), processFully(output), getErr(log)) - - /** Creates a `ProcessIO` that appends its output to a `StringBuffer`. It can - * attach the process input to stdin, and it will either send the error - * stream to stderr, or to a `ProcessLogger`. - * - * For example, the `ProcessIO` created by the function below will store the - * normal output on the buffer provided, and print all error on stderr. The - * input will be read from stdin. - * {{{ - * import scala.sys.process.{BasicIO, ProcessLogger} - * val printer = ProcessLogger(println(_)) - * def appendToBuffer(b: StringBuffer) = BasicIO(true, b, Some(printer)) - * }}} - * - * @param withIn True if the process input should be attached to stdin. - * @param buffer A `StringBuffer` which will receive the process normal - * output. - * @param log An optional `ProcessLogger` to which the output should be - * sent. If `None`, output will be sent to stderr. - * @return A `ProcessIO` with the characteristics above. - */ - def apply(withIn: Boolean, buffer: StringBuffer, log: Option[ProcessLogger]) = - new ProcessIO(input(withIn), processFully(buffer), getErr(log)) - - /** Creates a `ProcessIO` from a `ProcessLogger` . It can attach the - * process input to stdin. - * - * @param withIn True if the process input should be attached to stdin. - * @param log A `ProcessLogger` to receive all output, normal and error. - * @return A `ProcessIO` with the characteristics above. - */ - def apply(withIn: Boolean, log: ProcessLogger) = - new ProcessIO(input(withIn), processOutFully(log), processErrFully(log)) - - /** Returns a function `InputStream => Unit` given an optional - * `ProcessLogger`. If no logger is passed, the function will send the output - * to stderr. This function can be used to create a - * [[scala.sys.process.ProcessIO]]. - * - * @param log An optional `ProcessLogger` to which the contents of - * the `InputStream` will be sent. - * @return A function `InputStream => Unit` (used by - * [[scala.sys.process.ProcessIO]]) which will send the data to - * either the provided `ProcessLogger` or, if `None`, to stderr. - */ - def getErr(log: Option[ProcessLogger]) = log match { - case Some(lg) => processErrFully(lg) - case None => toStdErr - } - - private def processErrFully(log: ProcessLogger) = processFully(log err _) - private def processOutFully(log: ProcessLogger) = processFully(log out _) - - /** Closes a `Closeable` without throwing an exception */ - def close(c: Closeable) = try c.close() catch { case _: IOException => () } - - /** Returns a function `InputStream => Unit` that appends all data read to the - * provided `Appendable`. This function can be used to create a - * [[scala.sys.process.ProcessIO]]. The buffer will be appended line by line. - * - * @param buffer An `Appendable` such as `StringBuilder` or `StringBuffer`. - * @return A function `InputStream => Unit` (used by - * [[scala.sys.process.ProcessIO]] which will append all data read - * from the stream to the buffer. - */ - def processFully(buffer: Appendable): InputStream => Unit = processFully(appendLine(buffer)) - - /** Returns a function `InputStream => Unit` that will call the passed - * function with all data read. This function can be used to create a - * [[scala.sys.process.ProcessIO]]. The `processLine` function will be called - * with each line read, and `Newline` will be appended after each line. - * - * @param processLine A function that will be called with all data read from - * the stream. - * @return A function `InputStream => Unit` (used by - * [[scala.sys.process.ProcessIO]] which will call `processLine` - * with all data read from the stream. - */ - def processFully(processLine: String => Unit): InputStream => Unit = in => { - val reader = new BufferedReader(new InputStreamReader(in)) - try processLinesFully(processLine)(reader.readLine) - finally reader.close() - } - - /** Calls `processLine` with the result of `readLine` until the latter returns - * `null` or the current thread is interrupted. - */ - def processLinesFully(processLine: String => Unit)(readLine: () => String) { - def working = (Thread.currentThread.isInterrupted == false) - def halting = { Thread.currentThread.interrupt(); null } - def readFully(): Unit = - if (working) { - val line = - try readLine() - catch { - case _: InterruptedException => halting - case e: IOException if !working => halting - } - if (line != null) { - processLine(line) - readFully() - } - } - readFully() - } - - /** Copy contents of stdin to the `OutputStream`. */ - def connectToIn(o: OutputStream): Unit = transferFully(Uncloseable protect stdin, o) - - /** Returns a function `OutputStream => Unit` that either reads the content - * from stdin or does nothing. This function can be used by - * [[scala.sys.process.ProcessIO]]. - */ - def input(connect: Boolean): OutputStream => Unit = { outputToProcess => - if (connect) connectToIn(outputToProcess) - outputToProcess.close() - } - - /** Returns a `ProcessIO` connected to stdout and stderr, and, optionally, stdin. */ - def standard(connectInput: Boolean): ProcessIO = standard(input(connectInput)) - - /** Returns a `ProcessIO` connected to stdout, stderr and the provided `in` */ - def standard(in: OutputStream => Unit): ProcessIO = new ProcessIO(in, toStdOut, toStdErr) - - /** Send all the input from the stream to stderr, and closes the input stream - * afterwards. - */ - def toStdErr = (in: InputStream) => transferFully(in, stderr) - - /** Send all the input from the stream to stdout, and closes the input stream - * afterwards. - */ - def toStdOut = (in: InputStream) => transferFully(in, stdout) - - /** Copy all input from the input stream to the output stream. Closes the - * input stream once it's all read. - */ - def transferFully(in: InputStream, out: OutputStream): Unit = - try transferFullyImpl(in, out) - catch onIOInterrupt(()) - - private[this] def appendLine(buffer: Appendable): String => Unit = line => { - buffer append line - buffer append Newline - } - - private[this] def transferFullyImpl(in: InputStream, out: OutputStream) { - val buffer = new Array[Byte](BufferSize) - @tailrec def loop() { - val byteCount = in.read(buffer) - if (byteCount > 0) { - out.write(buffer, 0, byteCount) - // flush() will throw an exception once the process has terminated - val available = try { out.flush(); true } catch { case _: IOException => false } - if (available) loop() - } - } - loop() - in.close() - } -} diff --git a/tests/scala2-library/src/library/scala/sys/process/Process.scala b/tests/scala2-library/src/library/scala/sys/process/Process.scala deleted file mode 100644 index 0ec749e78a2e..000000000000 --- a/tests/scala2-library/src/library/scala/sys/process/Process.scala +++ /dev/null @@ -1,223 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala -package sys -package process - -import processInternal._ -import ProcessBuilder._ -import scala.language.implicitConversions - -/** Represents a process that is running or has finished running. - * It may be a compound process with several underlying native processes (such as `a #&& b`). - * - * This trait is often not used directly, though its companion object contains - * factories for [[scala.sys.process.ProcessBuilder]], the main component of this - * package. - * - * It is used directly when calling the method `run` on a `ProcessBuilder`, - * which makes the process run in the background. The methods provided on `Process` - * make it possible for one to block until the process exits and get the exit value, - * or destroy the process altogether. - * - * @see [[scala.sys.process.ProcessBuilder]] - */ -trait Process { - /** Returns this process alive status */ - def isAlive(): Boolean - /** Blocks until this process exits and returns the exit code.*/ - def exitValue(): Int - /** Destroys this process. */ - def destroy(): Unit -} - -/** Methods for constructing simple commands that can then be combined. */ -object Process extends ProcessImpl with ProcessCreation { } - -/** Factories for creating [[scala.sys.process.ProcessBuilder]]. They can be - * found on and used through [[scala.sys.process.Process]]'s companion object. - */ -trait ProcessCreation { - /** Creates a [[scala.sys.process.ProcessBuilder]] from a `String`, including the - * parameters. - * - * @example {{{ apply("cat file.txt") }}} - */ - def apply(command: String): ProcessBuilder = apply(command, None) - - /** Creates a [[scala.sys.process.ProcessBuilder]] from a sequence of `String`, - * where the head is the command and each element of the tail is a parameter. - * - * @example {{{ apply("cat" :: files) }}} - */ - def apply(command: Seq[String]): ProcessBuilder = apply(command, None) - - /** Creates a [[scala.sys.process.ProcessBuilder]] from a command represented by a `String`, - * and a sequence of `String` representing the arguments. - * - * @example {{{ apply("cat", files) }}} - */ - def apply(command: String, arguments: Seq[String]): ProcessBuilder = apply(command +: arguments, None) - - /** Creates a [[scala.sys.process.ProcessBuilder]] with working dir set to `File` and extra - * environment variables. - * - * @example {{{ apply("java", new java.io.File("/opt/app"), "CLASSPATH" -> "library.jar") }}} - */ - def apply(command: String, cwd: File, extraEnv: (String, String)*): ProcessBuilder = - apply(command, Some(cwd), extraEnv: _*) - - /** Creates a [[scala.sys.process.ProcessBuilder]] with working dir set to `File` and extra - * environment variables. - * - * @example {{{ apply("java" :: javaArgs, new java.io.File("/opt/app"), "CLASSPATH" -> "library.jar") }}} - */ - def apply(command: Seq[String], cwd: File, extraEnv: (String, String)*): ProcessBuilder = - apply(command, Some(cwd), extraEnv: _*) - - /** Creates a [[scala.sys.process.ProcessBuilder]] with working dir optionally set to - * `File` and extra environment variables. - * - * @example {{{ apply("java", params.get("cwd"), "CLASSPATH" -> "library.jar") }}} - */ - def apply(command: String, cwd: Option[File], extraEnv: (String, String)*): ProcessBuilder = { - apply(command.split("""\s+"""), cwd, extraEnv : _*) - // not smart to use this on windows, because CommandParser uses \ to escape ". - /*CommandParser.parse(command) match { - case Left(errorMsg) => error(errorMsg) - case Right((cmd, args)) => apply(cmd :: args, cwd, extraEnv : _*) - }*/ - } - - /** Creates a [[scala.sys.process.ProcessBuilder]] with working dir optionally set to - * `File` and extra environment variables. - * - * @example {{{ apply("java" :: javaArgs, params.get("cwd"), "CLASSPATH" -> "library.jar") }}} - */ - def apply(command: Seq[String], cwd: Option[File], extraEnv: (String, String)*): ProcessBuilder = { - val jpb = new JProcessBuilder(command.toArray: _*) - cwd foreach (jpb directory _) - extraEnv foreach { case (k, v) => jpb.environment.put(k, v) } - apply(jpb) - } - - /** Creates a [[scala.sys.process.ProcessBuilder]] from a `java.lang.ProcessBuilder`. - * - * @example {{{ - * apply((new java.lang.ProcessBuilder("ls", "-l")) directory new java.io.File(System.getProperty("user.home"))) - * }}} - */ - def apply(builder: JProcessBuilder): ProcessBuilder = new Simple(builder) - - /** Creates a [[scala.sys.process.ProcessBuilder]] from a `java.io.File`. This - * `ProcessBuilder` can then be used as a `Source` or a `Sink`, so one can - * pipe things from and to it. - */ - def apply(file: File): FileBuilder = new FileImpl(file) - - /** Creates a [[scala.sys.process.ProcessBuilder]] from a `java.net.URL`. This - * `ProcessBuilder` can then be used as a `Source`, so that one can pipe things - * from it. - */ - def apply(url: URL): URLBuilder = new URLImpl(url) - - /** Creates a [[scala.sys.process.ProcessBuilder]] from a `Boolean`. This can be - * to force an exit value. - */ - def apply(value: Boolean): ProcessBuilder = apply(value.toString, if (value) 0 else 1) - - /** Creates a [[scala.sys.process.ProcessBuilder]] from a `String` name and a - * `Boolean`. This can be used to force an exit value, with the name being - * used for `toString`. - */ - def apply(name: String, exitValue: => Int): ProcessBuilder = new Dummy(name, exitValue) - - /** Creates a sequence of [[scala.sys.process.ProcessBuilder.Source]] from a sequence of - * something else for which there's an implicit conversion to `Source`. - */ - def applySeq[T](builders: Seq[T])(implicit convert: T => Source): Seq[Source] = builders.map(convert) - - /** Creates a [[scala.sys.process.ProcessBuilder]] from one or more - * [[scala.sys.process.ProcessBuilder.Source]], which can then be - * piped to something else. - * - * This will concatenate the output of all sources. For example: - * - * {{{ - * import scala.sys.process._ - * import scala.sys.process.Process.cat - * import java.net.URL - * import java.io.File - * - * val spde = new URL("http://technically.us/spde.html") - * val dispatch = new URL("http://dispatch.databinder.net/Dispatch.html") - * val build = new File("project/build.properties") - * cat(spde, dispatch, build) #| "grep -i scala" ! - * }}} - */ - def cat(file: Source, files: Source*): ProcessBuilder = cat(file +: files) - - /** Creates a [[scala.sys.process.ProcessBuilder]] from a non-empty sequence - * of [[scala.sys.process.ProcessBuilder.Source]], which can then be - * piped to something else. - * - * This will concatenate the output of all sources. - */ - def cat(files: Seq[Source]): ProcessBuilder = { - require(files.nonEmpty) - files map (_.cat) reduceLeft (_ #&& _) - } -} - -/** Provide implicit conversions for the factories offered by [[scala.sys.process.Process]]'s - * companion object. These implicits can then be used to decrease the noise in a pipeline - * of commands, making it look more shell-like. They are available through the package object - * [[scala.sys.process]]. - */ -trait ProcessImplicits { - import Process._ - - /** Return a sequence of [[scala.sys.process.ProcessBuilder.Source]] from a sequence - * of values for which an implicit conversion to `Source` is available. - */ - implicit def buildersToProcess[T](builders: Seq[T])(implicit convert: T => Source): Seq[Source] = applySeq(builders) - - /** Implicitly convert a `java.lang.ProcessBuilder` into a Scala one. */ - implicit def builderToProcess(builder: JProcessBuilder): ProcessBuilder = apply(builder) - - /** Implicitly convert a `java.io.File` into a - * [[scala.sys.process.ProcessBuilder.FileBuilder]], which can be used as - * either input or output of a process. For example: - * {{{ - * import scala.sys.process._ - * "ls" #> new java.io.File("dirContents.txt") ! - * }}} - */ - implicit def fileToProcess(file: File): FileBuilder = apply(file) - - /** Implicitly convert a `java.net.URL` into a - * [[scala.sys.process.ProcessBuilder.URLBuilder]] , which can be used as - * input to a process. For example: - * {{{ - * import scala.sys.process._ - * Seq("xmllint", "--html", "-") #< new java.net.URL("http://www.scala-lang.org") #> new java.io.File("fixed.html") ! - * }}} - */ - implicit def urlToProcess(url: URL): URLBuilder = apply(url) - - /** Implicitly convert a `String` into a [[scala.sys.process.ProcessBuilder]]. */ - implicit def stringToProcess(command: String): ProcessBuilder = apply(command) - - /** Implicitly convert a sequence of `String` into a - * [[scala.sys.process.ProcessBuilder]]. The first argument will be taken to - * be the command to be executed, and the remaining will be its arguments. - * When using this, arguments may contain spaces. - */ - implicit def stringSeqToProcess(command: Seq[String]): ProcessBuilder = apply(command) -} diff --git a/tests/scala2-library/src/library/scala/sys/process/ProcessBuilder.scala b/tests/scala2-library/src/library/scala/sys/process/ProcessBuilder.scala deleted file mode 100644 index d0745e5833c7..000000000000 --- a/tests/scala2-library/src/library/scala/sys/process/ProcessBuilder.scala +++ /dev/null @@ -1,370 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala -package sys -package process - -import processInternal._ -import ProcessBuilder._ - -/** Represents a sequence of one or more external processes that can be - * executed. A `ProcessBuilder` can be a single external process, or a - * combination of other `ProcessBuilder`. One can control where the - * output of an external process will go to, and where its input will come - * from, or leave that decision to whoever starts it. - * - * One creates a `ProcessBuilder` through factories provided in - * [[scala.sys.process.Process]]'s companion object, or implicit conversions - * based on these factories made available in the package object - * [[scala.sys.process]]. Here are some examples: - * {{{ - * import scala.sys.process._ - * - * // Executes "ls" and sends output to stdout - * "ls".! - * - * // Execute "ls" and assign a `Stream[String]` of its output to "contents". - * val contents = Process("ls").lineStream - * - * // Here we use a `Seq` to make the parameter whitespace-safe - * def contentsOf(dir: String): String = Seq("ls", dir).!! - * }}} - * - * The methods of `ProcessBuilder` are divided in three categories: the ones that - * combine two `ProcessBuilder` to create a third, the ones that redirect input - * or output of a `ProcessBuilder`, and the ones that execute - * the external processes associated with it. - * - * ==Combining `ProcessBuilder`== - * - * Two existing `ProcessBuilder` can be combined in the following ways: - * - * - They can be executed in parallel, with the output of the first being fed - * as input to the second, like Unix pipes. This is achieved with the `#|` - * method. - * - They can be executed in sequence, with the second starting as soon as - * the first ends. This is done by the `###` method. - * - The execution of the second one can be conditioned by the return code - * (exit status) of the first, either only when it's zero, or only when it's - * not zero. The methods `#&&` and `#||` accomplish these tasks. - * - * ==Redirecting Input/Output== - * - * Though control of input and output can be done when executing the process, - * there's a few methods that create a new `ProcessBuilder` with a - * pre-configured input or output. They are `#<`, `#>` and `#>>`, and may take - * as input either another `ProcessBuilder` (like the pipe described above), or - * something else such as a `java.io.File` or a `java.io.InputStream`. - * For example: - * {{{ - * new URL("http://databinder.net/dispatch/About") #> "grep JSON" #>> new File("About_JSON") ! - * }}} - * - * ==Starting Processes== - * - * To execute all external commands associated with a `ProcessBuilder`, one - * may use one of four groups of methods. Each of these methods have various - * overloads and variations to enable further control over the I/O. These - * methods are: - * - * - `run`: the most general method, it returns a - * [[scala.sys.process.Process]] immediately, and the external command - * executes concurrently. - * - `!`: blocks until all external commands exit, and returns the exit code - * of the last one in the chain of execution. - * - `!!`: blocks until all external commands exit, and returns a `String` - * with the output generated. - * - `lineStream`: returns immediately like `run`, and the output being generated - * is provided through a `Stream[String]`. Getting the next element of that - * `Stream` may block until it becomes available. This method will throw an - * exception if the return code is different than zero -- if this is not - * desired, use the `lineStream_!` method. - * - * ==Handling Input and Output== - * - * If not specified, the input of the external commands executed with `run` or - * `!` will not be tied to anything, and the output will be redirected to the - * stdout and stderr of the Scala process. For the methods `!!` and `lineStream`, no - * input will be provided, and the output will be directed according to the - * semantics of these methods. - * - * Some methods will cause stdin to be used as input. Output can be controlled - * with a [[scala.sys.process.ProcessLogger]] -- `!!` and `lineStream` will only - * redirect error output when passed a `ProcessLogger`. If one desires full - * control over input and output, then a [[scala.sys.process.ProcessIO]] can be - * used with `run`. - * - * For example, we could silence the error output from `lineStream_!` like this: - * {{{ - * val etcFiles = "find /etc" lineStream_! ProcessLogger(line => ()) - * }}} - * - * ==Extended Example== - * - * Let's examine in detail one example of usage: - * {{{ - * import scala.sys.process._ - * "find src -name *.scala -exec grep null {} ;" #| "xargs test -z" #&& "echo null-free" #|| "echo null detected" ! - * }}} - * Note that every `String` is implicitly converted into a `ProcessBuilder` - * through the implicits imported from [[scala.sys.process]]. These `ProcessBuilder` are then - * combined in three different ways. - * - * 1. `#|` pipes the output of the first command into the input of the second command. It - * mirrors a shell pipe (`|`). - * 1. `#&&` conditionally executes the second command if the previous one finished with - * exit value 0. It mirrors shell's `&&`. - * 1. `#||` conditionally executes the third command if the exit value of the previous - * command is different than zero. It mirrors shell's `||`. - * - * Finally, `!` at the end executes the commands, and returns the exit value. - * Whatever is printed will be sent to the Scala process standard output. If - * we wanted to capture it, we could run that with `!!` instead. - * - * Note: though it is not shown above, the equivalent of a shell's `;` would be - * `###`. The reason for this name is that `;` is a reserved token in Scala. - * - * Note: the `lines` method, though deprecated, may conflict with the `StringLike` - * method of the same name. To avoid this, one may wish to call the builders in - * `Process` instead of importing `scala.sys.process._`. The example above would be - * {{{ - * import scala.sys.process.Process - * Process("find src -name *.scala -exec grep null {} ;") #| Process("xargs test -z") #&& Process("echo null-free") #|| Process("echo null detected") ! - * }}} - */ -trait ProcessBuilder extends Source with Sink { - /** Starts the process represented by this builder, blocks until it exits, and - * returns the output as a String. Standard error is sent to the console. If - * the exit code is non-zero, an exception is thrown. - */ - def !! : String - - /** Starts the process represented by this builder, blocks until it exits, and - * returns the output as a String. Standard error is sent to the provided - * ProcessLogger. If the exit code is non-zero, an exception is thrown. - */ - def !!(log: ProcessLogger): String - - /** Starts the process represented by this builder, blocks until it exits, and - * returns the output as a String. Standard error is sent to the console. If - * the exit code is non-zero, an exception is thrown. The newly started - * process reads from standard input of the current process. - */ - def !!< : String - - /** Starts the process represented by this builder, blocks until it exits, and - * returns the output as a String. Standard error is sent to the provided - * ProcessLogger. If the exit code is non-zero, an exception is thrown. The - * newly started process reads from standard input of the current process. - */ - def !!<(log: ProcessLogger): String - - /** Starts the process represented by this builder. The output is returned as - * a Stream that blocks when lines are not available but the process has not - * completed. Standard error is sent to the console. If the process exits - * with a non-zero value, the Stream will provide all lines up to termination - * and then throw an exception. - */ - def lineStream: Stream[String] - - /** Deprecated (renamed). Use `lineStream` instead. */ - @deprecated("use lineStream instead", "2.11.0") - def lines: Stream[String] = lineStream - - /** Starts the process represented by this builder. The output is returned as - * a Stream that blocks when lines are not available but the process has not - * completed. Standard error is sent to the provided ProcessLogger. If the - * process exits with a non-zero value, the Stream will provide all lines up - * to termination and then throw an exception. - */ - def lineStream(log: ProcessLogger): Stream[String] - - /** Deprecated (renamed). Use `lineStream(log: ProcessLogger)` instead. */ - @deprecated("use lineStream instead", "2.11.0") - def lines(log: ProcessLogger): Stream[String] = lineStream(log) - - /** Starts the process represented by this builder. The output is returned as - * a Stream that blocks when lines are not available but the process has not - * completed. Standard error is sent to the console. If the process exits - * with a non-zero value, the Stream will provide all lines up to termination - * but will not throw an exception. - */ - def lineStream_! : Stream[String] - - /** Deprecated (renamed). Use `lineStream_!` instead. */ - @deprecated("use lineStream_! instead", "2.11.0") - def lines_! : Stream[String] = lineStream_! - - /** Starts the process represented by this builder. The output is returned as - * a Stream that blocks when lines are not available but the process has not - * completed. Standard error is sent to the provided ProcessLogger. If the - * process exits with a non-zero value, the Stream will provide all lines up - * to termination but will not throw an exception. - */ - def lineStream_!(log: ProcessLogger): Stream[String] - - /** Deprecated (renamed). Use `lineStream_!(log: ProcessLogger)` instead. */ - @deprecated("use lineStream_! instead", "2.11.0") - def lines_!(log: ProcessLogger): Stream[String] = lineStream_!(log) - - /** Starts the process represented by this builder, blocks until it exits, and - * returns the exit code. Standard output and error are sent to the console. - */ - def ! : Int - - /** Starts the process represented by this builder, blocks until it exits, and - * returns the exit code. Standard output and error are sent to the given - * ProcessLogger. - */ - def !(log: ProcessLogger): Int - - /** Starts the process represented by this builder, blocks until it exits, and - * returns the exit code. Standard output and error are sent to the console. - * The newly started process reads from standard input of the current process. - */ - def !< : Int - - /** Starts the process represented by this builder, blocks until it exits, and - * returns the exit code. Standard output and error are sent to the given - * ProcessLogger. The newly started process reads from standard input of the - * current process. - */ - def !<(log: ProcessLogger): Int - - /** Starts the process represented by this builder. Standard output and error - * are sent to the console.*/ - def run(): Process - - /** Starts the process represented by this builder. Standard output and error - * are sent to the given ProcessLogger. - */ - def run(log: ProcessLogger): Process - - /** Starts the process represented by this builder. I/O is handled by the - * given ProcessIO instance. - */ - def run(io: ProcessIO): Process - - /** Starts the process represented by this builder. Standard output and error - * are sent to the console. The newly started process reads from standard - * input of the current process if `connectInput` is true. - */ - def run(connectInput: Boolean): Process - - /** Starts the process represented by this builder. Standard output and error - * are sent to the given ProcessLogger. The newly started process reads from - * standard input of the current process if `connectInput` is true. - */ - def run(log: ProcessLogger, connectInput: Boolean): Process - - /** Constructs a command that runs this command first and then `other` if this - * command succeeds. - */ - def #&& (other: ProcessBuilder): ProcessBuilder - - /** Constructs a command that runs this command first and then `other` if this - * command does not succeed. - */ - def #|| (other: ProcessBuilder): ProcessBuilder - - /** Constructs a command that will run this command and pipes the output to - * `other`. `other` must be a simple command. - */ - def #| (other: ProcessBuilder): ProcessBuilder - - /** Constructs a command that will run this command and then `other`. The - * exit code will be the exit code of `other`. - */ - def ### (other: ProcessBuilder): ProcessBuilder - - - /** True if this command can be the target of a pipe. */ - def canPipeTo: Boolean - - /** True if this command has an exit code which should be propagated to the - * user. Given a pipe between A and B, if B.hasExitValue is true then the - * exit code will be the one from B; if it is false, the one from A. This - * exists to prevent output redirections (implemented as pipes) from masking - * useful process error codes. - */ - def hasExitValue: Boolean -} - -/** This object contains traits used to describe input and output sources. */ -object ProcessBuilder extends ProcessBuilderImpl { - /** Used when creating [[scala.sys.process.ProcessBuilder.Source]] from an URL. */ - trait URLBuilder extends Source { - - } - - /** Used when creating [[scala.sys.process.ProcessBuilder.Source]] and/or - * [[scala.sys.process.ProcessBuilder.Sink]] from a file. - */ - trait FileBuilder extends Sink with Source { - /** Append the contents of a `java.io.File` to this file */ - def #<<(f: File): ProcessBuilder - - /** Append the contents from a `java.net.URL` to this file */ - def #<<(u: URL): ProcessBuilder - - /** Append the contents of a `java.io.InputStream` to this file */ - def #<<(i: => InputStream): ProcessBuilder - - /** Append the contents of a [[scala.sys.process.ProcessBuilder]] to this file */ - def #<<(p: ProcessBuilder): ProcessBuilder - } - - /** Represents everything that can be used as an input to a - * [[scala.sys.process.ProcessBuilder]]. - */ - trait Source { - protected def toSource: ProcessBuilder - - /** Writes the output stream of this process to the given file. */ - def #> (f: File): ProcessBuilder = toFile(f, append = false) - - /** Appends the output stream of this process to the given file. */ - def #>> (f: File): ProcessBuilder = toFile(f, append = true) - - /** Writes the output stream of this process to the given OutputStream. The - * argument is call-by-name, so the stream is recreated, written, and closed each - * time this process is executed. - */ - def #>(out: => OutputStream): ProcessBuilder = #> (new OStreamBuilder(out, "")) - - /** Writes the output stream of this process to a [[scala.sys.process.ProcessBuilder]]. */ - def #>(b: ProcessBuilder): ProcessBuilder = new PipedBuilder(toSource, b, false) - - /** Returns a [[scala.sys.process.ProcessBuilder]] representing this `Source`. */ - def cat = toSource - private def toFile(f: File, append: Boolean) = #> (new FileOutput(f, append)) - } - - /** Represents everything that can receive an output from a - * [[scala.sys.process.ProcessBuilder]]. - */ - trait Sink { - protected def toSink: ProcessBuilder - - /** Reads the given file into the input stream of this process. */ - def #< (f: File): ProcessBuilder = #< (new FileInput(f)) - - /** Reads the given URL into the input stream of this process. */ - def #< (f: URL): ProcessBuilder = #< (new URLInput(f)) - - /** Reads the given InputStream into the input stream of this process. The - * argument is call-by-name, so the stream is recreated, read, and closed each - * time this process is executed. - */ - def #<(in: => InputStream): ProcessBuilder = #< (new IStreamBuilder(in, "")) - - /** Reads the output of a [[scala.sys.process.ProcessBuilder]] into the input stream of this process. */ - def #<(b: ProcessBuilder): ProcessBuilder = new PipedBuilder(b, toSink, false) - } -} diff --git a/tests/scala2-library/src/library/scala/sys/process/ProcessBuilderImpl.scala b/tests/scala2-library/src/library/scala/sys/process/ProcessBuilderImpl.scala deleted file mode 100644 index 0df2e648e0e1..000000000000 --- a/tests/scala2-library/src/library/scala/sys/process/ProcessBuilderImpl.scala +++ /dev/null @@ -1,219 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala -package sys -package process - -import processInternal._ -import Process._ -import java.io.{ FileInputStream, FileOutputStream } -import BasicIO.{ Uncloseable, Streamed } -import Uncloseable.protect - -private[process] trait ProcessBuilderImpl { - self: ProcessBuilder.type => - - private[process] class DaemonBuilder(underlying: ProcessBuilder) extends AbstractBuilder { - final def run(io: ProcessIO): Process = underlying.run(io.daemonized()) - } - - private[process] class Dummy(override val toString: String, exitValue: => Int) extends AbstractBuilder { - override def run(io: ProcessIO): Process = new DummyProcess(exitValue) - override def canPipeTo = true - } - - private[process] class URLInput(url: URL) extends IStreamBuilder(url.openStream, url.toString) - private[process] class FileInput(file: File) extends IStreamBuilder(new FileInputStream(file), file.getAbsolutePath) - private[process] class FileOutput(file: File, append: Boolean) extends OStreamBuilder(new FileOutputStream(file, append), file.getAbsolutePath) - - private[process] class OStreamBuilder( - stream: => OutputStream, - label: String - ) extends ThreadBuilder(label, _ writeInput protect(stream)) { - override def hasExitValue = false - } - - private[process] class IStreamBuilder( - stream: => InputStream, - label: String - ) extends ThreadBuilder(label, _ processOutput protect(stream)) { - override def hasExitValue = false - } - - private[process] abstract class ThreadBuilder( - override val toString: String, - runImpl: ProcessIO => Unit - ) extends AbstractBuilder { - - override def run(io: ProcessIO): Process = { - val success = new SyncVar[Boolean] - def go(): Unit = { - var ok = false - try { - runImpl(io) - ok = true - } finally success.put(ok) - } - val t = Spawn(go(), io.daemonizeThreads) - new ThreadProcess(t, success) - } - } - - /** Represents a simple command without any redirection or combination. */ - private[process] class Simple(p: JProcessBuilder) extends AbstractBuilder { - override def run(io: ProcessIO): Process = { - val process = p.start() // start the external process - import io._ - - // spawn threads that process the input, output, and error streams using the functions defined in `io` - val inThread = Spawn(writeInput(process.getOutputStream), daemon = true) - val outThread = Spawn(processOutput(process.getInputStream), daemonizeThreads) - val errorThread = - if (p.redirectErrorStream) Nil - else List(Spawn(processError(process.getErrorStream), daemonizeThreads)) - - new SimpleProcess(process, inThread, outThread :: errorThread) - } - override def toString = p.command.toString - override def canPipeTo = true - } - - private[scala] abstract class AbstractBuilder extends ProcessBuilder with Sink with Source { - protected def toSource = this - protected def toSink = this - - def #|(other: ProcessBuilder): ProcessBuilder = { - require(other.canPipeTo, "Piping to multiple processes is not supported.") - new PipedBuilder(this, other, false) - } - def #||(other: ProcessBuilder): ProcessBuilder = new OrBuilder(this, other) - def #&&(other: ProcessBuilder): ProcessBuilder = new AndBuilder(this, other) - def ###(other: ProcessBuilder): ProcessBuilder = new SequenceBuilder(this, other) - - def run(): Process = run(connectInput = false) - def run(connectInput: Boolean): Process = run(BasicIO.standard(connectInput)) - def run(log: ProcessLogger): Process = run(log, connectInput = false) - def run(log: ProcessLogger, connectInput: Boolean): Process = run(BasicIO(connectInput, log)) - - def !! = slurp(None, withIn = false) - def !!(log: ProcessLogger) = slurp(Some(log), withIn = false) - def !!< = slurp(None, withIn = true) - def !!<(log: ProcessLogger) = slurp(Some(log), withIn = true) - - def lineStream: Stream[String] = lineStream(withInput = false, nonZeroException = true, None) - def lineStream(log: ProcessLogger): Stream[String] = lineStream(withInput = false, nonZeroException = true, Some(log)) - def lineStream_! : Stream[String] = lineStream(withInput = false, nonZeroException = false, None) - def lineStream_!(log: ProcessLogger): Stream[String] = lineStream(withInput = false, nonZeroException = false, Some(log)) - - def ! = run(connectInput = false).exitValue() - def !(io: ProcessIO) = run(io).exitValue() - def !(log: ProcessLogger) = runBuffered(log, connectInput = false) - def !< = run(connectInput = true).exitValue() - def !<(log: ProcessLogger) = runBuffered(log, connectInput = true) - - /** Constructs a new builder which runs this command with all input/output threads marked - * as daemon threads. This allows the creation of a long running process while still - * allowing the JVM to exit normally. - * - * Note: not in the public API because it's not fully baked, but I need the capability - * for fsc. - */ - def daemonized(): ProcessBuilder = new DaemonBuilder(this) - - private[this] def slurp(log: Option[ProcessLogger], withIn: Boolean): String = { - val buffer = new StringBuffer - val code = this ! BasicIO(withIn, buffer, log) - - if (code == 0) buffer.toString - else scala.sys.error("Nonzero exit value: " + code) - } - - private[this] def lineStream( - withInput: Boolean, - nonZeroException: Boolean, - log: Option[ProcessLogger] - ): Stream[String] = { - val streamed = Streamed[String](nonZeroException) - val process = run(BasicIO(withInput, streamed.process, log)) - - Spawn(streamed done process.exitValue()) - streamed.stream() - } - - private[this] def runBuffered(log: ProcessLogger, connectInput: Boolean) = - log buffer run(log, connectInput).exitValue() - - def canPipeTo = false - def hasExitValue = true - } - - private[process] class URLImpl(url: URL) extends URLBuilder with Source { - protected def toSource = new URLInput(url) - } - private[process] class FileImpl(base: File) extends FileBuilder with Sink with Source { - protected def toSource = new FileInput(base) - protected def toSink = new FileOutput(base, false) - - def #<<(f: File): ProcessBuilder = #<<(new FileInput(f)) - def #<<(u: URL): ProcessBuilder = #<<(new URLInput(u)) - def #<<(s: => InputStream): ProcessBuilder = #<<(new IStreamBuilder(s, "")) - def #<<(b: ProcessBuilder): ProcessBuilder = new PipedBuilder(b, new FileOutput(base, true), false) - } - - private[process] abstract class BasicBuilder extends AbstractBuilder { - protected[this] def checkNotThis(a: ProcessBuilder) = require(a != this, "Compound process '" + a + "' cannot contain itself.") - final def run(io: ProcessIO): Process = { - val p = createProcess(io) - p.start() - p - } - protected[this] def createProcess(io: ProcessIO): BasicProcess - } - - private[process] abstract class SequentialBuilder( - a: ProcessBuilder, - b: ProcessBuilder, - operatorString: String - ) extends BasicBuilder { - - checkNotThis(a) - checkNotThis(b) - override def toString = " ( " + a + " " + operatorString + " " + b + " ) " - } - - private[process] class PipedBuilder( - first: ProcessBuilder, - second: ProcessBuilder, - toError: Boolean - ) extends SequentialBuilder(first, second, if (toError) "#|!" else "#|") { - - override def createProcess(io: ProcessIO) = new PipedProcesses(first, second, io, toError) - } - - private[process] class AndBuilder( - first: ProcessBuilder, - second: ProcessBuilder - ) extends SequentialBuilder(first, second, "#&&") { - override def createProcess(io: ProcessIO) = new AndProcess(first, second, io) - } - - private[process] class OrBuilder( - first: ProcessBuilder, - second: ProcessBuilder - ) extends SequentialBuilder(first, second, "#||") { - override def createProcess(io: ProcessIO) = new OrProcess(first, second, io) - } - - private[process] class SequenceBuilder( - first: ProcessBuilder, - second: ProcessBuilder - ) extends SequentialBuilder(first, second, "###") { - override def createProcess(io: ProcessIO) = new ProcessSequence(first, second, io) - } -} diff --git a/tests/scala2-library/src/library/scala/sys/process/ProcessIO.scala b/tests/scala2-library/src/library/scala/sys/process/ProcessIO.scala deleted file mode 100644 index eedf667c88cf..000000000000 --- a/tests/scala2-library/src/library/scala/sys/process/ProcessIO.scala +++ /dev/null @@ -1,68 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala -package sys -package process - -import processInternal._ - -/** This class is used to control the I/O of every - * [[scala.sys.process.Process]]. The functions used to create it will be - * called with the process streams once it has been started. It might not be - * necessary to use `ProcessIO` directly -- - * [[scala.sys.process.ProcessBuilder]] can return the process output to the - * caller, or use a [[scala.sys.process.ProcessLogger]] which avoids direct - * interaction with a stream. One can even use the factories at `BasicIO` to - * create a `ProcessIO`, or use its helper methods when creating one's own - * `ProcessIO`. - * - * When creating a `ProcessIO`, it is important to ''close all streams'' when - * finished, since the JVM might use system resources to capture the process - * input and output, and will not release them unless the streams are - * explicitly closed. - * - * `ProcessBuilder` will call `writeInput`, `processOutput` and `processError` - * in separate threads, and if daemonizeThreads is true, they will all be - * marked as daemon threads. - * - * @param writeInput Function that will be called with the `OutputStream` to - * which all input to the process must be written. This will - * be called in a newly spawned thread. - * @param processOutput Function that will be called with the `InputStream` - * from which all normal output of the process must be - * read from. This will be called in a newly spawned - * thread. - * @param processError Function that will be called with the `InputStream` from - * which all error output of the process must be read from. - * This will be called in a newly spawned thread. - * @param daemonizeThreads Indicates whether the newly spawned threads that - * will run `processOutput`, `processError` and - * `writeInput` should be marked as daemon threads. - * @note Failure to close the passed streams may result in resource leakage. - */ -final class ProcessIO( - val writeInput: OutputStream => Unit, - val processOutput: InputStream => Unit, - val processError: InputStream => Unit, - val daemonizeThreads: Boolean -) { - def this(in: OutputStream => Unit, out: InputStream => Unit, err: InputStream => Unit) = this(in, out, err, false) - - /** Creates a new `ProcessIO` with a different handler for the process input. */ - def withInput(write: OutputStream => Unit): ProcessIO = new ProcessIO(write, processOutput, processError, daemonizeThreads) - - /** Creates a new `ProcessIO` with a different handler for the normal output. */ - def withOutput(process: InputStream => Unit): ProcessIO = new ProcessIO(writeInput, process, processError, daemonizeThreads) - - /** Creates a new `ProcessIO` with a different handler for the error output. */ - def withError(process: InputStream => Unit): ProcessIO = new ProcessIO(writeInput, processOutput, process, daemonizeThreads) - - /** Creates a new `ProcessIO`, with `daemonizeThreads` true. */ - def daemonized(): ProcessIO = new ProcessIO(writeInput, processOutput, processError, true) -} diff --git a/tests/scala2-library/src/library/scala/sys/process/ProcessImpl.scala b/tests/scala2-library/src/library/scala/sys/process/ProcessImpl.scala deleted file mode 100644 index a7afecf44007..000000000000 --- a/tests/scala2-library/src/library/scala/sys/process/ProcessImpl.scala +++ /dev/null @@ -1,258 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala -package sys -package process - -import processInternal._ -import java.io.{ PipedInputStream, PipedOutputStream } - -private[process] trait ProcessImpl { - self: Process.type => - - /** Runs provided code in a new Thread and returns the Thread instance. */ - private[process] object Spawn { - def apply(f: => Unit): Thread = apply(f, daemon = false) - def apply(f: => Unit, daemon: Boolean): Thread = { - val thread = new Thread() { override def run() = { f } } - thread.setDaemon(daemon) - thread.start() - thread - } - } - private[process] object Future { - def apply[T](f: => T): (Thread, () => T) = { - val result = new SyncVar[Either[Throwable, T]] - def run(): Unit = - try result.put(Right(f)) - catch { case e: Exception => result.put(Left(e)) } - - val t = Spawn(run()) - - (t, () => result.get match { - case Right(value) => value - case Left(exception) => throw exception - }) - } - } - - private[process] class AndProcess( - a: ProcessBuilder, - b: ProcessBuilder, - io: ProcessIO - ) extends SequentialProcess(a, b, io, _ == 0) - - private[process] class OrProcess( - a: ProcessBuilder, - b: ProcessBuilder, - io: ProcessIO - ) extends SequentialProcess(a, b, io, _ != 0) - - private[process] class ProcessSequence( - a: ProcessBuilder, - b: ProcessBuilder, - io: ProcessIO - ) extends SequentialProcess(a, b, io, _ => true) - - private[process] class SequentialProcess( - a: ProcessBuilder, - b: ProcessBuilder, - io: ProcessIO, - evaluateSecondProcess: Int => Boolean - ) extends CompoundProcess { - - protected[this] override def runAndExitValue() = { - val first = a.run(io) - runInterruptible(first.exitValue())(first.destroy()) flatMap { codeA => - if (evaluateSecondProcess(codeA)) { - val second = b.run(io) - runInterruptible(second.exitValue())(second.destroy()) - } - else Some(codeA) - } - } - } - - private[process] abstract class BasicProcess extends Process { - def start(): Unit - } - - private[process] abstract class CompoundProcess extends BasicProcess { - def isAlive() = processThread.isAlive() - def destroy() = destroyer() - def exitValue() = futureValue() getOrElse scala.sys.error("No exit code: process destroyed.") - def start() = { futureThread ;() } - - protected lazy val (processThread, (futureThread, futureValue), destroyer) = { - val code = new SyncVar[Option[Int]]() - val thread = Spawn { - var value: Option[Int] = None - try value = runAndExitValue() - finally code.put(value) - } - - ( - thread, - Future(code.get), // thread.join() - () => thread.interrupt() - ) - } - - /** Start and block until the exit value is available and then return it in Some. Return None if destroyed (use 'run')*/ - protected[this] def runAndExitValue(): Option[Int] - - protected[this] def runInterruptible[T](action: => T)(destroyImpl: => Unit): Option[T] = { - try Some(action) - catch onInterrupt { destroyImpl; None } - } - } - - private[process] class PipedProcesses(a: ProcessBuilder, b: ProcessBuilder, defaultIO: ProcessIO, toError: Boolean) extends CompoundProcess { - protected[this] override def runAndExitValue() = runAndExitValue(new PipeSource(a.toString), new PipeSink(b.toString)) - protected[this] def runAndExitValue(source: PipeSource, sink: PipeSink): Option[Int] = { - source connectOut sink - source.start() - sink.start() - - /** Release PipeSource, PipeSink and Process in the correct order. - * If once connect Process with Source or Sink, then the order of releasing them - * must be Source -> Sink -> Process, otherwise IOException will be thrown. */ - def releaseResources(so: PipeSource, sk: PipeSink, p: Process *) = { - so.release() - sk.release() - p foreach( _.destroy() ) - } - - val firstIO = - if (toError) defaultIO.withError(source.connectIn) - else defaultIO.withOutput(source.connectIn) - val secondIO = defaultIO.withInput(sink.connectOut) - - val second = - try b.run(secondIO) - catch onError { err => - releaseResources(source, sink) - throw err - } - val first = - try a.run(firstIO) - catch onError { err => - releaseResources(source, sink, second) - throw err - } - runInterruptible { - source.join() - val exit1 = first.exitValue() - val exit2 = second.exitValue() - // Since file redirection (e.g. #>) is implemented as a piped process, - // we ignore its exit value so cmd #> file doesn't always return 0. - if (b.hasExitValue) exit2 else exit1 - } { - releaseResources(source, sink, first, second) - } - } - } - - private[process] abstract class PipeThread(isSink: Boolean, labelFn: () => String) extends Thread { - def run(): Unit - - private[process] def runloop(src: InputStream, dst: OutputStream): Unit = { - try BasicIO.transferFully(src, dst) - catch ioFailure(ioHandler) - finally BasicIO close { - if (isSink) dst else src - } - } - private def ioHandler(e: IOException) { - println("I/O error " + e.getMessage + " for process: " + labelFn()) - e.printStackTrace() - } - } - - private[process] class PipeSource(label: => String) extends PipeThread(false, () => label) { - protected[this] val pipe = new PipedOutputStream - protected[this] val source = new LinkedBlockingQueue[Option[InputStream]] - override def run(): Unit = { - try { - source.take match { - case Some(in) => runloop(in, pipe) - case None => - } - } - catch onInterrupt(()) - finally BasicIO close pipe - } - def connectIn(in: InputStream): Unit = source add Some(in) - def connectOut(sink: PipeSink): Unit = sink connectIn pipe - def release(): Unit = { - interrupt() - source add None - join() - } - } - private[process] class PipeSink(label: => String) extends PipeThread(true, () => label) { - protected[this] val pipe = new PipedInputStream - protected[this] val sink = new LinkedBlockingQueue[Option[OutputStream]] - override def run(): Unit = { - try { - sink.take match { - case Some(out) => runloop(pipe, out) - case None => - } - } - catch onInterrupt(()) - finally BasicIO close pipe - } - def connectOut(out: OutputStream): Unit = sink add Some(out) - def connectIn(pipeOut: PipedOutputStream): Unit = pipe connect pipeOut - def release(): Unit = { - interrupt() - sink add None - join() - } - } - - /** A thin wrapper around a java.lang.Process. `ioThreads` are the Threads created to do I/O. - * The implementation of `exitValue` waits until these threads die before returning. - */ - private[process] class DummyProcess(action: => Int) extends Process { - private[this] val (thread, value) = Future(action) - override def isAlive() = thread.isAlive() - override def exitValue() = value() - override def destroy() { } - } - - /** A thin wrapper around a java.lang.Process. `outputThreads` are the Threads created to read from the - * output and error streams of the process. `inputThread` is the Thread created to write to the input stream of - * the process. - * The implementation of `exitValue` interrupts `inputThread` and then waits until all I/O threads die before - * returning. */ - private[process] class SimpleProcess(p: JProcess, inputThread: Thread, outputThreads: List[Thread]) extends Process { - override def isAlive() = p.isAlive() - override def exitValue() = { - try p.waitFor() // wait for the process to terminate - finally inputThread.interrupt() // we interrupt the input thread to notify it that it can terminate - outputThreads foreach (_.join()) // this ensures that all output is complete before returning (waitFor does not ensure this) - - p.exitValue() - } - override def destroy() = { - try { - outputThreads foreach (_.interrupt()) // on destroy, don't bother consuming any more output - p.destroy() - } - finally inputThread.interrupt() - } - } - private[process] final class ThreadProcess(thread: Thread, success: SyncVar[Boolean]) extends Process { - override def isAlive() = thread.isAlive() - override def exitValue() = if (success.get) 0 else 1 // thread.join() - override def destroy() = thread.interrupt() - } -} diff --git a/tests/scala2-library/src/library/scala/sys/process/ProcessLogger.scala b/tests/scala2-library/src/library/scala/sys/process/ProcessLogger.scala deleted file mode 100644 index 60728940070c..000000000000 --- a/tests/scala2-library/src/library/scala/sys/process/ProcessLogger.scala +++ /dev/null @@ -1,101 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala -package sys -package process - -import java.io._ - -/** Encapsulates the output and error streams of a running process. This is used - * by [[scala.sys.process.ProcessBuilder]] when starting a process, as an - * alternative to [[scala.sys.process.ProcessIO]], which can be more difficult - * to use. Note that a `ProcessLogger` will be used to create a `ProcessIO` - * anyway. The object `BasicIO` has some functions to do that. - * - * Here is an example that counts the number of lines in the normal and error - * output of a process: - * {{{ - * import scala.sys.process._ - * - * var normalLines = 0 - * var errorLines = 0 - * val countLogger = ProcessLogger(line => normalLines += 1, - * line => errorLines += 1) - * "find /etc" ! countLogger - * }}} - * - * @see [[scala.sys.process.ProcessBuilder]] - */ -trait ProcessLogger { - /** Will be called with each line read from the process output stream. - */ - def out(s: => String): Unit - - /** Will be called with each line read from the process error stream. - */ - def err(s: => String): Unit - - /** If a process is begun with one of these `ProcessBuilder` methods: - * {{{ - * def !(log: ProcessLogger): Int - * def !<(log: ProcessLogger): Int - * }}} - * The run will be wrapped in a call to buffer. This gives the logger - * an opportunity to set up and tear down buffering. At present the - * library implementations of `ProcessLogger` simply execute the body - * unbuffered. - */ - def buffer[T](f: => T): T -} - -/** A [[scala.sys.process.ProcessLogger]] that writes output to a file. */ -class FileProcessLogger(file: File) extends ProcessLogger with Closeable with Flushable { - private val writer = ( - new PrintWriter( - new BufferedWriter( - new OutputStreamWriter( - new FileOutputStream(file, true) - ) - ) - ) - ) - def out(s: => String): Unit = writer println s - def err(s: => String): Unit = writer println s - def buffer[T](f: => T): T = f - def close(): Unit = writer.close() - def flush(): Unit = writer.flush() -} - -/** Provides factories to create [[scala.sys.process.ProcessLogger]], which - * are used to capture output of [[scala.sys.process.ProcessBuilder]] commands - * when run. - */ -object ProcessLogger { - /** Creates a [[scala.sys.process.ProcessLogger]] that redirects output to a `java.io.File`. */ - def apply(file: File): FileProcessLogger = new FileProcessLogger(file) - - /** Creates a [[scala.sys.process.ProcessLogger]] that sends all output, standard and error, - * to the passed function. - */ - def apply(fn: String => Unit): ProcessLogger = apply(fn, fn) - - /** Creates a [[scala.sys.process.ProcessLogger]] that sends all output to the corresponding - * function. - * - * @param fout This function will receive standard output. - * - * @param ferr This function will receive standard error. - */ - def apply(fout: String => Unit, ferr: String => Unit): ProcessLogger = - new ProcessLogger { - def out(s: => String): Unit = fout(s) - def err(s: => String): Unit = ferr(s) - def buffer[T](f: => T): T = f - } -} diff --git a/tests/scala2-library/src/library/scala/sys/process/package.scala b/tests/scala2-library/src/library/scala/sys/process/package.scala deleted file mode 100644 index 440e62b6aaf3..000000000000 --- a/tests/scala2-library/src/library/scala/sys/process/package.scala +++ /dev/null @@ -1,262 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -// Developer note: -// scala -J-Dscala.process.debug -// for process debugging output. -// -package scala.sys { - /** This package handles the execution of external processes. The contents of - * this package can be divided in three groups, according to their - * responsibilities: - * - * - Indicating what to run and how to run it. - * - Handling a process input and output. - * - Running the process. - * - * For simple uses, the only group that matters is the first one. Running an - * external command can be as simple as `"ls".!`, or as complex as building a - * pipeline of commands such as this: - * - * {{{ - * import scala.sys.process._ - * "ls" #| "grep .scala" #&& Seq("sh", "-c", "scalac *.scala") #|| "echo nothing found" lineStream - * }}} - * - * We describe below the general concepts and architecture of the package, - * and then take a closer look at each of the categories mentioned above. - * - * ==Concepts and Architecture== - * - * The underlying basis for the whole package is Java's `Process` and - * `ProcessBuilder` classes. While there's no need to use these Java classes, - * they impose boundaries on what is possible. One cannot, for instance, - * retrieve a ''process id'' for whatever is executing. - * - * When executing an external process, one can provide a command's name, - * arguments to it, the directory in which it will be executed and what - * environment variables will be set. For each executing process, one can - * feed its standard input through a `java.io.OutputStream`, and read from - * its standard output and standard error through a pair of - * `java.io.InputStream`. One can wait until a process finishes execution and - * then retrieve its return value, or one can kill an executing process. - * Everything else must be built on those features. - * - * This package provides a DSL for running and chaining such processes, - * mimicking Unix shells ability to pipe output from one process to the input - * of another, or control the execution of further processes based on the - * return status of the previous one. - * - * In addition to this DSL, this package also provides a few ways of - * controlling input and output of these processes, going from simple and - * easy to use to complex and flexible. - * - * When processes are composed, a new `ProcessBuilder` is created which, when - * run, will execute the `ProcessBuilder` instances it is composed of - * according to the manner of the composition. If piping one process to - * another, they'll be executed simultaneously, and each will be passed a - * `ProcessIO` that will copy the output of one to the input of the other. - * - * ==What to Run and How== - * - * The central component of the process execution DSL is the - * [[scala.sys.process.ProcessBuilder]] trait. It is `ProcessBuilder` that - * implements the process execution DSL, that creates the - * [[scala.sys.process.Process]] that will handle the execution, and return - * the results of such execution to the caller. We can see that DSL in the - * introductory example: `#|`, `#&&` and `#!!` are methods on - * `ProcessBuilder` used to create a new `ProcessBuilder` through - * composition. - * - * One creates a `ProcessBuilder` either through factories on the - * [[scala.sys.process.Process]]'s companion object, or through implicit - * conversions available in this package object itself. Implicitly, each - * process is created either out of a `String`, with arguments separated by - * spaces -- no escaping of spaces is possible -- or out of a - * [[scala.collection.Seq]], where the first element represents the command - * name, and the remaining elements are arguments to it. In this latter case, - * arguments may contain spaces. - * - * To further control what how the process will be run, such as specifying - * the directory in which it will be run, see the factories on - * [[scala.sys.process.Process]]'s object companion. - * - * Once the desired `ProcessBuilder` is available, it can be executed in - * different ways, depending on how one desires to control its I/O, and what - * kind of result one wishes for: - * - * - Return status of the process (`!` methods) - * - Output of the process as a `String` (`!!` methods) - * - Continuous output of the process as a `Stream[String]` (`lineStream` methods) - * - The `Process` representing it (`run` methods) - * - * Some simple examples of these methods: - * {{{ - * import scala.sys.process._ - * - * // This uses ! to get the exit code - * def fileExists(name: String) = Seq("test", "-f", name).! == 0 - * - * // This uses !! to get the whole result as a string - * val dirContents = "ls".!! - * - * // This "fire-and-forgets" the method, which can be lazily read through - * // a Stream[String] - * def sourceFilesAt(baseDir: String): Stream[String] = { - * val cmd = Seq("find", baseDir, "-name", "*.scala", "-type", "f") - * cmd.lineStream - * } - * }}} - * - * We'll see more details about controlling I/O of the process in the next - * section. - * - * ==Handling Input and Output== - * - * In the underlying Java model, once a `Process` has been started, one can - * get `java.io.InputStream` and `java.io.OutputStream` representing its - * output and input respectively. That is, what one writes to an - * `OutputStream` is turned into input to the process, and the output of a - * process can be read from an `InputStream` -- of which there are two, one - * representing normal output, and the other representing error output. - * - * This model creates a difficulty, which is that the code responsible for - * actually running the external processes is the one that has to take - * decisions about how to handle its I/O. - * - * This package presents an alternative model: the I/O of a running process - * is controlled by a [[scala.sys.process.ProcessIO]] object, which can be - * passed _to_ the code that runs the external process. A `ProcessIO` will - * have direct access to the java streams associated with the process I/O. It - * must, however, close these streams afterwards. - * - * Simpler abstractions are available, however. The components of this - * package that handle I/O are: - * - * - [[scala.sys.process.ProcessIO]]: provides the low level abstraction. - * - [[scala.sys.process.ProcessLogger]]: provides a higher level abstraction - * for output, and can be created through its object companion - * - [[scala.sys.process.BasicIO]]: a library of helper methods for the - * creation of `ProcessIO`. - * - This package object itself, with a few implicit conversions. - * - * Some examples of I/O handling: - * {{{ - * import scala.sys.process._ - * - * // An overly complex way of computing size of a compressed file - * def gzFileSize(name: String) = { - * val cat = Seq("zcat", name) - * var count = 0 - * def byteCounter(input: java.io.InputStream) = { - * while(input.read() != -1) count += 1 - * input.close() - * } - * val p = cat run new ProcessIO(_.close(), byteCounter, _.close()) - * p.exitValue() - * count - * } - * - * // This "fire-and-forgets" the method, which can be lazily read through - * // a Stream[String], and accumulates all errors on a StringBuffer - * def sourceFilesAt(baseDir: String): (Stream[String], StringBuffer) = { - * val buffer = new StringBuffer() - * val cmd = Seq("find", baseDir, "-name", "*.scala", "-type", "f") - * val lineStream = cmd lineStream_! ProcessLogger(buffer append _) - * (lineStream, buffer) - * } - * }}} - * - * Instances of the java classes `java.io.File` and `java.net.URL` can both - * be used directly as input to other processes, and `java.io.File` can be - * used as output as well. One can even pipe one to the other directly - * without any intervening process, though that's not a design goal or - * recommended usage. For example, the following code will copy a web page to - * a file: - * {{{ - * import java.io.File - * import java.net.URL - * import scala.sys.process._ - * new URL("http://www.scala-lang.org/") #> new File("scala-lang.html") ! - * }}} - * - * More information about the other ways of controlling I/O can be found - * in the Scaladoc for the associated objects, traits and classes. - * - * ==Running the Process== - * - * Paradoxically, this is the simplest component of all, and the one least - * likely to be interacted with. It consists solely of - * [[scala.sys.process.Process]], and it provides only two methods: - * - * - `exitValue()`: blocks until the process exit, and then returns the exit - * value. This is what happens when one uses the `!` method of - * `ProcessBuilder`. - * - `destroy()`: this will kill the external process and close the streams - * associated with it. - */ - package object process extends ProcessImplicits { - /** The arguments passed to `java` when creating this process */ - def javaVmArguments: List[String] = { - import scala.collection.JavaConverters._ - - java.lang.management.ManagementFactory.getRuntimeMXBean.getInputArguments.asScala.toList - } - /** The input stream of this process */ - def stdin = java.lang.System.in - /** The output stream of this process */ - def stdout = java.lang.System.out - /** The error stream of this process */ - def stderr = java.lang.System.err - } - // private val shell: String => Array[String] = - // if (isWin) Array("cmd.exe", "/C", _) - // else Array("sh", "-c", _) - - package process { - // These are in a nested object instead of at the package level - // due to the issues described in tickets #3160 and #3836. - private[process] object processInternal { - final val processDebug = props contains "scala.process.debug" - dbg("Initializing process package.") - - type =?>[-A, +B] = PartialFunction[A, B] - type Closeable = java.io.Closeable - type File = java.io.File - type IOException = java.io.IOException - type InterruptedIOException = java.io.InterruptedIOException - type InputStream = java.io.InputStream - type JProcess = java.lang.Process - type JProcessBuilder = java.lang.ProcessBuilder - type LinkedBlockingQueue[T] = java.util.concurrent.LinkedBlockingQueue[T] - type OutputStream = java.io.OutputStream - type SyncVar[T] = scala.concurrent.SyncVar[T] - type URL = java.net.URL - - def onError[T](handler: Throwable => T): Throwable =?> T = { - case e @ _ => handler(e) - } - - def onIOInterrupt[T](handler: => T): Throwable =?> T = { - case _: InterruptedIOException => handler - } - - def onInterrupt[T](handler: => T): Throwable =?> T = { - case _: InterruptedException => handler - } - - def ioFailure[T](handler: IOException => T): Throwable =?> T = { - case e: IOException => handler(e) - } - - def dbg(msgs: Any*) = if (processDebug) { - Console.println("[process] " + (msgs mkString " ")) - } - } - } -} diff --git a/tests/scala2-library/src/library/scala/throws.scala b/tests/scala2-library/src/library/scala/throws.scala deleted file mode 100644 index 5a5dd9a1f56b..000000000000 --- a/tests/scala2-library/src/library/scala/throws.scala +++ /dev/null @@ -1,28 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala - -/** - * Annotation for specifying the exceptions thrown by a method. - * For example: - * {{{ - * class Reader(fname: String) { - * private val in = new BufferedReader(new FileReader(fname)) - * @throws[IOException]("if the file doesn't exist") - * def read() = in.read() - * } - * }}} - * - * @author Nikolay Mihaylov - * @version 1.0, 19/05/2006 - * @since 2.1 - */ -class throws[T <: Throwable](cause: String = "") extends scala.annotation.StaticAnnotation { - def this(clazz: Class[T]) = this("") -} diff --git a/tests/scala2-library/src/library/scala/transient.scala b/tests/scala2-library/src/library/scala/transient.scala deleted file mode 100644 index ec87439093bc..000000000000 --- a/tests/scala2-library/src/library/scala/transient.scala +++ /dev/null @@ -1,14 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala - -import scala.annotation.meta._ - -@field -class transient extends scala.annotation.StaticAnnotation diff --git a/tests/scala2-library/src/library/scala/unchecked.scala b/tests/scala2-library/src/library/scala/unchecked.scala deleted file mode 100644 index 9dff6a9ee627..000000000000 --- a/tests/scala2-library/src/library/scala/unchecked.scala +++ /dev/null @@ -1,36 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala - -/** An annotation to designate that the annotated entity - * should not be considered for additional compiler checks. - * Specific applications include annotating the subject of - * a match expression to suppress exhaustiveness warnings, and - * annotating a type argument in a match case to suppress - * unchecked warnings. - * - * Such suppression should be used with caution, without which - * one may encounter [[scala.MatchError]] or [[java.lang.ClassCastException]] - * at runtime. In most cases one can and should address the - * warning instead of suppressing it. - * - * {{{ - * object Test extends App { - * // This would normally warn "match is not exhaustive" - * // because `None` is not covered. - * def f(x: Option[String]) = (x: @unchecked) match { case Some(y) => y } - * // This would normally warn "type pattern is unchecked" - * // but here will blindly cast the head element to String. - * def g(xs: Any) = xs match { case x: List[String @unchecked] => x.head } - * } - * }}} - * - * @since 2.4 - */ -class unchecked extends scala.annotation.Annotation {} diff --git a/tests/scala2-library/src/library/scala/util/DynamicVariable.scala b/tests/scala2-library/src/library/scala/util/DynamicVariable.scala deleted file mode 100644 index 963fe1c49773..000000000000 --- a/tests/scala2-library/src/library/scala/util/DynamicVariable.scala +++ /dev/null @@ -1,68 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2006-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala -package util - -import java.lang.InheritableThreadLocal - -/** `DynamicVariables` provide a binding mechanism where the current - * value is found through dynamic scope, but where access to the - * variable itself is resolved through static scope. - * - * The current value can be retrieved with the value method. New values - * should be pushed using the `withValue` method. Values pushed via - * `withValue` only stay valid while the `withValue`'s second argument, a - * parameterless closure, executes. When the second argument finishes, - * the variable reverts to the previous value. - * - * {{{ - * someDynamicVariable.withValue(newValue) { - * // ... code called in here that calls value ... - * // ... will be given back the newValue ... - * } - * }}} - * - * Each thread gets its own stack of bindings. When a - * new thread is created, the `DynamicVariable` gets a copy - * of the stack of bindings from the parent thread, and - * from then on the bindings for the new thread - * are independent of those for the original thread. - * - * @author Lex Spoon - * @version 1.1, 2007-5-21 - */ -class DynamicVariable[T](init: T) { - private val tl = new InheritableThreadLocal[T] { - override def initialValue = init.asInstanceOf[T with AnyRef] - } - - /** Retrieve the current value */ - def value: T = tl.get.asInstanceOf[T] - - /** Set the value of the variable while executing the specified - * thunk. - * - * @param newval The value to which to set the variable - * @param thunk The code to evaluate under the new setting - */ - def withValue[S](newval: T)(thunk: => S): S = { - val oldval = value - tl set newval - - try thunk - finally tl set oldval - } - - /** Change the currently bound value, discarding the old value. - * Usually withValue() gives better semantics. - */ - def value_=(newval: T) = tl set newval - - override def toString: String = "DynamicVariable(" + value + ")" -} diff --git a/tests/scala2-library/src/library/scala/util/Either.scala b/tests/scala2-library/src/library/scala/util/Either.scala deleted file mode 100644 index 5833cbf6828d..000000000000 --- a/tests/scala2-library/src/library/scala/util/Either.scala +++ /dev/null @@ -1,759 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala -package util - -/** Represents a value of one of two possible types (a disjoint union.) - * An instance of `Either` is an instance of either [[scala.util.Left]] or [[scala.util.Right]]. - * - * A common use of `Either` is as an alternative to [[scala.Option]] for dealing - * with possibly missing values. In this usage, [[scala.None]] is replaced - * with a [[scala.util.Left]] which can contain useful information. - * [[scala.util.Right]] takes the place of [[scala.Some]]. Convention dictates - * that `Left` is used for failure and `Right` is used for success. - * - * For example, you could use `Either[String, Int]` to indicate whether a - * received input is a `String` or an `Int`. - * - * {{{ - * import scala.io.StdIn._ - * val in = readLine("Type Either a string or an Int: ") - * val result: Either[String,Int] = - * try Right(in.toInt) - * catch { - * case e: NumberFormatException => Left(in) - * } - * - * result match { - * case Right(x) => s"You passed me the Int: $x, which I will increment. $x + 1 = ${x+1}" - * case Left(x) => s"You passed me the String: $x" - * } - * }}} - * - * `Either` is right-biased, which means that `Right` is assumed to be the default case to - * operate on. If it is `Left`, operations like `map` and `flatMap` return the `Left` value unchanged: - * - * {{{ - * def doubled(i: Int) = i * 2 - * Right(42).map(doubled) // Right(84) - * Left(42).map(doubled) // Left(42) - * }}} - * - * Since `Either` defines the methods `map` and `flatMap`, it can also be used in for comprehensions: - * {{{ - * val right1 = Right(1) : Right[Double, Int] - * val right2 = Right(2) - * val right3 = Right(3) - * val left23 = Left(23.0) : Left[Double, Int] - * val left42 = Left(42.0) - * - * for { - * x <- right1 - * y <- right2 - * z <- right3 - * } yield x + y + z // Right(6) - * - * for { - * x <- right1 - * y <- right2 - * z <- left23 - * } yield x + y + z // Left(23.0) - * - * for { - * x <- right1 - * y <- left23 - * z <- right2 - * } yield x + y + z // Left(23.0) - * - * // Guard expressions are not supported: - * for { - * i <- right1 - * if i > 0 - * } yield i - * // error: value withFilter is not a member of Right[Double,Int] - * - * // Similarly, refutable patterns are not supported: - * for (x: Int <- right1) yield x - * // error: value withFilter is not a member of Right[Double,Int] - * }}} - * - * Since `for` comprehensions use `map` and `flatMap`, the types - * of function parameters used in the expression must be inferred. - * These types are constrained by the `Either` values. In particular, - * because of right-biasing, `Left` values may require an explicit - * type argument for type parameter `B`, the right value. Otherwise, - * it might be inferred as `Nothing`. - * - * {{{ - * for { - * x <- left23 - * y <- right1 - * z <- left42 // type at this position: Either[Double, Nothing] - * } yield x + y + z - * // ^ - * // error: ambiguous reference to overloaded definition, - * // both method + in class Int of type (x: Char)Int - * // and method + in class Int of type (x: Byte)Int - * // match argument types (Nothing) - * - * for (x <- right2 ; y <- left23) yield x + y // Left(23.0) - * for (x <- right2 ; y <- left42) yield x + y // error - * - * for { - * x <- right1 - * y <- left42 // type at this position: Either[Double, Nothing] - * z <- left23 - * } yield x + y + z - * // Left(42.0), but unexpectedly a `Either[Double,String]` - * }}} - * - * @author Tony Morris, Workingmouse - * @version 2.0, 2016-07-15 - * @since 2.7 - */ -sealed abstract class Either[+A, +B] extends Product with Serializable { - /** Projects this `Either` as a `Left`. - * - * This allows for-comprehensions over the left side of `Either` instances, - * reversing `Either`'s usual right-bias. - * - * For example {{{ - * for (s <- Left("flower").left) yield s.length // Left(6) - * }}} - * - * Continuing the analogy with [[scala.Option]], a `LeftProjection` declares - * that `Left` should be analogous to `Some` in some code. - * - * {{{ - * // using Option - * def interactWithDB(x: Query): Option[Result] = - * try Some(getResultFromDatabase(x)) - * catch { - * case _: SQLException => None - * } - * - * // this will only be executed if interactWithDB returns a Some - * val report = for (result <- interactWithDB(someQuery)) yield generateReport(result) - * report match { - * case Some(r) => send(r) - * case None => log("report not generated, not sure why...") - * } - * - * // using Either - * def interactWithDB(x: Query): Either[Exception, Result] = - * try Right(getResultFromDatabase(x)) - * catch { - * case e: SQLException => Left(e) - * } - * - * // run a report only if interactWithDB returns a Right - * val report = for (result <- interactWithDB(someQuery)) yield generateReport(result) - * report match { - * case Right(r) => send(r) - * case Left(e) => log(s"report not generated, reason was $e") - * } - * // only report errors - * for (e <- interactWithDB(someQuery).left) log(s"query failed, reason was $e") - * }}} - */ - def left = Either.LeftProjection(this) - - /** Projects this `Either` as a `Right`. - * - * Because `Either` is right-biased, this method is not normally needed. - */ - def right = Either.RightProjection(this) - - /** Applies `fa` if this is a `Left` or `fb` if this is a `Right`. - * - * @example {{{ - * val result = util.Try("42".toInt).toEither - * result.fold( - * e => s"Operation failed with $e", - * v => s"Operation produced value: $v" - * ) - * }}} - * - * @param fa the function to apply if this is a `Left` - * @param fb the function to apply if this is a `Right` - * @return the results of applying the function - */ - def fold[C](fa: A => C, fb: B => C): C = this match { - case Right(b) => fb(b) - case Left(a) => fa(a) - } - - /** If this is a `Left`, then return the left value in `Right` or vice versa. - * - * @example {{{ - * val left: Either[String, Int] = Left("left") - * val right: Either[Int, String] = left.swap // Result: Right("left") - * }}} - * @example {{{ - * val right = Right(2) - * val left = Left(3) - * for { - * r1 <- right - * r2 <- left.swap - * } yield r1 * r2 // Right(6) - * }}} - */ - def swap: Either[B, A] = this match { - case Left(a) => Right(a) - case Right(b) => Left(b) - } - - /** Joins an `Either` through `Right`. - * - * This method requires that the right side of this `Either` is itself - * an `Either` type. That is, this must be some type like: {{{ - * Either[A, Either[A, C]] - * }}} (which respects the type parameter bounds, shown below.) - * - * If this instance is a `Right[Either[A, C]]` then the contained `Either[A, C]` - * will be returned, otherwise this value will be returned unmodified. - * - * @example {{{ - * Right[String, Either[String, Int]](Right(12)).joinRight // Result: Right(12) - * Right[String, Either[String, Int]](Left("flower")).joinRight // Result: Left("flower") - * Left[String, Either[String, Int]]("flower").joinRight // Result: Left("flower") - * }}} - * - * This method, and `joinLeft`, are analogous to `Option#flatten` - */ - def joinRight[A1 >: A, B1 >: B, C](implicit ev: B1 <:< Either[A1, C]): Either[A1, C] = this match { - case Right(b) => b - case _ => this.asInstanceOf[Either[A1, C]] - } - - /** Joins an `Either` through `Left`. - * - * This method requires that the left side of this `Either` is itself an - * `Either` type. That is, this must be some type like: {{{ - * Either[Either[C, B], B] - * }}} (which respects the type parameter bounds, shown below.) - * - * If this instance is a `Left[Either[C, B]]` then the contained `Either[C, B]` - * will be returned, otherwise this value will be returned unmodified. - * - * {{{ - * Left[Either[Int, String], String](Right("flower")).joinLeft // Result: Right("flower") - * Left[Either[Int, String], String](Left(12)).joinLeft // Result: Left(12) - * Right[Either[Int, String], String]("daisy").joinLeft // Result: Right("daisy") - * }}} - * - * This method, and `joinRight`, are analogous to `Option#flatten`. - */ - def joinLeft[A1 >: A, B1 >: B, C](implicit ev: A1 <:< Either[C, B1]): Either[C, B1] = this match { - case Left(a) => a - case _ => this.asInstanceOf[Either[C, B1]] - } - - /** Executes the given side-effecting function if this is a `Right`. - * - * {{{ - * Right(12).foreach(println) // prints "12" - * Left(12).foreach(println) // doesn't print - * }}} - * @param f The side-effecting function to execute. - */ - def foreach[U](f: B => U): Unit = this match { - case Right(b) => f(b) - case _ => - } - - /** Returns the value from this `Right` or the given argument if this is a `Left`. - * - * {{{ - * Right(12).getOrElse(17) // 12 - * Left(12).getOrElse(17) // 17 - * }}} - */ - def getOrElse[B1 >: B](or: => B1): B1 = this match { - case Right(b) => b - case _ => or - } - - /** Returns `true` if this is a `Right` and its value is equal to `elem` (as determined by `==`), - * returns `false` otherwise. - * - * {{{ - * // Returns true because value of Right is "something" which equals "something". - * Right("something") contains "something" - * - * // Returns false because value of Right is "something" which does not equal "anything". - * Right("something") contains "anything" - * - * // Returns false because it's not a Right value. - * Left("something") contains "something" - * }}} - * - * @param elem the element to test. - * @return `true` if this is a `Right` value equal to `elem`. - */ - final def contains[B1 >: B](elem: B1): Boolean = this match { - case Right(b) => b == elem - case _ => false - } - - /** Returns `true` if `Left` or returns the result of the application of - * the given predicate to the `Right` value. - * - * {{{ - * Right(12).forall(_ > 10) // true - * Right(7).forall(_ > 10) // false - * Left(12).forall(_ => false) // true - * }}} - */ - def forall(f: B => Boolean): Boolean = this match { - case Right(b) => f(b) - case _ => true - } - - /** Returns `false` if `Left` or returns the result of the application of - * the given predicate to the `Right` value. - * - * {{{ - * Right(12).exists(_ > 10) // true - * Right(7).exists(_ > 10) // false - * Left(12).exists(_ => true) // false - * }}} - */ - def exists(p: B => Boolean): Boolean = this match { - case Right(b) => p(b) - case _ => false - } - - /** Binds the given function across `Right`. - * - * @param f The function to bind across `Right`. - */ - def flatMap[A1 >: A, B1](f: B => Either[A1, B1]): Either[A1, B1] = this match { - case Right(b) => f(b) - case _ => this.asInstanceOf[Either[A1, B1]] - } - - /** The given function is applied if this is a `Right`. - * - * {{{ - * Right(12).map(x => "flower") // Result: Right("flower") - * Left(12).map(x => "flower") // Result: Left(12) - * }}} - */ - def map[B1](f: B => B1): Either[A, B1] = this match { - case Right(b) => Right(f(b)) - case _ => this.asInstanceOf[Either[A, B1]] - } - - /** Returns `Right` with the existing value of `Right` if this is a `Right` - * and the given predicate `p` holds for the right value, - * or `Left(zero)` if this is a `Right` and the given predicate `p` does not hold for the right value, - * or `Left` with the existing value of `Left` if this is a `Left`. - * - * {{{ - * Right(12).filterOrElse(_ > 10, -1) // Right(12) - * Right(7).filterOrElse(_ > 10, -1) // Left(-1) - * Left(7).filterOrElse(_ => false, -1) // Left(7) - * }}} - */ - def filterOrElse[A1 >: A](p: B => Boolean, zero: => A1): Either[A1, B] = this match { - case Right(b) if !p(b) => Left(zero) - case _ => this - } - - /** Returns a `Seq` containing the `Right` value if - * it exists or an empty `Seq` if this is a `Left`. - * - * {{{ - * Right(12).toSeq // Seq(12) - * Left(12).toSeq // Seq() - * }}} - */ - def toSeq: collection.immutable.Seq[B] = this match { - case Right(b) => collection.immutable.Seq(b) - case _ => collection.immutable.Seq.empty - } - - /** Returns a `Some` containing the `Right` value - * if it exists or a `None` if this is a `Left`. - * - * {{{ - * Right(12).toOption // Some(12) - * Left(12).toOption // None - * }}} - */ - def toOption: Option[B] = this match { - case Right(b) => Some(b) - case _ => None - } - - def toTry(implicit ev: A <:< Throwable): Try[B] = this match { - case Right(b) => Success(b) - case Left(a) => Failure(a) - } - - /** Returns `true` if this is a `Left`, `false` otherwise. - * - * {{{ - * Left("tulip").isLeft // true - * Right("venus fly-trap").isLeft // false - * }}} - */ - def isLeft: Boolean - - /** Returns `true` if this is a `Right`, `false` otherwise. - * - * {{{ - * Left("tulip").isRight // false - * Right("venus fly-trap").isRight // true - * }}} - */ - def isRight: Boolean -} - -/** The left side of the disjoint union, as opposed to the [[scala.util.Right]] side. - * - * @author Tony Morris, Workingmouse - * @version 1.0, 11/10/2008 - */ -final case class Left[+A, +B](@deprecatedName('a, "2.12.0") value: A) extends Either[A, B] { - def isLeft = true - def isRight = false - - @deprecated("Use .value instead.", "2.12.0") def a: A = value -} - -/** The right side of the disjoint union, as opposed to the [[scala.util.Left]] side. - * - * @author Tony Morris, Workingmouse - * @version 1.0, 11/10/2008 - */ -final case class Right[+A, +B](@deprecatedName('b, "2.12.0") value: B) extends Either[A, B] { - def isLeft = false - def isRight = true - - @deprecated("Use .value instead.", "2.12.0") def b: B = value -} - -object Either { - - /** If the condition is satisfied, return the given `B` in `Right`, - * otherwise, return the given `A` in `Left`. - * - * {{{ - * val userInput: String = readLine() - * Either.cond( - * userInput.forall(_.isDigit) && userInput.size == 10, - * PhoneNumber(userInput), - * s"The input ($userInput) does not look like a phone number" - * }}} - */ - def cond[A, B](test: Boolean, right: => B, left: => A): Either[A, B] = - if (test) Right(right) else Left(left) - - /** Allows use of a `merge` method to extract values from Either instances - * regardless of whether they are Left or Right. - * - * {{{ - * val l = Left(List(1)): Either[List[Int], Vector[Int]] - * val r = Right(Vector(1)): Either[List[Int], Vector[Int]] - * l.merge: Seq[Int] // List(1) - * r.merge: Seq[Int] // Vector(1) - * }}} - */ - implicit class MergeableEither[A](private val x: Either[A, A]) extends AnyVal { - def merge: A = x match { - case Right(a) => a - case Left(a) => a - } - } - - /** Projects an `Either` into a `Left`. - * - * @author Tony Morris, Workingmouse - * @version 1.0, 11/10/2008 - * @see [[scala.util.Either#left]] - */ - final case class LeftProjection[+A, +B](e: Either[A, B]) { - /** Returns the value from this `Left` or throws `java.util.NoSuchElementException` - * if this is a `Right`. - * - * {{{ - * Left(12).left.get // 12 - * Right(12).left.get // NoSuchElementException - * }}} - * - * @throws java.util.NoSuchElementException if the projection is [[scala.util.Right]] - */ - def get: A = e match { - case Left(a) => a - case _ => throw new NoSuchElementException("Either.left.get on Right") - } - - /** Executes the given side-effecting function if this is a `Left`. - * - * {{{ - * Left(12).left.foreach(x => println(x)) // prints "12" - * Right(12).left.foreach(x => println(x)) // doesn't print - * }}} - * @param f The side-effecting function to execute. - */ - def foreach[U](f: A => U): Unit = e match { - case Left(a) => f(a) - case _ => () - } - - /** Returns the value from this `Left` or the given argument if this is a `Right`. - * - * {{{ - * Left(12).left.getOrElse(17) // 12 - * Right(12).left.getOrElse(17) // 17 - * }}} - */ - def getOrElse[A1 >: A](or: => A1): A1 = e match { - case Left(a) => a - case _ => or - } - - /** Returns `true` if `Right` or returns the result of the application of - * the given function to the `Left` value. - * - * {{{ - * Left(12).left.forall(_ > 10) // true - * Left(7).left.forall(_ > 10) // false - * Right(12).left.forall(_ > 10) // true - * }}} - */ - def forall(@deprecatedName('f) p: A => Boolean): Boolean = e match { - case Left(a) => p(a) - case _ => true - } - - /** Returns `false` if `Right` or returns the result of the application of - * the given function to the `Left` value. - * - * {{{ - * Left(12).left.exists(_ > 10) // true - * Left(7).left.exists(_ > 10) // false - * Right(12).left.exists(_ > 10) // false - * }}} - */ - def exists(@deprecatedName('f) p: A => Boolean): Boolean = e match { - case Left(a) => p(a) - case _ => false - } - - /** Binds the given function across `Left`. - * - * {{{ - * Left(12).left.flatMap(x => Left("scala")) // Left("scala") - * Right(12).left.flatMap(x => Left("scala")) // Right(12) - * }}} - * @param f The function to bind across `Left`. - */ - def flatMap[A1, B1 >: B](f: A => Either[A1, B1]): Either[A1, B1] = e match { - case Left(a) => f(a) - case _ => e.asInstanceOf[Either[A1, B1]] - } - - /** Maps the function argument through `Left`. - * - * {{{ - * Left(12).left.map(_ + 2) // Left(14) - * Right[Int, Int](12).left.map(_ + 2) // Right(12) - * }}} - */ - def map[A1](f: A => A1): Either[A1, B] = e match { - case Left(a) => Left(f(a)) - case _ => e.asInstanceOf[Either[A1, B]] - } - - /** Returns `None` if this is a `Right` or if the given predicate - * `p` does not hold for the left value, otherwise, returns a `Left`. - * - * {{{ - * Left(12).left.filter(_ > 10) // Some(Left(12)) - * Left(7).left.filter(_ > 10) // None - * Right(12).left.filter(_ > 10) // None - * }}} - */ - def filter[B1](p: A => Boolean): Option[Either[A, B1]] = e match { - case x @ Left(a) if p(a) => Some(x.asInstanceOf[Either[A, B1]]) - case _ => None - } - - /** Returns a `Seq` containing the `Left` value if it exists or an empty - * `Seq` if this is a `Right`. - * - * {{{ - * Left(12).left.toSeq // Seq(12) - * Right(12).left.toSeq // Seq() - * }}} - */ - def toSeq: Seq[A] = e match { - case Left(a) => Seq(a) - case _ => Seq.empty - } - - /** Returns a `Some` containing the `Left` value if it exists or a - * `None` if this is a `Right`. - * - * {{{ - * Left(12).left.toOption // Some(12) - * Right(12).left.toOption // None - * }}} - */ - def toOption: Option[A] = e match { - case Left(a) => Some(a) - case _ => None - } - } - - /** Projects an `Either` into a `Right`. - * - * Because `Either` is already right-biased, this class is not normally needed. - * (It is retained in the library for now for easy cross-compilation between Scala - * 2.11 and 2.12.) - * - * @author Tony Morris, Workingmouse - * @version 1.0, 11/10/2008 - */ - final case class RightProjection[+A, +B](e: Either[A, B]) { - - /** Returns the value from this `Right` or throws - * `java.util.NoSuchElementException` if this is a `Left`. - * - * {{{ - * Right(12).right.get // 12 - * Left(12).right.get // NoSuchElementException - * }}} - * - * @throws java.util.NoSuchElementException if the projection is `Left`. - */ - def get: B = e match { - case Right(b) => b - case _ => throw new NoSuchElementException("Either.right.get on Left") - } - - /** Executes the given side-effecting function if this is a `Right`. - * - * {{{ - * Right(12).right.foreach(x => println(x)) // prints "12" - * Left(12).right.foreach(x => println(x)) // doesn't print - * }}} - * @param f The side-effecting function to execute. - */ - def foreach[U](f: B => U): Unit = e match { - case Right(b) => f(b) - case _ => () - } - - /** Returns the value from this `Right` or the given argument if this is a `Left`. - * - * {{{ - * Right(12).right.getOrElse(17) // 12 - * Left(12).right.getOrElse(17) // 17 - * }}} - */ - def getOrElse[B1 >: B](or: => B1): B1 = e match { - case Right(b) => b - case _ => or - } - - /** Returns `true` if `Left` or returns the result of the application of - * the given function to the `Right` value. - * - * {{{ - * Right(12).right.forall(_ > 10) // true - * Right(7).right.forall(_ > 10) // false - * Left(12).right.forall(_ > 10) // true - * }}} - */ - def forall(f: B => Boolean): Boolean = e match { - case Right(b) => f(b) - case _ => true - } - - /** Returns `false` if `Left` or returns the result of the application of - * the given function to the `Right` value. - * - * {{{ - * Right(12).right.exists(_ > 10) // true - * Right(7).right.exists(_ > 10) // false - * Left(12).right.exists(_ > 10) // false - * }}} - */ - def exists(@deprecatedName('f) p: B => Boolean): Boolean = e match { - case Right(b) => p(b) - case _ => false - } - - /** Binds the given function across `Right`. - * - * @param f The function to bind across `Right`. - */ - def flatMap[A1 >: A, B1](f: B => Either[A1, B1]): Either[A1, B1] = e match { - case Right(b) => f(b) - case _ => e.asInstanceOf[Either[A1, B1]] - } - - /** The given function is applied if this is a `Right`. - * - * {{{ - * Right(12).right.map(x => "flower") // Result: Right("flower") - * Left(12).right.map(x => "flower") // Result: Left(12) - * }}} - */ - def map[B1](f: B => B1): Either[A, B1] = e match { - case Right(b) => Right(f(b)) - case _ => e.asInstanceOf[Either[A, B1]] - } - - /** Returns `None` if this is a `Left` or if the - * given predicate `p` does not hold for the right value, - * otherwise, returns a `Right`. - * - * {{{ - * Right(12).right.filter(_ > 10) // Some(Right(12)) - * Right(7).right.filter(_ > 10) // None - * Left(12).right.filter(_ > 10) // None - * }}} - */ - def filter[A1](p: B => Boolean): Option[Either[A1, B]] = e match { - case Right(b) if p(b) => Some(Right(b)) - case _ => None - } - - /** Returns a `Seq` containing the `Right` value if - * it exists or an empty `Seq` if this is a `Left`. - * - * {{{ - * Right(12).right.toSeq // Seq(12) - * Left(12).right.toSeq // Seq() - * }}} - */ - def toSeq: Seq[B] = e match { - case Right(b) => Seq(b) - case _ => Seq.empty - } - - /** Returns a `Some` containing the `Right` value - * if it exists or a `None` if this is a `Left`. - * - * {{{ - * Right(12).right.toOption // Some(12) - * Left(12).right.toOption // None - * }}} - */ - def toOption: Option[B] = e match { - case Right(b) => Some(b) - case _ => None - } - } -} diff --git a/tests/scala2-library/src/library/scala/util/MurmurHash.scala b/tests/scala2-library/src/library/scala/util/MurmurHash.scala deleted file mode 100644 index cdc5c821fa6a..000000000000 --- a/tests/scala2-library/src/library/scala/util/MurmurHash.scala +++ /dev/null @@ -1,199 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala -package util - -/** An implementation of Austin Appleby's MurmurHash 3.0 algorithm - * (32 bit version); reference: https://github.com/aappleby/smhasher - * - * This is the hash used by collections and case classes (including - * tuples). - * - * @author Rex Kerr - * @version 2.9 - * @since 2.9 - */ - -import java.lang.Integer.{ rotateLeft => rotl } -import scala.collection.Iterator - -/** A class designed to generate well-distributed non-cryptographic - * hashes. It is designed to be passed to a collection's foreach method, - * or can take individual hash values with append. Its own hash code is - * set equal to the hash code of whatever it is hashing. - */ -@deprecated("use the object MurmurHash3 instead", "2.10.0") -class MurmurHash[@specialized(Int,Long,Float,Double) T](seed: Int) extends (T => Unit) { - import MurmurHash._ - - private var h = startHash(seed) - private var c = hiddenMagicA - private var k = hiddenMagicB - private var hashed = false - private var hashvalue = h - - /** Begin a new hash using the same seed. */ - def reset() { - h = startHash(seed) - c = hiddenMagicA - k = hiddenMagicB - hashed = false - } - - /** Incorporate the hash value of one item. */ - def apply(t: T) { - h = extendHash(h,t.##,c,k) - c = nextMagicA(c) - k = nextMagicB(k) - hashed = false - } - - /** Incorporate a known hash value. */ - def append(i: Int) { - h = extendHash(h,i,c,k) - c = nextMagicA(c) - k = nextMagicB(k) - hashed = false - } - - /** Retrieve the hash value */ - def hash = { - if (!hashed) { - hashvalue = finalizeHash(h) - hashed = true - } - hashvalue - } - override def hashCode = hash -} - -/** An object designed to generate well-distributed non-cryptographic - * hashes. It is designed to hash a collection of integers; along with - * the integers to hash, it generates two magic streams of integers to - * increase the distribution of repetitive input sequences. Thus, - * three methods need to be called at each step (to start and to - * incorporate a new integer) to update the values. Only one method - * needs to be called to finalize the hash. - */ -@deprecated("use the object MurmurHash3 instead", "2.10.0") -// NOTE: Used by sbt 0.13.0-M2 and below -object MurmurHash { - // Magic values used for MurmurHash's 32 bit hash. - // Don't change these without consulting a hashing expert! - final private val visibleMagic = 0x971e137b - final private val hiddenMagicA = 0x95543787 - final private val hiddenMagicB = 0x2ad7eb25 - final private val visibleMixer = 0x52dce729 - final private val hiddenMixerA = 0x7b7d159c - final private val hiddenMixerB = 0x6bce6396 - final private val finalMixer1 = 0x85ebca6b - final private val finalMixer2 = 0xc2b2ae35 - - // Arbitrary values used for hashing certain classes - final private val seedString = 0xf7ca7fd2 - final private val seedArray = 0x3c074a61 - - /** The first 23 magic integers from the first stream are stored here */ - val storedMagicA = - Iterator.iterate(hiddenMagicA)(nextMagicA).take(23).toArray - - /** The first 23 magic integers from the second stream are stored here */ - val storedMagicB = - Iterator.iterate(hiddenMagicB)(nextMagicB).take(23).toArray - - /** Begin a new hash with a seed value. */ - def startHash(seed: Int) = seed ^ visibleMagic - - /** The initial magic integers in the first stream. */ - def startMagicA = hiddenMagicA - - /** The initial magic integer in the second stream. */ - def startMagicB = hiddenMagicB - - /** Incorporates a new value into an existing hash. - * - * @param hash the prior hash value - * @param value the new value to incorporate - * @param magicA a magic integer from the stream - * @param magicB a magic integer from a different stream - * @return the updated hash value - */ - def extendHash(hash: Int, value: Int, magicA: Int, magicB: Int) = { - (hash ^ rotl(value*magicA,11)*magicB)*3 + visibleMixer - } - - /** Given a magic integer from the first stream, compute the next */ - def nextMagicA(magicA: Int) = magicA*5 + hiddenMixerA - - /** Given a magic integer from the second stream, compute the next */ - def nextMagicB(magicB: Int) = magicB*5 + hiddenMixerB - - /** Once all hashes have been incorporated, this performs a final mixing */ - def finalizeHash(hash: Int) = { - var i = (hash ^ (hash>>>16)) - i *= finalMixer1 - i ^= (i >>> 13) - i *= finalMixer2 - i ^= (i >>> 16) - i - } - - /** Compute a high-quality hash of an array */ - def arrayHash[@specialized T](a: Array[T]) = { - var h = startHash(a.length * seedArray) - var c = hiddenMagicA - var k = hiddenMagicB - var j = 0 - while (j < a.length) { - h = extendHash(h, a(j).##, c, k) - c = nextMagicA(c) - k = nextMagicB(k) - j += 1 - } - finalizeHash(h) - } - - /** Compute a high-quality hash of a string */ - def stringHash(s: String) = { - var h = startHash(s.length * seedString) - var c = hiddenMagicA - var k = hiddenMagicB - var j = 0 - while (j+1 < s.length) { - val i = (s.charAt(j)<<16) + s.charAt(j+1) - h = extendHash(h,i,c,k) - c = nextMagicA(c) - k = nextMagicB(k) - j += 2 - } - if (j < s.length) h = extendHash(h,s.charAt(j).toInt,c,k) - finalizeHash(h) - } - - /** Compute a hash that is symmetric in its arguments--that is, - * where the order of appearance of elements does not matter. - * This is useful for hashing sets, for example. - */ - def symmetricHash[T](xs: scala.collection.TraversableOnce[T], seed: Int) = { - var a,b,n = 0 - var c = 1 - xs.seq.foreach(i => { - val h = i.## - a += h - b ^= h - if (h != 0) c *= h - n += 1 - }) - var h = startHash(seed * n) - h = extendHash(h, a, storedMagicA(0), storedMagicB(0)) - h = extendHash(h, b, storedMagicA(1), storedMagicB(1)) - h = extendHash(h, c, storedMagicA(2), storedMagicB(2)) - finalizeHash(h) - } -} diff --git a/tests/scala2-library/src/library/scala/util/Properties.scala b/tests/scala2-library/src/library/scala/util/Properties.scala deleted file mode 100644 index 3fc713ed2e5f..000000000000 --- a/tests/scala2-library/src/library/scala/util/Properties.scala +++ /dev/null @@ -1,231 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2006-2015, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - - -package scala -package util - -import java.io.{ IOException, PrintWriter } -import java.util.jar.Attributes.{ Name => AttributeName } - -/** Loads `library.properties` from the jar. */ -object Properties extends PropertiesTrait { - protected def propCategory = "library" - protected def pickJarBasedOn = classOf[Option[_]] - - /** Scala manifest attributes. - */ - val ScalaCompilerVersion = new AttributeName("Scala-Compiler-Version") -} - -private[scala] trait PropertiesTrait { - protected def propCategory: String // specializes the remainder of the values - protected def pickJarBasedOn: Class[_] // props file comes from jar containing this - - /** The name of the properties file */ - protected val propFilename = "/" + propCategory + ".properties" - - /** The loaded properties */ - protected lazy val scalaProps: java.util.Properties = { - val props = new java.util.Properties - val stream = pickJarBasedOn getResourceAsStream propFilename - if (stream ne null) - quietlyDispose(props load stream, stream.close) - - props - } - - private def quietlyDispose(action: => Unit, disposal: => Unit) = - try { action } - finally { - try { disposal } - catch { case _: IOException => } - } - - def propIsSet(name: String) = System.getProperty(name) != null - def propIsSetTo(name: String, value: String) = propOrNull(name) == value - def propOrElse(name: String, alt: String) = System.getProperty(name, alt) - def propOrEmpty(name: String) = propOrElse(name, "") - def propOrNull(name: String) = propOrElse(name, null) - def propOrNone(name: String) = Option(propOrNull(name)) - def propOrFalse(name: String) = propOrNone(name) exists (x => List("yes", "on", "true") contains x.toLowerCase) - def setProp(name: String, value: String) = System.setProperty(name, value) - def clearProp(name: String) = System.clearProperty(name) - - def envOrElse(name: String, alt: String) = Option(System getenv name) getOrElse alt - def envOrNone(name: String) = Option(System getenv name) - - def envOrSome(name: String, alt: Option[String]) = envOrNone(name) orElse alt - - // for values based on propFilename, falling back to System properties - def scalaPropOrElse(name: String, alt: String): String = scalaPropOrNone(name).getOrElse(alt) - def scalaPropOrEmpty(name: String): String = scalaPropOrElse(name, "") - def scalaPropOrNone(name: String): Option[String] = Option(scalaProps.getProperty(name)).orElse(propOrNone("scala." + name)) - - /** The numeric portion of the runtime Scala version, if this is a final - * release. If for instance the versionString says "version 2.9.0.final", - * this would return Some("2.9.0"). - * - * @return Some(version) if this is a final release build, None if - * it is an RC, Beta, etc. or was built from source, or if the version - * cannot be read. - */ - val releaseVersion = - for { - v <- scalaPropOrNone("maven.version.number") - if !(v endsWith "-SNAPSHOT") - } yield v - - /** The development Scala version, if this is not a final release. - * The precise contents are not guaranteed, but it aims to provide a - * unique repository identifier (currently the svn revision) in the - * fourth dotted segment if the running version was built from source. - * - * @return Some(version) if this is a non-final version, None if this - * is a final release or the version cannot be read. - */ - val developmentVersion = - for { - v <- scalaPropOrNone("maven.version.number") - if v endsWith "-SNAPSHOT" - ov <- scalaPropOrNone("version.number") - } yield ov - - /** Either the development or release version if known, otherwise - * the empty string. - */ - def versionNumberString = scalaPropOrEmpty("version.number") - - /** The version number of the jar this was loaded from plus "version " prefix, - * or "version (unknown)" if it cannot be determined. - */ - val versionString = "version " + scalaPropOrElse("version.number", "(unknown)") - val copyrightString = scalaPropOrElse("copyright.string", "Copyright 2002-2017, LAMP/EPFL and Lightbend, Inc.") - - /** This is the encoding to use reading in source files, overridden with -encoding. - * Note that it uses "prop" i.e. looks in the scala jar, not the system properties. - */ - def sourceEncoding = scalaPropOrElse("file.encoding", "UTF-8") - def sourceReader = scalaPropOrElse("source.reader", "scala.tools.nsc.io.SourceReader") - - /** This is the default text encoding, overridden (unreliably) with - * `JAVA_OPTS="-Dfile.encoding=Foo"` - */ - def encodingString = propOrElse("file.encoding", "UTF-8") - - /** The default end of line character. - */ - def lineSeparator = System.lineSeparator() - - /* Various well-known properties. */ - def javaClassPath = propOrEmpty("java.class.path") - def javaHome = propOrEmpty("java.home") - def javaVendor = propOrEmpty("java.vendor") - def javaVersion = propOrEmpty("java.version") - def javaVmInfo = propOrEmpty("java.vm.info") - def javaVmName = propOrEmpty("java.vm.name") - def javaVmVendor = propOrEmpty("java.vm.vendor") - def javaVmVersion = propOrEmpty("java.vm.version") - def javaSpecVersion = propOrEmpty("java.specification.version") - def javaSpecVendor = propOrEmpty("java.specification.vendor") - def javaSpecName = propOrEmpty("java.specification.name") - def osName = propOrEmpty("os.name") - def scalaHome = propOrEmpty("scala.home") - def tmpDir = propOrEmpty("java.io.tmpdir") - def userDir = propOrEmpty("user.dir") - def userHome = propOrEmpty("user.home") - def userName = propOrEmpty("user.name") - - /* Some derived values. */ - /** Returns `true` iff the underlying operating system is a version of Microsoft Windows. */ - def isWin = osName startsWith "Windows" - // See http://mail.openjdk.java.net/pipermail/macosx-port-dev/2012-November/005148.html for - // the reason why we don't follow developer.apple.com/library/mac/#technotes/tn2002/tn2110. - /** Returns `true` iff the underlying operating system is a version of Apple Mac OSX. */ - def isMac = osName startsWith "Mac OS X" - /** Returns `true` iff the underlying operating system is a Linux distribution. */ - def isLinux = osName startsWith "Linux" - - /* Some runtime values. */ - private[scala] def isAvian = javaVmName contains "Avian" - - private[scala] def coloredOutputEnabled: Boolean = propOrElse("scala.color", "auto") match { - case "auto" => System.console() != null && !isWin - case a if a.toLowerCase() == "true" => true - case _ => false - } - - // This is looking for javac, tools.jar, etc. - // Tries JDK_HOME first, then the more common but likely jre JAVA_HOME, - // and finally the system property based javaHome. - def jdkHome = envOrElse("JDK_HOME", envOrElse("JAVA_HOME", javaHome)) - - // private[scala] for 2.12 - private[this] def versionFor(command: String) = s"Scala $command $versionString -- $copyrightString" - - def versionMsg = versionFor(propCategory) - def scalaCmd = if (isWin) "scala.bat" else "scala" - def scalacCmd = if (isWin) "scalac.bat" else "scalac" - - /** Compares the given specification version to the specification version of the platform. - * - * @param version a specification version number (legacy forms acceptable) - * @return `true` if the specification version of the current runtime - * is equal to or higher than the version denoted by the given string. - * @throws NumberFormatException if the given string is not a version string - * - * @example {{{ - * // In this example, the runtime's Java specification is assumed to be at version 8. - * isJavaAtLeast("1.8") // true - * isJavaAtLeast("8") // true - * isJavaAtLeast("9") // false - * isJavaAtLeast("9.1") // false - * isJavaAtLeast("1.9") // throws - * }}} - */ - def isJavaAtLeast(version: String): Boolean = { - def versionOf(s: String, depth: Int): (Int, String) = - s.indexOf('.') match { - case 0 => - (-2, s.substring(1)) - case 1 if depth == 0 && s.charAt(0) == '1' => - val r0 = s.substring(2) - val (v, r) = versionOf(r0, 1) - val n = if (v > 8 || r0.isEmpty) -2 else v // accept 1.8, not 1.9 or 1. - (n, r) - case -1 => - val n = if (!s.isEmpty) s.toInt else if (depth == 0) -2 else 0 - (n, "") - case i => - val r = s.substring(i + 1) - val n = if (depth < 2 && r.isEmpty) -2 else s.substring(0, i).toInt - (n, r) - } - def compareVersions(s: String, v: String, depth: Int): Int = { - if (depth >= 3) 0 - else { - val (sn, srest) = versionOf(s, depth) - val (vn, vrest) = versionOf(v, depth) - if (vn < 0) -2 - else if (sn < vn) -1 - else if (sn > vn) 1 - else compareVersions(srest, vrest, depth + 1) - } - } - compareVersions(javaSpecVersion, version, 0) match { - case -2 => throw new NumberFormatException(s"Not a version: $version") - case i => i >= 0 - } - } - - // provide a main method so version info can be obtained by running this - def main(args: Array[String]) { - val writer = new PrintWriter(Console.err, true) - writer println versionMsg - } -} diff --git a/tests/scala2-library/src/library/scala/util/Random.scala b/tests/scala2-library/src/library/scala/util/Random.scala deleted file mode 100644 index 16d18d7d6df0..000000000000 --- a/tests/scala2-library/src/library/scala/util/Random.scala +++ /dev/null @@ -1,149 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2006-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala -package util - -import scala.collection.mutable.ArrayBuffer -import scala.collection.generic.CanBuildFrom -import scala.collection.immutable.{ List, Stream } -import scala.language.{implicitConversions, higherKinds} - -/** - * @author Stephane Micheloud - * - */ -class Random(val self: java.util.Random) extends AnyRef with Serializable { - /** Creates a new random number generator using a single long seed. */ - def this(seed: Long) = this(new java.util.Random(seed)) - - /** Creates a new random number generator using a single integer seed. */ - def this(seed: Int) = this(seed.toLong) - - /** Creates a new random number generator. */ - def this() = this(new java.util.Random()) - - /** Returns the next pseudorandom, uniformly distributed boolean value - * from this random number generator's sequence. - */ - def nextBoolean(): Boolean = self.nextBoolean() - - /** Generates random bytes and places them into a user-supplied byte - * array. - */ - def nextBytes(bytes: Array[Byte]) { self.nextBytes(bytes) } - - /** Returns the next pseudorandom, uniformly distributed double value - * between 0.0 and 1.0 from this random number generator's sequence. - */ - def nextDouble(): Double = self.nextDouble() - - /** Returns the next pseudorandom, uniformly distributed float value - * between 0.0 and 1.0 from this random number generator's sequence. - */ - def nextFloat(): Float = self.nextFloat() - - /** Returns the next pseudorandom, Gaussian ("normally") distributed - * double value with mean 0.0 and standard deviation 1.0 from this - * random number generator's sequence. - */ - def nextGaussian(): Double = self.nextGaussian() - - /** Returns the next pseudorandom, uniformly distributed int value - * from this random number generator's sequence. - */ - def nextInt(): Int = self.nextInt() - - /** Returns a pseudorandom, uniformly distributed int value between 0 - * (inclusive) and the specified value (exclusive), drawn from this - * random number generator's sequence. - */ - def nextInt(n: Int): Int = self.nextInt(n) - - /** Returns the next pseudorandom, uniformly distributed long value - * from this random number generator's sequence. - */ - def nextLong(): Long = self.nextLong() - - /** Returns a pseudorandomly generated String. This routine does - * not take any measures to preserve the randomness of the distribution - * in the face of factors like unicode's variable-length encoding, - * so please don't use this for anything important. It's primarily - * intended for generating test data. - * - * @param length the desired length of the String - * @return the String - */ - def nextString(length: Int) = { - def safeChar() = { - val surrogateStart: Int = 0xD800 - val res = nextInt(surrogateStart - 1) + 1 - res.toChar - } - - List.fill(length)(safeChar()).mkString - } - - /** Returns the next pseudorandom, uniformly distributed value - * from the ASCII range 33-126. - */ - def nextPrintableChar(): Char = { - val low = 33 - val high = 127 - (self.nextInt(high - low) + low).toChar - } - - def setSeed(seed: Long) { self.setSeed(seed) } - - /** Returns a new collection of the same type in a randomly chosen order. - * - * @return the shuffled collection - */ - def shuffle[T, CC[X] <: TraversableOnce[X]](xs: CC[T])(implicit bf: CanBuildFrom[CC[T], T, CC[T]]): CC[T] = { - val buf = new ArrayBuffer[T] ++= xs - - def swap(i1: Int, i2: Int) { - val tmp = buf(i1) - buf(i1) = buf(i2) - buf(i2) = tmp - } - - for (n <- buf.length to 2 by -1) { - val k = nextInt(n) - swap(n - 1, k) - } - - (bf(xs) ++= buf).result() - } - - /** Returns a Stream of pseudorandomly chosen alphanumeric characters, - * equally chosen from A-Z, a-z, and 0-9. - * - * @since 2.8 - */ - def alphanumeric: Stream[Char] = { - def nextAlphaNum: Char = { - val chars = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789" - chars charAt (self nextInt chars.length) - } - - Stream continually nextAlphaNum - } - -} - -/** The object `Random` offers a default implementation - * of scala.util.Random and random-related convenience methods. - * - * @since 2.8 - */ -object Random extends Random { - - implicit def javaRandomToRandom(r: java.util.Random): Random = new Random(r) - -} diff --git a/tests/scala2-library/src/library/scala/util/Sorting.scala b/tests/scala2-library/src/library/scala/util/Sorting.scala deleted file mode 100644 index 3bda7c0d3919..000000000000 --- a/tests/scala2-library/src/library/scala/util/Sorting.scala +++ /dev/null @@ -1,284 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2006-2015, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala -package util - -import scala.reflect.ClassTag -import scala.math.Ordering - -/** The `Sorting` object provides convenience wrappers for `java.util.Arrays.sort`. - * Methods that defer to `java.util.Arrays.sort` say that they do or under what - * conditions that they do. - * - * `Sorting` also implements a general-purpose quicksort and stable (merge) sort - * for those cases where `java.util.Arrays.sort` could only be used at the cost - * of a large memory penalty. If performance rather than memory usage is the - * primary concern, one may wish to find alternate strategies to use - * `java.util.Arrays.sort` directly e.g. by boxing primitives to use - * a custom ordering on them. - * - * `Sorting` provides methods where you can provide a comparison function, or - * can request a sort of items that are [[scala.math.Ordered]] or that - * otherwise have an implicit or explicit [[scala.math.Ordering]]. - * - * Note also that high-performance non-default sorts for numeric types - * are not provided. If this is required, it is advisable to investigate - * other libraries that cover this use case. - * - * @author Ross Judson - * @author Adriaan Moors - * @author Rex Kerr - * @version 1.1 - */ -object Sorting { - /** Sort an array of Doubles using `java.util.Arrays.sort`. */ - def quickSort(a: Array[Double]): Unit = java.util.Arrays.sort(a) - - /** Sort an array of Ints using `java.util.Arrays.sort`. */ - def quickSort(a: Array[Int]): Unit = java.util.Arrays.sort(a) - - /** Sort an array of Floats using `java.util.Arrays.sort`. */ - def quickSort(a: Array[Float]): Unit = java.util.Arrays.sort(a) - - private final val qsortThreshold = 16 - - /** Sort array `a` with quicksort, using the Ordering on its elements. - * This algorithm sorts in place, so no additional memory is used aside from - * what might be required to box individual elements during comparison. - */ - def quickSort[K: Ordering](a: Array[K]): Unit = { - // Must have iN >= i0 or math will fail. Also, i0 >= 0. - def inner(a: Array[K], i0: Int, iN: Int, ord: Ordering[K]): Unit = { - if (iN - i0 < qsortThreshold) insertionSort(a, i0, iN, ord) - else { - val iK = (i0 + iN) >>> 1 // Unsigned div by 2 - // Find index of median of first, central, and last elements - var pL = - if (ord.compare(a(i0), a(iN - 1)) <= 0) - if (ord.compare(a(i0), a(iK)) < 0) - if (ord.compare(a(iN - 1), a(iK)) < 0) iN - 1 else iK - else i0 - else - if (ord.compare(a(i0), a(iK)) < 0) i0 - else - if (ord.compare(a(iN - 1), a(iK)) <= 0) iN - 1 - else iK - val pivot = a(pL) - // pL is the start of the pivot block; move it into the middle if needed - if (pL != iK) { a(pL) = a(iK); a(iK) = pivot; pL = iK } - // Elements equal to the pivot will be in range pL until pR - var pR = pL + 1 - // Items known to be less than pivot are below iA (range i0 until iA) - var iA = i0 - // Items known to be greater than pivot are at or above iB (range iB until iN) - var iB = iN - // Scan through everything in the buffer before the pivot(s) - while (pL - iA > 0) { - val current = a(iA) - ord.compare(current, pivot) match { - case 0 => - // Swap current out with pivot block - a(iA) = a(pL - 1) - a(pL - 1) = current - pL -= 1 - case x if x < 0 => - // Already in place. Just update indices. - iA += 1 - case _ if iB > pR => - // Wrong side. There's room on the other side, so swap - a(iA) = a(iB - 1) - a(iB - 1) = current - iB -= 1 - case _ => - // Wrong side and there is no room. Swap by rotating pivot block. - a(iA) = a(pL - 1) - a(pL - 1) = a(pR - 1) - a(pR - 1) = current - pL -= 1 - pR -= 1 - iB -= 1 - } - } - // Get anything remaining in buffer after the pivot(s) - while (iB - pR > 0) { - val current = a(iB - 1) - ord.compare(current, pivot) match { - case 0 => - // Swap current out with pivot block - a(iB - 1) = a(pR) - a(pR) = current - pR += 1 - case x if x > 0 => - // Already in place. Just update indices. - iB -= 1 - case _ => - // Wrong side and we already know there is no room. Swap by rotating pivot block. - a(iB - 1) = a(pR) - a(pR) = a(pL) - a(pL) = current - iA += 1 - pL += 1 - pR += 1 - } - } - // Use tail recursion on large half (Sedgewick's method) so we don't blow up the stack if pivots are poorly chosen - if (iA - i0 < iN - iB) { - inner(a, i0, iA, ord) // True recursion - inner(a, iB, iN, ord) // Should be tail recursion - } - else { - inner(a, iB, iN, ord) // True recursion - inner(a, i0, iA, ord) // Should be tail recursion - } - } - } - inner(a, 0, a.length, implicitly[Ordering[K]]) - } - - private final val mergeThreshold = 32 - - // Ordering[T] might be slow especially for boxed primitives, so use binary search variant of insertion sort - // Caller must pass iN >= i0 or math will fail. Also, i0 >= 0. - private def insertionSort[@specialized T](a: Array[T], i0: Int, iN: Int, ord: Ordering[T]): Unit = { - val n = iN - i0 - if (n < 2) return - if (ord.compare(a(i0), a(i0+1)) > 0) { - val temp = a(i0) - a(i0) = a(i0+1) - a(i0+1) = temp - } - var m = 2 - while (m < n) { - // Speed up already-sorted case by checking last element first - val next = a(i0 + m) - if (ord.compare(next, a(i0+m-1)) < 0) { - var iA = i0 - var iB = i0 + m - 1 - while (iB - iA > 1) { - val ix = (iA + iB) >>> 1 // Use bit shift to get unsigned div by 2 - if (ord.compare(next, a(ix)) < 0) iB = ix - else iA = ix - } - val ix = iA + (if (ord.compare(next, a(iA)) < 0) 0 else 1) - var i = i0 + m - while (i > ix) { - a(i) = a(i-1) - i -= 1 - } - a(ix) = next - } - m += 1 - } - } - - // Caller is required to pass iN >= i0, else math will fail. Also, i0 >= 0. - private def mergeSort[@specialized T: ClassTag](a: Array[T], i0: Int, iN: Int, ord: Ordering[T], scratch: Array[T] = null): Unit = { - if (iN - i0 < mergeThreshold) insertionSort(a, i0, iN, ord) - else { - val iK = (i0 + iN) >>> 1 // Bit shift equivalent to unsigned math, no overflow - val sc = if (scratch eq null) new Array[T](iK - i0) else scratch - mergeSort(a, i0, iK, ord, sc) - mergeSort(a, iK, iN, ord, sc) - mergeSorted(a, i0, iK, iN, ord, sc) - } - } - - // Must have 0 <= i0 < iK < iN - private def mergeSorted[@specialized T](a: Array[T], i0: Int, iK: Int, iN: Int, ord: Ordering[T], scratch: Array[T]): Unit = { - // Check to make sure we're not already in order - if (ord.compare(a(iK-1), a(iK)) > 0) { - var i = i0 - val jN = iK - i0 - var j = 0 - while (i < iK) { - scratch (j) = a(i) - i += 1 - j += 1 - } - var k = i0 - j = 0 - while (i < iN && j < jN) { - if (ord.compare(a(i), scratch(j)) < 0) { a(k) = a(i); i += 1 } - else { a(k) = scratch(j); j += 1 } - k += 1 - } - while (j < jN) { a(k) = scratch(j); j += 1; k += 1 } - // Don't need to finish a(i) because it's already in place, k = i - } - } - - // Why would you even do this? - private def booleanSort(a: Array[Boolean]): Unit = { - var i = 0 - var n = 0 - while (i < a.length) { - if (!a(i)) n += 1 - i += 1 - } - i = 0 - while (i < n) { - a(i) = false - i += 1 - } - while (i < a.length) { - a(i) = true - i += 1 - } - } - - // TODO: add upper bound: T <: AnyRef, propagate to callers below (not binary compatible) - // Maybe also rename all these methods to `sort`. - @inline private def sort[T](a: Array[T], ord: Ordering[T]): Unit = a match { - case _: Array[AnyRef] => - // Note that runtime matches are covariant, so could actually be any Array[T] s.t. T is not primitive (even boxed value classes) - if (a.length > 1 && (ord eq null)) throw new NullPointerException("Ordering") - java.util.Arrays.sort(a, ord) - case a: Array[Int] => if (ord eq Ordering.Int) java.util.Arrays.sort(a) else mergeSort[Int](a, 0, a.length, ord) - case a: Array[Double] => mergeSort[Double](a, 0, a.length, ord) // Because not all NaNs are identical, stability is meaningful! - case a: Array[Long] => if (ord eq Ordering.Long) java.util.Arrays.sort(a) else mergeSort[Long](a, 0, a.length, ord) - case a: Array[Float] => mergeSort[Float](a, 0, a.length, ord) // Because not all NaNs are identical, stability is meaningful! - case a: Array[Char] => if (ord eq Ordering.Char) java.util.Arrays.sort(a) else mergeSort[Char](a, 0, a.length, ord) - case a: Array[Byte] => if (ord eq Ordering.Byte) java.util.Arrays.sort(a) else mergeSort[Byte](a, 0, a.length, ord) - case a: Array[Short] => if (ord eq Ordering.Short) java.util.Arrays.sort(a) else mergeSort[Short](a, 0, a.length, ord) - case a: Array[Boolean] => if (ord eq Ordering.Boolean) booleanSort(a) else mergeSort[Boolean](a, 0, a.length, ord) - // Array[Unit] is matched as an Array[AnyRef] due to covariance in runtime matching. Not worth catching it as a special case. - case null => throw new NullPointerException - } - - // TODO: remove unnecessary ClassTag (not binary compatible) - /** Sort array `a` using the Ordering on its elements, preserving the original ordering where possible. Uses `java.util.Arrays.sort` unless `K` is a primitive type. */ - def stableSort[K: ClassTag: Ordering](a: Array[K]): Unit = sort(a, Ordering[K]) - - // TODO: Remove unnecessary ClassTag (not binary compatible) - // TODO: make this fast for primitive K (could be specialized if it didn't go through Ordering) - /** Sort array `a` using function `f` that computes the less-than relation for each element. Uses `java.util.Arrays.sort` unless `K` is a primitive type. */ - def stableSort[K: ClassTag](a: Array[K], f: (K, K) => Boolean): Unit = sort(a, Ordering fromLessThan f) - - /** A sorted Array, using the Ordering for the elements in the sequence `a`. Uses `java.util.Arrays.sort` unless `K` is a primitive type. */ - def stableSort[K: ClassTag: Ordering](a: Seq[K]): Array[K] = { - val ret = a.toArray - sort(ret, Ordering[K]) - ret - } - - // TODO: make this fast for primitive K (could be specialized if it didn't go through Ordering) - /** A sorted Array, given a function `f` that computes the less-than relation for each item in the sequence `a`. Uses `java.util.Arrays.sort` unless `K` is a primitive type. */ - def stableSort[K: ClassTag](a: Seq[K], f: (K, K) => Boolean): Array[K] = { - val ret = a.toArray - sort(ret, Ordering fromLessThan f) - ret - } - - /** A sorted Array, given an extraction function `f` that returns an ordered key for each item in the sequence `a`. Uses `java.util.Arrays.sort` unless `K` is a primitive type. */ - def stableSort[K: ClassTag, M: Ordering](a: Seq[K], f: K => M): Array[K] = { - val ret = a.toArray - sort(ret, Ordering[M] on f) - ret - } -} diff --git a/tests/scala2-library/src/library/scala/util/Try.scala b/tests/scala2-library/src/library/scala/util/Try.scala deleted file mode 100644 index 00e9585c38e2..000000000000 --- a/tests/scala2-library/src/library/scala/util/Try.scala +++ /dev/null @@ -1,268 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2008-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala -package util - -import scala.util.control.NonFatal - -/** - * The `Try` type represents a computation that may either result in an exception, or return a - * successfully computed value. It's similar to, but semantically different from the [[scala.util.Either]] type. - * - * Instances of `Try[T]`, are either an instance of [[scala.util.Success]][T] or [[scala.util.Failure]][T]. - * - * For example, `Try` can be used to perform division on a user-defined input, without the need to do explicit - * exception-handling in all of the places that an exception might occur. - * - * Example: - * {{{ - * import scala.io.StdIn - * import scala.util.{Try, Success, Failure} - * - * def divide: Try[Int] = { - * val dividend = Try(StdIn.readLine("Enter an Int that you'd like to divide:\n").toInt) - * val divisor = Try(StdIn.readLine("Enter an Int that you'd like to divide by:\n").toInt) - * val problem = dividend.flatMap(x => divisor.map(y => x/y)) - * problem match { - * case Success(v) => - * println("Result of " + dividend.get + "/"+ divisor.get +" is: " + v) - * Success(v) - * case Failure(e) => - * println("You must've divided by zero or entered something that's not an Int. Try again!") - * println("Info from the exception: " + e.getMessage) - * divide - * } - * } - * - * }}} - * - * An important property of `Try` shown in the above example is its ability to ''pipeline'', or chain, operations, - * catching exceptions along the way. The `flatMap` and `map` combinators in the above example each essentially - * pass off either their successfully completed value, wrapped in the `Success` type for it to be further operated - * upon by the next combinator in the chain, or the exception wrapped in the `Failure` type usually to be simply - * passed on down the chain. Combinators such as `recover` and `recoverWith` are designed to provide some type of - * default behavior in the case of failure. - * - * ''Note'': only non-fatal exceptions are caught by the combinators on `Try` (see [[scala.util.control.NonFatal]]). - * Serious system errors, on the other hand, will be thrown. - * - * ''Note:'': all Try combinators will catch exceptions and return failure unless otherwise specified in the documentation. - * - * `Try` comes to the Scala standard library after years of use as an integral part of Twitter's stack. - * - * @author based on Twitter's original implementation in com.twitter.util. - * @since 2.10 - */ -sealed abstract class Try[+T] extends Product with Serializable { - - /** Returns `true` if the `Try` is a `Failure`, `false` otherwise. - */ - def isFailure: Boolean - - /** Returns `true` if the `Try` is a `Success`, `false` otherwise. - */ - def isSuccess: Boolean - - /** Returns the value from this `Success` or the given `default` argument if this is a `Failure`. - * - * ''Note:'': This will throw an exception if it is not a success and default throws an exception. - */ - def getOrElse[U >: T](default: => U): U - - /** Returns this `Try` if it's a `Success` or the given `default` argument if this is a `Failure`. - */ - def orElse[U >: T](default: => Try[U]): Try[U] - - /** Returns the value from this `Success` or throws the exception if this is a `Failure`. - */ - def get: T - - /** - * Applies the given function `f` if this is a `Success`, otherwise returns `Unit` if this is a `Failure`. - * - * ''Note:'' If `f` throws, then this method may throw an exception. - */ - def foreach[U](f: T => U): Unit - - /** - * Returns the given function applied to the value from this `Success` or returns this if this is a `Failure`. - */ - def flatMap[U](f: T => Try[U]): Try[U] - - /** - * Maps the given function to the value from this `Success` or returns this if this is a `Failure`. - */ - def map[U](f: T => U): Try[U] - - /** - * Applies the given partial function to the value from this `Success` or returns this if this is a `Failure`. - */ - def collect[U](pf: PartialFunction[T, U]): Try[U] - - /** - * Converts this to a `Failure` if the predicate is not satisfied. - */ - def filter(p: T => Boolean): Try[T] - - /** Creates a non-strict filter, which eventually converts this to a `Failure` - * if the predicate is not satisfied. - * - * Note: unlike filter, withFilter does not create a new Try. - * Instead, it restricts the domain of subsequent - * `map`, `flatMap`, `foreach`, and `withFilter` operations. - * - * As Try is a one-element collection, this may be a bit overkill, - * but it's consistent with withFilter on Option and the other collections. - * - * @param p the predicate used to test elements. - * @return an object of class `WithFilter`, which supports - * `map`, `flatMap`, `foreach`, and `withFilter` operations. - * All these operations apply to those elements of this Try - * which satisfy the predicate `p`. - */ - @inline final def withFilter(p: T => Boolean): WithFilter = new WithFilter(p) - - /** We need a whole WithFilter class to honor the "doesn't create a new - * collection" contract even though it seems unlikely to matter much in a - * collection with max size 1. - */ - @deprecatedInheritance("You were never supposed to be able to extend this class.", "2.12.0") - class WithFilter(p: T => Boolean) { - def map[U](f: T => U): Try[U] = Try.this filter p map f - def flatMap[U](f: T => Try[U]): Try[U] = Try.this filter p flatMap f - def foreach[U](f: T => U): Unit = Try.this filter p foreach f - def withFilter(q: T => Boolean): WithFilter = new WithFilter(x => p(x) && q(x)) - } - - /** - * Applies the given function `f` if this is a `Failure`, otherwise returns this if this is a `Success`. - * This is like `flatMap` for the exception. - */ - def recoverWith[U >: T](@deprecatedName('f) pf: PartialFunction[Throwable, Try[U]]): Try[U] - - /** - * Applies the given function `f` if this is a `Failure`, otherwise returns this if this is a `Success`. - * This is like map for the exception. - */ - def recover[U >: T](@deprecatedName('f) pf: PartialFunction[Throwable, U]): Try[U] - - /** - * Returns `None` if this is a `Failure` or a `Some` containing the value if this is a `Success`. - */ - def toOption: Option[T] - - /** - * Transforms a nested `Try`, ie, a `Try` of type `Try[Try[T]]`, - * into an un-nested `Try`, ie, a `Try` of type `Try[T]`. - */ - def flatten[U](implicit ev: T <:< Try[U]): Try[U] - - /** - * Inverts this `Try`. If this is a `Failure`, returns its exception wrapped in a `Success`. - * If this is a `Success`, returns a `Failure` containing an `UnsupportedOperationException`. - */ - def failed: Try[Throwable] - - /** Completes this `Try` by applying the function `f` to this if this is of type `Failure`, or conversely, by applying - * `s` if this is a `Success`. - */ - def transform[U](s: T => Try[U], f: Throwable => Try[U]): Try[U] - - /** - * Returns `Left` with `Throwable` if this is a `Failure`, otherwise returns `Right` with `Success` value. - */ - def toEither: Either[Throwable, T] - - /** - * Applies `fa` if this is a `Failure` or `fb` if this is a `Success`. - * If `fb` is initially applied and throws an exception, - * then `fa` is applied with this exception. - * - * @example {{{ - * val result: Try[Throwable, Int] = Try { string.toInt } - * log(result.fold( - * ex => "Operation failed with " + ex, - * v => "Operation produced value: " + v - * )) - * }}} - * - * @param fa the function to apply if this is a `Failure` - * @param fb the function to apply if this is a `Success` - * @return the results of applying the function - */ - def fold[U](fa: Throwable => U, fb: T => U): U - -} - -object Try { - /** Constructs a `Try` using the by-name parameter. This - * method will ensure any non-fatal exception is caught and a - * `Failure` object is returned. - */ - def apply[T](r: => T): Try[T] = - try Success(r) catch { - case NonFatal(e) => Failure(e) - } -} - -final case class Failure[+T](exception: Throwable) extends Try[T] { - override def isFailure: Boolean = true - override def isSuccess: Boolean = false - override def get: T = throw exception - override def getOrElse[U >: T](default: => U): U = default - override def orElse[U >: T](default: => Try[U]): Try[U] = - try default catch { case NonFatal(e) => Failure(e) } - override def flatMap[U](f: T => Try[U]): Try[U] = this.asInstanceOf[Try[U]] - override def flatten[U](implicit ev: T <:< Try[U]): Try[U] = this.asInstanceOf[Try[U]] - override def foreach[U](f: T => U): Unit = () - override def transform[U](s: T => Try[U], f: Throwable => Try[U]): Try[U] = - try f(exception) catch { case NonFatal(e) => Failure(e) } - override def map[U](f: T => U): Try[U] = this.asInstanceOf[Try[U]] - override def collect[U](pf: PartialFunction[T, U]): Try[U] = this.asInstanceOf[Try[U]] - override def filter(p: T => Boolean): Try[T] = this - override def recover[U >: T](@deprecatedName('rescueException) pf: PartialFunction[Throwable, U]): Try[U] = - try { if (pf isDefinedAt exception) Success(pf(exception)) else this } catch { case NonFatal(e) => Failure(e) } - override def recoverWith[U >: T](@deprecatedName('f) pf: PartialFunction[Throwable, Try[U]]): Try[U] = - try { if (pf isDefinedAt exception) pf(exception) else this } catch { case NonFatal(e) => Failure(e) } - override def failed: Try[Throwable] = Success(exception) - override def toOption: Option[T] = None - override def toEither: Either[Throwable, T] = Left(exception) - override def fold[U](fa: Throwable => U, fb: T => U): U = fa(exception) -} - - -final case class Success[+T](value: T) extends Try[T] { - override def isFailure: Boolean = false - override def isSuccess: Boolean = true - override def get = value - override def getOrElse[U >: T](default: => U): U = get - override def orElse[U >: T](default: => Try[U]): Try[U] = this - override def flatMap[U](f: T => Try[U]): Try[U] = - try f(value) catch { case NonFatal(e) => Failure(e) } - override def flatten[U](implicit ev: T <:< Try[U]): Try[U] = value - override def foreach[U](f: T => U): Unit = f(value) - override def transform[U](s: T => Try[U], f: Throwable => Try[U]): Try[U] = this flatMap s - override def map[U](f: T => U): Try[U] = Try[U](f(value)) - override def collect[U](pf: PartialFunction[T, U]): Try[U] = - try { - if (pf isDefinedAt value) Success(pf(value)) - else Failure(new NoSuchElementException("Predicate does not hold for " + value)) - } catch { case NonFatal(e) => Failure(e) } - override def filter(p: T => Boolean): Try[T] = - try { - if (p(value)) this else Failure(new NoSuchElementException("Predicate does not hold for " + value)) - } catch { case NonFatal(e) => Failure(e) } - override def recover[U >: T](@deprecatedName('rescueException) pf: PartialFunction[Throwable, U]): Try[U] = this - override def recoverWith[U >: T](@deprecatedName('f) pf: PartialFunction[Throwable, Try[U]]): Try[U] = this - override def failed: Try[Throwable] = Failure(new UnsupportedOperationException("Success.failed")) - override def toOption: Option[T] = Some(value) - override def toEither: Either[Throwable, T] = Right(value) - override def fold[U](fa: Throwable => U, fb: T => U): U = - try { fb(value) } catch { case NonFatal(e) => fa(e) } -} diff --git a/tests/scala2-library/src/library/scala/util/control/Breaks.scala b/tests/scala2-library/src/library/scala/util/control/Breaks.scala deleted file mode 100644 index 5524b10afa7f..000000000000 --- a/tests/scala2-library/src/library/scala/util/control/Breaks.scala +++ /dev/null @@ -1,94 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala -package util.control - -/** A class that can be instantiated for the break control abstraction. - * Example usage: - * {{{ - * val mybreaks = new Breaks - * import mybreaks.{break, breakable} - * - * breakable { - * for (...) { - * if (...) break() - * } - * } - * }}} - * Calls to break from one instantiation of `Breaks` will never - * target breakable objects of some other instantiation. - */ -class Breaks { - - private val breakException = new BreakControl - - /** - * A block from which one can exit with a `break`. The `break` may be - * executed further down in the call stack provided that it is called on the - * exact same instance of `Breaks`. - */ - def breakable(op: => Unit) { - try { - op - } catch { - case ex: BreakControl => - if (ex ne breakException) throw ex - } - } - - sealed trait TryBlock[T] { - def catchBreak(onBreak: =>T): T - } - - /** - * This variant enables the execution of a code block in case of a `break()`: - * {{{ - * tryBreakable { - * for (...) { - * if (...) break() - * } - * } catchBreak { - * doCleanup() - * } - * }}} - */ - def tryBreakable[T](op: =>T) = new TryBlock[T] { - def catchBreak(onBreak: =>T) = try { - op - } catch { - case ex: BreakControl => - if (ex ne breakException) throw ex - onBreak - } - } - - /** - * Break from dynamically closest enclosing breakable block using this exact - * `Breaks` instance. - * - * @note This might be different than the statically closest enclosing block! - */ - def break(): Nothing = { throw breakException } -} - -/** An object that can be used for the break control abstraction. - * Example usage: - * {{{ - * import Breaks.{break, breakable} - * - * breakable { - * for (...) { - * if (...) break - * } - * } - * }}} - */ -object Breaks extends Breaks - -private class BreakControl extends ControlThrowable diff --git a/tests/scala2-library/src/library/scala/util/control/ControlThrowable.scala b/tests/scala2-library/src/library/scala/util/control/ControlThrowable.scala deleted file mode 100644 index 7ed3d95cd3ca..000000000000 --- a/tests/scala2-library/src/library/scala/util/control/ControlThrowable.scala +++ /dev/null @@ -1,35 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala -package util.control - -/** A marker trait indicating that the `Throwable` it is mixed into is - * intended for flow control. - * - * Note that `Throwable` subclasses which extend this trait may extend any - * other `Throwable` subclass (eg. `RuntimeException`) and are not required - * to extend `Throwable` directly. - * - * Instances of `Throwable` subclasses marked in this way should not normally - * be caught. Where catch-all behaviour is required `ControlThrowable` - * should be propagated, for example: - * {{{ - * import scala.util.control.ControlThrowable - * - * try { - * // Body might throw arbitrarily - * } catch { - * case c: ControlThrowable => throw c // propagate - * case t: Exception => log(t) // log and suppress - * } - * }}} - * - * @author Miles Sabin - */ -trait ControlThrowable extends Throwable with NoStackTrace diff --git a/tests/scala2-library/src/library/scala/util/control/Exception.scala b/tests/scala2-library/src/library/scala/util/control/Exception.scala deleted file mode 100644 index 8e38f3435d15..000000000000 --- a/tests/scala2-library/src/library/scala/util/control/Exception.scala +++ /dev/null @@ -1,371 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala -package util -package control - -import scala.reflect.{ ClassTag, classTag } -import scala.language.implicitConversions - -/** Classes representing the components of exception handling. - * - * Each class is independently composable. - * - * This class differs from [[scala.util.Try]] in that it focuses on composing exception handlers rather than - * composing behavior. All behavior should be composed first and fed to a [[Catch]] object using one of the - * `opt`, `either` or `withTry` methods. Taken together the classes provide a DSL for composing catch and finally - * behaviors. - * - * === Examples === - * - * Create a `Catch` which handles specified exceptions. - * {{{ - * import scala.util.control.Exception._ - * import java.net._ - * - * val s = "http://www.scala-lang.org/" - * - * // Some(http://www.scala-lang.org/) - * val x1: Option[URL] = catching(classOf[MalformedURLException]) opt new URL(s) - * - * // Right(http://www.scala-lang.org/) - * val x2: Either[Throwable,URL] = - * catching(classOf[MalformedURLException], classOf[NullPointerException]) either new URL(s) - * - * // Success(http://www.scala-lang.org/) - * val x3: Try[URL] = catching(classOf[MalformedURLException], classOf[NullPointerException]) withTry new URL(s) - * - * val defaultUrl = new URL("http://example.com") - * // URL(http://example.com) because htt/xx throws MalformedURLException - * val x4: URL = failAsValue(classOf[MalformedURLException])(defaultUrl)(new URL("htt/xx")) - * }}} - * - * Create a `Catch` which logs exceptions using `handling` and `by`. - * {{{ - * def log(t: Throwable): Unit = t.printStackTrace - * - * val withThrowableLogging: Catch[Unit] = handling(classOf[MalformedURLException]) by (log) - * - * def printUrl(url: String) : Unit = { - * val con = new URL(url) openConnection() - * val source = scala.io.Source.fromInputStream(con.getInputStream()) - * source.getLines.foreach(println) - * } - * - * val badUrl = "htt/xx" - * // Prints stacktrace, - * // java.net.MalformedURLException: no protocol: htt/xx - * // at java.net.URL.(URL.java:586) - * withThrowableLogging { printUrl(badUrl) } - * - * val goodUrl = "http://www.scala-lang.org/" - * // Prints page content, - * // <!DOCTYPE html> - * // <html> - * withThrowableLogging { printUrl(goodUrl) } - * }}} - * - * Use `unwrapping` to create a `Catch` that unwraps exceptions before rethrowing. - * {{{ - * class AppException(cause: Throwable) extends RuntimeException(cause) - * - * val unwrappingCatch: Catch[Nothing] = unwrapping(classOf[AppException]) - * - * def calcResult: Int = throw new AppException(new NullPointerException) - * - * // Throws NPE not AppException, - * // java.lang.NullPointerException - * // at .calcResult(<console>:17) - * val result = unwrappingCatch(calcResult) - * }}} - * - * Use `failAsValue` to provide a default when a specified exception is caught. - * - * {{{ - * val inputDefaulting: Catch[Int] = failAsValue(classOf[NumberFormatException])(0) - * val candidatePick = "seven" // scala.io.StdIn.readLine() - * - * // Int = 0 - * val pick = inputDefaulting(candidatePick.toInt) - * }}} - * - * Compose multiple `Catch`s with `or` to build a `Catch` that provides default values varied by exception. - * {{{ - * val formatDefaulting: Catch[Int] = failAsValue(classOf[NumberFormatException])(0) - * val nullDefaulting: Catch[Int] = failAsValue(classOf[NullPointerException])(-1) - * val otherDefaulting: Catch[Int] = nonFatalCatch withApply(_ => -100) - * - * val combinedDefaulting: Catch[Int] = formatDefaulting or nullDefaulting or otherDefaulting - * - * def p(s: String): Int = s.length * s.toInt - * - * // Int = 0 - * combinedDefaulting(p("tenty-nine")) - * - * // Int = -1 - * combinedDefaulting(p(null: String)) - * - * // Int = -100 - * combinedDefaulting(throw new IllegalStateException) - * - * // Int = 22 - * combinedDefaulting(p("11")) - * }}} - * - * @groupname composition-catch Catch behavior composition - * @groupprio composition-catch 10 - * @groupdesc composition-catch Build Catch objects from exception lists and catch logic - * - * @groupname composition-finally Finally behavior composition - * @groupprio composition-finally 20 - * @groupdesc composition-finally Build Catch objects from finally logic - * - * @groupname canned-behavior General purpose catch objects - * @groupprio canned-behavior 30 - * @groupdesc canned-behavior Catch objects with predefined behavior. Use combinator methods to compose additional behavior. - * - * @groupname dsl DSL behavior composition - * @groupprio dsl 40 - * @groupdesc dsl Expressive Catch behavior composition - * - * @groupname composition-catch-promiscuously Promiscuous Catch behaviors - * @groupprio composition-catch-promiscuously 50 - * @groupdesc composition-catch-promiscuously Useful if catching `ControlThrowable` or `InterruptedException` is required. - * - * @groupname logic-container Logic Containers - * @groupprio logic-container 60 - * @groupdesc logic-container Containers for catch and finally behavior. - * - * @define protectedExceptions `ControlThrowable` or `InterruptedException` - * - * @author Paul Phillips - */ - -object Exception { - type Catcher[+T] = PartialFunction[Throwable, T] - - def mkCatcher[Ex <: Throwable: ClassTag, T](isDef: Ex => Boolean, f: Ex => T) = new Catcher[T] { - private def downcast(x: Throwable): Option[Ex] = - if (classTag[Ex].runtimeClass.isAssignableFrom(x.getClass)) Some(x.asInstanceOf[Ex]) - else None - - def isDefinedAt(x: Throwable) = downcast(x) exists isDef - def apply(x: Throwable): T = f(downcast(x).get) - } - - def mkThrowableCatcher[T](isDef: Throwable => Boolean, f: Throwable => T) = mkCatcher[Throwable, T](isDef, f) - - implicit def throwableSubtypeToCatcher[Ex <: Throwable: ClassTag, T](pf: PartialFunction[Ex, T]): Catcher[T] = - mkCatcher(pf.isDefinedAt _, pf.apply _) - - /** !!! Not at all sure of every factor which goes into this, - * and/or whether we need multiple standard variations. - * @return true if `x` is $protectedExceptions otherwise false. - */ - def shouldRethrow(x: Throwable): Boolean = x match { - case _: ControlThrowable => true - case _: InterruptedException => true - // case _: java.lang.Error => true ? - case _ => false - } - - trait Described { - protected val name: String - private var _desc: String = "" - def desc = _desc - def withDesc(s: String): this.type = { - _desc = s - this - } - override def toString() = name + "(" + desc + ")" - } - - /** A container class for finally code. - * @group logic-container - */ - class Finally private[Exception](body: => Unit) extends Described { - protected val name = "Finally" - - def and(other: => Unit): Finally = new Finally({ body ; other }) - def invoke() { body } - } - - /** A container class for catch/finally logic. - * - * Pass a different value for rethrow if you want to probably - * unwisely allow catching control exceptions and other throwables - * which the rest of the world may expect to get through. - * @tparam T result type of bodies used in try and catch blocks - * @param pf Partial function used when applying catch logic to determine result value - * @param fin Finally logic which if defined will be invoked after catch logic - * @param rethrow Predicate on throwables determining when to rethrow a caught [[Throwable]] - * @group logic-container - */ - class Catch[+T]( - val pf: Catcher[T], - val fin: Option[Finally] = None, - val rethrow: Throwable => Boolean = shouldRethrow) - extends Described { - - protected val name = "Catch" - - /** Create a new Catch with additional exception handling logic. */ - def or[U >: T](pf2: Catcher[U]): Catch[U] = new Catch(pf orElse pf2, fin, rethrow) - def or[U >: T](other: Catch[U]): Catch[U] = or(other.pf) - - /** Apply this catch logic to the supplied body. */ - def apply[U >: T](body: => U): U = - try body - catch { - case x if rethrow(x) => throw x - case x if pf isDefinedAt x => pf(x) - } - finally fin foreach (_.invoke()) - - /** Create a new Catch container from this object and the supplied finally body. - * @param body The additional logic to apply after all existing finally bodies - */ - def andFinally(body: => Unit): Catch[T] = { - val appendedFin = fin map(_ and body) getOrElse new Finally(body) - new Catch(pf, Some(appendedFin), rethrow) - } - - /** Apply this catch logic to the supplied body, mapping the result - * into `Option[T]` - `None` if any exception was caught, `Some(T)` otherwise. - */ - def opt[U >: T](body: => U): Option[U] = toOption(Some(body)) - - /** Apply this catch logic to the supplied body, mapping the result - * into `Either[Throwable, T]` - `Left(exception)` if an exception was caught, - * `Right(T)` otherwise. - */ - def either[U >: T](body: => U): Either[Throwable, U] = toEither(Right(body)) - - /** Apply this catch logic to the supplied body, mapping the result - * into `Try[T]` - `Failure` if an exception was caught, `Success(T)` otherwise. - */ - def withTry[U >: T](body: => U): scala.util.Try[U] = toTry(Success(body)) - - /** Create a `Catch` object with the same `isDefinedAt` logic as this one, - * but with the supplied `apply` method replacing the current one. */ - def withApply[U](f: Throwable => U): Catch[U] = { - val pf2 = new Catcher[U] { - def isDefinedAt(x: Throwable) = pf isDefinedAt x - def apply(x: Throwable) = f(x) - } - new Catch(pf2, fin, rethrow) - } - - /** Convenience methods. */ - def toOption: Catch[Option[T]] = withApply(_ => None) - def toEither: Catch[Either[Throwable, T]] = withApply(Left(_)) - def toTry: Catch[scala.util.Try[T]] = withApply(x => Failure(x)) - } - - final val nothingCatcher: Catcher[Nothing] = mkThrowableCatcher(_ => false, throw _) - final def nonFatalCatcher[T]: Catcher[T] = mkThrowableCatcher({ case NonFatal(_) => true; case _ => false }, throw _) - final def allCatcher[T]: Catcher[T] = mkThrowableCatcher(_ => true, throw _) - - /** The empty `Catch` object. - * @group canned-behavior - **/ - final val noCatch: Catch[Nothing] = new Catch(nothingCatcher) withDesc "" - - /** A `Catch` object which catches everything. - * @group canned-behavior - **/ - final def allCatch[T]: Catch[T] = new Catch(allCatcher[T]) withDesc "" - - /** A `Catch` object which catches non-fatal exceptions. - * @group canned-behavior - **/ - final def nonFatalCatch[T]: Catch[T] = new Catch(nonFatalCatcher[T]) withDesc "" - - /** Creates a `Catch` object which will catch any of the supplied exceptions. - * Since the returned `Catch` object has no specific logic defined and will simply - * rethrow the exceptions it catches, you will typically want to call `opt`, - * `either` or `withTry` on the return value, or assign custom logic by calling "withApply". - * - * Note that `Catch` objects automatically rethrow `ControlExceptions` and others - * which should only be caught in exceptional circumstances. If you really want - * to catch exactly what you specify, use `catchingPromiscuously` instead. - * @group composition-catch - */ - def catching[T](exceptions: Class[_]*): Catch[T] = - new Catch(pfFromExceptions(exceptions : _*)) withDesc (exceptions map (_.getName) mkString ", ") - - def catching[T](c: Catcher[T]): Catch[T] = new Catch(c) - - /** Creates a `Catch` object which will catch any of the supplied exceptions. - * Unlike "catching" which filters out those in shouldRethrow, this one will - * catch whatever you ask of it including $protectedExceptions. - * @group composition-catch-promiscuously - */ - def catchingPromiscuously[T](exceptions: Class[_]*): Catch[T] = catchingPromiscuously(pfFromExceptions(exceptions : _*)) - def catchingPromiscuously[T](c: Catcher[T]): Catch[T] = new Catch(c, None, _ => false) - - /** Creates a `Catch` object which catches and ignores any of the supplied exceptions. - * @group composition-catch - */ - def ignoring(exceptions: Class[_]*): Catch[Unit] = - catching(exceptions: _*) withApply (_ => ()) - - /** Creates a `Catch` object which maps all the supplied exceptions to `None`. - * @group composition-catch - */ - def failing[T](exceptions: Class[_]*): Catch[Option[T]] = - catching(exceptions: _*) withApply (_ => None) - - /** Creates a `Catch` object which maps all the supplied exceptions to the given value. - * @group composition-catch - */ - def failAsValue[T](exceptions: Class[_]*)(value: => T): Catch[T] = - catching(exceptions: _*) withApply (_ => value) - - class By[T,R](f: T => R) { - def by(x: T): R = f(x) - } - - /** Returns a partially constructed `Catch` object, which you must give - * an exception handler function as an argument to `by`. - * @example - * {{{ - * handling(classOf[MalformedURLException], classOf[NullPointerException]) by (_.printStackTrace) - * }}} - * @group dsl - */ - // TODO: Add return type - def handling[T](exceptions: Class[_]*) = { - def fun(f: Throwable => T) = catching(exceptions: _*) withApply f - new By[Throwable => T, Catch[T]](fun _) - } - - /** Returns a `Catch` object with no catch logic and the argument as the finally logic. - * @group composition-finally - */ - def ultimately[T](body: => Unit): Catch[T] = noCatch andFinally body - - /** Creates a `Catch` object which unwraps any of the supplied exceptions. - * @group composition-catch - */ - def unwrapping[T](exceptions: Class[_]*): Catch[T] = { - def unwrap(x: Throwable): Throwable = - if (wouldMatch(x, exceptions) && x.getCause != null) unwrap(x.getCause) - else x - - catching(exceptions: _*) withApply (x => throw unwrap(x)) - } - - /** Private **/ - private def wouldMatch(x: Throwable, classes: scala.collection.Seq[Class[_]]): Boolean = - classes exists (_ isAssignableFrom x.getClass) - - private def pfFromExceptions(exceptions: Class[_]*): PartialFunction[Throwable, Nothing] = - { case x if wouldMatch(x, exceptions) => throw x } -} diff --git a/tests/scala2-library/src/library/scala/util/control/NoStackTrace.scala b/tests/scala2-library/src/library/scala/util/control/NoStackTrace.scala deleted file mode 100644 index 3647af4ac388..000000000000 --- a/tests/scala2-library/src/library/scala/util/control/NoStackTrace.scala +++ /dev/null @@ -1,34 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala -package util.control - -/** A trait for exceptions which, for efficiency reasons, do not - * fill in the stack trace. Stack trace suppression can be disabled - * on a global basis via a system property wrapper in - * [[scala.sys.SystemProperties]]. - * - * @note Since JDK 1.7, a similar effect can be achieved with `class Ex extends Throwable(..., writableStackTrace = false)` - * - * @author Paul Phillips - * @since 2.8 - */ -trait NoStackTrace extends Throwable { - override def fillInStackTrace(): Throwable = - if (NoStackTrace.noSuppression) super.fillInStackTrace() - else this -} - -object NoStackTrace { - final def noSuppression = _noSuppression - - // two-stage init to make checkinit happy, since sys.SystemProperties.noTraceSuppression.value calls back into NoStackTrace.noSuppression - final private var _noSuppression = false - _noSuppression = sys.SystemProperties.noTraceSuppression.value -} diff --git a/tests/scala2-library/src/library/scala/util/control/NonFatal.scala b/tests/scala2-library/src/library/scala/util/control/NonFatal.scala deleted file mode 100644 index 9d3dfea07455..000000000000 --- a/tests/scala2-library/src/library/scala/util/control/NonFatal.scala +++ /dev/null @@ -1,44 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala -package util.control - -/** - * Extractor of non-fatal Throwables. Will not match fatal errors like `VirtualMachineError` - * (for example, `OutOfMemoryError` and `StackOverflowError`, subclasses of `VirtualMachineError`), `ThreadDeath`, - * `LinkageError`, `InterruptedException`, `ControlThrowable`. - * - * Note that [[scala.util.control.ControlThrowable]], an internal Throwable, is not matched by - * `NonFatal` (and would therefore be thrown). - * - * For example, all harmless Throwables can be caught by: - * {{{ - * try { - * // dangerous stuff - * } catch { - * case NonFatal(e) => log.error(e, "Something not that bad.") - * // or - * case e if NonFatal(e) => log.error(e, "Something not that bad.") - * } - * }}} - */ -object NonFatal { - /** - * Returns true if the provided `Throwable` is to be considered non-fatal, or false if it is to be considered fatal - */ - def apply(t: Throwable): Boolean = t match { - // VirtualMachineError includes OutOfMemoryError and other fatal errors - case _: VirtualMachineError | _: ThreadDeath | _: InterruptedException | _: LinkageError | _: ControlThrowable => false - case _ => true - } - /** - * Returns Some(t) if NonFatal(t) == true, otherwise None - */ - def unapply(t: Throwable): Option[Throwable] = if (apply(t)) Some(t) else None -} diff --git a/tests/scala2-library/src/library/scala/util/control/TailCalls.scala b/tests/scala2-library/src/library/scala/util/control/TailCalls.scala deleted file mode 100644 index c7fefb1ebadb..000000000000 --- a/tests/scala2-library/src/library/scala/util/control/TailCalls.scala +++ /dev/null @@ -1,110 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala -package util.control - -/** Methods exported by this object implement tail calls via trampolining. - * Tail calling methods have to return their result using `done` or call the - * next method using `tailcall`. Both return a `TailRec` object. The result - * of evaluating a tailcalling function can be retrieved from a `Tailrec` - * value using method `result`. - * Implemented as described in "Stackless Scala with Free Monads" - * http://blog.higher-order.com/assets/trampolines.pdf - * - * Here's a usage example: - * {{{ - * import scala.util.control.TailCalls._ - * - * def isEven(xs: List[Int]): TailRec[Boolean] = - * if (xs.isEmpty) done(true) else tailcall(isOdd(xs.tail)) - * - * def isOdd(xs: List[Int]): TailRec[Boolean] = - * if (xs.isEmpty) done(false) else tailcall(isEven(xs.tail)) - * - * isEven((1 to 100000).toList).result - * - * def fib(n: Int): TailRec[Int] = - * if (n < 2) done(n) else for { - * x <- tailcall(fib(n - 1)) - * y <- tailcall(fib(n - 2)) - * } yield (x + y) - * - * fib(40).result - * }}} - */ -object TailCalls { - - /** This class represents a tailcalling computation - */ - abstract class TailRec[+A] { - - /** Continue the computation with `f`. */ - final def map[B](f: A => B): TailRec[B] = - flatMap(a => Call(() => Done(f(a)))) - - /** Continue the computation with `f` and merge the trampolining - * of this computation with that of `f`. */ - final def flatMap[B](f: A => TailRec[B]): TailRec[B] = - this match { - case Done(a) => Call(() => f(a)) - case c@Call(_) => Cont(c, f) - // Take advantage of the monad associative law to optimize the size of the required stack - case c: Cont[a1, b1] => Cont(c.a, (x: a1) => c.f(x) flatMap f) - } - - /** Returns either the next step of the tailcalling computation, - * or the result if there are no more steps. */ - @annotation.tailrec final def resume: Either[() => TailRec[A], A] = this match { - case Done(a) => Right(a) - case Call(k) => Left(k) - case Cont(a, f) => a match { - case Done(v) => f(v).resume - case Call(k) => Left(() => k().flatMap(f)) - case Cont(b, g) => b.flatMap(x => g(x) flatMap f).resume - } - } - - /** Returns the result of the tailcalling computation. - */ - @annotation.tailrec final def result: A = this match { - case Done(a) => a - case Call(t) => t().result - case Cont(a, f) => a match { - case Done(v) => f(v).result - case Call(t) => t().flatMap(f).result - case Cont(b, g) => b.flatMap(x => g(x) flatMap f).result - } - } - } - - /** Internal class representing a tailcall */ - protected case class Call[A](rest: () => TailRec[A]) extends TailRec[A] - - /** Internal class representing the final result returned from a tailcalling - * computation */ - protected case class Done[A](value: A) extends TailRec[A] - - /** Internal class representing a continuation with function A => TailRec[B]. - * It is needed for the flatMap to be implemented. */ - protected case class Cont[A, B](a: TailRec[A], f: A => TailRec[B]) extends TailRec[B] - - /** Performs a tailcall - * @param rest the expression to be evaluated in the tailcall - * @return a `TailRec` object representing the expression `rest` - */ - def tailcall[A](rest: => TailRec[A]): TailRec[A] = Call(() => rest) - - /** Used to return final result from tailcalling computation - * @param `result` the result value - * @return a `TailRec` object representing a computation which immediately - * returns `result` - */ - def done[A](result: A): TailRec[A] = Done(result) - -} diff --git a/tests/scala2-library/src/library/scala/util/hashing/ByteswapHashing.scala b/tests/scala2-library/src/library/scala/util/hashing/ByteswapHashing.scala deleted file mode 100644 index 470479725bd4..000000000000 --- a/tests/scala2-library/src/library/scala/util/hashing/ByteswapHashing.scala +++ /dev/null @@ -1,36 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2006-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://www.scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala -package util.hashing - - - - - - -/** A fast multiplicative hash by Phil Bagwell. - */ -final class ByteswapHashing[T] extends Hashing[T] { - - def hash(v: T) = byteswap32(v.##) - -} - - -object ByteswapHashing { - - private class Chained[T](h: Hashing[T]) extends Hashing[T] { - def hash(v: T) = byteswap32(h.hash(v)) - } - - /** Composes another `Hashing` with the Byteswap hash. - */ - def chain[T](h: Hashing[T]): Hashing[T] = new Chained(h) - -} diff --git a/tests/scala2-library/src/library/scala/util/hashing/Hashing.scala b/tests/scala2-library/src/library/scala/util/hashing/Hashing.scala deleted file mode 100644 index 884a0e23a7c8..000000000000 --- a/tests/scala2-library/src/library/scala/util/hashing/Hashing.scala +++ /dev/null @@ -1,40 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala -package util.hashing - -import scala.annotation.implicitNotFound - -/** `Hashing` is a trait whose instances each represent a strategy for hashing - * instances of a type. - * - * `Hashing`'s companion object defines a default hashing strategy for all - * objects - it calls their `##` method. - * - * Note: when using a custom `Hashing`, make sure to use it with the `Equiv` - * such that if any two objects are equal, then their hash codes must be equal. - * - * @since 2.10 - */ -@implicitNotFound(msg = "No implicit Hashing defined for ${T}.") -trait Hashing[T] extends Serializable { - def hash(x: T): Int -} - -object Hashing { - final class Default[T] extends Hashing[T] { - def hash(x: T) = x.## - } - - implicit def default[T]: Default[T] = new Default[T] - - def fromFunction[T](f: T => Int) = new Hashing[T] { - def hash(x: T) = f(x) - } -} diff --git a/tests/scala2-library/src/library/scala/util/hashing/MurmurHash3.scala b/tests/scala2-library/src/library/scala/util/hashing/MurmurHash3.scala deleted file mode 100644 index fa725903e319..000000000000 --- a/tests/scala2-library/src/library/scala/util/hashing/MurmurHash3.scala +++ /dev/null @@ -1,281 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala -package util.hashing - -import java.lang.Integer.{ rotateLeft => rotl } - -private[hashing] class MurmurHash3 { - /** Mix in a block of data into an intermediate hash value. */ - final def mix(hash: Int, data: Int): Int = { - var h = mixLast(hash, data) - h = rotl(h, 13) - h * 5 + 0xe6546b64 - } - - /** May optionally be used as the last mixing step. Is a little bit faster than mix, - * as it does no further mixing of the resulting hash. For the last element this is not - * necessary as the hash is thoroughly mixed during finalization anyway. */ - final def mixLast(hash: Int, data: Int): Int = { - var k = data - - k *= 0xcc9e2d51 - k = rotl(k, 15) - k *= 0x1b873593 - - hash ^ k - } - - /** Finalize a hash to incorporate the length and make sure all bits avalanche. */ - final def finalizeHash(hash: Int, length: Int): Int = avalanche(hash ^ length) - - /** Force all bits of the hash to avalanche. Used for finalizing the hash. */ - private final def avalanche(hash: Int): Int = { - var h = hash - - h ^= h >>> 16 - h *= 0x85ebca6b - h ^= h >>> 13 - h *= 0xc2b2ae35 - h ^= h >>> 16 - - h - } - - /** Compute the hash of a product */ - final def productHash(x: Product, seed: Int): Int = { - val arr = x.productArity - // Case objects have the hashCode inlined directly into the - // synthetic hashCode method, but this method should still give - // a correct result if passed a case object. - if (arr == 0) { - x.productPrefix.hashCode - } - else { - var h = seed - var i = 0 - while (i < arr) { - h = mix(h, x.productElement(i).##) - i += 1 - } - finalizeHash(h, arr) - } - } - - /** Compute the hash of a string */ - final def stringHash(str: String, seed: Int): Int = { - var h = seed - var i = 0 - while (i + 1 < str.length) { - val data = (str.charAt(i) << 16) + str.charAt(i + 1) - h = mix(h, data) - i += 2 - } - if (i < str.length) h = mixLast(h, str.charAt(i).toInt) - finalizeHash(h, str.length) - } - - /** Compute a hash that is symmetric in its arguments - that is a hash - * where the order of appearance of elements does not matter. - * This is useful for hashing sets, for example. - */ - final def unorderedHash(xs: TraversableOnce[Any], seed: Int): Int = { - var a, b, n = 0 - var c = 1 - xs foreach { x => - val h = x.## - a += h - b ^= h - if (h != 0) c *= h - n += 1 - } - var h = seed - h = mix(h, a) - h = mix(h, b) - h = mixLast(h, c) - finalizeHash(h, n) - } - /** Compute a hash that depends on the order of its arguments. - */ - final def orderedHash(xs: TraversableOnce[Any], seed: Int): Int = { - var n = 0 - var h = seed - xs foreach { x => - h = mix(h, x.##) - n += 1 - } - finalizeHash(h, n) - } - - /** Compute the hash of an array. - */ - final def arrayHash[@specialized T](a: Array[T], seed: Int): Int = { - var h = seed - var i = 0 - while (i < a.length) { - h = mix(h, a(i).##) - i += 1 - } - finalizeHash(h, a.length) - } - - /** Compute the hash of a byte array. Faster than arrayHash, because - * it hashes 4 bytes at once. - */ - final def bytesHash(data: Array[Byte], seed: Int): Int = { - var len = data.length - var h = seed - - // Body - var i = 0 - while(len >= 4) { - var k = data(i + 0) & 0xFF - k |= (data(i + 1) & 0xFF) << 8 - k |= (data(i + 2) & 0xFF) << 16 - k |= (data(i + 3) & 0xFF) << 24 - - h = mix(h, k) - - i += 4 - len -= 4 - } - - // Tail - var k = 0 - if(len == 3) k ^= (data(i + 2) & 0xFF) << 16 - if(len >= 2) k ^= (data(i + 1) & 0xFF) << 8 - if(len >= 1) { - k ^= (data(i + 0) & 0xFF) - h = mixLast(h, k) - } - - // Finalization - finalizeHash(h, data.length) - } - - final def listHash(xs: scala.collection.immutable.List[_], seed: Int): Int = { - var n = 0 - var h = seed - var elems = xs - while (!elems.isEmpty) { - val head = elems.head - val tail = elems.tail - h = mix(h, head.##) - n += 1 - elems = tail - } - finalizeHash(h, n) - } -} - -/** - * An implementation of Austin Appleby's MurmurHash 3 algorithm - * (MurmurHash3_x86_32). This object contains methods that hash - * values of various types as well as means to construct `Hashing` - * objects. - * - * This algorithm is designed to generate well-distributed non-cryptographic - * hashes. It is designed to hash data in 32 bit chunks (ints). - * - * The mix method needs to be called at each step to update the intermediate - * hash value. For the last chunk to incorporate into the hash mixLast may - * be used instead, which is slightly faster. Finally finalizeHash needs to - * be called to compute the final hash value. - * - * This is based on the earlier MurmurHash3 code by Rex Kerr, but the - * MurmurHash3 algorithm was since changed by its creator Austin Appleby - * to remedy some weaknesses and improve performance. This represents the - * latest and supposedly final version of the algorithm (revision 136). - * - * @see [[https://github.com/aappleby/smhasher]] - */ -object MurmurHash3 extends MurmurHash3 { - final val arraySeed = 0x3c074a61 - final val stringSeed = 0xf7ca7fd2 - final val productSeed = 0xcafebabe - final val symmetricSeed = 0xb592f7ae - final val traversableSeed = 0xe73a8b15 - final val seqSeed = "Seq".hashCode - final val mapSeed = "Map".hashCode - final val setSeed = "Set".hashCode - - def arrayHash[@specialized T](a: Array[T]): Int = arrayHash(a, arraySeed) - def bytesHash(data: Array[Byte]): Int = bytesHash(data, arraySeed) - def orderedHash(xs: TraversableOnce[Any]): Int = orderedHash(xs, symmetricSeed) - def productHash(x: Product): Int = productHash(x, productSeed) - def stringHash(x: String): Int = stringHash(x, stringSeed) - def unorderedHash(xs: TraversableOnce[Any]): Int = unorderedHash(xs, traversableSeed) - - private[scala] def wrappedArrayHash[@specialized T](a: Array[T]): Int = arrayHash(a, seqSeed) - private[scala] def wrappedBytesHash(data: Array[Byte]): Int = bytesHash(data, seqSeed) - - /** To offer some potential for optimization. - */ - def seqHash(xs: scala.collection.Seq[_]): Int = xs match { - case xs: List[_] => listHash(xs, seqSeed) - case xs => orderedHash(xs, seqSeed) - } - - def mapHash(xs: scala.collection.Map[_, _]): Int = unorderedHash(xs, mapSeed) - def setHash(xs: scala.collection.Set[_]): Int = unorderedHash(xs, setSeed) - - class ArrayHashing[@specialized T] extends Hashing[Array[T]] { - def hash(a: Array[T]) = arrayHash(a) - } - - def arrayHashing[@specialized T] = new ArrayHashing[T] - - def bytesHashing = new Hashing[Array[Byte]] { - def hash(data: Array[Byte]) = bytesHash(data) - } - - def orderedHashing = new Hashing[TraversableOnce[Any]] { - def hash(xs: TraversableOnce[Any]) = orderedHash(xs) - } - - def productHashing = new Hashing[Product] { - def hash(x: Product) = productHash(x) - } - - def stringHashing = new Hashing[String] { - def hash(x: String) = stringHash(x) - } - - def unorderedHashing = new Hashing[TraversableOnce[Any]] { - def hash(xs: TraversableOnce[Any]) = unorderedHash(xs) - } - - /** All this trouble and foreach still appears faster. - * Leaving in place in case someone would like to investigate further. - */ - /** - def linearSeqHash(xs: scala.collection.LinearSeq[_], seed: Int): Int = { - var n = 0 - var h = seed - var elems = xs - while (elems.nonEmpty) { - h = mix(h, elems.head.##) - n += 1 - elems = elems.tail - } - finalizeHash(h, n) - } - - def indexedSeqHash(xs: scala.collection.IndexedSeq[_], seed: Int): Int = { - var n = 0 - var h = seed - val len = xs.length - while (n < len) { - h = mix(h, xs(n).##) - n += 1 - } - finalizeHash(h, n) - } - */ -} diff --git a/tests/scala2-library/src/library/scala/util/hashing/package.scala b/tests/scala2-library/src/library/scala/util/hashing/package.scala deleted file mode 100644 index 2c8e0154fc31..000000000000 --- a/tests/scala2-library/src/library/scala/util/hashing/package.scala +++ /dev/null @@ -1,36 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2006-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://www.scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala -package util - - - - - - -package object hashing { - - /** Fast multiplicative hash with a nice distribution. - */ - def byteswap32(v: Int): Int = { - var hc = v * 0x9e3775cd - hc = java.lang.Integer.reverseBytes(hc) - hc * 0x9e3775cd - } - - /** Fast multiplicative hash with a nice distribution - * for 64-bit values. - */ - def byteswap64(v: Long): Long = { - var hc = v * 0x9e3775cd9e3775cdL - hc = java.lang.Long.reverseBytes(hc) - hc * 0x9e3775cd9e3775cdL - } - -} diff --git a/tests/scala2-library/src/library/scala/util/matching/Regex.scala b/tests/scala2-library/src/library/scala/util/matching/Regex.scala deleted file mode 100644 index 4822fe02b400..000000000000 --- a/tests/scala2-library/src/library/scala/util/matching/Regex.scala +++ /dev/null @@ -1,899 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2007-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -/** - * This package is concerned with regular expression (regex) matching against strings, - * with the main goal of pulling out information from those matches, or replacing - * them with something else. - * - * [[scala.util.matching.Regex]] is the class users instantiate to do regular expression matching. - * - * The companion object to [[scala.util.matching.Regex]] contains supporting members: - * * [[scala.util.matching.Regex.Match]] makes more information about a match available. - * * [[scala.util.matching.Regex.MatchIterator]] is used to iterate over matched strings. - * * [[scala.util.matching.Regex.MatchData]] is just a base trait for the above classes. - * * [[scala.util.matching.Regex.Groups]] extracts group from a [[scala.util.matching.Regex.Match]] - * without recomputing the match. - */ -package scala.util.matching - -import scala.collection.AbstractIterator -import java.util.regex.{ Pattern, Matcher } - -/** A regular expression is used to determine whether a string matches a pattern - * and, if it does, to extract or transform the parts that match. - * - * === Usage === - * This class delegates to the [[java.util.regex]] package of the Java Platform. - * See the documentation for [[java.util.regex.Pattern]] for details about - * the regular expression syntax for pattern strings. - * - * An instance of `Regex` represents a compiled regular expression pattern. - * Since compilation is expensive, frequently used `Regex`es should be constructed - * once, outside of loops and perhaps in a companion object. - * - * The canonical way to create a `Regex` is by using the method `r`, provided - * implicitly for strings: - * - * {{{ - * val date = raw"(\d{4})-(\d{2})-(\d{2})".r - * }}} - * - * Since escapes are not processed in multi-line string literals, using triple quotes - * avoids having to escape the backslash character, so that `"\\d"` can be written `"""\d"""`. - * The same result is achieved with certain interpolators, such as `raw"\d".r` or - * a custom interpolator `r"\d"` that also compiles the `Regex`. - * - * === Extraction === - * To extract the capturing groups when a `Regex` is matched, use it as - * an extractor in a pattern match: - * - * {{{ - * "2004-01-20" match { - * case date(year, month, day) => s"$year was a good year for PLs." - * } - * }}} - * - * To check only whether the `Regex` matches, ignoring any groups, - * use a sequence wildcard: - * - * {{{ - * "2004-01-20" match { - * case date(_*) => "It's a date!" - * } - * }}} - * - * That works because a `Regex` extractor produces a sequence of strings. - * Extracting only the year from a date could also be expressed with - * a sequence wildcard: - * - * {{{ - * "2004-01-20" match { - * case date(year, _*) => s"$year was a good year for PLs." - * } - * }}} - * - * In a pattern match, `Regex` normally matches the entire input. - * However, an unanchored `Regex` finds the pattern anywhere - * in the input. - * - * {{{ - * val embeddedDate = date.unanchored - * "Date: 2004-01-20 17:25:18 GMT (10 years, 28 weeks, 5 days, 17 hours and 51 minutes ago)" match { - * case embeddedDate("2004", "01", "20") => "A Scala is born." - * } - * }}} - * - * === Find Matches === - * To find or replace matches of the pattern, use the various find and replace methods. - * For each method, there is a version for working with matched strings and - * another for working with `Match` objects. - * - * For example, pattern matching with an unanchored `Regex`, as in the previous example, - * can also be accomplished using `findFirstMatchIn`. The `findFirst` methods return an `Option` - * which is non-empty if a match is found, or `None` for no match: - * - * {{{ - * val dates = "Important dates in history: 2004-01-20, 1958-09-05, 2010-10-06, 2011-07-15" - * val firstDate = date.findFirstIn(dates).getOrElse("No date found.") - * val firstYear = for (m <- date.findFirstMatchIn(dates)) yield m.group(1) - * }}} - * - * To find all matches: - * - * {{{ - * val allYears = for (m <- date.findAllMatchIn(dates)) yield m.group(1) - * }}} - * - * To iterate over the matched strings, use `findAllIn`, which returns a special iterator - * that can be queried for the `MatchData` of the last match: - * - * {{{ - * val mi = date.findAllIn(dates) - * while (mi.hasNext) { - * val d = mi.next - * if (mi.group(1).toInt < 1960) println(s"$d: An oldie but goodie.") - * } - * }}} - * - * Although the `MatchIterator` returned by `findAllIn` is used like any `Iterator`, - * with alternating calls to `hasNext` and `next`, `hasNext` has the additional - * side effect of advancing the underlying matcher to the next unconsumed match. - * This effect is visible in the `MatchData` representing the "current match". - * - * {{{ - * val r = "(ab+c)".r - * val s = "xxxabcyyyabbczzz" - * r.findAllIn(s).start // 3 - * val mi = r.findAllIn(s) - * mi.hasNext // true - * mi.start // 3 - * mi.next() // "abc" - * mi.start // 3 - * mi.hasNext // true - * mi.start // 9 - * mi.next() // "abbc" - * }}} - * - * The example shows that methods on `MatchData` such as `start` will advance to - * the first match, if necessary. It also shows that `hasNext` will advance to - * the next unconsumed match, if `next` has already returned the current match. - * - * The current `MatchData` can be captured using the `matchData` method. - * Alternatively, `findAllMatchIn` returns an `Iterator[Match]`, where there - * is no interaction between the iterator and `Match` objects it has already produced. - * - * Note that `findAllIn` finds matches that don't overlap. (See [[findAllIn]] for more examples.) - * - * {{{ - * val num = raw"(\d+)".r - * val all = num.findAllIn("123").toList // List("123"), not List("123", "23", "3") - * }}} - * - * === Replace Text === - * Text replacement can be performed unconditionally or as a function of the current match: - * - * {{{ - * val redacted = date.replaceAllIn(dates, "XXXX-XX-XX") - * val yearsOnly = date.replaceAllIn(dates, m => m.group(1)) - * val months = (0 to 11).map { i => val c = Calendar.getInstance; c.set(2014, i, 1); f"$c%tb" } - * val reformatted = date.replaceAllIn(dates, _ match { case date(y,m,d) => f"${months(m.toInt - 1)} $d, $y" }) - * }}} - * - * Pattern matching the `Match` against the `Regex` that created it does not reapply the `Regex`. - * In the expression for `reformatted`, each `date` match is computed once. But it is possible to apply a - * `Regex` to a `Match` resulting from a different pattern: - * - * {{{ - * val docSpree = """2011(?:-\d{2}){2}""".r - * val docView = date.replaceAllIn(dates, _ match { - * case docSpree() => "Historic doc spree!" - * case _ => "Something else happened" - * }) - * }}} - * - * @see [[java.util.regex.Pattern]] - * - * @author Thibaud Hottelier - * @author Philipp Haller - * @author Martin Odersky - * @version 1.1, 29/01/2008 - * - * @param pattern The compiled pattern - * @param groupNames A mapping from names to indices in capture groups - * - * @define replacementString - * In the replacement String, a dollar sign (`$`) followed by a number will be - * interpreted as a reference to a group in the matched pattern, with numbers - * 1 through 9 corresponding to the first nine groups, and 0 standing for the - * whole match. Any other character is an error. The backslash (`\`) character - * will be interpreted as an escape character and can be used to escape the - * dollar sign. Use `Regex.quoteReplacement` to escape these characters. - */ -@SerialVersionUID(-2094783597747625537L) -class Regex private[matching](val pattern: Pattern, groupNames: String*) extends Serializable { - outer => - - import Regex._ - - /** Compile a regular expression, supplied as a string, into a pattern that - * can be matched against inputs. - * - * If group names are supplied, they can be used this way: - * - * {{{ - * val namedDate = new Regex("""(\d\d\d\d)-(\d\d)-(\d\d)""", "year", "month", "day") - * val namedYears = for (m <- namedDate findAllMatchIn dates) yield m group "year" - * }}} - * - * Group names supplied to the constructor are preferred to inline group names - * when retrieving matched groups by name. Not all platforms support inline names. - * - * This constructor does not support options as flags, which must be - * supplied as inline flags in the pattern string: `(?idmsux-idmsux)`. - * - * @param regex The regular expression to compile. - * @param groupNames Names of capturing groups. - */ - def this(regex: String, groupNames: String*) = this(Pattern.compile(regex), groupNames: _*) - - /** Tries to match a [[java.lang.CharSequence]]. - * - * If the match succeeds, the result is a list of the matching - * groups (or a `null` element if a group did not match any input). - * If the pattern specifies no groups, then the result will be an empty list - * on a successful match. - * - * This method attempts to match the entire input by default; to find the next - * matching subsequence, use an unanchored `Regex`. - * - * For example: - * - * {{{ - * val p1 = "ab*c".r - * val p1Matches = "abbbc" match { - * case p1() => true // no groups - * case _ => false - * } - * val p2 = "a(b*)c".r - * val p2Matches = "abbbc" match { - * case p2(_*) => true // any groups - * case _ => false - * } - * val numberOfB = "abbbc" match { - * case p2(b) => Some(b.length) // one group - * case _ => None - * } - * val p3 = "b*".r.unanchored - * val p3Matches = "abbbc" match { - * case p3() => true // find the b's - * case _ => false - * } - * val p4 = "a(b*)(c+)".r - * val p4Matches = "abbbcc" match { - * case p4(_*) => true // multiple groups - * case _ => false - * } - * val allGroups = "abbbcc" match { - * case p4(all @ _*) => all mkString "/" // "bbb/cc" - * case _ => "" - * } - * val cGroup = "abbbcc" match { - * case p4(_, c) => c - * case _ => "" - * } - * }}} - * - * @param s The string to match - * @return The matches - */ - def unapplySeq(s: CharSequence): Option[List[String]] = s match { - case null => None - case _ => - val m = pattern matcher s - if (runMatcher(m)) Some((1 to m.groupCount).toList map m.group) - else None - } - - /** Tries to match the String representation of a [[scala.Char]]. - * - * If the match succeeds, the result is the first matching - * group if any groups are defined, or an empty Sequence otherwise. - * - * For example: - * - * {{{ - * val cat = "cat" - * // the case must consume the group to match - * val r = """(\p{Lower})""".r - * cat(0) match { case r(x) => true } - * cat(0) match { case r(_) => true } - * cat(0) match { case r(_*) => true } - * cat(0) match { case r() => true } // no match - * - * // there is no group to extract - * val r = """\p{Lower}""".r - * cat(0) match { case r(x) => true } // no match - * cat(0) match { case r(_) => true } // no match - * cat(0) match { case r(_*) => true } // matches - * cat(0) match { case r() => true } // matches - * - * // even if there are multiple groups, only one is returned - * val r = """((.))""".r - * cat(0) match { case r(_) => true } // matches - * cat(0) match { case r(_,_) => true } // no match - * }}} - * - * @param c The Char to match - * @return The match - */ - def unapplySeq(c: Char): Option[List[Char]] = { - val m = pattern matcher c.toString - if (runMatcher(m)) { - if (m.groupCount > 0) Some((m group 1).toList) else Some(Nil) - } else None - } - - /** Tries to match on a [[scala.util.matching.Regex.Match]]. - * - * A previously failed match results in None. - * - * If a successful match was made against the current pattern, then that result is used. - * - * Otherwise, this Regex is applied to the previously matched input, - * and the result of that match is used. - */ - def unapplySeq(m: Match): Option[List[String]] = - if (m == null || m.matched == null) None - else if (m.matcher.pattern == this.pattern) Some((1 to m.groupCount).toList map m.group) - else unapplySeq(m.matched) - - /** Tries to match target. - * @param target The string to match - * @return The matches - */ - @deprecated("extracting a match result from anything but a CharSequence or Match is deprecated", "2.11.0") - def unapplySeq(target: Any): Option[List[String]] = target match { - case s: CharSequence => - val m = pattern matcher s - if (runMatcher(m)) Some((1 to m.groupCount).toList map m.group) - else None - case m: Match => unapplySeq(m.matched) - case _ => None - } - - // @see UnanchoredRegex - protected def runMatcher(m: Matcher) = m.matches() - - /** Return all non-overlapping matches of this `Regex` in the given character - * sequence as a [[scala.util.matching.Regex.MatchIterator]], - * which is a special [[scala.collection.Iterator]] that returns the - * matched strings but can also be queried for more data about the last match, - * such as capturing groups and start position. - * - * A `MatchIterator` can also be converted into an iterator - * that returns objects of type [[scala.util.matching.Regex.Match]], - * such as is normally returned by `findAllMatchIn`. - * - * Where potential matches overlap, the first possible match is returned, - * followed by the next match that follows the input consumed by the - * first match: - * - * {{{ - * val hat = "hat[^a]+".r - * val hathaway = "hathatthattthatttt" - * val hats = hat.findAllIn(hathaway).toList // List(hath, hattth) - * val pos = hat.findAllMatchIn(hathaway).map(_.start).toList // List(0, 7) - * }}} - * - * To return overlapping matches, it is possible to formulate a regular expression - * with lookahead (`?=`) that does not consume the overlapping region. - * - * {{{ - * val madhatter = "(h)(?=(at[^a]+))".r - * val madhats = madhatter.findAllMatchIn(hathaway).map { - * case madhatter(x,y) => s"$x$y" - * }.toList // List(hath, hatth, hattth, hatttt) - * }}} - * - * Attempting to retrieve match information after exhausting the iterator - * results in [[java.lang.IllegalStateException]]. - * See [[scala.util.matching.Regex.MatchIterator]] for details. - * - * @param source The text to match against. - * @return A [[scala.util.matching.Regex.MatchIterator]] of matched substrings. - * @example {{{for (words <- """\w+""".r findAllIn "A simple example.") yield words}}} - */ - def findAllIn(source: CharSequence) = new Regex.MatchIterator(source, this, groupNames) - - /** Return all non-overlapping matches of this regexp in given character sequence as a - * [[scala.collection.Iterator]] of [[scala.util.matching.Regex.Match]]. - * - * @param source The text to match against. - * @return A [[scala.collection.Iterator]] of [[scala.util.matching.Regex.Match]] for all matches. - * @example {{{for (words <- """\w+""".r findAllMatchIn "A simple example.") yield words.start}}} - */ - def findAllMatchIn(source: CharSequence): Iterator[Match] = { - val matchIterator = findAllIn(source) - new Iterator[Match] { - def hasNext = matchIterator.hasNext - def next: Match = { - matchIterator.next() - new Match(matchIterator.source, matchIterator.matcher, matchIterator.groupNames).force - } - } - } - - /** Return an optional first matching string of this `Regex` in the given character sequence, - * or None if there is no match. - * - * @param source The text to match against. - * @return An [[scala.Option]] of the first matching string in the text. - * @example {{{"""\w+""".r findFirstIn "A simple example." foreach println // prints "A"}}} - */ - def findFirstIn(source: CharSequence): Option[String] = { - val m = pattern.matcher(source) - if (m.find) Some(m.group) else None - } - - /** Return an optional first match of this `Regex` in the given character sequence, - * or None if it does not exist. - * - * If the match is successful, the [[scala.util.matching.Regex.Match]] can be queried for - * more data. - * - * @param source The text to match against. - * @return A [[scala.Option]] of [[scala.util.matching.Regex.Match]] of the first matching string in the text. - * @example {{{("""[a-z]""".r findFirstMatchIn "A simple example.") map (_.start) // returns Some(2), the index of the first match in the text}}} - */ - def findFirstMatchIn(source: CharSequence): Option[Match] = { - val m = pattern.matcher(source) - if (m.find) Some(new Match(source, m, groupNames)) else None - } - - /** Return an optional match of this `Regex` at the beginning of the - * given character sequence, or None if it matches no prefix - * of the character sequence. - * - * Unlike `findFirstIn`, this method will only return a match at - * the beginning of the input. - * - * @param source The text to match against. - * @return A [[scala.Option]] of the matched prefix. - * @example {{{"""\p{Lower}""".r findPrefixOf "A simple example." // returns None, since the text does not begin with a lowercase letter}}} - */ - def findPrefixOf(source: CharSequence): Option[String] = { - val m = pattern.matcher(source) - if (m.lookingAt) Some(m.group) else None - } - - /** Return an optional match of this `Regex` at the beginning of the - * given character sequence, or None if it matches no prefix - * of the character sequence. - * - * Unlike `findFirstMatchIn`, this method will only return a match at - * the beginning of the input. - * - * @param source The text to match against. - * @return A [[scala.Option]] of the [[scala.util.matching.Regex.Match]] of the matched string. - * @example {{{"""\w+""".r findPrefixMatchOf "A simple example." map (_.after) // returns Some(" simple example.")}}} - */ - def findPrefixMatchOf(source: CharSequence): Option[Match] = { - val m = pattern.matcher(source) - if (m.lookingAt) Some(new Match(source, m, groupNames)) else None - } - - /** Replaces all matches by a string. - * - * $replacementString - * - * @param target The string to match - * @param replacement The string that will replace each match - * @return The resulting string - * @example {{{"""\d+""".r replaceAllIn ("July 15", "") // returns "July "}}} - */ - def replaceAllIn(target: CharSequence, replacement: String): String = { - val m = pattern.matcher(target) - m.replaceAll(replacement) - } - - /** - * Replaces all matches using a replacer function. The replacer function takes a - * [[scala.util.matching.Regex.Match]] so that extra information can be obtained - * from the match. For example: - * - * {{{ - * import scala.util.matching.Regex - * val datePattern = new Regex("""(\d\d\d\d)-(\d\d)-(\d\d)""", "year", "month", "day") - * val text = "From 2011-07-15 to 2011-07-17" - * val repl = datePattern replaceAllIn (text, m => s"${m group "month"}/${m group "day"}") - * }}} - * - * $replacementString - * - * @param target The string to match. - * @param replacer The function which maps a match to another string. - * @return The target string after replacements. - */ - def replaceAllIn(target: CharSequence, replacer: Match => String): String = { - val it = new Regex.MatchIterator(target, this, groupNames).replacementData - it foreach (md => it replace replacer(md)) - it.replaced - } - - /** - * Replaces some of the matches using a replacer function that returns an [[scala.Option]]. - * The replacer function takes a [[scala.util.matching.Regex.Match]] so that extra - * information can be obtained from the match. For example: - * - * {{{ - * import scala.util.matching.Regex._ - * - * val vars = Map("x" -> "a var", "y" -> """some $ and \ signs""") - * val text = "A text with variables %x, %y and %z." - * val varPattern = """%(\w+)""".r - * val mapper = (m: Match) => vars get (m group 1) map (quoteReplacement(_)) - * val repl = varPattern replaceSomeIn (text, mapper) - * }}} - * - * $replacementString - * - * @param target The string to match. - * @param replacer The function which optionally maps a match to another string. - * @return The target string after replacements. - */ - def replaceSomeIn(target: CharSequence, replacer: Match => Option[String]): String = { - val it = new Regex.MatchIterator(target, this, groupNames).replacementData - for (matchdata <- it ; replacement <- replacer(matchdata)) - it replace replacement - - it.replaced - } - - /** Replaces the first match by a string. - * - * $replacementString - * - * @param target The string to match - * @param replacement The string that will replace the match - * @return The resulting string - */ - def replaceFirstIn(target: CharSequence, replacement: String): String = { - val m = pattern.matcher(target) - m.replaceFirst(replacement) - } - - /** Splits the provided character sequence around matches of this regexp. - * - * @param toSplit The character sequence to split - * @return The array of strings computed by splitting the - * input around matches of this regexp - */ - def split(toSplit: CharSequence): Array[String] = - pattern.split(toSplit) - - /** Create a new Regex with the same pattern, but no requirement that - * the entire String matches in extractor patterns. - * - * Normally, matching on `date` behaves as though the pattern were - * enclosed in anchors, `"^pattern$"`. - * - * The unanchored `Regex` behaves as though those anchors were removed. - * - * Note that this method does not actually strip any matchers from the pattern. - * - * Calling `anchored` returns the original `Regex`. - * - * {{{ - * val date = """(\d\d\d\d)-(\d\d)-(\d\d)""".r.unanchored - * - * val date(year, month, day) = "Date 2011-07-15" // OK - * - * val copyright: String = "Date of this document: 2011-07-15" match { - * case date(year, month, day) => s"Copyright $year" // OK - * case _ => "No copyright" - * } - * }}} - * - * @return The new unanchored regex - */ - def unanchored: UnanchoredRegex = new Regex(pattern, groupNames: _*) with UnanchoredRegex { override def anchored = outer } - def anchored: Regex = this - - def regex: String = pattern.pattern - - /** The string defining the regular expression */ - override def toString = regex -} - -/** A [[Regex]] that finds the first match when used in a pattern match. - * - * @see [[Regex#unanchored]] - */ -trait UnanchoredRegex extends Regex { - override protected def runMatcher(m: Matcher) = m.find() - override def unanchored = this -} - -/** This object defines inner classes that describe - * regex matches and helper objects. - */ -object Regex { - - /** This class provides methods to access - * the details of a match. - */ - trait MatchData { - - /** Basically, wraps a platform Matcher. */ - protected def matcher: Matcher - - /** The source from which the match originated */ - val source: CharSequence - - /** The names of the groups, or an empty sequence if none defined */ - val groupNames: Seq[String] - - /** The number of capturing groups in the pattern. - * (For a given successful match, some of those groups may not have matched any input.) - */ - def groupCount: Int - - /** The index of the first matched character, or -1 if nothing was matched */ - def start: Int - - /** The index of the first matched character in group `i`, - * or -1 if nothing was matched for that group. - */ - def start(i: Int): Int - - /** The index following the last matched character, or -1 if nothing was matched. */ - def end: Int - - /** The index following the last matched character in group `i`, - * or -1 if nothing was matched for that group. - */ - def end(i: Int): Int - - /** The matched string, or `null` if nothing was matched. */ - def matched: String = - if (start >= 0) source.subSequence(start, end).toString - else null - - /** The matched string in group `i`, - * or `null` if nothing was matched. - */ - def group(i: Int): String = - if (start(i) >= 0) source.subSequence(start(i), end(i)).toString - else null - - /** All capturing groups, i.e., not including group(0). */ - def subgroups: List[String] = (1 to groupCount).toList map group - - /** The char sequence before first character of match, - * or `null` if nothing was matched. - */ - def before: CharSequence = - if (start >= 0) source.subSequence(0, start) - else null - - /** The char sequence before first character of match in group `i`, - * or `null` if nothing was matched for that group. - */ - def before(i: Int): CharSequence = - if (start(i) >= 0) source.subSequence(0, start(i)) - else null - - /** Returns char sequence after last character of match, - * or `null` if nothing was matched. - */ - def after: CharSequence = - if (end >= 0) source.subSequence(end, source.length) - else null - - /** The char sequence after last character of match in group `i`, - * or `null` if nothing was matched for that group. - */ - def after(i: Int): CharSequence = - if (end(i) >= 0) source.subSequence(end(i), source.length) - else null - - private lazy val nameToIndex: Map[String, Int] = Map[String, Int]() ++ ("" :: groupNames.toList).zipWithIndex - - /** Returns the group with the given name. - * - * Uses explicit group names when supplied; otherwise, - * queries the underlying implementation for inline named groups. - * Not all platforms support inline group names. - * - * @param id The group name - * @return The requested group - * @throws IllegalArgumentException if the requested group name is not defined - */ - def group(id: String): String = ( - if (groupNames.isEmpty) - matcher group id - else - nameToIndex.get(id) match { - case Some(index) => group(index) - case None => matcher group id - } - ) - - /** The matched string; equivalent to `matched.toString`. */ - override def toString = matched - } - - /** Provides information about a successful match. */ - class Match(val source: CharSequence, - protected[matching] val matcher: Matcher, - val groupNames: Seq[String]) extends MatchData { - - /** The index of the first matched character. */ - val start = matcher.start - - /** The index following the last matched character. */ - val end = matcher.end - - /** The number of subgroups. */ - def groupCount = matcher.groupCount - - private lazy val starts: Array[Int] = - ((0 to groupCount) map matcher.start).toArray - private lazy val ends: Array[Int] = - ((0 to groupCount) map matcher.end).toArray - - /** The index of the first matched character in group `i`. */ - def start(i: Int) = starts(i) - - /** The index following the last matched character in group `i`. */ - def end(i: Int) = ends(i) - - /** The match itself with matcher-dependent lazy vals forced, - * so that match is valid even once matcher is advanced. - */ - def force: this.type = { starts; ends; this } - } - - /** An extractor object for Matches, yielding the matched string. - * - * This can be used to help writing replacer functions when you - * are not interested in match data. For example: - * - * {{{ - * import scala.util.matching.Regex.Match - * """\w+""".r replaceAllIn ("A simple example.", _ match { case Match(s) => s.toUpperCase }) - * }}} - * - */ - object Match { - def unapply(m: Match): Some[String] = Some(m.matched) - } - - /** An extractor object that yields the groups in the match. Using this extractor - * rather than the original `Regex` ensures that the match is not recomputed. - * - * {{{ - * import scala.util.matching.Regex.Groups - * - * val date = """(\d\d\d\d)-(\d\d)-(\d\d)""".r - * val text = "The doc spree happened on 2011-07-15." - * val day = date replaceAllIn(text, _ match { case Groups(_, month, day) => s"$month/$day" }) - * }}} - */ - object Groups { - def unapplySeq(m: Match): Option[Seq[String]] = if (m.groupCount > 0) Some(1 to m.groupCount map m.group) else None - } - - /** A class to step through a sequence of regex matches. - * - * This is an iterator that returns the matched strings. - * - * Queries about match data pertain to the current state of the underlying - * matcher, which is advanced by calling `hasNext` or `next`. - * - * When matches are exhausted, queries about match data will throw - * [[java.lang.IllegalStateException]]. - * - * @see [[java.util.regex.Matcher]] - */ - class MatchIterator(val source: CharSequence, val regex: Regex, val groupNames: Seq[String]) - extends AbstractIterator[String] with Iterator[String] with MatchData { self => - - protected[Regex] val matcher = regex.pattern.matcher(source) - - // 0 = not yet matched, 1 = matched, 2 = advanced to match, 3 = no more matches - private[this] var nextSeen = 0 - - /** Return true if `next` will find a match. - * As a side effect, advance the underlying matcher if necessary; - * queries about the current match data pertain to the underlying matcher. - */ - def hasNext: Boolean = { - nextSeen match { - case 0 => nextSeen = if (matcher.find()) 1 else 3 - case 1 => () - case 2 => nextSeen = 0 ; hasNext - case 3 => () - } - nextSeen == 1 // otherwise, 3 - } - - /** The next matched substring of `source`. - * As a side effect, advance the underlying matcher if necessary. - */ - def next(): String = { - nextSeen match { - case 0 => if (!hasNext) throw new NoSuchElementException ; next() - case 1 => nextSeen = 2 - case 2 => nextSeen = 0 ; next() - case 3 => throw new NoSuchElementException - } - matcher.group - } - - /** Report emptiness. */ - override def toString = super[AbstractIterator].toString - - // ensure we're at a match - private[this] def ensure(): Unit = nextSeen match { - case 0 => if (!hasNext) throw new IllegalStateException - case 1 => () - case 2 => () - case 3 => throw new IllegalStateException - } - - /** The index of the first matched character. */ - def start: Int = { ensure() ; matcher.start } - - /** The index of the first matched character in group `i`. */ - def start(i: Int): Int = { ensure() ; matcher.start(i) } - - /** The index of the last matched character. */ - def end: Int = { ensure() ; matcher.end } - - /** The index following the last matched character in group `i`. */ - def end(i: Int): Int = { ensure() ; matcher.end(i) } - - /** The number of subgroups. */ - def groupCount = { ensure() ; matcher.groupCount } - - /** Convert to an iterator that yields MatchData elements instead of Strings. */ - def matchData: Iterator[Match] = new AbstractIterator[Match] { - def hasNext = self.hasNext - def next = { self.next(); new Match(source, matcher, groupNames).force } - } - - /** Convert to an iterator that yields MatchData elements instead of Strings and has replacement support. */ - private[matching] def replacementData = new AbstractIterator[Match] with Replacement { - def matcher = self.matcher - def hasNext = self.hasNext - def next = { self.next(); new Match(source, matcher, groupNames).force } - } - } - - /** - * A trait able to build a string with replacements assuming it has a matcher. - * Meant to be mixed in with iterators. - */ - private[matching] trait Replacement { - protected def matcher: Matcher - - private val sb = new java.lang.StringBuffer - - def replaced = { - val newsb = new java.lang.StringBuffer(sb) - matcher.appendTail(newsb) - newsb.toString - } - - def replace(rs: String) = matcher.appendReplacement(sb, rs) - } - - /** Quotes strings to be used literally in regex patterns. - * - * All regex metacharacters in the input match themselves literally in the output. - * - * @example {{{List("US$", "CAN$").map(Regex.quote).mkString("|").r}}} - */ - def quote(text: String): String = Pattern quote text - - /** Quotes replacement strings to be used in replacement methods. - * - * Replacement methods give special meaning to backslashes (`\`) and - * dollar signs (`$`) in replacement strings, so they are not treated - * as literals. This method escapes these characters so the resulting - * string can be used as a literal replacement representing the input - * string. - * - * @param text The string one wishes to use as literal replacement. - * @return A string that can be used to replace matches with `text`. - * @example {{{"CURRENCY".r.replaceAllIn(input, Regex quoteReplacement "US$")}}} - */ - def quoteReplacement(text: String): String = Matcher quoteReplacement text -} diff --git a/tests/scala2-library/src/library/scala/volatile.scala b/tests/scala2-library/src/library/scala/volatile.scala deleted file mode 100644 index c612732329f2..000000000000 --- a/tests/scala2-library/src/library/scala/volatile.scala +++ /dev/null @@ -1,14 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala - -import scala.annotation.meta._ - -@field -class volatile extends scala.annotation.StaticAnnotation