Skip to content

Commit dcddc1f

Browse files
authored
Merge pull request #1326 from szeiger/wip/consistent-analysis-format
New analysis format
2 parents 133cb19 + 1d296b2 commit dcddc1f

File tree

13 files changed

+1942
-2
lines changed

13 files changed

+1942
-2
lines changed

.github/workflows/ci.yml

+2-2
Original file line numberDiff line numberDiff line change
@@ -74,10 +74,10 @@ jobs:
7474
if: ${{ github.event_name == 'pull_request' && matrix.jobtype == 4 }}
7575
shell: bash
7676
run: |
77-
sbt -v -Dfile.encoding=UTF-8 "-Dbenchmark.pattern=.*Scalac.*" "runBenchmarks"
77+
sbt -v -Dfile.encoding=UTF-8 "-Dbenchmark.pattern=.*Scalac.*" "zincBenchmarks/jmh:clean" "runBenchmarks"
7878
- name: Benchmark (Shapeless) against Develop Branch (5)
7979
if: ${{ github.event_name == 'pull_request' && matrix.jobtype == 5 }}
8080
shell: bash
8181
run: |
82-
sbt -v -Dfile.encoding=UTF-8 "-Dbenchmark.pattern=.*Shapeless.*" "runBenchmarks"
82+
sbt -v -Dfile.encoding=UTF-8 "-Dbenchmark.pattern=.*Shapeless.*" "zincBenchmarks/jmh:clean" "runBenchmarks"
8383
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,176 @@
1+
package xsbt
2+
3+
import java.io.File
4+
import java.util.concurrent.TimeUnit
5+
import scala.collection.mutable
6+
7+
import org.openjdk.jmh.annotations._
8+
import org.openjdk.jmh.infra.Blackhole
9+
import sbt.internal.inc.consistent._
10+
import sbt.internal.inc.{ Analysis, FileAnalysisStore }
11+
import sbt.io.IO
12+
import xsbti.compile.analysis.ReadWriteMappers
13+
import xsbti.compile.{ AnalysisContents, AnalysisStore }
14+
15+
@BenchmarkMode(Array(Mode.AverageTime))
16+
@Fork(1)
17+
@Threads(1)
18+
@Warmup(iterations = 5)
19+
@Measurement(iterations = 5)
20+
@OutputTimeUnit(TimeUnit.MILLISECONDS)
21+
@State(Scope.Benchmark)
22+
class AnalysisFormatBenchmark {
23+
24+
var temp: File = _
25+
val sets = IndexedSeq("compiler", "reflect", "library")
26+
var cached: Map[String, AnalysisContents] = _
27+
28+
@Setup
29+
def setup(): Unit = {
30+
this.temp = IO.createTemporaryDirectory
31+
sets.foreach { s =>
32+
val f = new File("../../../test-data", s"${s}.zip")
33+
assert(f.exists())
34+
val f2 = new File(temp, f.getName)
35+
IO.copyFile(f, f2)
36+
assert(f2.exists())
37+
}
38+
this.cached = readAll("", FileAnalysisStore.binary(_))
39+
writeAll("-ref-text", FileAnalysisStore.text(_), cached)
40+
// writeAll("-ref-ctext", ConsistentFileAnalysisStore.text(_, ReadWriteMappers.getEmptyMappers), cached)
41+
writeAll(
42+
"-ref-cbin",
43+
ConsistentFileAnalysisStore.binary(_, ReadWriteMappers.getEmptyMappers),
44+
cached
45+
)
46+
writeAll(
47+
"-ref-cbin-nosort",
48+
ConsistentFileAnalysisStore.binary(_, ReadWriteMappers.getEmptyMappers, sort = false),
49+
cached
50+
)
51+
println("Sizes:")
52+
temp.listFiles().foreach { p => println(s"$p: ${p.length()}") }
53+
val cbinTotal = temp.listFiles().filter(_.getName.endsWith("-cbin.zip")).map(_.length()).sum
54+
println(s"cbin total = $cbinTotal, ${cbinTotal / 1024}k")
55+
val cbinNoSortTotal =
56+
temp.listFiles().filter(_.getName.endsWith("-cbin-nosort.zip")).map(_.length()).sum
57+
println(s"cbin-nosort total = $cbinNoSortTotal, ${cbinNoSortTotal / 1024}k")
58+
}
59+
60+
@TearDown
61+
def tearDown(): Unit = {
62+
if (temp != null) IO.delete(temp)
63+
}
64+
65+
@Benchmark
66+
def readBinary(bh: Blackhole): Unit = bh.consume(readAll("", FileAnalysisStore.binary(_)))
67+
68+
@Benchmark
69+
def readText(bh: Blackhole): Unit = bh.consume(readAll("-ref-text", FileAnalysisStore.text(_)))
70+
71+
@Benchmark
72+
def readConsistentBinary(bh: Blackhole): Unit =
73+
bh.consume(
74+
readAll("-ref-cbin", ConsistentFileAnalysisStore.binary(_, ReadWriteMappers.getEmptyMappers))
75+
)
76+
77+
@Benchmark
78+
def writeBinary(bh: Blackhole): Unit =
79+
bh.consume(writeAll("-test-bin", FileAnalysisStore.binary(_), cached))
80+
81+
@Benchmark
82+
def writeText(bh: Blackhole): Unit =
83+
bh.consume(writeAll("-test-text", FileAnalysisStore.text(_), cached))
84+
85+
@Benchmark
86+
def writeConsistentBinary(bh: Blackhole): Unit =
87+
bh.consume(
88+
writeAll(
89+
"-test-cbin",
90+
ConsistentFileAnalysisStore.binary(_, ReadWriteMappers.getEmptyMappers),
91+
cached
92+
)
93+
)
94+
95+
@Benchmark
96+
def writeConsistentBinaryNoSort(bh: Blackhole): Unit =
97+
bh.consume(
98+
writeAll(
99+
"-test-cbin-nosort",
100+
ConsistentFileAnalysisStore.binary(_, ReadWriteMappers.getEmptyMappers, sort = false),
101+
cached
102+
)
103+
)
104+
105+
@Benchmark
106+
def writeNull(bh: Blackhole): Unit = {
107+
cached.foreach {
108+
case (s, a) =>
109+
val ser = new NullSerializer
110+
val af = new ConsistentAnalysisFormat(ReadWriteMappers.getEmptyMappers, sort = true)
111+
af.write(ser, a.getAnalysis, a.getMiniSetup)
112+
bh.consume(ser.count)
113+
}
114+
}
115+
116+
@Benchmark
117+
def writeNullNoSort(bh: Blackhole): Unit = {
118+
cached.foreach {
119+
case (s, a) =>
120+
val ser = new NullSerializer
121+
val af = new ConsistentAnalysisFormat(ReadWriteMappers.getEmptyMappers, sort = false)
122+
af.write(ser, a.getAnalysis, a.getMiniSetup)
123+
bh.consume(ser.count)
124+
}
125+
}
126+
127+
def readAll(suffix: String, store: File => AnalysisStore): Map[String, AnalysisContents] =
128+
sets.iterator.map(s => (s, read(s, suffix, store))).toMap
129+
130+
def writeAll(
131+
suffix: String,
132+
store: File => AnalysisStore,
133+
map: Map[String, AnalysisContents]
134+
): Unit =
135+
map.foreach { case (s, a) => write(s, suffix, store, a) }
136+
137+
def read(set: String, suffix: String, store: File => AnalysisStore): AnalysisContents = {
138+
val api = store((new File(temp, s"${set}${suffix}.zip"))).unsafeGet()
139+
assert(api.getAnalysis.asInstanceOf[Analysis].apis.internal.head._2.api() != null)
140+
api
141+
}
142+
143+
def write(
144+
set: String,
145+
suffix: String,
146+
store: File => AnalysisStore,
147+
analysis: AnalysisContents
148+
): Unit = {
149+
assert(analysis.getMiniSetup.storeApis())
150+
val f = new File(temp, s"${set}${suffix}.zip")
151+
IO.delete(f)
152+
store(f).set(analysis)
153+
assert(f.exists())
154+
}
155+
}
156+
157+
class NullSerializer extends Serializer {
158+
private[this] val strings = mutable.HashMap.empty[String, String]
159+
private[this] var _count = 0
160+
def count: Int = _count
161+
def startBlock(name: String): Unit = _count += 1
162+
def startArray(name: String, length: Int): Unit = _count += 1
163+
def endBlock(): Unit = _count += 1
164+
def endArray(): Unit = _count += 1
165+
def string(s: String): Unit = {
166+
if (!strings.contains(s)) {
167+
strings.put(s, s)
168+
_count += 1
169+
}
170+
}
171+
def bool(b: Boolean): Unit = _count += 1
172+
def int(i: Int): Unit = _count += 1
173+
def byte(b: Byte): Unit = _count += 1
174+
def long(l: Long): Unit = _count += 1
175+
def end(): Unit = _count += 1
176+
}
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,55 @@
1+
package sbt.internal.inc.consistent
2+
3+
import java.util.Arrays
4+
import scala.collection.{ MapLike, SetLike, SortedMap, SortedMapLike }
5+
import scala.collection.generic.{
6+
CanBuildFrom,
7+
GenericTraversableTemplate,
8+
MapFactory,
9+
SeqFactory,
10+
SetFactory,
11+
SortedMapFactory
12+
}
13+
14+
// some simple compatibility shims for 2.12 so we don't need to depend on collection-compat
15+
object Compat {
16+
type Factory[-A, +C] = CanBuildFrom[Nothing, A, C]
17+
18+
implicit def sortedMapFactoryToCBF[CC[A, B] <: SortedMap[A, B] with SortedMapLike[
19+
A,
20+
B,
21+
CC[A, B]
22+
], K: Ordering, V](f: SortedMapFactory[CC]): Factory[(K, V), CC[K, V]] =
23+
new f.SortedMapCanBuildFrom
24+
25+
implicit def mapFactoryToCBF[CC[A, B] <: Map[A, B] with MapLike[A, B, CC[A, B]], K, V](
26+
f: MapFactory[CC]
27+
): Factory[(K, V), CC[K, V]] =
28+
new f.MapCanBuildFrom
29+
30+
implicit def seqFactoryToCBF[CC[X] <: Seq[X] with GenericTraversableTemplate[X, CC], E](
31+
f: SeqFactory[CC]
32+
): Factory[E, CC[E]] =
33+
new f.GenericCanBuildFrom
34+
35+
implicit def setFactoryToCBF[CC[X] <: Set[X] with SetLike[X, CC[X]], E](f: SetFactory[CC])
36+
: Factory[E, CC[E]] =
37+
f.setCanBuildFrom
38+
39+
implicit class FactoryOps[-A, +C](private val factory: Factory[A, C]) {
40+
def newBuilder: scala.collection.mutable.Builder[A, C] = factory()
41+
}
42+
43+
type IterableOnce[+E] = TraversableOnce[E]
44+
45+
implicit class IterableOnceOps[+E](private val it: IterableOnce[E]) {
46+
def iterator: Iterator[E] = it match {
47+
case it: Iterator[?] => it.asInstanceOf[Iterator[E]]
48+
case it => it.asInstanceOf[Iterable[E]].iterator
49+
}
50+
}
51+
52+
implicit class ArrayOps[A <: AnyRef](private val a: Array[A]) {
53+
def sortInPlaceBy[B](f: A => B)(implicit ord: Ordering[B]): Unit = Arrays.sort(a, ord on f)
54+
}
55+
}
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,5 @@
1+
package sbt.internal.inc.consistent
2+
3+
object Compat {
4+
type Factory[-A, +C] = scala.collection.Factory[A, C]
5+
}

0 commit comments

Comments
 (0)