add jmh benchmarking

This commit is contained in:
Jörg Prante 2020-10-21 11:30:12 +02:00
parent 9a15022520
commit 5a0fe36e18
8 changed files with 596 additions and 4 deletions

View file

@ -1,3 +1,20 @@
sourceSets {
jmh {
java.srcDirs = ['src/jmh/java']
resources.srcDirs = ['src/jmh/resources']
compileClasspath += sourceSets.main.runtimeClasspath
}
}
dependencies {
api project(':datastructures-common')
}
jmhImplementation 'org.openjdk.jmh:jmh-core:1.21'
jmhAnnotationProcessor 'org.openjdk.jmh:jmh-generator-annprocess:1.21'
}
task jmh(type: JavaExec, group: 'jmh', dependsOn: jmhClasses) {
main = 'org.openjdk.jmh.Main'
classpath = sourceSets.jmh.compileClasspath + sourceSets.jmh.runtimeClasspath
}
classes.finalizedBy(jmhClasses)

View file

@ -0,0 +1,84 @@
package org.xbib.datastructures.tiny.jmh;
import org.openjdk.jmh.annotations.Benchmark;
import org.openjdk.jmh.annotations.BenchmarkMode;
import org.openjdk.jmh.annotations.Fork;
import org.openjdk.jmh.annotations.Measurement;
import org.openjdk.jmh.annotations.Mode;
import org.openjdk.jmh.annotations.Scope;
import org.openjdk.jmh.annotations.State;
import org.openjdk.jmh.annotations.Warmup;
import org.xbib.datastructures.tiny.TinySet;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.LinkedHashSet;
import java.util.LinkedList;
import java.util.List;
import java.util.Set;
import java.util.TreeSet;
@State(Scope.Benchmark)
@BenchmarkMode(Mode.Throughput)
@Warmup(iterations = 10)
@Fork(1)
@Measurement(iterations = 10)
public class CollectionAdditionTest {
private static final int COLLECTION_SIZE = 1_000_000;
@Benchmark
public List<Integer> arrayList() {
List<Integer> list = new ArrayList<>();
for (int i = 0; i < COLLECTION_SIZE; i++) {
list.add(i);
}
return list;
}
@Benchmark
public List<Integer> linkedList() {
List<Integer> list = new LinkedList<>();
for (int i = 0; i < COLLECTION_SIZE; i++) {
list.add(i);
}
return list;
}
@Benchmark
public Set<Integer> hashSet() {
Set<Integer> set = new HashSet<>();
for (int i = 0; i < COLLECTION_SIZE; i++) {
set.add(i);
}
return set;
}
@Benchmark
public Set<Integer> linkedHashSet() {
Set<Integer> set = new LinkedHashSet<>();
for (int i = 0; i < COLLECTION_SIZE; i++) {
set.add(i);
}
return set;
}
@Benchmark
public Set<Integer> treeSet() {
Set<Integer> set = new TreeSet<>();
for (int i = 0; i < COLLECTION_SIZE; i++) {
set.add(i);
}
return set;
}
@Benchmark
public Set<Integer> tinySet() {
TinySet.Builder<Integer> setBuilder = TinySet.builder();
for (int i = 0; i < COLLECTION_SIZE; i++) {
setBuilder.add(i);
}
return setBuilder.build();
}
}

View file

@ -0,0 +1,97 @@
package org.xbib.datastructures.tiny.jmh;
import org.openjdk.jmh.annotations.Benchmark;
import org.openjdk.jmh.annotations.BenchmarkMode;
import org.openjdk.jmh.annotations.Fork;
import org.openjdk.jmh.annotations.Level;
import org.openjdk.jmh.annotations.Measurement;
import org.openjdk.jmh.annotations.Mode;
import org.openjdk.jmh.annotations.Scope;
import org.openjdk.jmh.annotations.Setup;
import org.openjdk.jmh.annotations.State;
import org.openjdk.jmh.annotations.Warmup;
import org.openjdk.jmh.infra.Blackhole;
import org.xbib.datastructures.tiny.TinySet;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.LinkedHashSet;
import java.util.LinkedList;
import java.util.TreeSet;
@State(Scope.Benchmark)
@BenchmarkMode(Mode.Throughput)
@Warmup(iterations = 10)
@Fork(1)
@Measurement(iterations = 10)
public class CollectionRemovalTest {
private static final int COLLECTION_SIZE = 25_000;
private ArrayList<Integer> arrayList;
private LinkedList<Integer> linkedList;
private HashSet<Integer> hashSet;
private LinkedHashSet<Integer> linkedHashSet;
private TreeSet<Integer> treeSet;
private TinySet.Builder<Integer> tinySet;
@Setup(Level.Trial)
public void setup() {
arrayList = new ArrayList<>();
linkedList = new LinkedList<>();
hashSet = new HashSet<>();
linkedHashSet = new LinkedHashSet<>();
treeSet = new TreeSet<>();
tinySet = TinySet.builder();
for (int i = 0; i < COLLECTION_SIZE; i++) {
arrayList.add(i);
linkedList.add(i);
hashSet.add(i);
linkedHashSet.add(i);
treeSet.add(i);
tinySet.add(i);
}
}
@Benchmark
public void arrayList(Blackhole bh) {
for (int i = 0; i < COLLECTION_SIZE; i++) {
Integer remove = arrayList.remove(i);
}
}
@Benchmark
public void linkedList(Blackhole bh) {
for (int i = 0; i < COLLECTION_SIZE; i++) {
Integer remove = linkedList.remove(i);
}
}
@Benchmark
public void hashSet(Blackhole bh) {
for (int i = 0; i < COLLECTION_SIZE; i++) {
boolean remove = hashSet.remove(i);
}
}
@Benchmark
public void linkedHashSet(Blackhole bh) {
for (int i = 0; i < COLLECTION_SIZE; i++) {
boolean remove = linkedHashSet.remove(i);
}
}
@Benchmark
public void treeSet(Blackhole bh) {
for (int i = 0; i < COLLECTION_SIZE; i++) {
boolean remove = treeSet.remove(i);
}
}
@Benchmark
public void tinySet(Blackhole bh) {
for (int i = 0; i < COLLECTION_SIZE; i++) {
boolean remove = tinySet.remove(i);
}
}
}

View file

@ -0,0 +1,100 @@
package org.xbib.datastructures.tiny.jmh;
import org.openjdk.jmh.annotations.Benchmark;
import org.openjdk.jmh.annotations.BenchmarkMode;
import org.openjdk.jmh.annotations.Fork;
import org.openjdk.jmh.annotations.Level;
import org.openjdk.jmh.annotations.Measurement;
import org.openjdk.jmh.annotations.Mode;
import org.openjdk.jmh.annotations.Scope;
import org.openjdk.jmh.annotations.Setup;
import org.openjdk.jmh.annotations.State;
import org.openjdk.jmh.annotations.Warmup;
import org.openjdk.jmh.infra.Blackhole;
import org.xbib.datastructures.tiny.TinySet;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.LinkedHashSet;
import java.util.LinkedList;
import java.util.TreeSet;
@State(Scope.Benchmark)
@BenchmarkMode(Mode.Throughput)
@Warmup(iterations = 10)
@Fork(1)
@Measurement(iterations = 10)
public class CollectionRetrievalTest {
private static final int COLLECTION_SIZE = 25_000;
private ArrayList<Integer> arrayList;
private LinkedList<Integer> linkedList;
private HashSet<Integer> hashSet;
private LinkedHashSet<Integer> linkedHashSet;
private TreeSet<Integer> treeSet;
private TinySet<Integer> tinySet;
@Setup(Level.Trial)
public void setup() {
arrayList = new ArrayList<>();
linkedList = new LinkedList<>();
hashSet = new HashSet<>();
linkedHashSet = new LinkedHashSet<>();
treeSet = new TreeSet<>();
TinySet.Builder<Integer> tinySetBuilder = TinySet.builder();
for (int i = 0; i < COLLECTION_SIZE; i++) {
arrayList.add(i);
linkedList.add(i);
hashSet.add(i);
linkedHashSet.add(i);
treeSet.add(i);
tinySetBuilder.add(i);
}
tinySet = tinySetBuilder.build();
}
@Benchmark
public void arrayList(Blackhole bh) {
for (int i = 0; i < COLLECTION_SIZE; i++) {
Integer elem = arrayList.get(i);
bh.consume(elem);
}
}
@Benchmark
public void linkedList(Blackhole bh) {
for (int i = 0; i < COLLECTION_SIZE; i++) {
Integer elem = linkedList.get(i);
bh.consume(elem);
}
}
@Benchmark
public void hashSetIterator(Blackhole bh) {
for (Integer elem : hashSet) {
bh.consume(elem);
}
}
@Benchmark
public void linkedHashSetIterator(Blackhole bh) {
for (Integer elem : linkedHashSet) {
bh.consume(elem);
}
}
@Benchmark
public void treeSetIterator(Blackhole bh) {
for (Integer elem : treeSet) {
bh.consume(elem);
}
}
@Benchmark
public void tinySetIterator(Blackhole bh) {
for (Integer elem : tinySet) {
bh.consume(elem);
}
}
}

View file

@ -0,0 +1,83 @@
package org.xbib.datastructures.tiny.jmh;
import org.openjdk.jmh.annotations.Benchmark;
import org.openjdk.jmh.annotations.BenchmarkMode;
import org.openjdk.jmh.annotations.Fork;
import org.openjdk.jmh.annotations.Measurement;
import org.openjdk.jmh.annotations.Mode;
import org.openjdk.jmh.annotations.Scope;
import org.openjdk.jmh.annotations.State;
import org.openjdk.jmh.annotations.Warmup;
import org.xbib.datastructures.tiny.TinyMap;
import java.util.Collections;
import java.util.HashMap;
import java.util.LinkedHashMap;
import java.util.Map;
import java.util.TreeMap;
import java.util.concurrent.ConcurrentHashMap;
@State(Scope.Benchmark)
@BenchmarkMode(Mode.Throughput)
@Warmup(iterations = 10)
@Fork(value = 1)
@Measurement(iterations = 10)
public class MapAdditionTest {
private static final int COLLECTION_SIZE = 1_000_000;
@Benchmark
public Map<Long, String> hashMap() {
Map<Long, String> map = new HashMap<>();
for (long i = 0; i < COLLECTION_SIZE; i++) {
map.put(i, String.valueOf(i));
}
return map;
}
@Benchmark
public Map<Long, String> linkedHashMap() {
Map<Long, String> map = new LinkedHashMap<>();
for (long i = 0; i < COLLECTION_SIZE; i++) {
map.put(i, String.valueOf(i));
}
return map;
}
@Benchmark
public Map<Long, String> treeMap() {
Map<Long, String> map = new TreeMap<>();
for (long i = 0; i < COLLECTION_SIZE; i++) {
map.put(i, String.valueOf(i));
}
return map;
}
@Benchmark
public Map<Long, String> synchronizedHashMap() {
Map<Long, String> map = new HashMap<>();
map = Collections.synchronizedMap(map);
for (long i = 0; i < COLLECTION_SIZE; i++) {
map.put(i, String.valueOf(i));
}
return map;
}
@Benchmark
public Map<Long, String> concurrentHashMap() {
ConcurrentHashMap<Long, String> map = new ConcurrentHashMap<>();
for (long i = 0; i < COLLECTION_SIZE; i++) {
map.put(i, String.valueOf(i));
}
return map;
}
@Benchmark
public Map<Long, String> tinyMap() {
TinyMap.Builder<Long, String> map = TinyMap.<Long, String>builder();
for (long i = 0; i < COLLECTION_SIZE; i++) {
map.put(i, String.valueOf(i));
}
return map.build();
}
}

View file

@ -0,0 +1,105 @@
package org.xbib.datastructures.tiny.jmh;
import org.openjdk.jmh.annotations.Benchmark;
import org.openjdk.jmh.annotations.BenchmarkMode;
import org.openjdk.jmh.annotations.Fork;
import org.openjdk.jmh.annotations.Level;
import org.openjdk.jmh.annotations.Measurement;
import org.openjdk.jmh.annotations.Mode;
import org.openjdk.jmh.annotations.Scope;
import org.openjdk.jmh.annotations.Setup;
import org.openjdk.jmh.annotations.State;
import org.openjdk.jmh.annotations.Warmup;
import org.openjdk.jmh.infra.Blackhole;
import org.xbib.datastructures.tiny.TinyMap;
import java.util.Collections;
import java.util.HashMap;
import java.util.LinkedHashMap;
import java.util.Map;
import java.util.TreeMap;
import java.util.concurrent.ConcurrentHashMap;
@State(Scope.Benchmark)
@BenchmarkMode(Mode.Throughput)
@Warmup(iterations = 10)
@Fork(value = 1)
@Measurement(iterations = 10)
public class MapRemovalTest {
private static final int COLLECTION_SIZE = 25_000;
private HashMap<Long, String> hashMap;
private LinkedHashMap<Long, String> linkedHashMap;
private TreeMap<Long, String> treeMap;
private ConcurrentHashMap<Long, String> concurrentHashMap;
private Map<Long, String> synchronizedHashMap;
private TinyMap.Builder<Long, String> tinyMap;
@Setup(Level.Trial)
public void setup() {
hashMap = new HashMap<>();
linkedHashMap = new LinkedHashMap<>();
treeMap = new TreeMap<>();
concurrentHashMap = new ConcurrentHashMap<>();
synchronizedHashMap = new HashMap<>();
tinyMap = TinyMap.builder();
for (long i = 0; i < COLLECTION_SIZE; i++) {
hashMap.put(i, String.valueOf(i));
linkedHashMap.put(i, String.valueOf(i));
treeMap.put(i, String.valueOf(i));
concurrentHashMap.put(i, String.valueOf(i));
synchronizedHashMap.put(i, String.valueOf(i));
tinyMap.put(i, String.valueOf(i));
}
synchronizedHashMap = Collections.synchronizedMap(synchronizedHashMap);
}
@Benchmark
public void hashMap(Blackhole bh) {
for (long i = 0L; i < COLLECTION_SIZE; i++) {
String s = hashMap.remove(i);
bh.consume(s);
}
}
@Benchmark
public void linkedHashMap(Blackhole bh) {
for (long i = 0L; i < COLLECTION_SIZE; i++) {
String s = linkedHashMap.remove(i);
bh.consume(s);
}
}
@Benchmark
public void treeMap(Blackhole bh) {
for (long i = 0L; i < COLLECTION_SIZE; i++) {
String s = treeMap.remove(i);
bh.consume(s);
}
}
@Benchmark
public void synchronizedHashMap(Blackhole bh) {
for (long i = 0L; i < COLLECTION_SIZE; i++) {
String s = synchronizedHashMap.remove(i);
bh.consume(s);
}
}
@Benchmark
public void concurrentHashMap(Blackhole bh) {
for (long i = 0L; i < COLLECTION_SIZE; i++) {
String s = concurrentHashMap.remove(i);
bh.consume(s);
}
}
@Benchmark
public void tinyMap(Blackhole bh) {
for (long i = 0L; i < COLLECTION_SIZE; i++) {
String s = tinyMap.remove(i);
bh.consume(s);
}
}
}

View file

@ -0,0 +1,106 @@
package org.xbib.datastructures.tiny.jmh;
import org.openjdk.jmh.annotations.Benchmark;
import org.openjdk.jmh.annotations.BenchmarkMode;
import org.openjdk.jmh.annotations.Fork;
import org.openjdk.jmh.annotations.Level;
import org.openjdk.jmh.annotations.Measurement;
import org.openjdk.jmh.annotations.Mode;
import org.openjdk.jmh.annotations.Scope;
import org.openjdk.jmh.annotations.Setup;
import org.openjdk.jmh.annotations.State;
import org.openjdk.jmh.annotations.Warmup;
import org.openjdk.jmh.infra.Blackhole;
import org.xbib.datastructures.tiny.TinyMap;
import java.util.Collections;
import java.util.HashMap;
import java.util.LinkedHashMap;
import java.util.Map;
import java.util.TreeMap;
import java.util.concurrent.ConcurrentHashMap;
@State(Scope.Benchmark)
@BenchmarkMode(Mode.Throughput)
@Warmup(iterations = 10)
@Fork(1)
@Measurement(iterations = 10)
public class MapRetrievalTest {
private static final int COLLECTION_SIZE = 25_000;
private HashMap<Long, String> hashMap;
private LinkedHashMap<Long, String> linkedHashMap;
private TreeMap<Long, String> treeMap;
private ConcurrentHashMap<Long, String> concurrentHashMap;
private Map<Long, String> synchronizedHashMap;
private TinyMap<Long, String> tinyMap;
@Setup(Level.Trial)
public void setup() {
hashMap = new HashMap<>();
linkedHashMap = new LinkedHashMap<>();
treeMap = new TreeMap<>();
concurrentHashMap = new ConcurrentHashMap<>();
synchronizedHashMap = new HashMap<>();
TinyMap.Builder<Long, String> tinyMapBuilder = TinyMap.builder();
for (long i = 0; i < COLLECTION_SIZE; i++) {
hashMap.put(i, String.valueOf(i));
linkedHashMap.put(i, String.valueOf(i));
treeMap.put(i, String.valueOf(i));
concurrentHashMap.put(i, String.valueOf(i));
synchronizedHashMap.put(i, String.valueOf(i));
tinyMapBuilder.put(i, String.valueOf(i));
}
synchronizedHashMap = Collections.synchronizedMap(synchronizedHashMap);
tinyMap = tinyMapBuilder.build();
}
@Benchmark
public void hashMap(Blackhole bh) {
for (long i = 0; i < COLLECTION_SIZE; i++) {
String s = hashMap.get(i);
bh.consume(s);
}
}
@Benchmark
public void linkedHashMap(Blackhole bh) {
for (long i = 0; i < COLLECTION_SIZE; i++) {
String s = linkedHashMap.get(i);
bh.consume(s);
}
}
@Benchmark
public void treeMap(Blackhole bh) {
for (long i = 0; i < COLLECTION_SIZE; i++) {
String s = treeMap.get(i);
bh.consume(s);
}
}
@Benchmark
public void synchronizedHashMap(Blackhole bh) {
for (long i = 0; i < COLLECTION_SIZE; i++) {
String s = synchronizedHashMap.get(i);
bh.consume(s);
}
}
@Benchmark
public void concurrentHashMap(Blackhole bh) {
for (long i = 0; i < COLLECTION_SIZE; i++) {
String s = concurrentHashMap.get(i);
bh.consume(s);
}
}
@Benchmark
public void tinyhMap(Blackhole bh) {
for (long i = 0; i < COLLECTION_SIZE; i++) {
String s = tinyMap.get(i);
bh.consume(s);
}
}
}

View file

@ -36,9 +36,9 @@ artifacts {
archives sourcesJar, javadocJar
}
tasks.withType(JavaCompile) {
options.compilerArgs << '-Xlint:all,-exports'
}
//tasks.withType(JavaCompile) {
// options.compilerArgs << '-Xlint:all'
//}
javadoc {
options.addStringOption('Xdoclint:none', '-quiet')