Compare commits
No commits in common. "4960f8cdb41a975c0e0bc3c1f028c0f7575feb7e" and "7c7dbace613afa2e6e68f7a94929832fee77cc4d" have entirely different histories.
4960f8cdb4
...
7c7dbace61
532 changed files with 12357 additions and 2423 deletions
15
build.gradle
15
build.gradle
|
@ -1,18 +1,23 @@
|
|||
|
||||
plugins {
|
||||
id "checkstyle"
|
||||
id "pmd"
|
||||
id 'maven-publish'
|
||||
id 'signing'
|
||||
id "io.github.gradle-nexus.publish-plugin" version "2.0.0-rc-1"
|
||||
id "com.github.spotbugs" version "6.0.0-beta.3"
|
||||
id "org.cyclonedx.bom" version "1.7.4"
|
||||
id "org.xbib.gradle.plugin.asciidoctor" version "3.0.0"
|
||||
}
|
||||
|
||||
wrapper {
|
||||
gradleVersion = libs.versions.gradle.get()
|
||||
distributionType = Wrapper.DistributionType.BIN
|
||||
distributionType = Wrapper.DistributionType.ALL
|
||||
}
|
||||
|
||||
ext {
|
||||
name = rootProject.name
|
||||
user = 'joerg'
|
||||
name = 'datastructures'
|
||||
description = 'Data structures for Java'
|
||||
inceptionYear = '2012'
|
||||
url = 'https://xbib.org/' + user + '/' + name
|
||||
|
@ -26,10 +31,16 @@ ext {
|
|||
}
|
||||
|
||||
subprojects {
|
||||
//apply from: rootProject.file('gradle/ide/idea.gradle')
|
||||
apply from: rootProject.file('gradle/repositories/maven.gradle')
|
||||
apply from: rootProject.file('gradle/compile/java.gradle')
|
||||
apply from: rootProject.file('gradle/test/junit5.gradle')
|
||||
apply from: rootProject.file('gradle/documentation/asciidoc.gradle')
|
||||
apply from: rootProject.file('gradle/quality/checkstyle.gradle')
|
||||
apply from: rootProject.file('gradle/quality/pmd.gradle')
|
||||
//apply from: rootProject.file('gradle/quality/spotbugs.gradle')
|
||||
apply from: rootProject.file('gradle/publish/maven.gradle')
|
||||
}
|
||||
apply from: rootProject.file('gradle/publish/sonatype.gradle')
|
||||
apply from: rootProject.file('gradle/publish/forgejo.gradle')
|
||||
apply from: rootProject.file('gradle/quality/cyclonedx.gradle')
|
||||
|
|
|
@ -1,8 +1,12 @@
|
|||
import org.xbib.config.ConfigLogger;
|
||||
import org.xbib.config.NullConfigLogger;
|
||||
import org.xbib.config.SystemConfigLogger;
|
||||
import org.xbib.settings.SettingsLoader;
|
||||
|
||||
module org.xbib.config {
|
||||
requires transitive org.xbib.settings.datastructures;
|
||||
requires java.logging;
|
||||
exports org.xbib.config;
|
||||
uses ConfigLogger;
|
||||
uses SettingsLoader;
|
||||
provides ConfigLogger with NullConfigLogger, SystemConfigLogger;
|
||||
}
|
||||
|
|
|
@ -16,8 +16,6 @@ import java.util.HashMap;
|
|||
import java.util.List;
|
||||
import java.util.Locale;
|
||||
import java.util.Map;
|
||||
import java.util.logging.Level;
|
||||
import java.util.logging.Logger;
|
||||
import java.util.stream.Collectors;
|
||||
import org.xbib.settings.Settings;
|
||||
import org.xbib.settings.SettingsBuilder;
|
||||
|
@ -31,7 +29,7 @@ public class ConfigLoader {
|
|||
|
||||
private static final Map<ConfigParams, Settings> map = new HashMap<>();
|
||||
|
||||
private static final Logger logger = Logger.getLogger(ConfigLoader.class.getName());
|
||||
private ConfigLogger logger;
|
||||
|
||||
private ConfigLoader() {
|
||||
}
|
||||
|
@ -40,6 +38,11 @@ public class ConfigLoader {
|
|||
return new ConfigLoader();
|
||||
}
|
||||
|
||||
public ConfigLoader withLogger(ConfigLogger logger) {
|
||||
this.logger = logger;
|
||||
return this;
|
||||
}
|
||||
|
||||
public synchronized Settings load(ConfigParams configParams) throws ConfigException {
|
||||
map.computeIfAbsent(configParams, p -> internalLoad(p)
|
||||
.replacePropertyPlaceholders()
|
||||
|
@ -182,9 +185,13 @@ public class ConfigLoader {
|
|||
int pos = settingsFileName.lastIndexOf('.');
|
||||
String suffix = (pos > 0 ? settingsFileName.substring(pos + 1) : "").toLowerCase(Locale.ROOT);
|
||||
Path path = Paths.get(settingsFileName);
|
||||
logger.log(Level.INFO, "trying " + path);
|
||||
if (logger != null) {
|
||||
logger.info("trying " + path);
|
||||
}
|
||||
if (Files.exists(path)) {
|
||||
logger.log(Level.INFO, "found path: " + path);
|
||||
if (logger != null) {
|
||||
logger.info("found path: " + path);
|
||||
}
|
||||
System.setProperty("config.path", path.getParent().toString());
|
||||
try {
|
||||
InputStream inputStream = Files.newInputStream(path);
|
||||
|
@ -209,7 +216,9 @@ public class ConfigLoader {
|
|||
params.directoryName + '-' + fileNameWithoutSuffix + suffix : fileNameWithoutSuffix + suffix;
|
||||
InputStream inputStream = classLoader.getResourceAsStream(path);
|
||||
if (inputStream != null) {
|
||||
logger.log(Level.INFO, "found resource: " + path);
|
||||
if (logger != null) {
|
||||
logger.info("found resource: " + path);
|
||||
}
|
||||
SettingsBuilder streamSettings = createSettingsFromStream(inputStream, suffix);
|
||||
if (streamSettings != null) {
|
||||
settings.put(streamSettings.build());
|
||||
|
@ -222,7 +231,9 @@ public class ConfigLoader {
|
|||
private SettingsBuilder createSettingsFromStream(InputStream inputStream,
|
||||
String suffix) throws ConfigException {
|
||||
if (inputStream == null) {
|
||||
logger.log(Level.WARNING, "unable to open input stream");
|
||||
if (logger != null) {
|
||||
logger.error("unable to open input stream");
|
||||
}
|
||||
return null;
|
||||
}
|
||||
return createSettingsFromReader(new InputStreamReader(inputStream, StandardCharsets.UTF_8), suffix);
|
||||
|
@ -231,7 +242,9 @@ public class ConfigLoader {
|
|||
private SettingsBuilder createSettingsFromReader(Reader reader,
|
||||
String suffix) throws ConfigException {
|
||||
if (reader == null) {
|
||||
logger.log(Level.WARNING, "unable to open reader");
|
||||
if (logger != null) {
|
||||
logger.error("unable to open reader");
|
||||
}
|
||||
return null;
|
||||
}
|
||||
SettingsLoader settingsLoader = SettingsLoaderService.getInstance().loaderFromResource(suffix);
|
||||
|
@ -245,7 +258,9 @@ public class ConfigLoader {
|
|||
}
|
||||
return settings;
|
||||
} else {
|
||||
logger.log(Level.WARNING, "suffix is invalid: " + suffix);
|
||||
if (logger != null) {
|
||||
logger.error("suffix is invalid: " + suffix);
|
||||
}
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
|
10
config/src/main/java/org/xbib/config/ConfigLogger.java
Normal file
10
config/src/main/java/org/xbib/config/ConfigLogger.java
Normal file
|
@ -0,0 +1,10 @@
|
|||
package org.xbib.config;
|
||||
|
||||
public interface ConfigLogger {
|
||||
|
||||
void info(String string);
|
||||
|
||||
void warn(String string);
|
||||
|
||||
void error(String message);
|
||||
}
|
19
config/src/main/java/org/xbib/config/NullConfigLogger.java
Normal file
19
config/src/main/java/org/xbib/config/NullConfigLogger.java
Normal file
|
@ -0,0 +1,19 @@
|
|||
package org.xbib.config;
|
||||
|
||||
public class NullConfigLogger implements ConfigLogger {
|
||||
|
||||
public NullConfigLogger() {
|
||||
}
|
||||
|
||||
@Override
|
||||
public void info(String string) {
|
||||
}
|
||||
|
||||
@Override
|
||||
public void warn(String message) {
|
||||
}
|
||||
|
||||
@Override
|
||||
public void error(String message) {
|
||||
}
|
||||
}
|
22
config/src/main/java/org/xbib/config/SystemConfigLogger.java
Normal file
22
config/src/main/java/org/xbib/config/SystemConfigLogger.java
Normal file
|
@ -0,0 +1,22 @@
|
|||
package org.xbib.config;
|
||||
|
||||
public class SystemConfigLogger implements ConfigLogger {
|
||||
|
||||
public SystemConfigLogger() {
|
||||
}
|
||||
|
||||
@Override
|
||||
public void info(String string) {
|
||||
System.err.println("info: " + string);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void warn(String message) {
|
||||
System.err.println("warning: " + message);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void error(String message) {
|
||||
System.err.println("error: " + message);
|
||||
}
|
||||
}
|
4
config/src/main/java/org/xbib/config/package-info.java
Normal file
4
config/src/main/java/org/xbib/config/package-info.java
Normal file
|
@ -0,0 +1,4 @@
|
|||
/**
|
||||
* Classes for configuration setup.
|
||||
*/
|
||||
package org.xbib.config;
|
|
@ -0,0 +1,4 @@
|
|||
/**
|
||||
* Test classes for config.
|
||||
*/
|
||||
package org.xbib.config.test;
|
|
@ -0,0 +1,2 @@
|
|||
org.xbib.config.NullConfigLogger
|
||||
org.xbib.config.SystemConfigLogger
|
|
@ -39,14 +39,13 @@ public class GeneratorTest {
|
|||
StringWriter writer = new StringWriter();
|
||||
Generator gen = new Generator(writer);
|
||||
gen.keys(Arrays.asList("a", "b", "c"));
|
||||
gen.writeKeys();
|
||||
for (int i = 0; i < 1; i++) {
|
||||
gen.write("val" + i);
|
||||
gen.write("\"Hello, World\"");
|
||||
gen.write("hey look a line seperator \n");
|
||||
}
|
||||
gen.close();
|
||||
assertEquals("a,b,c,\nval0,\"\"\"Hello, World\"\"\",\"hey look a line seperator \n\"\n", writer.toString());
|
||||
assertEquals("val0,\"\"\"Hello, World\"\"\",\"hey look a line seperator \n\"\n", writer.toString());
|
||||
}
|
||||
|
||||
@Test
|
||||
|
|
|
@ -695,7 +695,7 @@ public abstract class Ordering<T> implements Comparator<T> {
|
|||
* only when the resulting list may need further modification, or may contain {@code null}. The
|
||||
* input is not modified. The returned list has random access.
|
||||
*
|
||||
* <p>Tthis method does not discard elements that are
|
||||
* <p>Unlike {@link java.util.Sets#newTreeSet(Iterable)}, this method does not discard elements that are
|
||||
* duplicates according to the comparator. The sort performed is <i>stable</i>, meaning that such
|
||||
* elements will appear in the returned list in the same order they appeared in {@code elements}.
|
||||
*
|
||||
|
@ -716,7 +716,7 @@ public abstract class Ordering<T> implements Comparator<T> {
|
|||
* Returns an <b>immutable</b> list containing {@code elements} sorted by this ordering. The input
|
||||
* is not modified.
|
||||
*
|
||||
* <p>This method does not discard elements that are
|
||||
* <p>Unlike {@link java.util.Sets#newTreeSet(Iterable)}, this method does not discard elements that are
|
||||
* duplicates according to the comparator. The sort performed is <i>stable</i>, meaning that such
|
||||
* elements will appear in the returned list in the same order they appeared in {@code elements}.
|
||||
*
|
||||
|
@ -735,6 +735,10 @@ public abstract class Ordering<T> implements Comparator<T> {
|
|||
* Returns {@code true} if each element in {@code iterable} after the first is greater than or
|
||||
* equal to the element that preceded it, according to this ordering. Note that this is always
|
||||
* true when the iterable has fewer than two elements.
|
||||
*
|
||||
* <p><b>Java 8 users:</b> Use the equivalent {@link java.util.Comparators#isInOrder(Iterable, Comparator)}
|
||||
* instead, since the rest of {@code Ordering} is mostly obsolete (as explained in the class
|
||||
* documentation).
|
||||
*/
|
||||
public boolean isOrdered(Iterable<? extends T> iterable) {
|
||||
Iterator<? extends T> it = iterable.iterator();
|
||||
|
@ -755,6 +759,10 @@ public abstract class Ordering<T> implements Comparator<T> {
|
|||
* Returns {@code true} if each element in {@code iterable} after the first is <i>strictly</i>
|
||||
* greater than the element that preceded it, according to this ordering. Note that this is always
|
||||
* true when the iterable has fewer than two elements.
|
||||
*
|
||||
* <p><b>Java 8 users:</b> Use the equivalent {@link java.util.Comparators#isInStrictOrder(Iterable,
|
||||
* Comparator)} instead, since the rest of {@code Ordering} is mostly obsolete (as explained in
|
||||
* the class documentation).
|
||||
*/
|
||||
public boolean isStrictlyOrdered(Iterable<? extends T> iterable) {
|
||||
Iterator<? extends T> it = iterable.iterator();
|
||||
|
|
3
datastructures-multi/build.gradle
Normal file
3
datastructures-multi/build.gradle
Normal file
|
@ -0,0 +1,3 @@
|
|||
dependencies {
|
||||
api project(':datastructures-immutable')
|
||||
}
|
5
datastructures-multi/src/main/java/module-info.java
Normal file
5
datastructures-multi/src/main/java/module-info.java
Normal file
|
@ -0,0 +1,5 @@
|
|||
module org.xbib.datastructures.multi {
|
||||
exports org.xbib.datastructures.multi;
|
||||
requires org.xbib.datastructures.api;
|
||||
requires org.xbib.datastructures.immutable;
|
||||
}
|
File diff suppressed because it is too large
Load diff
|
@ -0,0 +1,321 @@
|
|||
package org.xbib.datastructures.multi;
|
||||
|
||||
import java.io.InvalidObjectException;
|
||||
import java.io.ObjectStreamException;
|
||||
import java.io.Serializable;
|
||||
import java.util.ConcurrentModificationException;
|
||||
import java.util.Iterator;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
import java.util.function.ObjIntConsumer;
|
||||
import static java.util.Objects.requireNonNull;
|
||||
|
||||
/**
|
||||
* Basic implementation of {@code Multiset<E>} backed by an instance of {@code Map<E, Count>}.
|
||||
*
|
||||
* <p>For serialization to work, the subclass must specify explicit {@code readObject} and {@code
|
||||
* writeObject} methods.
|
||||
*/
|
||||
abstract class AbstractMapBasedMultiset<E extends Object> extends AbstractMultiset<E>
|
||||
implements Serializable {
|
||||
// TODO(lowasser): consider overhauling this back to Map<E, Integer>
|
||||
private transient Map<E, Count> backingMap;
|
||||
|
||||
/*
|
||||
* Cache the size for efficiency. Using a long lets us avoid the need for
|
||||
* overflow checking and ensures that size() will function correctly even if
|
||||
* the multiset had once been larger than Integer.MAX_VALUE.
|
||||
*/
|
||||
private transient long size;
|
||||
|
||||
/** Standard constructor. */
|
||||
protected AbstractMapBasedMultiset(Map<E, Count> backingMap) {
|
||||
checkArgument(backingMap.isEmpty());
|
||||
this.backingMap = backingMap;
|
||||
}
|
||||
|
||||
/** Used during deserialization only. The backing map must be empty. */
|
||||
void setBackingMap(Map<E, Count> backingMap) {
|
||||
this.backingMap = backingMap;
|
||||
}
|
||||
|
||||
// Required Implementations
|
||||
|
||||
/**
|
||||
* {@inheritDoc}
|
||||
*
|
||||
* <p>Invoking {@link Entry#getCount} on an entry in the returned set always returns the
|
||||
* current count of that element in the multiset, as opposed to the count at the time the entry
|
||||
* was retrieved.
|
||||
*/
|
||||
@Override
|
||||
public Set<Entry<E>> entrySet() {
|
||||
return super.entrySet();
|
||||
}
|
||||
|
||||
@Override
|
||||
Iterator<E> elementIterator() {
|
||||
final Iterator<Map.Entry<E, Count>> backingEntries = backingMap.entrySet().iterator();
|
||||
return new Iterator<E>() {
|
||||
@CheckForNull Map.Entry<E, Count> toRemove;
|
||||
|
||||
@Override
|
||||
public boolean hasNext() {
|
||||
return backingEntries.hasNext();
|
||||
}
|
||||
|
||||
@Override
|
||||
@ParametricNullness
|
||||
public E next() {
|
||||
final Map.Entry<E, Count> mapEntry = backingEntries.next();
|
||||
toRemove = mapEntry;
|
||||
return mapEntry.getKey();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void remove() {
|
||||
checkState(toRemove != null, "no calls to next() since the last call to remove()");
|
||||
size -= toRemove.getValue().getAndSet(0);
|
||||
backingEntries.remove();
|
||||
toRemove = null;
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
@Override
|
||||
Iterator<Entry<E>> entryIterator() {
|
||||
final Iterator<Map.Entry<E, Count>> backingEntries = backingMap.entrySet().iterator();
|
||||
return new Iterator<Entry<E>>() {
|
||||
@CheckForNull Map.Entry<E, Count> toRemove;
|
||||
|
||||
@Override
|
||||
public boolean hasNext() {
|
||||
return backingEntries.hasNext();
|
||||
}
|
||||
|
||||
@Override
|
||||
public Entry<E> next() {
|
||||
final Map.Entry<E, Count> mapEntry = backingEntries.next();
|
||||
toRemove = mapEntry;
|
||||
return new Multisets.AbstractEntry<E>() {
|
||||
@Override
|
||||
@ParametricNullness
|
||||
public E getElement() {
|
||||
return mapEntry.getKey();
|
||||
}
|
||||
|
||||
@Override
|
||||
public int getCount() {
|
||||
Count count = mapEntry.getValue();
|
||||
if (count == null || count.get() == 0) {
|
||||
Count frequency = backingMap.get(getElement());
|
||||
if (frequency != null) {
|
||||
return frequency.get();
|
||||
}
|
||||
}
|
||||
return (count == null) ? 0 : count.get();
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
@Override
|
||||
public void remove() {
|
||||
checkState(toRemove != null, "no calls to next() since the last call to remove()");
|
||||
size -= toRemove.getValue().getAndSet(0);
|
||||
backingEntries.remove();
|
||||
toRemove = null;
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
@Override
|
||||
public void forEachEntry(ObjIntConsumer<? super E> action) {
|
||||
checkNotNull(action);
|
||||
backingMap.forEach((element, count) -> action.accept(element, count.get()));
|
||||
}
|
||||
|
||||
@Override
|
||||
public void clear() {
|
||||
for (Count frequency : backingMap.values()) {
|
||||
frequency.set(0);
|
||||
}
|
||||
backingMap.clear();
|
||||
size = 0L;
|
||||
}
|
||||
|
||||
@Override
|
||||
int distinctElements() {
|
||||
return backingMap.size();
|
||||
}
|
||||
|
||||
// Optimizations - Query Operations
|
||||
|
||||
@Override
|
||||
public int size() {
|
||||
return Ints.saturatedCast(size);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Iterator<E> iterator() {
|
||||
return new MapBasedMultisetIterator();
|
||||
}
|
||||
|
||||
/*
|
||||
* Not subclassing AbstractMultiset$MultisetIterator because next() needs to
|
||||
* retrieve the Map.Entry<E, Count> entry, which can then be used for
|
||||
* a more efficient remove() call.
|
||||
*/
|
||||
private class MapBasedMultisetIterator implements Iterator<E> {
|
||||
final Iterator<Map.Entry<E, Count>> entryIterator;
|
||||
@CheckForNull Map.Entry<E, Count> currentEntry;
|
||||
int occurrencesLeft;
|
||||
boolean canRemove;
|
||||
|
||||
MapBasedMultisetIterator() {
|
||||
this.entryIterator = backingMap.entrySet().iterator();
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean hasNext() {
|
||||
return occurrencesLeft > 0 || entryIterator.hasNext();
|
||||
}
|
||||
|
||||
@Override
|
||||
@ParametricNullness
|
||||
public E next() {
|
||||
if (occurrencesLeft == 0) {
|
||||
currentEntry = entryIterator.next();
|
||||
occurrencesLeft = currentEntry.getValue().get();
|
||||
}
|
||||
occurrencesLeft--;
|
||||
canRemove = true;
|
||||
/*
|
||||
* requireNonNull is safe because occurrencesLeft starts at 0, forcing us to initialize
|
||||
* currentEntry above. After that, we never clear it.
|
||||
*/
|
||||
return requireNonNull(currentEntry).getKey();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void remove() {
|
||||
checkRemove(canRemove);
|
||||
/*
|
||||
* requireNonNull is safe because canRemove is set to true only after we initialize
|
||||
* currentEntry (which we never subsequently clear).
|
||||
*/
|
||||
int frequency = requireNonNull(currentEntry).getValue().get();
|
||||
if (frequency <= 0) {
|
||||
throw new ConcurrentModificationException();
|
||||
}
|
||||
if (currentEntry.getValue().addAndGet(-1) == 0) {
|
||||
entryIterator.remove();
|
||||
}
|
||||
size--;
|
||||
canRemove = false;
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public int count(@CheckForNull Object element) {
|
||||
Count frequency = Maps.safeGet(backingMap, element);
|
||||
return (frequency == null) ? 0 : frequency.get();
|
||||
}
|
||||
|
||||
// Optional Operations - Modification Operations
|
||||
|
||||
/**
|
||||
* {@inheritDoc}
|
||||
*
|
||||
* @throws IllegalArgumentException if the call would result in more than {@link
|
||||
* Integer#MAX_VALUE} occurrences of {@code element} in this multiset.
|
||||
*/
|
||||
@CanIgnoreReturnValue
|
||||
@Override
|
||||
public int add(@ParametricNullness E element, int occurrences) {
|
||||
if (occurrences == 0) {
|
||||
return count(element);
|
||||
}
|
||||
checkArgument(occurrences > 0, "occurrences cannot be negative: %s", occurrences);
|
||||
Count frequency = backingMap.get(element);
|
||||
int oldCount;
|
||||
if (frequency == null) {
|
||||
oldCount = 0;
|
||||
backingMap.put(element, new Count(occurrences));
|
||||
} else {
|
||||
oldCount = frequency.get();
|
||||
long newCount = (long) oldCount + (long) occurrences;
|
||||
checkArgument(newCount <= Integer.MAX_VALUE, "too many occurrences: %s", newCount);
|
||||
frequency.add(occurrences);
|
||||
}
|
||||
size += occurrences;
|
||||
return oldCount;
|
||||
}
|
||||
|
||||
@CanIgnoreReturnValue
|
||||
@Override
|
||||
public int remove(@CheckForNull Object element, int occurrences) {
|
||||
if (occurrences == 0) {
|
||||
return count(element);
|
||||
}
|
||||
checkArgument(occurrences > 0, "occurrences cannot be negative: %s", occurrences);
|
||||
Count frequency = backingMap.get(element);
|
||||
if (frequency == null) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
int oldCount = frequency.get();
|
||||
|
||||
int numberRemoved;
|
||||
if (oldCount > occurrences) {
|
||||
numberRemoved = occurrences;
|
||||
} else {
|
||||
numberRemoved = oldCount;
|
||||
backingMap.remove(element);
|
||||
}
|
||||
|
||||
frequency.add(-numberRemoved);
|
||||
size -= numberRemoved;
|
||||
return oldCount;
|
||||
}
|
||||
|
||||
// Roughly a 33% performance improvement over AbstractMultiset.setCount().
|
||||
@CanIgnoreReturnValue
|
||||
@Override
|
||||
public int setCount(@ParametricNullness E element, int count) {
|
||||
checkNonnegative(count, "count");
|
||||
|
||||
Count existingCounter;
|
||||
int oldCount;
|
||||
if (count == 0) {
|
||||
existingCounter = backingMap.remove(element);
|
||||
oldCount = getAndSet(existingCounter, count);
|
||||
} else {
|
||||
existingCounter = backingMap.get(element);
|
||||
oldCount = getAndSet(existingCounter, count);
|
||||
|
||||
if (existingCounter == null) {
|
||||
backingMap.put(element, new Count(count));
|
||||
}
|
||||
}
|
||||
|
||||
size += (count - oldCount);
|
||||
return oldCount;
|
||||
}
|
||||
|
||||
private static int getAndSet(@CheckForNull Count i, int count) {
|
||||
if (i == null) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
return i.getAndSet(count);
|
||||
}
|
||||
|
||||
// Don't allow default serialization.
|
||||
@GwtIncompatible // java.io.ObjectStreamException
|
||||
private void readObjectNoData() throws ObjectStreamException {
|
||||
throw new InvalidObjectException("Stream data required");
|
||||
}
|
||||
|
||||
@GwtIncompatible // not needed in emulated source.
|
||||
private static final long serialVersionUID = -2250766705698539974L;
|
||||
}
|
|
@ -0,0 +1,295 @@
|
|||
package org.xbib.datastructures.multi;
|
||||
|
||||
import java.util.AbstractCollection;
|
||||
import java.util.Collection;
|
||||
import java.util.Iterator;
|
||||
import java.util.Map;
|
||||
import java.util.Map.Entry;
|
||||
import java.util.Objects;
|
||||
import java.util.Set;
|
||||
import java.util.Spliterator;
|
||||
import java.util.Spliterators;
|
||||
import org.xbib.datastructures.api.Multimap;
|
||||
import org.xbib.datastructures.api.Multiset;
|
||||
import org.xbib.datastructures.api.SetMultimap;
|
||||
|
||||
/**
|
||||
* A skeleton {@code Multimap} implementation, not necessarily in terms of a {@code Map}.
|
||||
*/
|
||||
abstract class AbstractMultimap<K extends Object, V extends Object>
|
||||
implements Multimap<K, V> {
|
||||
@Override
|
||||
public boolean isEmpty() {
|
||||
return size() == 0;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean containsValue(Object value) {
|
||||
for (Collection<V> collection : asMap().values()) {
|
||||
if (collection.contains(value)) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean containsEntry(Object key, Object value) {
|
||||
Collection<V> collection = asMap().get(key);
|
||||
return collection != null && collection.contains(value);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean remove(Object key, Object value) {
|
||||
Collection<V> collection = asMap().get(key);
|
||||
return collection != null && collection.remove(value);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean put(K key, V value) {
|
||||
return get(key).add(value);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean putAll(K key, Iterable<? extends V> values) {
|
||||
Objects.requireNonNull(values);
|
||||
// make sure we only call values.iterator() once
|
||||
// and we only call get(key) if values is nonempty
|
||||
if (values instanceof Collection<? extends V> valueCollection) {
|
||||
return !valueCollection.isEmpty() && get(key).addAll(valueCollection);
|
||||
} else {
|
||||
Iterator<? extends V> valueItr = values.iterator();
|
||||
return valueItr.hasNext() && addAll(get(key), valueItr);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean putAll(Multimap<? extends K, ? extends V> multimap) {
|
||||
boolean changed = false;
|
||||
for (Entry<? extends K, ? extends V> entry : multimap.entries()) {
|
||||
changed |= put(entry.getKey(), entry.getValue());
|
||||
}
|
||||
return changed;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Collection<V> replaceValues(K key, Iterable<? extends V> values) {
|
||||
Objects.requireNonNull(values);
|
||||
Collection<V> result = removeAll(key);
|
||||
putAll(key, values);
|
||||
return result;
|
||||
}
|
||||
|
||||
private transient Collection<Entry<K, V>> entries;
|
||||
|
||||
@Override
|
||||
public Collection<Entry<K, V>> entries() {
|
||||
Collection<Entry<K, V>> result = entries;
|
||||
return (result == null) ? entries = createEntries() : result;
|
||||
}
|
||||
|
||||
abstract Collection<Entry<K, V>> createEntries();
|
||||
|
||||
class Entries extends MultimapsEntries<K, V> {
|
||||
@Override
|
||||
Multimap<K, V> multimap() {
|
||||
return AbstractMultimap.this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Iterator<Entry<K, V>> iterator() {
|
||||
return entryIterator();
|
||||
}
|
||||
|
||||
@Override
|
||||
public Spliterator<Entry<K, V>> spliterator() {
|
||||
return entrySpliterator();
|
||||
}
|
||||
}
|
||||
|
||||
class EntrySet extends Entries implements Set<Entry<K, V>> {
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return hashCodeImpl(this);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
return equalsImpl(this, obj);
|
||||
}
|
||||
}
|
||||
|
||||
abstract Iterator<Entry<K, V>> entryIterator();
|
||||
|
||||
Spliterator<Entry<K, V>> entrySpliterator() {
|
||||
return Spliterators.spliterator(
|
||||
entryIterator(), size(), (this instanceof SetMultimap) ? Spliterator.DISTINCT : 0);
|
||||
}
|
||||
|
||||
private transient Set<K> keySet;
|
||||
|
||||
@Override
|
||||
public Set<K> keySet() {
|
||||
Set<K> result = keySet;
|
||||
return (result == null) ? keySet = createKeySet() : result;
|
||||
}
|
||||
|
||||
abstract Set<K> createKeySet();
|
||||
|
||||
private transient Multiset<K> keys;
|
||||
|
||||
@Override
|
||||
public Multiset<K> keys() {
|
||||
Multiset<K> result = keys;
|
||||
return (result == null) ? keys = createKeys() : result;
|
||||
}
|
||||
|
||||
abstract Multiset<K> createKeys();
|
||||
|
||||
private transient Collection<V> values;
|
||||
|
||||
@Override
|
||||
public Collection<V> values() {
|
||||
Collection<V> result = values;
|
||||
return (result == null) ? values = createValues() : result;
|
||||
}
|
||||
|
||||
abstract Collection<V> createValues();
|
||||
|
||||
class Values extends AbstractCollection<V> {
|
||||
@Override
|
||||
public Iterator<V> iterator() {
|
||||
return valueIterator();
|
||||
}
|
||||
|
||||
@Override
|
||||
public Spliterator<V> spliterator() {
|
||||
return valueSpliterator();
|
||||
}
|
||||
|
||||
@Override
|
||||
public int size() {
|
||||
return AbstractMultimap.this.size();
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean contains(Object o) {
|
||||
return AbstractMultimap.this.containsValue(o);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void clear() {
|
||||
AbstractMultimap.this.clear();
|
||||
}
|
||||
}
|
||||
|
||||
Iterator<V> valueIterator() {
|
||||
return valueIterator(entries().iterator());
|
||||
}
|
||||
|
||||
Spliterator<V> valueSpliterator() {
|
||||
return Spliterators.spliterator(valueIterator(), size(), 0);
|
||||
}
|
||||
|
||||
private transient Map<K, Collection<V>> asMap;
|
||||
|
||||
@Override
|
||||
public Map<K, Collection<V>> asMap() {
|
||||
Map<K, Collection<V>> result = asMap;
|
||||
return (result == null) ? asMap = createAsMap() : result;
|
||||
}
|
||||
|
||||
abstract Map<K, Collection<V>> createAsMap();
|
||||
|
||||
// Comparison and hashing
|
||||
|
||||
@Override
|
||||
public boolean equals(Object object) {
|
||||
return equalsImpl(this, object);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the hash code for this multimap.
|
||||
*
|
||||
* <p>The hash code of a multimap is defined as the hash code of the map view, as returned by
|
||||
* {@link Multimap#asMap}.
|
||||
*
|
||||
* @see Map#hashCode
|
||||
*/
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return asMap().hashCode();
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns a string representation of the multimap, generated by calling {@code toString} on the
|
||||
* map returned by {@link Multimap#asMap}.
|
||||
*
|
||||
* @return a string representation of the multimap
|
||||
*/
|
||||
@Override
|
||||
public String toString() {
|
||||
return asMap().toString();
|
||||
}
|
||||
|
||||
private static <K extends Object, V extends Object> Iterator<V> valueIterator(Iterator<Entry<K, V>> entryIterator) {
|
||||
return new TransformedIterator<Entry<K, V>, V>(entryIterator) {
|
||||
@Override
|
||||
V transform(Entry<K, V> entry) {
|
||||
return entry.getValue();
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
private static <T extends Object> boolean addAll(
|
||||
Collection<T> addTo, Iterator<? extends T> iterator) {
|
||||
Objects.requireNonNull(addTo);
|
||||
Objects.requireNonNull(iterator);
|
||||
boolean wasModified = false;
|
||||
while (iterator.hasNext()) {
|
||||
wasModified |= addTo.add(iterator.next());
|
||||
}
|
||||
return wasModified;
|
||||
}
|
||||
|
||||
/** An implementation for {@link Set#hashCode()}. */
|
||||
static int hashCodeImpl(Set<?> s) {
|
||||
int hashCode = 0;
|
||||
for (Object o : s) {
|
||||
hashCode += o != null ? o.hashCode() : 0;
|
||||
|
||||
hashCode = ~~hashCode;
|
||||
// Needed to deal with unusual integer overflow in GWT.
|
||||
}
|
||||
return hashCode;
|
||||
}
|
||||
|
||||
/** An implementation for {@link Set#equals(Object)}. */
|
||||
private static boolean equalsImpl(Set<?> s, Object object) {
|
||||
if (s == object) {
|
||||
return true;
|
||||
}
|
||||
if (object instanceof Set) {
|
||||
Set<?> o = (Set<?>) object;
|
||||
|
||||
try {
|
||||
return s.size() == o.size() && s.containsAll(o);
|
||||
} catch (NullPointerException | ClassCastException ignored) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
static boolean equalsImpl(Multimap<?, ?> multimap, Object object) {
|
||||
if (object == multimap) {
|
||||
return true;
|
||||
}
|
||||
if (object instanceof Multimap) {
|
||||
Multimap<?, ?> that = (Multimap<?, ?>) object;
|
||||
return multimap.asMap().equals(that.asMap());
|
||||
}
|
||||
return false;
|
||||
}
|
||||
}
|
|
@ -0,0 +1,234 @@
|
|||
/*
|
||||
* Copyright (C) 2007 The Guava Authors
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.xbib.datastructures.multi;
|
||||
|
||||
import com.google.common.annotations.GwtCompatible;
|
||||
import com.google.errorprone.annotations.CanIgnoreReturnValue;
|
||||
import com.google.errorprone.annotations.concurrent.LazyInit;
|
||||
import com.google.j2objc.annotations.WeakOuter;
|
||||
import java.util.AbstractCollection;
|
||||
import java.util.Collection;
|
||||
import java.util.Iterator;
|
||||
import java.util.Set;
|
||||
import javax.annotation.CheckForNull;
|
||||
import org.checkerframework.checker.nullness.qual.Nullable;
|
||||
import org.xbib.datastructures.api.Multiset;
|
||||
import static com.google.common.collect.Multisets.setCountImpl;
|
||||
|
||||
/**
|
||||
* This class provides a skeletal implementation of the {@link Multiset} interface. A new multiset
|
||||
* implementation can be created easily by extending this class and implementing the {@link
|
||||
* Multiset#entrySet()} method, plus optionally overriding {@link #add(Object, int)} and {@link
|
||||
* #remove(Object, int)} to enable modifications to the multiset.
|
||||
*
|
||||
* <p>The {@link #count} and {@link #size} implementations all iterate across the set returned by
|
||||
* {@link Multiset#entrySet()}, as do many methods acting on the set returned by {@link
|
||||
* #elementSet()}. Override those methods for better performance.
|
||||
*
|
||||
* @author Kevin Bourrillion
|
||||
* @author Louis Wasserman
|
||||
*/
|
||||
@GwtCompatible
|
||||
@ElementTypesAreNonnullByDefault
|
||||
abstract class AbstractMultiset<E extends @Nullable Object> extends AbstractCollection<E>
|
||||
implements Multiset<E> {
|
||||
// Query Operations
|
||||
|
||||
@Override
|
||||
public boolean isEmpty() {
|
||||
return entrySet().isEmpty();
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean contains(@CheckForNull Object element) {
|
||||
return count(element) > 0;
|
||||
}
|
||||
|
||||
// Modification Operations
|
||||
@CanIgnoreReturnValue
|
||||
@Override
|
||||
public final boolean add(@ParametricNullness E element) {
|
||||
add(element, 1);
|
||||
return true;
|
||||
}
|
||||
|
||||
@CanIgnoreReturnValue
|
||||
@Override
|
||||
public int add(@ParametricNullness E element, int occurrences) {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
|
||||
@CanIgnoreReturnValue
|
||||
@Override
|
||||
public final boolean remove(@CheckForNull Object element) {
|
||||
return remove(element, 1) > 0;
|
||||
}
|
||||
|
||||
@CanIgnoreReturnValue
|
||||
@Override
|
||||
public int remove(@CheckForNull Object element, int occurrences) {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
|
||||
@CanIgnoreReturnValue
|
||||
@Override
|
||||
public int setCount(@ParametricNullness E element, int count) {
|
||||
return setCountImpl(this, element, count);
|
||||
}
|
||||
|
||||
@CanIgnoreReturnValue
|
||||
@Override
|
||||
public boolean setCount(@ParametricNullness E element, int oldCount, int newCount) {
|
||||
return setCountImpl(this, element, oldCount, newCount);
|
||||
}
|
||||
|
||||
// Bulk Operations
|
||||
|
||||
/**
|
||||
* {@inheritDoc}
|
||||
*
|
||||
* <p>This implementation is highly efficient when {@code elementsToAdd} is itself a {@link
|
||||
* Multiset}.
|
||||
*/
|
||||
@CanIgnoreReturnValue
|
||||
@Override
|
||||
public final boolean addAll(Collection<? extends E> elementsToAdd) {
|
||||
return Multisets.addAllImpl(this, elementsToAdd);
|
||||
}
|
||||
|
||||
@CanIgnoreReturnValue
|
||||
@Override
|
||||
public final boolean removeAll(Collection<?> elementsToRemove) {
|
||||
return Multisets.removeAllImpl(this, elementsToRemove);
|
||||
}
|
||||
|
||||
@CanIgnoreReturnValue
|
||||
@Override
|
||||
public final boolean retainAll(Collection<?> elementsToRetain) {
|
||||
return Multisets.retainAllImpl(this, elementsToRetain);
|
||||
}
|
||||
|
||||
@Override
|
||||
public abstract void clear();
|
||||
|
||||
// Views
|
||||
|
||||
@LazyInit @CheckForNull private transient Set<E> elementSet;
|
||||
|
||||
@Override
|
||||
public Set<E> elementSet() {
|
||||
Set<E> result = elementSet;
|
||||
if (result == null) {
|
||||
elementSet = result = createElementSet();
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new instance of this multiset's element set, which will be returned by {@link
|
||||
* #elementSet()}.
|
||||
*/
|
||||
Set<E> createElementSet() {
|
||||
return new ElementSet();
|
||||
}
|
||||
|
||||
@WeakOuter
|
||||
class ElementSet extends Multisets.ElementSet<E> {
|
||||
@Override
|
||||
Multiset<E> multiset() {
|
||||
return AbstractMultiset.this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Iterator<E> iterator() {
|
||||
return elementIterator();
|
||||
}
|
||||
}
|
||||
|
||||
abstract Iterator<E> elementIterator();
|
||||
|
||||
@LazyInit @CheckForNull private transient Set<Entry<E>> entrySet;
|
||||
|
||||
@Override
|
||||
public Set<Entry<E>> entrySet() {
|
||||
Set<Entry<E>> result = entrySet;
|
||||
if (result == null) {
|
||||
entrySet = result = createEntrySet();
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
@WeakOuter
|
||||
class EntrySet extends Multisets.EntrySet<E> {
|
||||
@Override
|
||||
Multiset<E> multiset() {
|
||||
return AbstractMultiset.this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Iterator<Entry<E>> iterator() {
|
||||
return entryIterator();
|
||||
}
|
||||
|
||||
@Override
|
||||
public int size() {
|
||||
return distinctElements();
|
||||
}
|
||||
}
|
||||
|
||||
Set<Entry<E>> createEntrySet() {
|
||||
return new EntrySet();
|
||||
}
|
||||
|
||||
abstract Iterator<Entry<E>> entryIterator();
|
||||
|
||||
abstract int distinctElements();
|
||||
|
||||
// Object methods
|
||||
|
||||
/**
|
||||
* {@inheritDoc}
|
||||
*
|
||||
* <p>This implementation returns {@code true} if {@code object} is a multiset of the same size
|
||||
* and if, for each element, the two multisets have the same count.
|
||||
*/
|
||||
@Override
|
||||
public final boolean equals(@CheckForNull Object object) {
|
||||
return Multisets.equalsImpl(this, object);
|
||||
}
|
||||
|
||||
/**
|
||||
* {@inheritDoc}
|
||||
*
|
||||
* <p>This implementation returns the hash code of {@link Multiset#entrySet()}.
|
||||
*/
|
||||
@Override
|
||||
public final int hashCode() {
|
||||
return entrySet().hashCode();
|
||||
}
|
||||
|
||||
/**
|
||||
* {@inheritDoc}
|
||||
*
|
||||
* <p>This implementation returns the result of invoking {@code toString} on {@link
|
||||
* Multiset#entrySet()}.
|
||||
*/
|
||||
@Override
|
||||
public final String toString() {
|
||||
return entrySet().toString();
|
||||
}
|
||||
}
|
|
@ -0,0 +1,126 @@
|
|||
package org.xbib.datastructures.multi;
|
||||
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.Map;
|
||||
import java.util.Map.Entry;
|
||||
import java.util.Set;
|
||||
import org.xbib.datastructures.api.Multimap;
|
||||
import org.xbib.datastructures.api.SetMultimap;
|
||||
|
||||
/**
|
||||
* Basic implementation of the {@link SetMultimap} interface. It's a wrapper around {@link
|
||||
* AbstractMapBasedMultimap} that converts the returned collections into {@code Sets}. The {@link
|
||||
* #createCollection} method must return a {@code Set}.
|
||||
*/
|
||||
abstract class AbstractSetMultimap<K extends Object, V extends Object>
|
||||
extends AbstractMapBasedMultimap<K, V> implements SetMultimap<K, V> {
|
||||
/**
|
||||
* Creates a new multimap that uses the provided map.
|
||||
*
|
||||
* @param map place to store the mapping from each key to its corresponding values
|
||||
*/
|
||||
protected AbstractSetMultimap(Map<K, Collection<V>> map) {
|
||||
super(map);
|
||||
}
|
||||
|
||||
@Override
|
||||
abstract Set<V> createCollection();
|
||||
|
||||
@Override
|
||||
Set<V> createUnmodifiableEmptyCollection() {
|
||||
return Collections.emptySet();
|
||||
}
|
||||
|
||||
@Override
|
||||
<E extends Object> Collection<E> unmodifiableCollectionSubclass(
|
||||
Collection<E> collection) {
|
||||
return Collections.unmodifiableSet((Set<E>) collection);
|
||||
}
|
||||
|
||||
@Override
|
||||
Collection<V> wrapCollection(K key, Collection<V> collection) {
|
||||
return new WrappedSet(key, (Set<V>) collection);
|
||||
}
|
||||
|
||||
/**
|
||||
* {@inheritDoc}
|
||||
*
|
||||
* <p>Because a {@code SetMultimap} has unique values for a given key, this method returns a
|
||||
* {@link Set}, instead of the {@link Collection} specified in the {@link Multimap} interface.
|
||||
*/
|
||||
@Override
|
||||
public Set<V> get(K key) {
|
||||
return (Set<V>) super.get(key);
|
||||
}
|
||||
|
||||
/**
|
||||
* {@inheritDoc}
|
||||
*
|
||||
* <p>Because a {@code SetMultimap} has unique values for a given key, this method returns a
|
||||
* {@link Set}, instead of the {@link Collection} specified in the {@link Multimap} interface.
|
||||
*/
|
||||
@Override
|
||||
public Set<Entry<K, V>> entries() {
|
||||
return (Set<Entry<K, V>>) super.entries();
|
||||
}
|
||||
|
||||
/**
|
||||
* {@inheritDoc}
|
||||
*
|
||||
* <p>Because a {@code SetMultimap} has unique values for a given key, this method returns a
|
||||
* {@link Set}, instead of the {@link Collection} specified in the {@link Multimap} interface.
|
||||
*/
|
||||
@Override
|
||||
public Set<V> removeAll(Object key) {
|
||||
return (Set<V>) super.removeAll(key);
|
||||
}
|
||||
|
||||
/**
|
||||
* {@inheritDoc}
|
||||
*
|
||||
* <p>Because a {@code SetMultimap} has unique values for a given key, this method returns a
|
||||
* {@link Set}, instead of the {@link Collection} specified in the {@link Multimap} interface.
|
||||
*
|
||||
* <p>Any duplicates in {@code values} will be stored in the multimap once.
|
||||
*/
|
||||
@Override
|
||||
public Set<V> replaceValues(K key, Iterable<? extends V> values) {
|
||||
return (Set<V>) super.replaceValues(key, values);
|
||||
}
|
||||
|
||||
/**
|
||||
* {@inheritDoc}
|
||||
*
|
||||
* <p>Though the method signature doesn't say so explicitly, the returned map has {@link Set}
|
||||
* values.
|
||||
*/
|
||||
@Override
|
||||
public Map<K, Collection<V>> asMap() {
|
||||
return super.asMap();
|
||||
}
|
||||
|
||||
/**
|
||||
* Stores a key-value pair in the multimap.
|
||||
*
|
||||
* @param key key to store in the multimap
|
||||
* @param value value to store in the multimap
|
||||
* @return {@code true} if the method increased the size of the multimap, or {@code false} if the
|
||||
* multimap already contained the key-value pair
|
||||
*/
|
||||
@Override
|
||||
public boolean put(K key, V value) {
|
||||
return super.put(key, value);
|
||||
}
|
||||
|
||||
/**
|
||||
* Compares the specified object to this multimap for equality.
|
||||
*
|
||||
* <p>Two {@code SetMultimap} instances are equal if, for each key, they contain the same values.
|
||||
* Equality does not depend on the ordering of keys or values.
|
||||
*/
|
||||
@Override
|
||||
public boolean equals(Object object) {
|
||||
return super.equals(object);
|
||||
}
|
||||
}
|
|
@ -0,0 +1,8 @@
|
|||
package org.xbib.datastructures.multi;
|
||||
|
||||
/**
|
||||
* A dummy superclass of {@link ImmutableMultimap} that can be instanceof'd without ProGuard
|
||||
* retaining additional implementation details of {@link ImmutableMultimap}.
|
||||
*/
|
||||
abstract class BaseImmutableMultimap<K, V> extends AbstractMultimap<K, V> {
|
||||
}
|
|
@ -0,0 +1,426 @@
|
|||
package org.xbib.datastructures.multi;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.InvalidObjectException;
|
||||
import java.io.ObjectInputStream;
|
||||
import java.io.ObjectOutputStream;
|
||||
import java.util.Collection;
|
||||
import java.util.Comparator;
|
||||
import java.util.Map.Entry;
|
||||
import java.util.function.Function;
|
||||
import java.util.stream.Collector;
|
||||
import java.util.stream.Stream;
|
||||
import org.xbib.datastructures.api.ListMultimap;
|
||||
import org.xbib.datastructures.api.Multimap;
|
||||
|
||||
/**
|
||||
* A {@link ListMultimap} whose contents will never change, with many other important properties
|
||||
* detailed at {@link ImmutableCollection}.
|
||||
*
|
||||
*/
|
||||
public class ImmutableListMultimap<K, V> extends ImmutableMultimap<K, V>
|
||||
implements ListMultimap<K, V> {
|
||||
/**
|
||||
* Returns a {@link Collector} that accumulates elements into an {@code ImmutableListMultimap}
|
||||
* whose keys and values are the result of applying the provided mapping functions to the input
|
||||
* elements.
|
||||
*
|
||||
* <p>For streams with defined encounter order (as defined in the Ordering section of the {@link
|
||||
* java.util.stream} Javadoc), that order is preserved, but entries are <a
|
||||
* href="ImmutableMultimap.html#iteration">grouped by key</a>.
|
||||
*
|
||||
* <p>Example:
|
||||
*
|
||||
* <pre>{@code
|
||||
* static final Multimap<Character, String> FIRST_LETTER_MULTIMAP =
|
||||
* Stream.of("banana", "apple", "carrot", "asparagus", "cherry")
|
||||
* .collect(toImmutableListMultimap(str -> str.charAt(0), str -> str.substring(1)));
|
||||
*
|
||||
* // is equivalent to
|
||||
*
|
||||
* static final Multimap<Character, String> FIRST_LETTER_MULTIMAP =
|
||||
* new ImmutableListMultimap.Builder<Character, String>()
|
||||
* .put('b', "anana")
|
||||
* .putAll('a', "pple", "sparagus")
|
||||
* .putAll('c', "arrot", "herry")
|
||||
* .build();
|
||||
* }</pre>
|
||||
*
|
||||
* @since 21.0
|
||||
*/
|
||||
public static <T extends Object, K, V>
|
||||
Collector<T, ?, ImmutableListMultimap<K, V>> toImmutableListMultimap(
|
||||
Function<? super T, ? extends K> keyFunction,
|
||||
Function<? super T, ? extends V> valueFunction) {
|
||||
return CollectCollectors.toImmutableListMultimap(keyFunction, valueFunction);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns a {@code Collector} accumulating entries into an {@code ImmutableListMultimap}. Each
|
||||
* input element is mapped to a key and a stream of values, each of which are put into the
|
||||
* resulting {@code Multimap}, in the encounter order of the stream and the encounter order of the
|
||||
* streams of values.
|
||||
*
|
||||
* <p>Example:
|
||||
*
|
||||
* <pre>{@code
|
||||
* static final ImmutableListMultimap<Character, Character> FIRST_LETTER_MULTIMAP =
|
||||
* Stream.of("banana", "apple", "carrot", "asparagus", "cherry")
|
||||
* .collect(
|
||||
* flatteningToImmutableListMultimap(
|
||||
* str -> str.charAt(0),
|
||||
* str -> str.substring(1).chars().mapToObj(c -> (char) c));
|
||||
*
|
||||
* // is equivalent to
|
||||
*
|
||||
* static final ImmutableListMultimap<Character, Character> FIRST_LETTER_MULTIMAP =
|
||||
* ImmutableListMultimap.<Character, Character>builder()
|
||||
* .putAll('b', Arrays.asList('a', 'n', 'a', 'n', 'a'))
|
||||
* .putAll('a', Arrays.asList('p', 'p', 'l', 'e'))
|
||||
* .putAll('c', Arrays.asList('a', 'r', 'r', 'o', 't'))
|
||||
* .putAll('a', Arrays.asList('s', 'p', 'a', 'r', 'a', 'g', 'u', 's'))
|
||||
* .putAll('c', Arrays.asList('h', 'e', 'r', 'r', 'y'))
|
||||
* .build();
|
||||
* }
|
||||
* }</pre>
|
||||
*
|
||||
* @since 21.0
|
||||
*/
|
||||
public static <T extends Object, K, V>
|
||||
Collector<T, ?, ImmutableListMultimap<K, V>> flatteningToImmutableListMultimap(
|
||||
Function<? super T, ? extends K> keyFunction,
|
||||
Function<? super T, ? extends Stream<? extends V>> valuesFunction) {
|
||||
return CollectCollectors.flatteningToImmutableListMultimap(keyFunction, valuesFunction);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the empty multimap.
|
||||
*
|
||||
* <p><b>Performance note:</b> the instance returned is a singleton.
|
||||
*/
|
||||
// Casting is safe because the multimap will never hold any elements.
|
||||
@SuppressWarnings("unchecked")
|
||||
public static <K, V> ImmutableListMultimap<K, V> of() {
|
||||
return (ImmutableListMultimap<K, V>) EmptyImmutableListMultimap.INSTANCE;
|
||||
}
|
||||
|
||||
/** Returns an immutable multimap containing a single entry. */
|
||||
public static <K, V> ImmutableListMultimap<K, V> of(K k1, V v1) {
|
||||
Builder<K, V> builder = ImmutableListMultimap.builder();
|
||||
builder.put(k1, v1);
|
||||
return builder.build();
|
||||
}
|
||||
|
||||
/** Returns an immutable multimap containing the given entries, in order. */
|
||||
public static <K, V> ImmutableListMultimap<K, V> of(K k1, V v1, K k2, V v2) {
|
||||
Builder<K, V> builder = ImmutableListMultimap.builder();
|
||||
builder.put(k1, v1);
|
||||
builder.put(k2, v2);
|
||||
return builder.build();
|
||||
}
|
||||
|
||||
/** Returns an immutable multimap containing the given entries, in order. */
|
||||
public static <K, V> ImmutableListMultimap<K, V> of(K k1, V v1, K k2, V v2, K k3, V v3) {
|
||||
Builder<K, V> builder = ImmutableListMultimap.builder();
|
||||
builder.put(k1, v1);
|
||||
builder.put(k2, v2);
|
||||
builder.put(k3, v3);
|
||||
return builder.build();
|
||||
}
|
||||
|
||||
/** Returns an immutable multimap containing the given entries, in order. */
|
||||
public static <K, V> ImmutableListMultimap<K, V> of(
|
||||
K k1, V v1, K k2, V v2, K k3, V v3, K k4, V v4) {
|
||||
Builder<K, V> builder = ImmutableListMultimap.builder();
|
||||
builder.put(k1, v1);
|
||||
builder.put(k2, v2);
|
||||
builder.put(k3, v3);
|
||||
builder.put(k4, v4);
|
||||
return builder.build();
|
||||
}
|
||||
|
||||
/** Returns an immutable multimap containing the given entries, in order. */
|
||||
public static <K, V> ImmutableListMultimap<K, V> of(
|
||||
K k1, V v1, K k2, V v2, K k3, V v3, K k4, V v4, K k5, V v5) {
|
||||
Builder<K, V> builder = ImmutableListMultimap.builder();
|
||||
builder.put(k1, v1);
|
||||
builder.put(k2, v2);
|
||||
builder.put(k3, v3);
|
||||
builder.put(k4, v4);
|
||||
builder.put(k5, v5);
|
||||
return builder.build();
|
||||
}
|
||||
|
||||
// looking for of() with > 5 entries? Use the builder instead.
|
||||
|
||||
/**
|
||||
* Returns a new builder. The generated builder is equivalent to the builder created by the {@link
|
||||
* Builder} constructor.
|
||||
*/
|
||||
public static <K, V> Builder<K, V> builder() {
|
||||
return new Builder<>();
|
||||
}
|
||||
|
||||
/**
|
||||
* A builder for creating immutable {@code ListMultimap} instances, especially {@code public
|
||||
* static final} multimaps ("constant multimaps"). Example:
|
||||
*
|
||||
* <pre>{@code
|
||||
* static final Multimap<String, Integer> STRING_TO_INTEGER_MULTIMAP =
|
||||
* new ImmutableListMultimap.Builder<String, Integer>()
|
||||
* .put("one", 1)
|
||||
* .putAll("several", 1, 2, 3)
|
||||
* .putAll("many", 1, 2, 3, 4, 5)
|
||||
* .build();
|
||||
* }</pre>
|
||||
*
|
||||
* <p>Builder instances can be reused; it is safe to call {@link #build} multiple times to build
|
||||
* multiple multimaps in series. Each multimap contains the key-value mappings in the previously
|
||||
* created multimaps.
|
||||
*/
|
||||
public static final class Builder<K, V> extends ImmutableMultimap.Builder<K, V> {
|
||||
/**
|
||||
* Creates a new builder. The returned builder is equivalent to the builder generated by {@link
|
||||
* ImmutableListMultimap#builder}.
|
||||
*/
|
||||
public Builder() {}
|
||||
|
||||
@Override
|
||||
public Builder<K, V> put(K key, V value) {
|
||||
super.put(key, value);
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Builder<K, V> put(Entry<? extends K, ? extends V> entry) {
|
||||
super.put(entry);
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Builder<K, V> putAll(Iterable<? extends Entry<? extends K, ? extends V>> entries) {
|
||||
super.putAll(entries);
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Builder<K, V> putAll(K key, Iterable<? extends V> values) {
|
||||
super.putAll(key, values);
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Builder<K, V> putAll(K key, V... values) {
|
||||
super.putAll(key, values);
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Builder<K, V> putAll(Multimap<? extends K, ? extends V> multimap) {
|
||||
super.putAll(multimap);
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
Builder<K, V> combine(ImmutableMultimap.Builder<K, V> other) {
|
||||
super.combine(other);
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Builder<K, V> orderKeysBy(Comparator<? super K> keyComparator) {
|
||||
super.orderKeysBy(keyComparator);
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Builder<K, V> orderValuesBy(Comparator<? super V> valueComparator) {
|
||||
super.orderValuesBy(valueComparator);
|
||||
return this;
|
||||
}
|
||||
|
||||
/** Returns a newly-created immutable list multimap. */
|
||||
@Override
|
||||
public ImmutableListMultimap<K, V> build() {
|
||||
return (ImmutableListMultimap<K, V>) super.build();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns an immutable multimap containing the same mappings as {@code multimap}. The generated
|
||||
* multimap's key and value orderings correspond to the iteration ordering of the {@code
|
||||
* multimap.asMap()} view.
|
||||
*
|
||||
* <p>Despite the method name, this method attempts to avoid actually copying the data when it is
|
||||
* safe to do so. The exact circumstances under which a copy will or will not be performed are
|
||||
* undocumented and subject to change.
|
||||
*
|
||||
* @throws NullPointerException if any key or value in {@code multimap} is null
|
||||
*/
|
||||
public static <K, V> ImmutableListMultimap<K, V> copyOf(
|
||||
Multimap<? extends K, ? extends V> multimap) {
|
||||
if (multimap.isEmpty()) {
|
||||
return of();
|
||||
}
|
||||
|
||||
// TODO(lowasser): copy ImmutableSetMultimap by using asList() on the sets
|
||||
if (multimap instanceof ImmutableListMultimap) {
|
||||
@SuppressWarnings("unchecked") // safe since multimap is not writable
|
||||
ImmutableListMultimap<K, V> kvMultimap = (ImmutableListMultimap<K, V>) multimap;
|
||||
if (!kvMultimap.isPartialView()) {
|
||||
return kvMultimap;
|
||||
}
|
||||
}
|
||||
|
||||
return fromMapEntries(multimap.asMap().entrySet(), null);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns an immutable multimap containing the specified entries. The returned multimap iterates
|
||||
* over keys in the order they were first encountered in the input, and the values for each key
|
||||
* are iterated in the order they were encountered.
|
||||
*
|
||||
* @throws NullPointerException if any key, value, or entry is null
|
||||
*/
|
||||
public static <K, V> ImmutableListMultimap<K, V> copyOf(
|
||||
Iterable<? extends Entry<? extends K, ? extends V>> entries) {
|
||||
return new Builder<K, V>().putAll(entries).build();
|
||||
}
|
||||
|
||||
/** Creates an ImmutableListMultimap from an asMap.entrySet. */
|
||||
static <K, V> ImmutableListMultimap<K, V> fromMapEntries(
|
||||
Collection<? extends Entry<? extends K, ? extends Collection<? extends V>>> mapEntries,
|
||||
Comparator<? super V> valueComparator) {
|
||||
if (mapEntries.isEmpty()) {
|
||||
return of();
|
||||
}
|
||||
ImmutableMap.Builder<K, ImmutableList<V>> builder =
|
||||
new ImmutableMap.Builder<>(mapEntries.size());
|
||||
int size = 0;
|
||||
|
||||
for (Entry<? extends K, ? extends Collection<? extends V>> entry : mapEntries) {
|
||||
K key = entry.getKey();
|
||||
Collection<? extends V> values = entry.getValue();
|
||||
ImmutableList<V> list =
|
||||
(valueComparator == null)
|
||||
? ImmutableList.copyOf(values)
|
||||
: ImmutableList.sortedCopyOf(valueComparator, values);
|
||||
if (!list.isEmpty()) {
|
||||
builder.put(key, list);
|
||||
size += list.size();
|
||||
}
|
||||
}
|
||||
|
||||
return new ImmutableListMultimap<>(builder.buildOrThrow(), size);
|
||||
}
|
||||
|
||||
ImmutableListMultimap(ImmutableMap<K, ImmutableList<V>> map, int size) {
|
||||
super(map, size);
|
||||
}
|
||||
|
||||
// views
|
||||
|
||||
/**
|
||||
* Returns an immutable list of the values for the given key. If no mappings in the multimap have
|
||||
* the provided key, an empty immutable list is returned. The values are in the same order as the
|
||||
* parameters used to build this multimap.
|
||||
*/
|
||||
@Override
|
||||
public ImmutableList<V> get(K key) {
|
||||
// This cast is safe as its type is known in constructor.
|
||||
ImmutableList<V> list = (ImmutableList<V>) map.get(key);
|
||||
return (list == null) ? ImmutableList.<V>of() : list;
|
||||
}
|
||||
|
||||
private transient ImmutableListMultimap<V, K> inverse;
|
||||
|
||||
/**
|
||||
* {@inheritDoc}
|
||||
*
|
||||
* <p>Because an inverse of a list multimap can contain multiple pairs with the same key and
|
||||
* value, this method returns an {@code ImmutableListMultimap} rather than the {@code
|
||||
* ImmutableMultimap} specified in the {@code ImmutableMultimap} class.
|
||||
*
|
||||
*/
|
||||
@Override
|
||||
public ImmutableListMultimap<V, K> inverse() {
|
||||
ImmutableListMultimap<V, K> result = inverse;
|
||||
return (result == null) ? (inverse = invert()) : result;
|
||||
}
|
||||
|
||||
private ImmutableListMultimap<V, K> invert() {
|
||||
Builder<V, K> builder = builder();
|
||||
for (Entry<K, V> entry : entries()) {
|
||||
builder.put(entry.getValue(), entry.getKey());
|
||||
}
|
||||
ImmutableListMultimap<V, K> invertedMultimap = builder.build();
|
||||
invertedMultimap.inverse = this;
|
||||
return invertedMultimap;
|
||||
}
|
||||
|
||||
/**
|
||||
* Guaranteed to throw an exception and leave the multimap unmodified.
|
||||
*
|
||||
* @throws UnsupportedOperationException always
|
||||
*/
|
||||
@Override
|
||||
public final ImmutableList<V> removeAll(Object key) {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
|
||||
/**
|
||||
* Guaranteed to throw an exception and leave the multimap unmodified.
|
||||
*
|
||||
* @throws UnsupportedOperationException always
|
||||
*/
|
||||
@Override
|
||||
public final ImmutableList<V> replaceValues(K key, Iterable<? extends V> values) {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
|
||||
/**
|
||||
* @serialData number of distinct keys, and then for each distinct key: the key, the number of
|
||||
* values for that key, and the key's values
|
||||
*/
|
||||
private void writeObject(ObjectOutputStream stream) throws IOException {
|
||||
stream.defaultWriteObject();
|
||||
Serialization.writeMultimap(this, stream);
|
||||
}
|
||||
|
||||
private void readObject(ObjectInputStream stream) throws IOException, ClassNotFoundException {
|
||||
stream.defaultReadObject();
|
||||
int keyCount = stream.readInt();
|
||||
if (keyCount < 0) {
|
||||
throw new InvalidObjectException("Invalid key count " + keyCount);
|
||||
}
|
||||
ImmutableMap.Builder<Object, ImmutableList<Object>> builder = ImmutableMap.builder();
|
||||
int tmpSize = 0;
|
||||
|
||||
for (int i = 0; i < keyCount; i++) {
|
||||
Object key = stream.readObject();
|
||||
int valueCount = stream.readInt();
|
||||
if (valueCount <= 0) {
|
||||
throw new InvalidObjectException("Invalid value count " + valueCount);
|
||||
}
|
||||
|
||||
ImmutableList.Builder<Object> valuesBuilder = ImmutableList.builder();
|
||||
for (int j = 0; j < valueCount; j++) {
|
||||
valuesBuilder.add(stream.readObject());
|
||||
}
|
||||
builder.put(key, valuesBuilder.build());
|
||||
tmpSize += valueCount;
|
||||
}
|
||||
|
||||
ImmutableMap<Object, ImmutableList<Object>> tmpMap;
|
||||
try {
|
||||
tmpMap = builder.buildOrThrow();
|
||||
} catch (IllegalArgumentException e) {
|
||||
throw (InvalidObjectException) new InvalidObjectException(e.getMessage()).initCause(e);
|
||||
}
|
||||
|
||||
FieldSettersHolder.MAP_FIELD_SETTER.set(this, tmpMap);
|
||||
FieldSettersHolder.SIZE_FIELD_SETTER.set(this, tmpSize);
|
||||
}
|
||||
|
||||
private static final long serialVersionUID = 0;
|
||||
}
|
|
@ -0,0 +1,742 @@
|
|||
package org.xbib.datastructures.multi;
|
||||
|
||||
import java.io.InvalidObjectException;
|
||||
import java.io.ObjectInputStream;
|
||||
import java.io.Serializable;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collection;
|
||||
import java.util.Comparator;
|
||||
import java.util.Iterator;
|
||||
import java.util.LinkedHashMap;
|
||||
import java.util.Map;
|
||||
import java.util.Map.Entry;
|
||||
import java.util.Objects;
|
||||
import java.util.Set;
|
||||
import java.util.Spliterator;
|
||||
import java.util.function.BiConsumer;
|
||||
import org.xbib.datastructures.api.Multimap;
|
||||
import org.xbib.datastructures.api.SetMultimap;
|
||||
import org.xbib.datastructures.immutable.order.Ordering;
|
||||
import static java.util.Objects.requireNonNull;
|
||||
|
||||
/**
|
||||
* A {@link Multimap} whose contents will never change, with many other important properties
|
||||
* detailed at {@link ImmutableCollection}.
|
||||
*
|
||||
* <p><b>Warning:</b> avoid <i>direct</i> usage of {@link ImmutableMultimap} as a type (as with
|
||||
* {@link Multimap} itself). Prefer subtypes such as {@link ImmutableSetMultimap} or {@link
|
||||
* ImmutableListMultimap}, which have well-defined {@link #equals} semantics, thus avoiding a common
|
||||
* source of bugs and confusion.
|
||||
*
|
||||
* <p><b>Note:</b> every {@link ImmutableMultimap} offers an {@link #inverse} view, so there is no
|
||||
* need for a distinct {@code ImmutableBiMultimap} type.
|
||||
*
|
||||
* <p><a id="iteration"></a>
|
||||
*
|
||||
* <p><b>Key-grouped iteration.</b> All view collections follow the same iteration order. In all
|
||||
* current implementations, the iteration order always keeps multiple entries with the same key
|
||||
* together. Any creation method that would customarily respect insertion order (such as {@link
|
||||
* #copyOf(Multimap)}) instead preserves key-grouped order by inserting entries for an existing key
|
||||
* immediately after the last entry having that key.
|
||||
*/
|
||||
public abstract class ImmutableMultimap<K, V> extends BaseImmutableMultimap<K, V>
|
||||
implements Serializable {
|
||||
|
||||
/**
|
||||
* Returns an empty multimap.
|
||||
*
|
||||
* <p><b>Performance note:</b> the instance returned is a singleton.
|
||||
*/
|
||||
public static <K, V> ImmutableMultimap<K, V> of() {
|
||||
return ImmutableListMultimap.of();
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns an immutable multimap containing a single entry.
|
||||
*/
|
||||
public static <K, V> ImmutableMultimap<K, V> of(K k1, V v1) {
|
||||
return ImmutableListMultimap.of(k1, v1);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns an immutable multimap containing the given entries, in order.
|
||||
*/
|
||||
public static <K, V> ImmutableMultimap<K, V> of(K k1, V v1, K k2, V v2) {
|
||||
return ImmutableListMultimap.of(k1, v1, k2, v2);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns an immutable multimap containing the given entries, in the "key-grouped" insertion
|
||||
* order described in the <a href="#iteration">class documentation</a>.
|
||||
*/
|
||||
public static <K, V> ImmutableMultimap<K, V> of(K k1, V v1, K k2, V v2, K k3, V v3) {
|
||||
return ImmutableListMultimap.of(k1, v1, k2, v2, k3, v3);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns an immutable multimap containing the given entries, in the "key-grouped" insertion
|
||||
* order described in the <a href="#iteration">class documentation</a>.
|
||||
*/
|
||||
public static <K, V> ImmutableMultimap<K, V> of(K k1, V v1, K k2, V v2, K k3, V v3, K k4, V v4) {
|
||||
return ImmutableListMultimap.of(k1, v1, k2, v2, k3, v3, k4, v4);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns an immutable multimap containing the given entries, in the "key-grouped" insertion
|
||||
* order described in the <a href="#iteration">class documentation</a>.
|
||||
*/
|
||||
public static <K, V> ImmutableMultimap<K, V> of(
|
||||
K k1, V v1, K k2, V v2, K k3, V v3, K k4, V v4, K k5, V v5) {
|
||||
return ImmutableListMultimap.of(k1, v1, k2, v2, k3, v3, k4, v4, k5, v5);
|
||||
}
|
||||
|
||||
// looking for of() with > 5 entries? Use the builder instead.
|
||||
|
||||
/**
|
||||
* Returns a new builder. The generated builder is equivalent to the builder created by the {@link
|
||||
* Builder} constructor.
|
||||
*/
|
||||
public static <K, V> Builder<K, V> builder() {
|
||||
return new Builder<>();
|
||||
}
|
||||
|
||||
/**
|
||||
* A builder for creating immutable multimap instances, especially {@code public static final}
|
||||
* multimaps ("constant multimaps"). Example:
|
||||
*
|
||||
* <pre>{@code
|
||||
* static final Multimap<String, Integer> STRING_TO_INTEGER_MULTIMAP =
|
||||
* new ImmutableMultimap.Builder<String, Integer>()
|
||||
* .put("one", 1)
|
||||
* .putAll("several", 1, 2, 3)
|
||||
* .putAll("many", 1, 2, 3, 4, 5)
|
||||
* .build();
|
||||
* }</pre>
|
||||
*
|
||||
* <p>Builder instances can be reused; it is safe to call {@link #build} multiple times to build
|
||||
* multiple multimaps in series. Each multimap contains the key-value mappings in the previously
|
||||
* created multimaps.
|
||||
*
|
||||
*/
|
||||
public static class Builder<K, V> {
|
||||
final Map<K, Collection<V>> builderMap;
|
||||
Comparator<? super K> keyComparator;
|
||||
Comparator<? super V> valueComparator;
|
||||
|
||||
/**
|
||||
* Creates a new builder. The returned builder is equivalent to the builder generated by {@link
|
||||
* ImmutableMultimap#builder}.
|
||||
*/
|
||||
public Builder() {
|
||||
this.builderMap = new LinkedHashMap<>();
|
||||
}
|
||||
|
||||
Collection<V> newMutableValueCollection() {
|
||||
return new ArrayList<>();
|
||||
}
|
||||
|
||||
/**
|
||||
* Adds a key-value mapping to the built multimap.
|
||||
*/
|
||||
public Builder<K, V> put(K key, V value) {
|
||||
checkEntryNotNull(key, value);
|
||||
Collection<V> valueCollection = builderMap.get(key);
|
||||
if (valueCollection == null) {
|
||||
builderMap.put(key, valueCollection = newMutableValueCollection());
|
||||
}
|
||||
valueCollection.add(value);
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Adds an entry to the built multimap.
|
||||
*/
|
||||
public Builder<K, V> put(Entry<? extends K, ? extends V> entry) {
|
||||
return put(entry.getKey(), entry.getValue());
|
||||
}
|
||||
|
||||
/**
|
||||
* Adds entries to the built multimap.
|
||||
*/
|
||||
public Builder<K, V> putAll(Iterable<? extends Entry<? extends K, ? extends V>> entries) {
|
||||
for (Entry<? extends K, ? extends V> entry : entries) {
|
||||
put(entry);
|
||||
}
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Stores a collection of values with the same key in the built multimap.
|
||||
*
|
||||
* @throws NullPointerException if {@code key}, {@code values}, or any element in {@code values}
|
||||
* is null. The builder is left in an invalid state.
|
||||
*/
|
||||
public Builder<K, V> putAll(K key, Iterable<? extends V> values) {
|
||||
if (key == null) {
|
||||
throw new NullPointerException("null key in entry: null=" + toString(values.iterator()));
|
||||
}
|
||||
Collection<V> valueCollection = builderMap.get(key);
|
||||
if (valueCollection != null) {
|
||||
for (V value : values) {
|
||||
checkEntryNotNull(key, value);
|
||||
valueCollection.add(value);
|
||||
}
|
||||
return this;
|
||||
}
|
||||
Iterator<? extends V> valuesItr = values.iterator();
|
||||
if (!valuesItr.hasNext()) {
|
||||
return this;
|
||||
}
|
||||
valueCollection = newMutableValueCollection();
|
||||
while (valuesItr.hasNext()) {
|
||||
V value = valuesItr.next();
|
||||
checkEntryNotNull(key, value);
|
||||
valueCollection.add(value);
|
||||
}
|
||||
builderMap.put(key, valueCollection);
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Stores an array of values with the same key in the built multimap.
|
||||
*
|
||||
* @throws NullPointerException if the key or any value is null. The builder is left in an
|
||||
* invalid state.
|
||||
*/
|
||||
public Builder<K, V> putAll(K key, V... values) {
|
||||
return putAll(key, Arrays.asList(values));
|
||||
}
|
||||
|
||||
/**
|
||||
* Stores another multimap's entries in the built multimap. The generated multimap's key and
|
||||
* value orderings correspond to the iteration ordering of the {@code multimap.asMap()} view,
|
||||
* with new keys and values following any existing keys and values.
|
||||
*
|
||||
* @throws NullPointerException if any key or value in {@code multimap} is null. The builder is
|
||||
* left in an invalid state.
|
||||
*/
|
||||
public Builder<K, V> putAll(Multimap<? extends K, ? extends V> multimap) {
|
||||
for (Entry<? extends K, ? extends Collection<? extends V>> entry :
|
||||
multimap.asMap().entrySet()) {
|
||||
putAll(entry.getKey(), entry.getValue());
|
||||
}
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Specifies the ordering of the generated multimap's keys.
|
||||
*/
|
||||
public Builder<K, V> orderKeysBy(Comparator<? super K> keyComparator) {
|
||||
this.keyComparator = Objects.requireNonNull(keyComparator);
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Specifies the ordering of the generated multimap's values for each key.
|
||||
*/
|
||||
public Builder<K, V> orderValuesBy(Comparator<? super V> valueComparator) {
|
||||
this.valueComparator = Objects.requireNonNull(valueComparator);
|
||||
return this;
|
||||
}
|
||||
|
||||
Builder<K, V> combine(Builder<K, V> other) {
|
||||
for (Entry<K, Collection<V>> entry : other.builderMap.entrySet()) {
|
||||
putAll(entry.getKey(), entry.getValue());
|
||||
}
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns a newly-created immutable multimap.
|
||||
*/
|
||||
public ImmutableMultimap<K, V> build() {
|
||||
Collection<Entry<K, Collection<V>>> mapEntries = builderMap.entrySet();
|
||||
if (keyComparator != null) {
|
||||
mapEntries = Ordering.from(keyComparator).<K>onKeys().immutableSortedCopy(mapEntries);
|
||||
}
|
||||
return ImmutableListMultimap.fromMapEntries(mapEntries, valueComparator);
|
||||
}
|
||||
|
||||
|
||||
private static String toString(Iterator<?> iterator) {
|
||||
StringBuilder sb = new StringBuilder().append('[');
|
||||
boolean first = true;
|
||||
while (iterator.hasNext()) {
|
||||
if (!first) {
|
||||
sb.append(", ");
|
||||
}
|
||||
first = false;
|
||||
sb.append(iterator.next());
|
||||
}
|
||||
return sb.append(']').toString();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns an immutable multimap containing the same mappings as {@code multimap}, in the
|
||||
* "key-grouped" iteration order described in the class documentation.
|
||||
*
|
||||
* <p>Despite the method name, this method attempts to avoid actually copying the data when it is
|
||||
* safe to do so. The exact circumstances under which a copy will or will not be performed are
|
||||
* undocumented and subject to change.
|
||||
*
|
||||
* @throws NullPointerException if any key or value in {@code multimap} is null
|
||||
*/
|
||||
public static <K, V> ImmutableMultimap<K, V> copyOf(Multimap<? extends K, ? extends V> multimap) {
|
||||
if (multimap instanceof ImmutableMultimap) {
|
||||
@SuppressWarnings("unchecked") // safe since multimap is not writable
|
||||
ImmutableMultimap<K, V> kvMultimap = (ImmutableMultimap<K, V>) multimap;
|
||||
if (!kvMultimap.isPartialView()) {
|
||||
return kvMultimap;
|
||||
}
|
||||
}
|
||||
return ImmutableListMultimap.copyOf(multimap);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns an immutable multimap containing the specified entries. The returned multimap iterates
|
||||
* over keys in the order they were first encountered in the input, and the values for each key
|
||||
* are iterated in the order they were encountered.
|
||||
*
|
||||
* @throws NullPointerException if any key, value, or entry is null
|
||||
*/
|
||||
public static <K, V> ImmutableMultimap<K, V> copyOf(
|
||||
Iterable<? extends Entry<? extends K, ? extends V>> entries) {
|
||||
return ImmutableListMultimap.copyOf(entries);
|
||||
}
|
||||
|
||||
final transient ImmutableMap<K, ? extends ImmutableCollection<V>> map;
|
||||
final transient int size;
|
||||
|
||||
// These constants allow the deserialization code to set final fields. This
|
||||
// holder class makes sure they are not initialized unless an instance is
|
||||
// deserialized.
|
||||
static class FieldSettersHolder {
|
||||
static final Serialization.FieldSetter<ImmutableMultimap> MAP_FIELD_SETTER =
|
||||
Serialization.getFieldSetter(ImmutableMultimap.class, "map");
|
||||
static final Serialization.FieldSetter<ImmutableMultimap> SIZE_FIELD_SETTER =
|
||||
Serialization.getFieldSetter(ImmutableMultimap.class, "size");
|
||||
}
|
||||
|
||||
ImmutableMultimap(ImmutableMap<K, ? extends ImmutableCollection<V>> map, int size) {
|
||||
this.map = map;
|
||||
this.size = size;
|
||||
}
|
||||
|
||||
// mutators (not supported)
|
||||
|
||||
/**
|
||||
* Guaranteed to throw an exception and leave the multimap unmodified.
|
||||
*
|
||||
* @throws UnsupportedOperationException always
|
||||
*/
|
||||
@Override
|
||||
// DoNotCall wants this to be final, but we want to override it to return more specific types.
|
||||
// Inheritance is closed, and all subtypes are @DoNotCall, so this is safe to suppress.
|
||||
@SuppressWarnings("DoNotCall")
|
||||
public ImmutableCollection<V> removeAll(Object key) {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
|
||||
/**
|
||||
* Guaranteed to throw an exception and leave the multimap unmodified.
|
||||
*
|
||||
* @throws UnsupportedOperationException always
|
||||
*/
|
||||
@Override
|
||||
// DoNotCall wants this to be final, but we want to override it to return more specific types.
|
||||
// Inheritance is closed, and all subtypes are @DoNotCall, so this is safe to suppress.
|
||||
@SuppressWarnings("DoNotCall")
|
||||
public ImmutableCollection<V> replaceValues(K key, Iterable<? extends V> values) {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
|
||||
/**
|
||||
* Guaranteed to throw an exception and leave the multimap unmodified.
|
||||
*
|
||||
* @throws UnsupportedOperationException always
|
||||
*/
|
||||
@Override
|
||||
public final void clear() {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns an immutable collection of the values for the given key. If no mappings in the multimap
|
||||
* have the provided key, an empty immutable collection is returned. The values are in the same
|
||||
* order as the parameters used to build this multimap.
|
||||
*/
|
||||
@Override
|
||||
public abstract ImmutableCollection<V> get(K key);
|
||||
|
||||
/**
|
||||
* Returns an immutable multimap which is the inverse of this one. For every key-value mapping in
|
||||
* the original, the result will have a mapping with key and value reversed.
|
||||
*/
|
||||
public abstract ImmutableMultimap<V, K> inverse();
|
||||
|
||||
/**
|
||||
* Guaranteed to throw an exception and leave the multimap unmodified.
|
||||
*
|
||||
* @throws UnsupportedOperationException always
|
||||
*/
|
||||
@Override
|
||||
public final boolean put(K key, V value) {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
|
||||
/**
|
||||
* Guaranteed to throw an exception and leave the multimap unmodified.
|
||||
*
|
||||
* @throws UnsupportedOperationException always
|
||||
*/
|
||||
@Override
|
||||
public final boolean putAll(K key, Iterable<? extends V> values) {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
|
||||
/**
|
||||
* Guaranteed to throw an exception and leave the multimap unmodified.
|
||||
*
|
||||
* @throws UnsupportedOperationException always
|
||||
*/
|
||||
@Override
|
||||
public final boolean putAll(Multimap<? extends K, ? extends V> multimap) {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
|
||||
/**
|
||||
* Guaranteed to throw an exception and leave the multimap unmodified.
|
||||
*
|
||||
* @throws UnsupportedOperationException always
|
||||
*/
|
||||
@Override
|
||||
public final boolean remove(Object key, Object value) {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns {@code true} if this immutable multimap's implementation contains references to
|
||||
* user-created objects that aren't accessible via this multimap's methods. This is generally used
|
||||
* to determine whether {@code copyOf} implementations should make an explicit copy to avoid
|
||||
* memory leaks.
|
||||
*/
|
||||
boolean isPartialView() {
|
||||
return map.isPartialView();
|
||||
}
|
||||
|
||||
// accessors
|
||||
|
||||
@Override
|
||||
public boolean containsKey(Object key) {
|
||||
return map.containsKey(key);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean containsValue(Object value) {
|
||||
return value != null && super.containsValue(value);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int size() {
|
||||
return size;
|
||||
}
|
||||
|
||||
// views
|
||||
|
||||
/**
|
||||
* Returns an immutable set of the distinct keys in this multimap, in the same order as they
|
||||
* appear in this multimap.
|
||||
*/
|
||||
@Override
|
||||
public ImmutableSet<K> keySet() {
|
||||
return map.keySet();
|
||||
}
|
||||
|
||||
@Override
|
||||
Set<K> createKeySet() {
|
||||
throw new AssertionError("unreachable");
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns an immutable map that associates each key with its corresponding values in the
|
||||
* multimap. Keys and values appear in the same order as in this multimap.
|
||||
*/
|
||||
@Override
|
||||
@SuppressWarnings("unchecked") // a widening cast
|
||||
public ImmutableMap<K, Collection<V>> asMap() {
|
||||
return (ImmutableMap) map;
|
||||
}
|
||||
|
||||
@Override
|
||||
Map<K, Collection<V>> createAsMap() {
|
||||
throw new AssertionError("should never be called");
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns an immutable collection of all key-value pairs in the multimap.
|
||||
*/
|
||||
@Override
|
||||
public ImmutableCollection<Entry<K, V>> entries() {
|
||||
return (ImmutableCollection<Entry<K, V>>) super.entries();
|
||||
}
|
||||
|
||||
@Override
|
||||
ImmutableCollection<Entry<K, V>> createEntries() {
|
||||
return new EntryCollection<>(this);
|
||||
}
|
||||
|
||||
private static class EntryCollection<K, V> extends ImmutableCollection<Entry<K, V>> {
|
||||
final ImmutableMultimap<K, V> multimap;
|
||||
|
||||
EntryCollection(ImmutableMultimap<K, V> multimap) {
|
||||
this.multimap = multimap;
|
||||
}
|
||||
|
||||
@Override
|
||||
public UnmodifiableIterator<Entry<K, V>> iterator() {
|
||||
return multimap.entryIterator();
|
||||
}
|
||||
|
||||
@Override
|
||||
boolean isPartialView() {
|
||||
return multimap.isPartialView();
|
||||
}
|
||||
|
||||
@Override
|
||||
public int size() {
|
||||
return multimap.size();
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean contains(Object object) {
|
||||
if (object instanceof Entry<?, ?> entry) {
|
||||
return multimap.containsEntry(entry.getKey(), entry.getValue());
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
private static final long serialVersionUID = 0;
|
||||
}
|
||||
|
||||
@Override
|
||||
UnmodifiableIterator<Entry<K, V>> entryIterator() {
|
||||
return new UnmodifiableIterator<Entry<K, V>>() {
|
||||
final Iterator<? extends Entry<K, ? extends ImmutableCollection<V>>> asMapItr =
|
||||
map.entrySet().iterator();
|
||||
K currentKey = null;
|
||||
Iterator<V> valueItr = emptyIterator();
|
||||
|
||||
@Override
|
||||
public boolean hasNext() {
|
||||
return valueItr.hasNext() || asMapItr.hasNext();
|
||||
}
|
||||
|
||||
@Override
|
||||
public Entry<K, V> next() {
|
||||
if (!valueItr.hasNext()) {
|
||||
Entry<K, ? extends ImmutableCollection<V>> entry = asMapItr.next();
|
||||
currentKey = entry.getKey();
|
||||
valueItr = entry.getValue().iterator();
|
||||
}
|
||||
/*
|
||||
* requireNonNull is safe: The first call to this method always enters the !hasNext() case
|
||||
* and populates currentKey, after which it's never cleared.
|
||||
*/
|
||||
return new ImmutableEntry<>(requireNonNull(currentKey), valueItr.next());
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
@Override
|
||||
Spliterator<Entry<K, V>> entrySpliterator() {
|
||||
return CollectSpliterators.flatMap(
|
||||
asMap().entrySet().spliterator(),
|
||||
keyToValueCollectionEntry -> {
|
||||
K key = keyToValueCollectionEntry.getKey();
|
||||
Collection<V> valueCollection = keyToValueCollectionEntry.getValue();
|
||||
return CollectSpliterators.map(
|
||||
valueCollection.spliterator(), (V value) -> Maps.immutableEntry(key, value));
|
||||
},
|
||||
Spliterator.SIZED | (this instanceof SetMultimap ? Spliterator.DISTINCT : 0),
|
||||
size());
|
||||
}
|
||||
|
||||
@Override
|
||||
public void forEach(BiConsumer<? super K, ? super V> action) {
|
||||
Objects.requireNonNull(action);
|
||||
asMap()
|
||||
.forEach(
|
||||
(key, valueCollection) -> valueCollection.forEach(value -> action.accept(key, value)));
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns an immutable multiset containing all the keys in this multimap, in the same order and
|
||||
* with the same frequencies as they appear in this multimap; to get only a single occurrence of
|
||||
* each key, use {@link #keySet}.
|
||||
*/
|
||||
@Override
|
||||
public ImmutableMultiset<K> keys() {
|
||||
return (ImmutableMultiset<K>) super.keys();
|
||||
}
|
||||
|
||||
@Override
|
||||
ImmutableMultiset<K> createKeys() {
|
||||
return new Keys();
|
||||
}
|
||||
|
||||
@SuppressWarnings("serial") // Uses writeReplace, not default serialization
|
||||
class Keys extends ImmutableMultiset<K> {
|
||||
@Override
|
||||
public boolean contains(Object object) {
|
||||
return containsKey(object);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int count(Object element) {
|
||||
Collection<V> values = map.get(element);
|
||||
return (values == null) ? 0 : values.size();
|
||||
}
|
||||
|
||||
@Override
|
||||
public ImmutableSet<K> elementSet() {
|
||||
return keySet();
|
||||
}
|
||||
|
||||
@Override
|
||||
public int size() {
|
||||
return ImmutableMultimap.this.size();
|
||||
}
|
||||
|
||||
@Override
|
||||
ImmutableMultiset.Entry<K> getEntry(int index) {
|
||||
Entry<K, ? extends Collection<V>> entry = map.entrySet().asList().get(index);
|
||||
return Multisets.immutableEntry(entry.getKey(), entry.getValue().size());
|
||||
}
|
||||
|
||||
@Override
|
||||
boolean isPartialView() {
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
Object writeReplace() {
|
||||
return new KeysSerializedForm(ImmutableMultimap.this);
|
||||
}
|
||||
|
||||
private void readObject(ObjectInputStream stream) throws InvalidObjectException {
|
||||
throw new InvalidObjectException("Use KeysSerializedForm");
|
||||
}
|
||||
}
|
||||
|
||||
private static final class KeysSerializedForm implements Serializable {
|
||||
final ImmutableMultimap<?, ?> multimap;
|
||||
|
||||
KeysSerializedForm(ImmutableMultimap<?, ?> multimap) {
|
||||
this.multimap = multimap;
|
||||
}
|
||||
|
||||
Object readResolve() {
|
||||
return multimap.keys();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns an immutable collection of the values in this multimap. Its iterator traverses the
|
||||
* values for the first key, the values for the second key, and so on.
|
||||
*/
|
||||
@Override
|
||||
public ImmutableCollection<V> values() {
|
||||
return (ImmutableCollection<V>) super.values();
|
||||
}
|
||||
|
||||
@Override
|
||||
ImmutableCollection<V> createValues() {
|
||||
return new Values<>(this);
|
||||
}
|
||||
|
||||
@Override
|
||||
UnmodifiableIterator<V> valueIterator() {
|
||||
return new UnmodifiableIterator<V>() {
|
||||
final Iterator<? extends ImmutableCollection<V>> valueCollectionItr = map.values().iterator();
|
||||
Iterator<V> valueItr = Iterators.emptyIterator();
|
||||
|
||||
@Override
|
||||
public boolean hasNext() {
|
||||
return valueItr.hasNext() || valueCollectionItr.hasNext();
|
||||
}
|
||||
|
||||
@Override
|
||||
public V next() {
|
||||
if (!valueItr.hasNext()) {
|
||||
valueItr = valueCollectionItr.next().iterator();
|
||||
}
|
||||
return valueItr.next();
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
private static final class Values<K, V> extends ImmutableCollection<V> {
|
||||
private final transient ImmutableMultimap<K, V> multimap;
|
||||
|
||||
Values(ImmutableMultimap<K, V> multimap) {
|
||||
this.multimap = multimap;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean contains(Object object) {
|
||||
return multimap.containsValue(object);
|
||||
}
|
||||
|
||||
@Override
|
||||
public UnmodifiableIterator<V> iterator() {
|
||||
return multimap.valueIterator();
|
||||
}
|
||||
|
||||
@Override
|
||||
int copyIntoArray(Object[] dst, int offset) {
|
||||
for (ImmutableCollection<V> valueCollection : multimap.map.values()) {
|
||||
offset = valueCollection.copyIntoArray(dst, offset);
|
||||
}
|
||||
return offset;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int size() {
|
||||
return multimap.size();
|
||||
}
|
||||
|
||||
@Override
|
||||
boolean isPartialView() {
|
||||
return true;
|
||||
}
|
||||
|
||||
private static final long serialVersionUID = 0;
|
||||
}
|
||||
|
||||
private static final long serialVersionUID = 0;
|
||||
|
||||
private static void checkEntryNotNull(Object key, Object value) {
|
||||
if (key == null) {
|
||||
throw new NullPointerException("null key in entry: null=" + value);
|
||||
} else if (value == null) {
|
||||
throw new NullPointerException("null value in entry: " + key + "=null");
|
||||
}
|
||||
}
|
||||
|
||||
private static <T extends Object> UnmodifiableIterator<T> emptyIterator() {
|
||||
return emptyListIterator();
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the empty iterator.
|
||||
*
|
||||
* <p>The {@link Iterable} equivalent of this method is {@link ImmutableSet#of()}.
|
||||
*/
|
||||
// Casting to any type is safe since there are no actual elements.
|
||||
@SuppressWarnings("unchecked")
|
||||
static <T extends Object> UnmodifiableListIterator<T> emptyListIterator() {
|
||||
return (UnmodifiableListIterator<T>) ImmutableCollection.ArrayItr.EMPTY;
|
||||
}
|
||||
}
|
|
@ -0,0 +1,592 @@
|
|||
package org.xbib.datastructures.multi;
|
||||
|
||||
import java.io.InvalidObjectException;
|
||||
import java.io.ObjectInputStream;
|
||||
import java.io.Serializable;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
import java.util.Objects;
|
||||
import java.util.function.Function;
|
||||
import java.util.function.ToIntFunction;
|
||||
import java.util.stream.Collector;
|
||||
import org.xbib.datastructures.api.Multiset;
|
||||
import static java.util.Objects.requireNonNull;
|
||||
|
||||
/**
|
||||
* A {@link Multiset} whose contents will never change, with many other important properties
|
||||
* detailed at {@link ImmutableCollection}.
|
||||
*
|
||||
* <p><b>Grouped iteration.</b> In all current implementations, duplicate elements always appear
|
||||
* consecutively when iterating. Elements iterate in order by the <i>first</i> appearance of that
|
||||
* element when the multiset was created.
|
||||
*
|
||||
*/
|
||||
@SuppressWarnings("serial") // we're overriding default serialization
|
||||
public abstract class ImmutableMultiset<E> extends ImmutableCollection<E>
|
||||
implements Multiset<E> {
|
||||
|
||||
/**
|
||||
* Returns a {@code Collector} that accumulates the input elements into a new {@code
|
||||
* ImmutableMultiset}. Elements iterate in order by the <i>first</i> appearance of that element in
|
||||
* encounter order.
|
||||
*
|
||||
* @since 21.0
|
||||
*/
|
||||
public static <E> Collector<E, ?, ImmutableMultiset<E>> toImmutableMultiset() {
|
||||
return CollectCollectors.toImmutableMultiset(Function.identity(), e -> 1);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns a {@code Collector} that accumulates elements into an {@code ImmutableMultiset} whose
|
||||
* elements are the result of applying {@code elementFunction} to the inputs, with counts equal to
|
||||
* the result of applying {@code countFunction} to the inputs.
|
||||
*
|
||||
* <p>If the mapped elements contain duplicates (according to {@link Object#equals}), the first
|
||||
* occurrence in encounter order appears in the resulting multiset, with count equal to the sum of
|
||||
* the outputs of {@code countFunction.applyAsInt(t)} for each {@code t} mapped to that element.
|
||||
*
|
||||
* @since 22.0
|
||||
*/
|
||||
public static <T extends Object, E>
|
||||
Collector<T, ?, ImmutableMultiset<E>> toImmutableMultiset(
|
||||
Function<? super T, ? extends E> elementFunction,
|
||||
ToIntFunction<? super T> countFunction) {
|
||||
return CollectCollectors.toImmutableMultiset(elementFunction, countFunction);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the empty immutable multiset.
|
||||
*
|
||||
* <p><b>Performance note:</b> the instance returned is a singleton.
|
||||
*/
|
||||
@SuppressWarnings("unchecked") // all supported methods are covariant
|
||||
public static <E> ImmutableMultiset<E> of() {
|
||||
return (ImmutableMultiset<E>) RegularImmutableMultiset.EMPTY;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns an immutable multiset containing a single element.
|
||||
*
|
||||
* @throws NullPointerException if {@code element} is null
|
||||
* @since 6.0 (source-compatible since 2.0)
|
||||
*/
|
||||
public static <E> ImmutableMultiset<E> of(E element) {
|
||||
return copyFromElements(element);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns an immutable multiset containing the given elements, in order.
|
||||
*
|
||||
* @throws NullPointerException if any element is null
|
||||
* @since 6.0 (source-compatible since 2.0)
|
||||
*/
|
||||
public static <E> ImmutableMultiset<E> of(E e1, E e2) {
|
||||
return copyFromElements(e1, e2);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns an immutable multiset containing the given elements, in the "grouped iteration order"
|
||||
* described in the class documentation.
|
||||
*
|
||||
* @throws NullPointerException if any element is null
|
||||
* @since 6.0 (source-compatible since 2.0)
|
||||
*/
|
||||
public static <E> ImmutableMultiset<E> of(E e1, E e2, E e3) {
|
||||
return copyFromElements(e1, e2, e3);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns an immutable multiset containing the given elements, in the "grouped iteration order"
|
||||
* described in the class documentation.
|
||||
*
|
||||
* @throws NullPointerException if any element is null
|
||||
* @since 6.0 (source-compatible since 2.0)
|
||||
*/
|
||||
public static <E> ImmutableMultiset<E> of(E e1, E e2, E e3, E e4) {
|
||||
return copyFromElements(e1, e2, e3, e4);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns an immutable multiset containing the given elements, in the "grouped iteration order"
|
||||
* described in the class documentation.
|
||||
*
|
||||
* @throws NullPointerException if any element is null
|
||||
* @since 6.0 (source-compatible since 2.0)
|
||||
*/
|
||||
public static <E> ImmutableMultiset<E> of(E e1, E e2, E e3, E e4, E e5) {
|
||||
return copyFromElements(e1, e2, e3, e4, e5);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns an immutable multiset containing the given elements, in the "grouped iteration order"
|
||||
* described in the class documentation.
|
||||
*
|
||||
* @throws NullPointerException if any element is null
|
||||
* @since 6.0 (source-compatible since 2.0)
|
||||
*/
|
||||
public static <E> ImmutableMultiset<E> of(E e1, E e2, E e3, E e4, E e5, E e6, E... others) {
|
||||
return new Builder<E>().add(e1).add(e2).add(e3).add(e4).add(e5).add(e6).add(others).build();
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns an immutable multiset containing the given elements, in the "grouped iteration order"
|
||||
* described in the class documentation.
|
||||
*
|
||||
* @throws NullPointerException if any of {@code elements} is null
|
||||
* @since 6.0
|
||||
*/
|
||||
public static <E> ImmutableMultiset<E> copyOf(E[] elements) {
|
||||
return copyFromElements(elements);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns an immutable multiset containing the given elements, in the "grouped iteration order"
|
||||
* described in the class documentation.
|
||||
*
|
||||
* @throws NullPointerException if any of {@code elements} is null
|
||||
*/
|
||||
public static <E> ImmutableMultiset<E> copyOf(Iterable<? extends E> elements) {
|
||||
if (elements instanceof ImmutableMultiset) {
|
||||
@SuppressWarnings("unchecked") // all supported methods are covariant
|
||||
ImmutableMultiset<E> result = (ImmutableMultiset<E>) elements;
|
||||
if (!result.isPartialView()) {
|
||||
return result;
|
||||
}
|
||||
}
|
||||
|
||||
Multiset<? extends E> multiset =
|
||||
(elements instanceof Multiset)
|
||||
? Multisets.cast(elements)
|
||||
: LinkedHashMultiset.create(elements);
|
||||
|
||||
return copyFromEntries(multiset.entrySet());
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns an immutable multiset containing the given elements, in the "grouped iteration order"
|
||||
* described in the class documentation.
|
||||
*
|
||||
* @throws NullPointerException if any of {@code elements} is null
|
||||
*/
|
||||
public static <E> ImmutableMultiset<E> copyOf(Iterator<? extends E> elements) {
|
||||
Multiset<E> multiset = LinkedHashMultiset.create();
|
||||
Iterators.addAll(multiset, elements);
|
||||
return copyFromEntries(multiset.entrySet());
|
||||
}
|
||||
|
||||
private static <E> ImmutableMultiset<E> copyFromElements(E... elements) {
|
||||
Multiset<E> multiset = LinkedHashMultiset.create();
|
||||
Collections.addAll(multiset, elements);
|
||||
return copyFromEntries(multiset.entrySet());
|
||||
}
|
||||
|
||||
static <E> ImmutableMultiset<E> copyFromEntries(
|
||||
Collection<? extends Entry<? extends E>> entries) {
|
||||
if (entries.isEmpty()) {
|
||||
return of();
|
||||
} else {
|
||||
return RegularImmutableMultiset.create(entries);
|
||||
}
|
||||
}
|
||||
|
||||
ImmutableMultiset() {}
|
||||
|
||||
@Override
|
||||
public UnmodifiableIterator<E> iterator() {
|
||||
final Iterator<Entry<E>> entryIterator = entrySet().iterator();
|
||||
return new UnmodifiableIterator<E>() {
|
||||
int remaining;
|
||||
E element;
|
||||
|
||||
@Override
|
||||
public boolean hasNext() {
|
||||
return (remaining > 0) || entryIterator.hasNext();
|
||||
}
|
||||
|
||||
@Override
|
||||
public E next() {
|
||||
if (remaining <= 0) {
|
||||
Entry<E> entry = entryIterator.next();
|
||||
element = entry.getElement();
|
||||
remaining = entry.getCount();
|
||||
}
|
||||
remaining--;
|
||||
/*
|
||||
* requireNonNull is safe because `remaining` starts at 0, forcing us to initialize
|
||||
* `element` above. After that, we never clear it.
|
||||
*/
|
||||
return requireNonNull(element);
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
private transient ImmutableList<E> asList;
|
||||
|
||||
@Override
|
||||
public ImmutableList<E> asList() {
|
||||
ImmutableList<E> result = asList;
|
||||
return (result == null) ? asList = super.asList() : result;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean contains(Object object) {
|
||||
return count(object) > 0;
|
||||
}
|
||||
|
||||
/**
|
||||
* Guaranteed to throw an exception and leave the collection unmodified.
|
||||
*
|
||||
* @throws UnsupportedOperationException always
|
||||
*/
|
||||
@Override
|
||||
public final int add(E element, int occurrences) {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
|
||||
/**
|
||||
* Guaranteed to throw an exception and leave the collection unmodified.
|
||||
*
|
||||
* @throws UnsupportedOperationException always
|
||||
*/
|
||||
@Override
|
||||
public final int remove(Object element, int occurrences) {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
|
||||
/**
|
||||
* Guaranteed to throw an exception and leave the collection unmodified.
|
||||
*
|
||||
* @throws UnsupportedOperationException always
|
||||
*/
|
||||
@Override
|
||||
public final int setCount(E element, int count) {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
|
||||
/**
|
||||
* Guaranteed to throw an exception and leave the collection unmodified.
|
||||
*
|
||||
* @throws UnsupportedOperationException always
|
||||
*/
|
||||
@Override
|
||||
public final boolean setCount(E element, int oldCount, int newCount) {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
|
||||
@Override
|
||||
int copyIntoArray(Object[] dst, int offset) {
|
||||
for (Multiset.Entry<E> entry : entrySet()) {
|
||||
Arrays.fill(dst, offset, offset + entry.getCount(), entry.getElement());
|
||||
offset += entry.getCount();
|
||||
}
|
||||
return offset;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object object) {
|
||||
return Multisets.equalsImpl(this, object);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Sets.hashCodeImpl(entrySet());
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return entrySet().toString();
|
||||
}
|
||||
|
||||
@Override
|
||||
public abstract ImmutableSet<E> elementSet();
|
||||
|
||||
private transient ImmutableSet<Entry<E>> entrySet;
|
||||
|
||||
@Override
|
||||
public ImmutableSet<Entry<E>> entrySet() {
|
||||
ImmutableSet<Entry<E>> es = entrySet;
|
||||
return (es == null) ? (entrySet = createEntrySet()) : es;
|
||||
}
|
||||
|
||||
private ImmutableSet<Entry<E>> createEntrySet() {
|
||||
return isEmpty() ? ImmutableSet.<Entry<E>>of() : new EntrySet();
|
||||
}
|
||||
|
||||
abstract Entry<E> getEntry(int index);
|
||||
|
||||
private final class EntrySet extends IndexedImmutableSet<Entry<E>> {
|
||||
@Override
|
||||
boolean isPartialView() {
|
||||
return ImmutableMultiset.this.isPartialView();
|
||||
}
|
||||
|
||||
@Override
|
||||
Entry<E> get(int index) {
|
||||
return getEntry(index);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int size() {
|
||||
return elementSet().size();
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean contains(Object o) {
|
||||
if (o instanceof Entry) {
|
||||
Entry<?> entry = (Entry<?>) o;
|
||||
if (entry.getCount() <= 0) {
|
||||
return false;
|
||||
}
|
||||
int count = count(entry.getElement());
|
||||
return count == entry.getCount();
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return ImmutableMultiset.this.hashCode();
|
||||
}
|
||||
|
||||
@Override
|
||||
Object writeReplace() {
|
||||
return new EntrySetSerializedForm<E>(ImmutableMultiset.this);
|
||||
}
|
||||
|
||||
private void readObject(ObjectInputStream stream) throws InvalidObjectException {
|
||||
throw new InvalidObjectException("Use EntrySetSerializedForm");
|
||||
}
|
||||
|
||||
private static final long serialVersionUID = 0;
|
||||
}
|
||||
|
||||
static class EntrySetSerializedForm<E> implements Serializable {
|
||||
final ImmutableMultiset<E> multiset;
|
||||
|
||||
EntrySetSerializedForm(ImmutableMultiset<E> multiset) {
|
||||
this.multiset = multiset;
|
||||
}
|
||||
|
||||
Object readResolve() {
|
||||
return multiset.entrySet();
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
Object writeReplace() {
|
||||
return new SerializedForm(this);
|
||||
}
|
||||
|
||||
private void readObject(ObjectInputStream stream) throws InvalidObjectException {
|
||||
throw new InvalidObjectException("Use SerializedForm");
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns a new builder. The generated builder is equivalent to the builder created by the {@link
|
||||
* Builder} constructor.
|
||||
*/
|
||||
public static <E> Builder<E> builder() {
|
||||
return new Builder<E>();
|
||||
}
|
||||
|
||||
/**
|
||||
* A builder for creating immutable multiset instances, especially {@code public static final}
|
||||
* multisets ("constant multisets"). Example:
|
||||
*
|
||||
* <pre>{@code
|
||||
* public static final ImmutableMultiset<Bean> BEANS =
|
||||
* new ImmutableMultiset.Builder<Bean>()
|
||||
* .addCopies(Bean.COCOA, 4)
|
||||
* .addCopies(Bean.GARDEN, 6)
|
||||
* .addCopies(Bean.RED, 8)
|
||||
* .addCopies(Bean.BLACK_EYED, 10)
|
||||
* .build();
|
||||
* }</pre>
|
||||
*
|
||||
* <p>Builder instances can be reused; it is safe to call {@link #build} multiple times to build
|
||||
* multiple multisets in series.
|
||||
*
|
||||
* @since 2.0
|
||||
*/
|
||||
public static class Builder<E> extends ImmutableCollection.Builder<E> {
|
||||
final Multiset<E> contents;
|
||||
|
||||
/**
|
||||
* Creates a new builder. The returned builder is equivalent to the builder generated by {@link
|
||||
* ImmutableMultiset#builder}.
|
||||
*/
|
||||
public Builder() {
|
||||
this(LinkedHashMultiset.<E>create());
|
||||
}
|
||||
|
||||
Builder(Multiset<E> contents) {
|
||||
this.contents = contents;
|
||||
}
|
||||
|
||||
/**
|
||||
* Adds {@code element} to the {@code ImmutableMultiset}.
|
||||
*
|
||||
* @param element the element to add
|
||||
* @return this {@code Builder} object
|
||||
* @throws NullPointerException if {@code element} is null
|
||||
*/
|
||||
@Override
|
||||
public Builder<E> add(E element) {
|
||||
contents.add(Objects.requireNonNull(element));
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Adds each element of {@code elements} to the {@code ImmutableMultiset}.
|
||||
*
|
||||
* @param elements the elements to add
|
||||
* @return this {@code Builder} object
|
||||
* @throws NullPointerException if {@code elements} is null or contains a null element
|
||||
*/
|
||||
@Override
|
||||
public Builder<E> add(E... elements) {
|
||||
super.add(elements);
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Adds a number of occurrences of an element to this {@code ImmutableMultiset}.
|
||||
*
|
||||
* @param element the element to add
|
||||
* @param occurrences the number of occurrences of the element to add. May be zero, in which
|
||||
* case no change will be made.
|
||||
* @return this {@code Builder} object
|
||||
* @throws NullPointerException if {@code element} is null
|
||||
* @throws IllegalArgumentException if {@code occurrences} is negative, or if this operation
|
||||
* would result in more than {@link Integer#MAX_VALUE} occurrences of the element
|
||||
*/
|
||||
public Builder<E> addCopies(E element, int occurrences) {
|
||||
contents.add(Objects.requireNonNull(element), occurrences);
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Adds or removes the necessary occurrences of an element such that the element attains the
|
||||
* desired count.
|
||||
*
|
||||
* @param element the element to add or remove occurrences of
|
||||
* @param count the desired count of the element in this multiset
|
||||
* @return this {@code Builder} object
|
||||
* @throws NullPointerException if {@code element} is null
|
||||
* @throws IllegalArgumentException if {@code count} is negative
|
||||
*/
|
||||
public Builder<E> setCount(E element, int count) {
|
||||
contents.setCount(Objects.requireNonNull(element), count);
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Adds each element of {@code elements} to the {@code ImmutableMultiset}.
|
||||
*
|
||||
* @param elements the {@code Iterable} to add to the {@code ImmutableMultiset}
|
||||
* @return this {@code Builder} object
|
||||
* @throws NullPointerException if {@code elements} is null or contains a null element
|
||||
*/
|
||||
@Override
|
||||
public Builder<E> addAll(Iterable<? extends E> elements) {
|
||||
if (elements instanceof Multiset) {
|
||||
Multiset<? extends E> multiset = Multisets.cast(elements);
|
||||
multiset.forEachEntry((e, n) -> contents.add(Objects.requireNonNull(e), n));
|
||||
} else {
|
||||
super.addAll(elements);
|
||||
}
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Adds each element of {@code elements} to the {@code ImmutableMultiset}.
|
||||
*
|
||||
* @param elements the elements to add to the {@code ImmutableMultiset}
|
||||
* @return this {@code Builder} object
|
||||
* @throws NullPointerException if {@code elements} is null or contains a null element
|
||||
*/
|
||||
@Override
|
||||
public Builder<E> addAll(Iterator<? extends E> elements) {
|
||||
super.addAll(elements);
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns a newly-created {@code ImmutableMultiset} based on the contents of the {@code
|
||||
* Builder}.
|
||||
*/
|
||||
@Override
|
||||
public ImmutableMultiset<E> build() {
|
||||
return copyOf(contents);
|
||||
}
|
||||
|
||||
ImmutableMultiset<E> buildJdkBacked() {
|
||||
if (contents.isEmpty()) {
|
||||
return of();
|
||||
}
|
||||
return JdkBackedImmutableMultiset.create(contents.entrySet());
|
||||
}
|
||||
}
|
||||
|
||||
static final class ElementSet<E> extends ImmutableSet.Indexed<E> {
|
||||
private final List<Entry<E>> entries;
|
||||
// TODO(cpovirk): @Weak?
|
||||
private final Multiset<E> delegate;
|
||||
|
||||
ElementSet(List<Entry<E>> entries, Multiset<E> delegate) {
|
||||
this.entries = entries;
|
||||
this.delegate = delegate;
|
||||
}
|
||||
|
||||
@Override
|
||||
E get(int index) {
|
||||
return entries.get(index).getElement();
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean contains(Object object) {
|
||||
return delegate.contains(object);
|
||||
}
|
||||
|
||||
@Override
|
||||
boolean isPartialView() {
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int size() {
|
||||
return entries.size();
|
||||
}
|
||||
}
|
||||
|
||||
static final class SerializedForm implements Serializable {
|
||||
final Object[] elements;
|
||||
final int[] counts;
|
||||
|
||||
// "extends Object" works around https://github.com/typetools/checker-framework/issues/3013
|
||||
SerializedForm(Multiset<? extends Object> multiset) {
|
||||
int distinct = multiset.entrySet().size();
|
||||
elements = new Object[distinct];
|
||||
counts = new int[distinct];
|
||||
int i = 0;
|
||||
for (Entry<? extends Object> entry : multiset.entrySet()) {
|
||||
elements[i] = entry.getElement();
|
||||
counts[i] = entry.getCount();
|
||||
i++;
|
||||
}
|
||||
}
|
||||
|
||||
Object readResolve() {
|
||||
LinkedHashMultiset<Object> multiset = LinkedHashMultiset.create(elements.length);
|
||||
for (int i = 0; i < elements.length; i++) {
|
||||
multiset.add(elements[i], counts[i]);
|
||||
}
|
||||
return ImmutableMultiset.copyOf(multiset);
|
||||
}
|
||||
|
||||
private static final long serialVersionUID = 0;
|
||||
}
|
||||
}
|
|
@ -0,0 +1,619 @@
|
|||
package org.xbib.datastructures.multi;
|
||||
|
||||
//import com.google.common.base.MoreObjects;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.InvalidObjectException;
|
||||
import java.io.ObjectInputStream;
|
||||
import java.io.ObjectOutputStream;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collection;
|
||||
import java.util.Comparator;
|
||||
import java.util.Map.Entry;
|
||||
import java.util.Objects;
|
||||
import java.util.function.Function;
|
||||
import java.util.function.Supplier;
|
||||
import java.util.stream.Collector;
|
||||
import java.util.stream.Collectors;
|
||||
import java.util.stream.Stream;
|
||||
import org.xbib.datastructures.api.Multimap;
|
||||
import org.xbib.datastructures.api.SetMultimap;
|
||||
import org.xbib.datastructures.immutable.order.Ordering;
|
||||
|
||||
/**
|
||||
* A {@link SetMultimap} whose contents will never change, with many other important properties
|
||||
* detailed at {@link ImmutableCollection}.
|
||||
*
|
||||
* <p><b>Warning:</b> As in all {@link SetMultimap}s, do not modify either a key <i>or a value</i>
|
||||
* of a {@code ImmutableSetMultimap} in a way that affects its {@link Object#equals} behavior.
|
||||
* Undefined behavior and bugs will result.
|
||||
*
|
||||
* <p>See the Guava User Guide article on <a href=
|
||||
* "https://github.com/google/guava/wiki/ImmutableCollectionsExplained">immutable collections</a>.
|
||||
*/
|
||||
public class ImmutableSetMultimap<K, V> extends ImmutableMultimap<K, V>
|
||||
implements SetMultimap<K, V> {
|
||||
/**
|
||||
* Returns a {@link Collector} that accumulates elements into an {@code ImmutableSetMultimap}
|
||||
* whose keys and values are the result of applying the provided mapping functions to the input
|
||||
* elements.
|
||||
*
|
||||
* <p>For streams with defined encounter order (as defined in the Ordering section of the {@link
|
||||
* java.util.stream} Javadoc), that order is preserved, but entries are <a
|
||||
* href="ImmutableMultimap.html#iteration">grouped by key</a>.
|
||||
*
|
||||
* <p>Example:
|
||||
*
|
||||
* <pre>{@code
|
||||
* static final Multimap<Character, String> FIRST_LETTER_MULTIMAP =
|
||||
* Stream.of("banana", "apple", "carrot", "asparagus", "cherry")
|
||||
* .collect(toImmutableSetMultimap(str -> str.charAt(0), str -> str.substring(1)));
|
||||
*
|
||||
* // is equivalent to
|
||||
*
|
||||
* static final Multimap<Character, String> FIRST_LETTER_MULTIMAP =
|
||||
* new ImmutableSetMultimap.Builder<Character, String>()
|
||||
* .put('b', "anana")
|
||||
* .putAll('a', "pple", "sparagus")
|
||||
* .putAll('c', "arrot", "herry")
|
||||
* .build();
|
||||
* }</pre>
|
||||
*/
|
||||
public static <T extends Object, K, V>
|
||||
Collector<T, ?, ImmutableSetMultimap<K, V>> toImmutableSetMultimap(
|
||||
Function<? super T, ? extends K> keyFunction,
|
||||
Function<? super T, ? extends V> valueFunction) {
|
||||
Objects.requireNonNull(keyFunction, "keyFunction");
|
||||
Objects.requireNonNull(valueFunction, "valueFunction");
|
||||
return Collector.of(
|
||||
ImmutableSetMultimap::<K, V>builder,
|
||||
(builder, t) -> builder.put(keyFunction.apply(t), valueFunction.apply(t)),
|
||||
Builder::combine,
|
||||
Builder::build);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns a {@code Collector} accumulating entries into an {@code ImmutableSetMultimap}. Each
|
||||
* input element is mapped to a key and a stream of values, each of which are put into the
|
||||
* resulting {@code Multimap}, in the encounter order of the stream and the encounter order of the
|
||||
* streams of values.
|
||||
*
|
||||
* <p>Example:
|
||||
*
|
||||
* <pre>{@code
|
||||
* static final ImmutableSetMultimap<Character, Character> FIRST_LETTER_MULTIMAP =
|
||||
* Stream.of("banana", "apple", "carrot", "asparagus", "cherry")
|
||||
* .collect(
|
||||
* flatteningToImmutableSetMultimap(
|
||||
* str -> str.charAt(0),
|
||||
* str -> str.substring(1).chars().mapToObj(c -> (char) c));
|
||||
*
|
||||
* // is equivalent to
|
||||
*
|
||||
* static final ImmutableSetMultimap<Character, Character> FIRST_LETTER_MULTIMAP =
|
||||
* ImmutableSetMultimap.<Character, Character>builder()
|
||||
* .putAll('b', Arrays.asList('a', 'n', 'a', 'n', 'a'))
|
||||
* .putAll('a', Arrays.asList('p', 'p', 'l', 'e'))
|
||||
* .putAll('c', Arrays.asList('a', 'r', 'r', 'o', 't'))
|
||||
* .putAll('a', Arrays.asList('s', 'p', 'a', 'r', 'a', 'g', 'u', 's'))
|
||||
* .putAll('c', Arrays.asList('h', 'e', 'r', 'r', 'y'))
|
||||
* .build();
|
||||
*
|
||||
* // after deduplication, the resulting multimap is equivalent to
|
||||
*
|
||||
* static final ImmutableSetMultimap<Character, Character> FIRST_LETTER_MULTIMAP =
|
||||
* ImmutableSetMultimap.<Character, Character>builder()
|
||||
* .putAll('b', Arrays.asList('a', 'n'))
|
||||
* .putAll('a', Arrays.asList('p', 'l', 'e', 's', 'a', 'r', 'g', 'u'))
|
||||
* .putAll('c', Arrays.asList('a', 'r', 'o', 't', 'h', 'e', 'y'))
|
||||
* .build();
|
||||
* }
|
||||
* }</pre>
|
||||
*
|
||||
*/
|
||||
public static <T extends Object, K, V>
|
||||
Collector<T, ?, ImmutableSetMultimap<K, V>> flatteningToImmutableSetMultimap(
|
||||
Function<? super T, ? extends K> keyFunction,
|
||||
Function<? super T, ? extends Stream<? extends V>> valuesFunction) {
|
||||
Objects.requireNonNull(keyFunction);
|
||||
Objects.requireNonNull(valuesFunction);
|
||||
return Collectors.collectingAndThen(
|
||||
flatteningToMultimap(
|
||||
input -> Objects.requireNonNull(keyFunction.apply(input)),
|
||||
input -> valuesFunction.apply(input).peek(Objects::requireNonNull),
|
||||
MultimapBuilder.linkedHashKeys().linkedHashSetValues()::<K, V>build),
|
||||
ImmutableSetMultimap::copyOf);
|
||||
}
|
||||
|
||||
private static <
|
||||
T extends Object,
|
||||
K extends Object,
|
||||
V extends Object,
|
||||
M extends Multimap<K, V>>
|
||||
Collector<T, ?, M> flatteningToMultimap(
|
||||
Function<? super T, ? extends K> keyFunction,
|
||||
Function<? super T, ? extends Stream<? extends V>> valueFunction,
|
||||
Supplier<M> multimapSupplier) {
|
||||
Objects.requireNonNull(keyFunction);
|
||||
Objects.requireNonNull(valueFunction);
|
||||
Objects.requireNonNull(multimapSupplier);
|
||||
return Collector.of(
|
||||
multimapSupplier,
|
||||
(multimap, input) -> {
|
||||
K key = keyFunction.apply(input);
|
||||
Collection<V> valuesForKey = multimap.get(key);
|
||||
valueFunction.apply(input).forEachOrdered(valuesForKey::add);
|
||||
},
|
||||
(multimap1, multimap2) -> {
|
||||
multimap1.putAll(multimap2);
|
||||
return multimap1;
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the empty multimap.
|
||||
*
|
||||
* <p><b>Performance note:</b> the instance returned is a singleton.
|
||||
*/
|
||||
// Casting is safe because the multimap will never hold any elements.
|
||||
@SuppressWarnings("unchecked")
|
||||
public static <K, V> ImmutableSetMultimap<K, V> of() {
|
||||
return (ImmutableSetMultimap<K, V>) EmptyImmutableSetMultimap.INSTANCE;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns an immutable multimap containing a single entry.
|
||||
*/
|
||||
public static <K, V> ImmutableSetMultimap<K, V> of(K k1, V v1) {
|
||||
Builder<K, V> builder = ImmutableSetMultimap.builder();
|
||||
builder.put(k1, v1);
|
||||
return builder.build();
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns an immutable multimap containing the given entries, in order. Repeated occurrences of
|
||||
* an entry (according to {@link Object#equals}) after the first are ignored.
|
||||
*/
|
||||
public static <K, V> ImmutableSetMultimap<K, V> of(K k1, V v1, K k2, V v2) {
|
||||
Builder<K, V> builder = ImmutableSetMultimap.builder();
|
||||
builder.put(k1, v1);
|
||||
builder.put(k2, v2);
|
||||
return builder.build();
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns an immutable multimap containing the given entries, in order. Repeated occurrences of
|
||||
* an entry (according to {@link Object#equals}) after the first are ignored.
|
||||
*/
|
||||
public static <K, V> ImmutableSetMultimap<K, V> of(K k1, V v1, K k2, V v2, K k3, V v3) {
|
||||
Builder<K, V> builder = ImmutableSetMultimap.builder();
|
||||
builder.put(k1, v1);
|
||||
builder.put(k2, v2);
|
||||
builder.put(k3, v3);
|
||||
return builder.build();
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns an immutable multimap containing the given entries, in order. Repeated occurrences of
|
||||
* an entry (according to {@link Object#equals}) after the first are ignored.
|
||||
*/
|
||||
public static <K, V> ImmutableSetMultimap<K, V> of(
|
||||
K k1, V v1, K k2, V v2, K k3, V v3, K k4, V v4) {
|
||||
Builder<K, V> builder = ImmutableSetMultimap.builder();
|
||||
builder.put(k1, v1);
|
||||
builder.put(k2, v2);
|
||||
builder.put(k3, v3);
|
||||
builder.put(k4, v4);
|
||||
return builder.build();
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns an immutable multimap containing the given entries, in order. Repeated occurrences of
|
||||
* an entry (according to {@link Object#equals}) after the first are ignored.
|
||||
*/
|
||||
public static <K, V> ImmutableSetMultimap<K, V> of(
|
||||
K k1, V v1, K k2, V v2, K k3, V v3, K k4, V v4, K k5, V v5) {
|
||||
Builder<K, V> builder = ImmutableSetMultimap.builder();
|
||||
builder.put(k1, v1);
|
||||
builder.put(k2, v2);
|
||||
builder.put(k3, v3);
|
||||
builder.put(k4, v4);
|
||||
builder.put(k5, v5);
|
||||
return builder.build();
|
||||
}
|
||||
|
||||
// looking for of() with > 5 entries? Use the builder instead.
|
||||
|
||||
/**
|
||||
* Returns a new {@link Builder}.
|
||||
*/
|
||||
public static <K, V> Builder<K, V> builder() {
|
||||
return new Builder<>();
|
||||
}
|
||||
|
||||
/**
|
||||
* A builder for creating immutable {@code SetMultimap} instances, especially {@code public static
|
||||
* final} multimaps ("constant multimaps"). Example:
|
||||
*
|
||||
* <pre>{@code
|
||||
* static final Multimap<String, Integer> STRING_TO_INTEGER_MULTIMAP =
|
||||
* new ImmutableSetMultimap.Builder<String, Integer>()
|
||||
* .put("one", 1)
|
||||
* .putAll("several", 1, 2, 3)
|
||||
* .putAll("many", 1, 2, 3, 4, 5)
|
||||
* .build();
|
||||
* }</pre>
|
||||
*
|
||||
* <p>Builder instances can be reused; it is safe to call {@link #build} multiple times to build
|
||||
* multiple multimaps in series. Each multimap contains the key-value mappings in the previously
|
||||
* created multimaps.
|
||||
*
|
||||
* @since 2.0
|
||||
*/
|
||||
public static final class Builder<K, V> extends ImmutableMultimap.Builder<K, V> {
|
||||
/**
|
||||
* Creates a new builder. The returned builder is equivalent to the builder generated by {@link
|
||||
* ImmutableSetMultimap#builder}.
|
||||
*/
|
||||
public Builder() {
|
||||
super();
|
||||
}
|
||||
|
||||
@Override
|
||||
Collection<V> newMutableValueCollection() {
|
||||
return Platform.preservesInsertionOrderOnAddsSet();
|
||||
}
|
||||
|
||||
/**
|
||||
* Adds a key-value mapping to the built multimap if it is not already present.
|
||||
*/
|
||||
@Override
|
||||
public Builder<K, V> put(K key, V value) {
|
||||
super.put(key, value);
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Adds an entry to the built multimap if it is not already present.
|
||||
*/
|
||||
@Override
|
||||
public Builder<K, V> put(Entry<? extends K, ? extends V> entry) {
|
||||
super.put(entry);
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Builder<K, V> putAll(Iterable<? extends Entry<? extends K, ? extends V>> entries) {
|
||||
super.putAll(entries);
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Builder<K, V> putAll(K key, Iterable<? extends V> values) {
|
||||
super.putAll(key, values);
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Builder<K, V> putAll(K key, V... values) {
|
||||
return putAll(key, Arrays.asList(values));
|
||||
}
|
||||
|
||||
@Override
|
||||
public Builder<K, V> putAll(Multimap<? extends K, ? extends V> multimap) {
|
||||
for (Entry<? extends K, ? extends Collection<? extends V>> entry :
|
||||
multimap.asMap().entrySet()) {
|
||||
putAll(entry.getKey(), entry.getValue());
|
||||
}
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
Builder<K, V> combine(ImmutableMultimap.Builder<K, V> other) {
|
||||
super.combine(other);
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Builder<K, V> orderKeysBy(Comparator<? super K> keyComparator) {
|
||||
super.orderKeysBy(keyComparator);
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Specifies the ordering of the generated multimap's values for each key.
|
||||
*
|
||||
* <p>If this method is called, the sets returned by the {@code get()} method of the generated
|
||||
* multimap and its {@link Multimap#asMap()} view are {@link ImmutableSortedSet} instances.
|
||||
* However, serialization does not preserve that property, though it does maintain the key and
|
||||
* value ordering.
|
||||
*/
|
||||
@Override
|
||||
public Builder<K, V> orderValuesBy(Comparator<? super V> valueComparator) {
|
||||
super.orderValuesBy(valueComparator);
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns a newly-created immutable set multimap.
|
||||
*/
|
||||
@Override
|
||||
public ImmutableSetMultimap<K, V> build() {
|
||||
Collection<Entry<K, Collection<V>>> mapEntries = builderMap.entrySet();
|
||||
if (keyComparator != null) {
|
||||
mapEntries = Ordering.from(keyComparator).<K>onKeys().immutableSortedCopy(mapEntries);
|
||||
}
|
||||
return fromMapEntries(mapEntries, valueComparator);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns an immutable set multimap containing the same mappings as {@code multimap}. The
|
||||
* generated multimap's key and value orderings correspond to the iteration ordering of the {@code
|
||||
* multimap.asMap()} view. Repeated occurrences of an entry in the multimap after the first are
|
||||
* ignored.
|
||||
*
|
||||
* <p>Despite the method name, this method attempts to avoid actually copying the data when it is
|
||||
* safe to do so. The exact circumstances under which a copy will or will not be performed are
|
||||
* undocumented and subject to change.
|
||||
*
|
||||
* @throws NullPointerException if any key or value in {@code multimap} is null
|
||||
*/
|
||||
public static <K, V> ImmutableSetMultimap<K, V> copyOf(
|
||||
Multimap<? extends K, ? extends V> multimap) {
|
||||
return copyOf(multimap, null);
|
||||
}
|
||||
|
||||
private static <K, V> ImmutableSetMultimap<K, V> copyOf(
|
||||
Multimap<? extends K, ? extends V> multimap,
|
||||
Comparator<? super V> valueComparator) {
|
||||
checkNotNull(multimap); // eager for GWT
|
||||
if (multimap.isEmpty() && valueComparator == null) {
|
||||
return of();
|
||||
}
|
||||
|
||||
if (multimap instanceof ImmutableSetMultimap) {
|
||||
@SuppressWarnings("unchecked") // safe since multimap is not writable
|
||||
ImmutableSetMultimap<K, V> kvMultimap = (ImmutableSetMultimap<K, V>) multimap;
|
||||
if (!kvMultimap.isPartialView()) {
|
||||
return kvMultimap;
|
||||
}
|
||||
}
|
||||
|
||||
return fromMapEntries(multimap.asMap().entrySet(), valueComparator);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns an immutable multimap containing the specified entries. The returned multimap iterates
|
||||
* over keys in the order they were first encountered in the input, and the values for each key
|
||||
* are iterated in the order they were encountered. If two values for the same key are {@linkplain
|
||||
* Object#equals equal}, the first value encountered is used.
|
||||
*
|
||||
* @throws NullPointerException if any key, value, or entry is null
|
||||
*/
|
||||
public static <K, V> ImmutableSetMultimap<K, V> copyOf(
|
||||
Iterable<? extends Entry<? extends K, ? extends V>> entries) {
|
||||
return new Builder<K, V>().putAll(entries).build();
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates an ImmutableSetMultimap from an asMap.entrySet.
|
||||
*/
|
||||
static <K, V> ImmutableSetMultimap<K, V> fromMapEntries(
|
||||
Collection<? extends Entry<? extends K, ? extends Collection<? extends V>>> mapEntries,
|
||||
Comparator<? super V> valueComparator) {
|
||||
if (mapEntries.isEmpty()) {
|
||||
return of();
|
||||
}
|
||||
ImmutableMap.Builder<K, ImmutableSet<V>> builder =
|
||||
new ImmutableMap.Builder<>(mapEntries.size());
|
||||
int size = 0;
|
||||
|
||||
for (Entry<? extends K, ? extends Collection<? extends V>> entry : mapEntries) {
|
||||
K key = entry.getKey();
|
||||
Collection<? extends V> values = entry.getValue();
|
||||
ImmutableSet<V> set = valueSet(valueComparator, values);
|
||||
if (!set.isEmpty()) {
|
||||
builder.put(key, set);
|
||||
size += set.size();
|
||||
}
|
||||
}
|
||||
|
||||
return new ImmutableSetMultimap<>(builder.buildOrThrow(), size, valueComparator);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returned by get() when a missing key is provided. Also holds the comparator, if any, used for
|
||||
* values.
|
||||
*/
|
||||
private final transient ImmutableSet<V> emptySet;
|
||||
|
||||
ImmutableSetMultimap(
|
||||
ImmutableMap<K, ImmutableSet<V>> map,
|
||||
int size,
|
||||
Comparator<? super V> valueComparator) {
|
||||
super(map, size);
|
||||
this.emptySet = emptySet(valueComparator);
|
||||
}
|
||||
|
||||
// views
|
||||
|
||||
/**
|
||||
* Returns an immutable set of the values for the given key. If no mappings in the multimap have
|
||||
* the provided key, an empty immutable set is returned. The values are in the same order as the
|
||||
* parameters used to build this multimap.
|
||||
*/
|
||||
@Override
|
||||
public ImmutableSet<V> get(K key) {
|
||||
// This cast is safe as its type is known in constructor.
|
||||
ImmutableSet<V> set = (ImmutableSet<V>) map.get(key);
|
||||
return MoreObjects.firstNonNull(set, emptySet);
|
||||
}
|
||||
|
||||
private transient ImmutableSetMultimap<V, K> inverse;
|
||||
|
||||
/**
|
||||
* {@inheritDoc}
|
||||
*
|
||||
* <p>Because an inverse of a set multimap cannot contain multiple pairs with the same key and
|
||||
* value, this method returns an {@code ImmutableSetMultimap} rather than the {@code
|
||||
* ImmutableMultimap} specified in the {@code ImmutableMultimap} class.
|
||||
*/
|
||||
@Override
|
||||
public ImmutableSetMultimap<V, K> inverse() {
|
||||
ImmutableSetMultimap<V, K> result = inverse;
|
||||
return (result == null) ? (inverse = invert()) : result;
|
||||
}
|
||||
|
||||
private ImmutableSetMultimap<V, K> invert() {
|
||||
Builder<V, K> builder = builder();
|
||||
for (Entry<K, V> entry : entries()) {
|
||||
builder.put(entry.getValue(), entry.getKey());
|
||||
}
|
||||
ImmutableSetMultimap<V, K> invertedMultimap = builder.build();
|
||||
invertedMultimap.inverse = this;
|
||||
return invertedMultimap;
|
||||
}
|
||||
|
||||
/**
|
||||
* Guaranteed to throw an exception and leave the multimap unmodified.
|
||||
*
|
||||
* @throws UnsupportedOperationException always
|
||||
*/
|
||||
@Override
|
||||
public final ImmutableSet<V> removeAll(Object key) {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
|
||||
/**
|
||||
* Guaranteed to throw an exception and leave the multimap unmodified.
|
||||
*
|
||||
* @throws UnsupportedOperationException always
|
||||
*/
|
||||
@Override
|
||||
public final ImmutableSet<V> replaceValues(K key, Iterable<? extends V> values) {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
|
||||
private transient ImmutableSet<Entry<K, V>> entries;
|
||||
|
||||
/**
|
||||
* Returns an immutable collection of all key-value pairs in the multimap. Its iterator traverses
|
||||
* the values for the first key, the values for the second key, and so on.
|
||||
*/
|
||||
@Override
|
||||
public ImmutableSet<Entry<K, V>> entries() {
|
||||
ImmutableSet<Entry<K, V>> result = entries;
|
||||
return result == null ? (entries = new EntrySet<>(this)) : result;
|
||||
}
|
||||
|
||||
private static final class EntrySet<K, V> extends ImmutableSet<Entry<K, V>> {
|
||||
private final transient ImmutableSetMultimap<K, V> multimap;
|
||||
|
||||
EntrySet(ImmutableSetMultimap<K, V> multimap) {
|
||||
this.multimap = multimap;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean contains(Object object) {
|
||||
if (object instanceof Entry<?, ?> entry) {
|
||||
return multimap.containsEntry(entry.getKey(), entry.getValue());
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int size() {
|
||||
return multimap.size();
|
||||
}
|
||||
|
||||
@Override
|
||||
public UnmodifiableIterator<Entry<K, V>> iterator() {
|
||||
return multimap.entryIterator();
|
||||
}
|
||||
|
||||
@Override
|
||||
boolean isPartialView() {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
private static <V> ImmutableSet<V> valueSet(Comparator<? super V> valueComparator, Collection<? extends V> values) {
|
||||
return (valueComparator == null)
|
||||
? ImmutableSet.copyOf(values)
|
||||
: ImmutableSortedSet.copyOf(valueComparator, values);
|
||||
}
|
||||
|
||||
private static <V> ImmutableSet<V> emptySet(Comparator<? super V> valueComparator) {
|
||||
return (valueComparator == null)
|
||||
? ImmutableSet.of()
|
||||
: ImmutableSortedSet.<V>emptySet(valueComparator);
|
||||
}
|
||||
|
||||
private static <V> ImmutableSet.Builder<V> valuesBuilder(Comparator<? super V> valueComparator) {
|
||||
return (valueComparator == null)
|
||||
? new ImmutableSet.Builder<V>()
|
||||
: new ImmutableSortedSet.Builder<V>(valueComparator);
|
||||
}
|
||||
|
||||
private void writeObject(ObjectOutputStream stream) throws IOException {
|
||||
stream.defaultWriteObject();
|
||||
stream.writeObject(valueComparator());
|
||||
Serialization.writeMultimap(this, stream);
|
||||
}
|
||||
|
||||
Comparator<? super V> valueComparator() {
|
||||
return emptySet instanceof ImmutableSortedSet
|
||||
? ((ImmutableSortedSet<V>) emptySet).comparator()
|
||||
: null;
|
||||
}
|
||||
|
||||
private static final class SetFieldSettersHolder {
|
||||
static final Serialization.FieldSetter<ImmutableSetMultimap> EMPTY_SET_FIELD_SETTER =
|
||||
Serialization.getFieldSetter(ImmutableSetMultimap.class, "emptySet");
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
private void readObject(ObjectInputStream stream) throws IOException, ClassNotFoundException {
|
||||
stream.defaultReadObject();
|
||||
Comparator<Object> valueComparator = (Comparator<Object>) stream.readObject();
|
||||
int keyCount = stream.readInt();
|
||||
if (keyCount < 0) {
|
||||
throw new InvalidObjectException("Invalid key count " + keyCount);
|
||||
}
|
||||
ImmutableMap.Builder<Object, ImmutableSet<Object>> builder = ImmutableMap.builder();
|
||||
int tmpSize = 0;
|
||||
|
||||
for (int i = 0; i < keyCount; i++) {
|
||||
Object key = stream.readObject();
|
||||
int valueCount = stream.readInt();
|
||||
if (valueCount <= 0) {
|
||||
throw new InvalidObjectException("Invalid value count " + valueCount);
|
||||
}
|
||||
|
||||
ImmutableSet.Builder<Object> valuesBuilder = valuesBuilder(valueComparator);
|
||||
for (int j = 0; j < valueCount; j++) {
|
||||
valuesBuilder.add(stream.readObject());
|
||||
}
|
||||
ImmutableSet<Object> valueSet = valuesBuilder.build();
|
||||
if (valueSet.size() != valueCount) {
|
||||
throw new InvalidObjectException("Duplicate key-value pairs exist for key " + key);
|
||||
}
|
||||
builder.put(key, valueSet);
|
||||
tmpSize += valueCount;
|
||||
}
|
||||
|
||||
ImmutableMap<Object, ImmutableSet<Object>> tmpMap;
|
||||
try {
|
||||
tmpMap = builder.buildOrThrow();
|
||||
} catch (IllegalArgumentException e) {
|
||||
throw (InvalidObjectException) new InvalidObjectException(e.getMessage(), e);
|
||||
}
|
||||
|
||||
FieldSettersHolder.MAP_FIELD_SETTER.set(this, tmpMap);
|
||||
FieldSettersHolder.SIZE_FIELD_SETTER.set(this, tmpSize);
|
||||
SetFieldSettersHolder.EMPTY_SET_FIELD_SETTER.set(this, emptySet(valueComparator));
|
||||
}
|
||||
|
||||
private static final long serialVersionUID = 0;
|
||||
}
|
|
@ -0,0 +1,63 @@
|
|||
package org.xbib.datastructures.multi;
|
||||
|
||||
import java.util.AbstractSet;
|
||||
import java.util.Collection;
|
||||
import java.util.Iterator;
|
||||
import java.util.Objects;
|
||||
import java.util.Set;
|
||||
import org.xbib.datastructures.api.Multiset;
|
||||
|
||||
/**
|
||||
* {@link AbstractSet} substitute without the potentially-quadratic {@code removeAll}
|
||||
* implementation.
|
||||
*/
|
||||
abstract class ImprovedAbstractSet<E extends Object> extends AbstractSet<E> {
|
||||
@Override
|
||||
public boolean removeAll(Collection<?> c) {
|
||||
return removeAllImpl(this, c);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean retainAll(Collection<?> c) {
|
||||
return super.retainAll(Objects.requireNonNull(c));
|
||||
}
|
||||
|
||||
private static boolean removeAllImpl(Set<?> set, Collection<?> collection) {
|
||||
Objects.requireNonNull(collection); // for GWT
|
||||
if (collection instanceof Multiset) {
|
||||
collection = ((Multiset<?>) collection).elementSet();
|
||||
}
|
||||
/*
|
||||
* AbstractSet.removeAll(List) has quadratic behavior if the list size
|
||||
* is just more than the set's size. We augment the test by
|
||||
* assuming that sets have fast contains() performance, and other
|
||||
* collections don't.
|
||||
*/
|
||||
if (collection instanceof Set && collection.size() > set.size()) {
|
||||
return removeAll(set.iterator(), collection);
|
||||
} else {
|
||||
return removeAllImpl(set, collection.iterator());
|
||||
}
|
||||
}
|
||||
|
||||
/** Remove each element in an iterable from a set. */
|
||||
private static boolean removeAllImpl(Set<?> set, Iterator<?> iterator) {
|
||||
boolean changed = false;
|
||||
while (iterator.hasNext()) {
|
||||
changed |= set.remove(iterator.next());
|
||||
}
|
||||
return changed;
|
||||
}
|
||||
|
||||
private static boolean removeAll(Iterator<?> removeFrom, Collection<?> elementsToRemove) {
|
||||
Objects.requireNonNull(elementsToRemove);
|
||||
boolean result = false;
|
||||
while (removeFrom.hasNext()) {
|
||||
if (elementsToRemove.contains(removeFrom.next())) {
|
||||
removeFrom.remove();
|
||||
result = true;
|
||||
}
|
||||
}
|
||||
return result;
|
||||
}
|
||||
}
|
|
@ -0,0 +1,80 @@
|
|||
package org.xbib.datastructures.multi;
|
||||
|
||||
import java.util.Collection;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
|
||||
/**
|
||||
* An implementation of ImmutableMultiset backed by a JDK Map and a list of entries. Used to protect
|
||||
* against hash flooding attacks.
|
||||
*/
|
||||
final class JdkBackedImmutableMultiset<E> extends ImmutableMultiset<E> {
|
||||
private final Map<E, Integer> delegateMap;
|
||||
private final ImmutableList<Entry<E>> entries;
|
||||
private final long size;
|
||||
|
||||
static <E> ImmutableMultiset<E> create(Collection<? extends Entry<? extends E>> entries) {
|
||||
@SuppressWarnings("unchecked")
|
||||
Entry<E>[] entriesArray = entries.toArray(new Entry[0]);
|
||||
Map<E, Integer> delegateMap = new HashMap<>(entriesArray.length);
|
||||
long size = 0;
|
||||
for (int i = 0; i < entriesArray.length; i++) {
|
||||
Entry<E> entry = entriesArray[i];
|
||||
int count = entry.getCount();
|
||||
size += count;
|
||||
E element = Objects.requireNonNull(entry.getElement());
|
||||
delegateMap.put(element, count);
|
||||
if (!(entry instanceof MultisetsImmutableEntry)) {
|
||||
entriesArray[i] = new MultisetsImmutableEntry<>(element, count);
|
||||
}
|
||||
}
|
||||
return new JdkBackedImmutableMultiset<>(
|
||||
delegateMap, ImmutableList.asImmutableList(entriesArray), size);
|
||||
}
|
||||
|
||||
private JdkBackedImmutableMultiset(
|
||||
Map<E, Integer> delegateMap, ImmutableList<Entry<E>> entries, long size) {
|
||||
this.delegateMap = delegateMap;
|
||||
this.entries = entries;
|
||||
this.size = size;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int count(Object element) {
|
||||
return delegateMap.getOrDefault(element, 0);
|
||||
}
|
||||
|
||||
private transient ImmutableSet<E> elementSet;
|
||||
|
||||
@Override
|
||||
public ImmutableSet<E> elementSet() {
|
||||
ImmutableSet<E> result = elementSet;
|
||||
return (result == null) ? elementSet = new ElementSet<>(entries, this) : result;
|
||||
}
|
||||
|
||||
@Override
|
||||
Entry<E> getEntry(int index) {
|
||||
return entries.get(index);
|
||||
}
|
||||
|
||||
@Override
|
||||
boolean isPartialView() {
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int size() {
|
||||
return saturatedCast(size);
|
||||
}
|
||||
|
||||
private static int saturatedCast(long value) {
|
||||
if (value > Integer.MAX_VALUE) {
|
||||
return Integer.MAX_VALUE;
|
||||
}
|
||||
if (value < Integer.MIN_VALUE) {
|
||||
return Integer.MIN_VALUE;
|
||||
}
|
||||
return (int) value;
|
||||
}
|
||||
}
|
|
@ -0,0 +1,567 @@
|
|||
package org.xbib.datastructures.multi;
|
||||
|
||||
import static java.util.Objects.requireNonNull;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.Collection;
|
||||
import java.util.ConcurrentModificationException;
|
||||
import java.util.Iterator;
|
||||
import java.util.Map.Entry;
|
||||
import java.util.NoSuchElementException;
|
||||
import java.util.Set;
|
||||
import java.util.Spliterator;
|
||||
import java.util.Spliterators;
|
||||
import java.util.function.Consumer;
|
||||
import org.xbib.datastructures.api.Multimap;
|
||||
import org.xbib.datastructures.immutable.ImmutableEntry;
|
||||
|
||||
/**
|
||||
* Implementation of {@code Multimap} that does not allow duplicate key-value entries and that
|
||||
* returns collections whose iterators follow the ordering in which the data was added to the
|
||||
* multimap.
|
||||
*
|
||||
* <p>The collections returned by {@code keySet}, {@code keys}, and {@code asMap} iterate through
|
||||
* the keys in the order they were first added to the multimap. Similarly, {@code get}, {@code
|
||||
* removeAll}, and {@code replaceValues} return collections that iterate through the values in the
|
||||
* order they were added. The collections generated by {@code entries} and {@code values} iterate
|
||||
* across the key-value mappings in the order they were added to the multimap.
|
||||
*
|
||||
* <p>The iteration ordering of the collections generated by {@code keySet}, {@code keys}, and
|
||||
* {@code asMap} has a few subtleties. As long as the set of keys remains unchanged, adding or
|
||||
* removing mappings does not affect the key iteration order. However, if you remove all values
|
||||
* associated with a key and then add the key back to the multimap, that key will come last in the
|
||||
* key iteration order.
|
||||
*
|
||||
* <p>The multimap does not store duplicate key-value pairs. Adding a new key-value pair equal to an
|
||||
* existing key-value pair has no effect.
|
||||
*
|
||||
* <p>Keys and values may be null. All optional multimap methods are supported, and all returned
|
||||
* views are modifiable.
|
||||
*
|
||||
* <p>This class is not threadsafe when any concurrent operations update the multimap. Concurrent
|
||||
* read operations will work correctly. To allow concurrent update operations, wrap your multimap
|
||||
* with a call to {@link Multimaps#synchronizedSetMultimap}.
|
||||
*
|
||||
* <p><b>Warning:</b> Do not modify either a key <i>or a value</i> of a {@code LinkedHashMultimap}
|
||||
* in a way that affects its {@link Object#equals} behavior. Undefined behavior and bugs will
|
||||
* result.
|
||||
*
|
||||
*/
|
||||
public final class LinkedHashMultimap<K extends Object, V extends Object>
|
||||
extends AbstractSetMultimap<K, V> {
|
||||
|
||||
/** Creates a new, empty {@code LinkedHashMultimap} with the default initial capacities. */
|
||||
public static <K extends Object, V extends Object>
|
||||
LinkedHashMultimap<K, V> create() {
|
||||
return new LinkedHashMultimap<>(DEFAULT_KEY_CAPACITY, DEFAULT_VALUE_SET_CAPACITY);
|
||||
}
|
||||
|
||||
/**
|
||||
* Constructs an empty {@code LinkedHashMultimap} with enough capacity to hold the specified
|
||||
* numbers of keys and values without rehashing.
|
||||
*
|
||||
* @param expectedKeys the expected number of distinct keys
|
||||
* @param expectedValuesPerKey the expected average number of values per key
|
||||
* @throws IllegalArgumentException if {@code expectedKeys} or {@code expectedValuesPerKey} is
|
||||
* negative
|
||||
*/
|
||||
public static <K extends Object, V extends Object>
|
||||
LinkedHashMultimap<K, V> create(int expectedKeys, int expectedValuesPerKey) {
|
||||
return new LinkedHashMultimap<>(
|
||||
Maps.capacity(expectedKeys), Maps.capacity(expectedValuesPerKey));
|
||||
}
|
||||
|
||||
/**
|
||||
* Constructs a {@code LinkedHashMultimap} with the same mappings as the specified multimap. If a
|
||||
* key-value mapping appears multiple times in the input multimap, it only appears once in the
|
||||
* constructed multimap. The new multimap has the same {@link Multimap#entries()} iteration order
|
||||
* as the input multimap, except for excluding duplicate mappings.
|
||||
*
|
||||
* @param multimap the multimap whose contents are copied to this multimap
|
||||
*/
|
||||
public static <K extends Object, V extends Object>
|
||||
LinkedHashMultimap<K, V> create(Multimap<? extends K, ? extends V> multimap) {
|
||||
LinkedHashMultimap<K, V> result = create(multimap.keySet().size(), DEFAULT_VALUE_SET_CAPACITY);
|
||||
result.putAll(multimap);
|
||||
return result;
|
||||
}
|
||||
|
||||
private interface ValueSetLink<K extends Object, V extends Object> {
|
||||
ValueSetLink<K, V> getPredecessorInValueSet();
|
||||
|
||||
ValueSetLink<K, V> getSuccessorInValueSet();
|
||||
|
||||
void setPredecessorInValueSet(ValueSetLink<K, V> entry);
|
||||
|
||||
void setSuccessorInValueSet(ValueSetLink<K, V> entry);
|
||||
}
|
||||
|
||||
private static <K extends Object, V extends Object> void succeedsInValueSet(
|
||||
ValueSetLink<K, V> pred, ValueSetLink<K, V> succ) {
|
||||
pred.setSuccessorInValueSet(succ);
|
||||
succ.setPredecessorInValueSet(pred);
|
||||
}
|
||||
|
||||
private static <K extends Object, V extends Object> void succeedsInMultimap(
|
||||
ValueEntry<K, V> pred, ValueEntry<K, V> succ) {
|
||||
pred.setSuccessorInMultimap(succ);
|
||||
succ.setPredecessorInMultimap(pred);
|
||||
}
|
||||
|
||||
private static <K extends Object, V extends Object> void deleteFromValueSet(
|
||||
ValueSetLink<K, V> entry) {
|
||||
succeedsInValueSet(entry.getPredecessorInValueSet(), entry.getSuccessorInValueSet());
|
||||
}
|
||||
|
||||
private static <K extends Object, V extends Object> void deleteFromMultimap(
|
||||
ValueEntry<K, V> entry) {
|
||||
succeedsInMultimap(entry.getPredecessorInMultimap(), entry.getSuccessorInMultimap());
|
||||
}
|
||||
|
||||
/**
|
||||
* LinkedHashMultimap entries are in no less than three coexisting linked lists: a bucket in the
|
||||
* hash table for a {@code Set<V>} associated with a key, the linked list of insertion-ordered
|
||||
* entries in that {@code Set<V>}, and the linked list of entries in the LinkedHashMultimap as a
|
||||
* whole.
|
||||
*/
|
||||
static final class ValueEntry<K extends Object, V extends Object>
|
||||
extends ImmutableEntry<K, V> implements ValueSetLink<K, V> {
|
||||
final int smearedValueHash;
|
||||
|
||||
ValueEntry<K, V> nextInValueBucket;
|
||||
/*
|
||||
* The *InValueSet and *InMultimap fields below are null after construction, but we almost
|
||||
* always call succeedsIn*() to initialize them immediately thereafter.
|
||||
*
|
||||
* The exception is the *InValueSet fields of multimapHeaderEntry, which are never set. (That
|
||||
* works out fine as long as we continue to be careful not to try to delete them or iterate
|
||||
* past them.)
|
||||
*
|
||||
* We could consider "lying" and omitting @CheckNotNull from all these fields. Normally, I'm not
|
||||
* a fan of that: What if we someday implement (presumably to be enabled during tests only)
|
||||
* bytecode rewriting that checks for any null value that passes through an API with a
|
||||
* known-non-null type? But that particular problem might not arise here, since we're not
|
||||
* actually reading from the fields in any case in which they might be null (as proven by the
|
||||
* requireNonNull checks below). Plus, we're *already* lying here, since newHeader passes a null
|
||||
* key and value, which we pass to the superconstructor, even though the key and value type for
|
||||
* a given entry might not include null. The right fix for the header problems is probably to
|
||||
* define a separate MultimapLink interface with a separate "header" implementation, which
|
||||
* hopefully could avoid implementing Entry or ValueSetLink at all. (But note that that approach
|
||||
* requires us to define extra classes -- unfortunate under Android.) *Then* we could consider
|
||||
* lying about the fields below on the grounds that we always initialize them just after the
|
||||
* constructor -- an example of the kind of lying that our hypothetical bytecode rewriter would
|
||||
* already have to deal with, thanks to DI frameworks that perform field and method injection,
|
||||
* frameworks like Android that define post-construct hooks like Activity.onCreate, etc.
|
||||
*/
|
||||
|
||||
ValueSetLink<K, V> predecessorInValueSet;
|
||||
ValueSetLink<K, V> successorInValueSet;
|
||||
|
||||
ValueEntry<K, V> predecessorInMultimap;
|
||||
ValueEntry<K, V> successorInMultimap;
|
||||
|
||||
ValueEntry(K key, V value,
|
||||
int smearedValueHash,
|
||||
ValueEntry<K, V> nextInValueBucket) {
|
||||
super(key, value);
|
||||
this.smearedValueHash = smearedValueHash;
|
||||
this.nextInValueBucket = nextInValueBucket;
|
||||
}
|
||||
|
||||
@SuppressWarnings("nullness") // see the comment on the class fields, especially about newHeader
|
||||
static <K extends Object, V extends Object> ValueEntry<K, V> newHeader() {
|
||||
return new ValueEntry<>(null, null, 0, null);
|
||||
}
|
||||
|
||||
boolean matchesValue(Object v, int smearedVHash) {
|
||||
return smearedValueHash == smearedVHash && Objects.equal(getValue(), v);
|
||||
}
|
||||
|
||||
@Override
|
||||
public ValueSetLink<K, V> getPredecessorInValueSet() {
|
||||
return requireNonNull(predecessorInValueSet); // see the comment on the class fields
|
||||
}
|
||||
|
||||
@Override
|
||||
public ValueSetLink<K, V> getSuccessorInValueSet() {
|
||||
return requireNonNull(successorInValueSet); // see the comment on the class fields
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setPredecessorInValueSet(ValueSetLink<K, V> entry) {
|
||||
predecessorInValueSet = entry;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setSuccessorInValueSet(ValueSetLink<K, V> entry) {
|
||||
successorInValueSet = entry;
|
||||
}
|
||||
|
||||
public ValueEntry<K, V> getPredecessorInMultimap() {
|
||||
return requireNonNull(predecessorInMultimap); // see the comment on the class fields
|
||||
}
|
||||
|
||||
public ValueEntry<K, V> getSuccessorInMultimap() {
|
||||
return requireNonNull(successorInMultimap); // see the comment on the class fields
|
||||
}
|
||||
|
||||
public void setSuccessorInMultimap(ValueEntry<K, V> multimapSuccessor) {
|
||||
this.successorInMultimap = multimapSuccessor;
|
||||
}
|
||||
|
||||
public void setPredecessorInMultimap(ValueEntry<K, V> multimapPredecessor) {
|
||||
this.predecessorInMultimap = multimapPredecessor;
|
||||
}
|
||||
}
|
||||
|
||||
private static final int DEFAULT_KEY_CAPACITY = 16;
|
||||
private static final int DEFAULT_VALUE_SET_CAPACITY = 2;
|
||||
static final double VALUE_SET_LOAD_FACTOR = 1.0;
|
||||
|
||||
transient int valueSetCapacity = DEFAULT_VALUE_SET_CAPACITY;
|
||||
private transient ValueEntry<K, V> multimapHeaderEntry;
|
||||
|
||||
private LinkedHashMultimap(int keyCapacity, int valueSetCapacity) {
|
||||
super(Platform.<K, Collection<V>>newLinkedHashMapWithExpectedSize(keyCapacity));
|
||||
checkNonnegative(valueSetCapacity, "expectedValuesPerKey");
|
||||
|
||||
this.valueSetCapacity = valueSetCapacity;
|
||||
this.multimapHeaderEntry = ValueEntry.newHeader();
|
||||
succeedsInMultimap(multimapHeaderEntry, multimapHeaderEntry);
|
||||
}
|
||||
|
||||
/**
|
||||
* {@inheritDoc}
|
||||
*
|
||||
* <p>Creates an empty {@code LinkedHashSet} for a collection of values for one key.
|
||||
*
|
||||
* @return a new {@code LinkedHashSet} containing a collection of values for one key
|
||||
*/
|
||||
@Override
|
||||
Set<V> createCollection() {
|
||||
return Platform.newLinkedHashSetWithExpectedSize(valueSetCapacity);
|
||||
}
|
||||
|
||||
/**
|
||||
* {@inheritDoc}
|
||||
*
|
||||
* <p>Creates a decorated insertion-ordered set that also keeps track of the order in which
|
||||
* key-value pairs are added to the multimap.
|
||||
*
|
||||
* @param key key to associate with values in the collection
|
||||
* @return a new decorated set containing a collection of values for one key
|
||||
*/
|
||||
@Override
|
||||
Collection<V> createCollection(K key) {
|
||||
return new ValueSet(key, valueSetCapacity);
|
||||
}
|
||||
|
||||
/**
|
||||
* {@inheritDoc}
|
||||
*
|
||||
* <p>If {@code values} is not empty and the multimap already contains a mapping for {@code key},
|
||||
* the {@code keySet()} ordering is unchanged. However, the provided values always come last in
|
||||
* the {@link #entries()} and {@link #values()} iteration orderings.
|
||||
*/
|
||||
@Override
|
||||
public Set<V> replaceValues(K key, Iterable<? extends V> values) {
|
||||
return super.replaceValues(key, values);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns a set of all key-value pairs. Changes to the returned set will update the underlying
|
||||
* multimap, and vice versa. The entries set does not support the {@code add} or {@code addAll}
|
||||
* operations.
|
||||
*
|
||||
* <p>The iterator generated by the returned set traverses the entries in the order they were
|
||||
* added to the multimap.
|
||||
*
|
||||
* <p>Each entry is an immutable snapshot of a key-value mapping in the multimap, taken at the
|
||||
* time the entry is returned by a method call to the collection or its iterator.
|
||||
*/
|
||||
@Override
|
||||
public Set<Entry<K, V>> entries() {
|
||||
return super.entries();
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns a view collection of all <i>distinct</i> keys contained in this multimap. Note that the
|
||||
* key set contains a key if and only if this multimap maps that key to at least one value.
|
||||
*
|
||||
* <p>The iterator generated by the returned set traverses the keys in the order they were first
|
||||
* added to the multimap.
|
||||
*
|
||||
* <p>Changes to the returned set will update the underlying multimap, and vice versa. However,
|
||||
* <i>adding</i> to the returned set is not possible.
|
||||
*/
|
||||
@Override
|
||||
public Set<K> keySet() {
|
||||
return super.keySet();
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns a collection of all values in the multimap. Changes to the returned collection will
|
||||
* update the underlying multimap, and vice versa.
|
||||
*
|
||||
* <p>The iterator generated by the returned collection traverses the values in the order they
|
||||
* were added to the multimap.
|
||||
*/
|
||||
@Override
|
||||
public Collection<V> values() {
|
||||
return super.values();
|
||||
}
|
||||
|
||||
final class ValueSet extends ImprovedAbstractSet<V> implements ValueSetLink<K, V> {
|
||||
/*
|
||||
* We currently use a fixed load factor of 1.0, a bit higher than normal to reduce memory
|
||||
* consumption.
|
||||
*/
|
||||
|
||||
private final K key;
|
||||
ValueEntry<K, V>[] hashTable;
|
||||
private int size = 0;
|
||||
private int modCount = 0;
|
||||
|
||||
// We use the set object itself as the end of the linked list, avoiding an unnecessary
|
||||
// entry object per key.
|
||||
private ValueSetLink<K, V> firstEntry;
|
||||
private ValueSetLink<K, V> lastEntry;
|
||||
|
||||
ValueSet(K key, int expectedValues) {
|
||||
this.key = key;
|
||||
this.firstEntry = this;
|
||||
this.lastEntry = this;
|
||||
// Round expected values up to a power of 2 to get the table size.
|
||||
int tableSize = Hashing.closedTableSize(expectedValues, VALUE_SET_LOAD_FACTOR);
|
||||
|
||||
@SuppressWarnings({"rawtypes", "unchecked"})
|
||||
ValueEntry<K, V>[] hashTable = new ValueEntry[tableSize];
|
||||
this.hashTable = hashTable;
|
||||
}
|
||||
|
||||
private int mask() {
|
||||
return hashTable.length - 1;
|
||||
}
|
||||
|
||||
@Override
|
||||
public ValueSetLink<K, V> getPredecessorInValueSet() {
|
||||
return lastEntry;
|
||||
}
|
||||
|
||||
@Override
|
||||
public ValueSetLink<K, V> getSuccessorInValueSet() {
|
||||
return firstEntry;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setPredecessorInValueSet(ValueSetLink<K, V> entry) {
|
||||
lastEntry = entry;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setSuccessorInValueSet(ValueSetLink<K, V> entry) {
|
||||
firstEntry = entry;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Iterator<V> iterator() {
|
||||
return new Iterator<V>() {
|
||||
ValueSetLink<K, V> nextEntry = firstEntry;
|
||||
ValueEntry<K, V> toRemove;
|
||||
int expectedModCount = modCount;
|
||||
|
||||
private void checkForComodification() {
|
||||
if (modCount != expectedModCount) {
|
||||
throw new ConcurrentModificationException();
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean hasNext() {
|
||||
checkForComodification();
|
||||
return nextEntry != ValueSet.this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public V next() {
|
||||
if (!hasNext()) {
|
||||
throw new NoSuchElementException();
|
||||
}
|
||||
ValueEntry<K, V> entry = (ValueEntry<K, V>) nextEntry;
|
||||
V result = entry.getValue();
|
||||
toRemove = entry;
|
||||
nextEntry = entry.getSuccessorInValueSet();
|
||||
return result;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void remove() {
|
||||
checkForComodification();
|
||||
checkState(toRemove != null, "no calls to next() since the last call to remove()");
|
||||
ValueSet.this.remove(toRemove.getValue());
|
||||
expectedModCount = modCount;
|
||||
toRemove = null;
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
@Override
|
||||
public void forEach(Consumer<? super V> action) {
|
||||
checkNotNull(action);
|
||||
for (ValueSetLink<K, V> entry = firstEntry;
|
||||
entry != ValueSet.this;
|
||||
entry = entry.getSuccessorInValueSet()) {
|
||||
action.accept(((ValueEntry<K, V>) entry).getValue());
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public int size() {
|
||||
return size;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean contains(Object o) {
|
||||
int smearedHash = Hashing.smearedHash(o);
|
||||
for (ValueEntry<K, V> entry = hashTable[smearedHash & mask()];
|
||||
entry != null;
|
||||
entry = entry.nextInValueBucket) {
|
||||
if (entry.matchesValue(o, smearedHash)) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean add(V value) {
|
||||
int smearedHash = Hashing.smearedHash(value);
|
||||
int bucket = smearedHash & mask();
|
||||
ValueEntry<K, V> rowHead = hashTable[bucket];
|
||||
for (ValueEntry<K, V> entry = rowHead; entry != null; entry = entry.nextInValueBucket) {
|
||||
if (entry.matchesValue(value, smearedHash)) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
ValueEntry<K, V> newEntry = new ValueEntry<>(key, value, smearedHash, rowHead);
|
||||
succeedsInValueSet(lastEntry, newEntry);
|
||||
succeedsInValueSet(newEntry, this);
|
||||
succeedsInMultimap(multimapHeaderEntry.getPredecessorInMultimap(), newEntry);
|
||||
succeedsInMultimap(newEntry, multimapHeaderEntry);
|
||||
hashTable[bucket] = newEntry;
|
||||
size++;
|
||||
modCount++;
|
||||
rehashIfNecessary();
|
||||
return true;
|
||||
}
|
||||
|
||||
private void rehashIfNecessary() {
|
||||
if (Hashing.needsResizing(size, hashTable.length, VALUE_SET_LOAD_FACTOR)) {
|
||||
@SuppressWarnings("unchecked")
|
||||
ValueEntry<K, V>[] hashTable = new ValueEntry[this.hashTable.length * 2];
|
||||
this.hashTable = hashTable;
|
||||
int mask = hashTable.length - 1;
|
||||
for (ValueSetLink<K, V> entry = firstEntry;
|
||||
entry != this;
|
||||
entry = entry.getSuccessorInValueSet()) {
|
||||
ValueEntry<K, V> valueEntry = (ValueEntry<K, V>) entry;
|
||||
int bucket = valueEntry.smearedValueHash & mask;
|
||||
valueEntry.nextInValueBucket = hashTable[bucket];
|
||||
hashTable[bucket] = valueEntry;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean remove(Object o) {
|
||||
int smearedHash = Hashing.smearedHash(o);
|
||||
int bucket = smearedHash & mask();
|
||||
ValueEntry<K, V> prev = null;
|
||||
for (ValueEntry<K, V> entry = hashTable[bucket];
|
||||
entry != null;
|
||||
prev = entry, entry = entry.nextInValueBucket) {
|
||||
if (entry.matchesValue(o, smearedHash)) {
|
||||
if (prev == null) {
|
||||
// first entry in the bucket
|
||||
hashTable[bucket] = entry.nextInValueBucket;
|
||||
} else {
|
||||
prev.nextInValueBucket = entry.nextInValueBucket;
|
||||
}
|
||||
deleteFromValueSet(entry);
|
||||
deleteFromMultimap(entry);
|
||||
size--;
|
||||
modCount++;
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void clear() {
|
||||
Arrays.fill(hashTable, null);
|
||||
size = 0;
|
||||
for (ValueSetLink<K, V> entry = firstEntry;
|
||||
entry != this;
|
||||
entry = entry.getSuccessorInValueSet()) {
|
||||
ValueEntry<K, V> valueEntry = (ValueEntry<K, V>) entry;
|
||||
deleteFromMultimap(valueEntry);
|
||||
}
|
||||
succeedsInValueSet(this, this);
|
||||
modCount++;
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
Iterator<Entry<K, V>> entryIterator() {
|
||||
return new Iterator<Entry<K, V>>() {
|
||||
ValueEntry<K, V> nextEntry = multimapHeaderEntry.getSuccessorInMultimap();
|
||||
ValueEntry<K, V> toRemove;
|
||||
|
||||
@Override
|
||||
public boolean hasNext() {
|
||||
return nextEntry != multimapHeaderEntry;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Entry<K, V> next() {
|
||||
if (!hasNext()) {
|
||||
throw new NoSuchElementException();
|
||||
}
|
||||
ValueEntry<K, V> result = nextEntry;
|
||||
toRemove = result;
|
||||
nextEntry = nextEntry.getSuccessorInMultimap();
|
||||
return result;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void remove() {
|
||||
checkState(toRemove != null, "no calls to next() since the last call to remove()");
|
||||
LinkedHashMultimap.this.remove(toRemove.getKey(), toRemove.getValue());
|
||||
toRemove = null;
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
@Override
|
||||
Spliterator<Entry<K, V>> entrySpliterator() {
|
||||
return Spliterators.spliterator(entries(), Spliterator.DISTINCT | Spliterator.ORDERED);
|
||||
}
|
||||
|
||||
@Override
|
||||
Iterator<V> valueIterator() {
|
||||
return Maps.valueIterator(entryIterator());
|
||||
}
|
||||
|
||||
@Override
|
||||
Spliterator<V> valueSpliterator() {
|
||||
return CollectSpliterators.map(entrySpliterator(), Entry::getValue);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void clear() {
|
||||
super.clear();
|
||||
succeedsInMultimap(multimapHeaderEntry, multimapHeaderEntry);
|
||||
}
|
||||
}
|
|
@ -0,0 +1,53 @@
|
|||
package org.xbib.datastructures.multi;
|
||||
|
||||
import org.xbib.datastructures.api.Multiset;
|
||||
|
||||
/**
|
||||
* A {@code Multiset} implementation with predictable iteration order. Its iterator orders elements
|
||||
* according to when the first occurrence of the element was added. When the multiset contains
|
||||
* multiple instances of an element, those instances are consecutive in the iteration order. If all
|
||||
* occurrences of an element are removed, after which that element is added to the multiset, the
|
||||
* element will appear at the end of the iteration.
|
||||
*
|
||||
*/
|
||||
public final class LinkedHashMultiset<E extends Object>
|
||||
extends AbstractMapBasedMultiset<E> {
|
||||
|
||||
/** Creates a new, empty {@code LinkedHashMultiset} using the default initial capacity. */
|
||||
public static <E extends Object> LinkedHashMultiset<E> create() {
|
||||
return new LinkedHashMultiset<E>();
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new, empty {@code LinkedHashMultiset} with the specified expected number of distinct
|
||||
* elements.
|
||||
*
|
||||
* @param distinctElements the expected number of distinct elements
|
||||
* @throws IllegalArgumentException if {@code distinctElements} is negative
|
||||
*/
|
||||
public static <E extends Object> LinkedHashMultiset<E> create(int distinctElements) {
|
||||
return new LinkedHashMultiset<E>(distinctElements);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new {@code LinkedHashMultiset} containing the specified elements.
|
||||
*
|
||||
* <p>This implementation is highly efficient when {@code elements} is itself a {@link Multiset}.
|
||||
*
|
||||
* @param elements the elements that the multiset should contain
|
||||
*/
|
||||
public static <E extends Object> LinkedHashMultiset<E> create(
|
||||
Iterable<? extends E> elements) {
|
||||
LinkedHashMultiset<E> multiset = create(Multisets.inferDistinctElements(elements));
|
||||
Iterables.addAll(multiset, elements);
|
||||
return multiset;
|
||||
}
|
||||
|
||||
private LinkedHashMultiset() {
|
||||
super(new LinkedHashMap<E, Count>());
|
||||
}
|
||||
|
||||
private LinkedHashMultiset(int distinctElements) {
|
||||
super(Maps.<E, Count>newLinkedHashMapWithExpectedSize(distinctElements));
|
||||
}
|
||||
}
|
|
@ -0,0 +1,866 @@
|
|||
package org.xbib.datastructures.multi;
|
||||
|
||||
import static java.util.Collections.unmodifiableList;
|
||||
import static java.util.Objects.requireNonNull;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.ObjectInputStream;
|
||||
import java.io.ObjectOutputStream;
|
||||
import java.io.Serializable;
|
||||
import java.util.AbstractSequentialList;
|
||||
import java.util.Collection;
|
||||
import java.util.ConcurrentModificationException;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
import java.util.ListIterator;
|
||||
import java.util.Map;
|
||||
import java.util.Map.Entry;
|
||||
import java.util.NoSuchElementException;
|
||||
import java.util.Set;
|
||||
import java.util.function.Consumer;
|
||||
|
||||
/**
|
||||
* An implementation of {@code ListMultimap} that supports deterministic iteration order for both
|
||||
* keys and values. The iteration order is preserved across non-distinct key values. For example,
|
||||
* for the following multimap definition:
|
||||
*
|
||||
* <pre>{@code
|
||||
* Multimap<K, V> multimap = LinkedListMultimap.create();
|
||||
* multimap.put(key1, foo);
|
||||
* multimap.put(key2, bar);
|
||||
* multimap.put(key1, baz);
|
||||
* }</pre>
|
||||
*
|
||||
* ... the iteration order for {@link #keys()} is {@code [key1, key2, key1]}, and similarly for
|
||||
* {@link #entries()}. Unlike {@link LinkedHashMultimap}, the iteration order is kept consistent
|
||||
* between keys, entries and values. For example, calling:
|
||||
*
|
||||
* <pre>{@code
|
||||
* multimap.remove(key1, foo);
|
||||
* }</pre>
|
||||
*
|
||||
* <p>changes the entries iteration order to {@code [key2=bar, key1=baz]} and the key iteration
|
||||
* order to {@code [key2, key1]}. The {@link #entries()} iterator returns mutable map entries, and
|
||||
* {@link #replaceValues} attempts to preserve iteration order as much as possible.
|
||||
*
|
||||
* <p>The collections returned by {@link #keySet()} and {@link #asMap} iterate through the keys in
|
||||
* the order they were first added to the multimap. Similarly, {@link #get}, {@link #removeAll}, and
|
||||
* {@link #replaceValues} return collections that iterate through the values in the order they were
|
||||
* added. The collections generated by {@link #entries()}, {@link #keys()}, and {@link #values}
|
||||
* iterate across the key-value mappings in the order they were added to the multimap.
|
||||
*
|
||||
* <p>The {@link #values()} and {@link #entries()} methods both return a {@code List}, instead of
|
||||
* the {@code Collection} specified by the {@link ListMultimap} interface.
|
||||
*
|
||||
* <p>The methods {@link #get}, {@link #keySet()}, {@link #keys()}, {@link #values}, {@link
|
||||
* #entries()}, and {@link #asMap} return collections that are views of the multimap. If the
|
||||
* multimap is modified while an iteration over any of those collections is in progress, except
|
||||
* through the iterator's methods, the results of the iteration are undefined.
|
||||
*
|
||||
* <p>Keys and values may be null. All optional multimap methods are supported, and all returned
|
||||
* views are modifiable.
|
||||
*
|
||||
* <p>This class is not threadsafe when any concurrent operations update the multimap. Concurrent
|
||||
* read operations will work correctly. To allow concurrent update operations, wrap your multimap
|
||||
* with a call to {@link Multimaps#synchronizedListMultimap}.
|
||||
*
|
||||
* <p>See the Guava User Guide article on <a href=
|
||||
* "https://github.com/google/guava/wiki/NewCollectionTypesExplained#multimap">{@code Multimap}</a>.
|
||||
*
|
||||
* @author Mike Bostock
|
||||
* @since 2.0
|
||||
*/
|
||||
@GwtCompatible(serializable = true, emulated = true)
|
||||
@ElementTypesAreNonnullByDefault
|
||||
public class LinkedListMultimap<K extends @Nullable Object, V extends @Nullable Object>
|
||||
extends AbstractMultimap<K, V> implements ListMultimap<K, V>, Serializable {
|
||||
/*
|
||||
* Order is maintained using a linked list containing all key-value pairs. In
|
||||
* addition, a series of disjoint linked lists of "siblings", each containing
|
||||
* the values for a specific key, is used to implement {@link
|
||||
* ValueForKeyIterator} in constant time.
|
||||
*/
|
||||
|
||||
private static final class Node<K extends @Nullable Object, V extends @Nullable Object>
|
||||
extends AbstractMapEntry<K, V> {
|
||||
@ParametricNullness final K key;
|
||||
@ParametricNullness V value;
|
||||
@CheckForNull Node<K, V> next; // the next node (with any key)
|
||||
@CheckForNull Node<K, V> previous; // the previous node (with any key)
|
||||
@CheckForNull Node<K, V> nextSibling; // the next node with the same key
|
||||
@CheckForNull Node<K, V> previousSibling; // the previous node with the same key
|
||||
|
||||
Node(@ParametricNullness K key, @ParametricNullness V value) {
|
||||
this.key = key;
|
||||
this.value = value;
|
||||
}
|
||||
|
||||
@Override
|
||||
@ParametricNullness
|
||||
public K getKey() {
|
||||
return key;
|
||||
}
|
||||
|
||||
@Override
|
||||
@ParametricNullness
|
||||
public V getValue() {
|
||||
return value;
|
||||
}
|
||||
|
||||
@Override
|
||||
@ParametricNullness
|
||||
public V setValue(@ParametricNullness V newValue) {
|
||||
V result = value;
|
||||
this.value = newValue;
|
||||
return result;
|
||||
}
|
||||
}
|
||||
|
||||
private static class KeyList<K extends @Nullable Object, V extends @Nullable Object> {
|
||||
Node<K, V> head;
|
||||
Node<K, V> tail;
|
||||
int count;
|
||||
|
||||
KeyList(Node<K, V> firstNode) {
|
||||
this.head = firstNode;
|
||||
this.tail = firstNode;
|
||||
firstNode.previousSibling = null;
|
||||
firstNode.nextSibling = null;
|
||||
this.count = 1;
|
||||
}
|
||||
}
|
||||
|
||||
@CheckForNull private transient Node<K, V> head; // the head for all keys
|
||||
@CheckForNull private transient Node<K, V> tail; // the tail for all keys
|
||||
private transient Map<K, KeyList<K, V>> keyToKeyList;
|
||||
private transient int size;
|
||||
|
||||
/*
|
||||
* Tracks modifications to keyToKeyList so that addition or removal of keys invalidates
|
||||
* preexisting iterators. This does *not* track simple additions and removals of values
|
||||
* that are not the first to be added or last to be removed for their key.
|
||||
*/
|
||||
private transient int modCount;
|
||||
|
||||
/** Creates a new, empty {@code LinkedListMultimap} with the default initial capacity. */
|
||||
public static <K extends @Nullable Object, V extends @Nullable Object>
|
||||
LinkedListMultimap<K, V> create() {
|
||||
return new LinkedListMultimap<>();
|
||||
}
|
||||
|
||||
/**
|
||||
* Constructs an empty {@code LinkedListMultimap} with enough capacity to hold the specified
|
||||
* number of keys without rehashing.
|
||||
*
|
||||
* @param expectedKeys the expected number of distinct keys
|
||||
* @throws IllegalArgumentException if {@code expectedKeys} is negative
|
||||
*/
|
||||
public static <K extends @Nullable Object, V extends @Nullable Object>
|
||||
LinkedListMultimap<K, V> create(int expectedKeys) {
|
||||
return new LinkedListMultimap<>(expectedKeys);
|
||||
}
|
||||
|
||||
/**
|
||||
* Constructs a {@code LinkedListMultimap} with the same mappings as the specified {@code
|
||||
* Multimap}. The new multimap has the same {@link Multimap#entries()} iteration order as the
|
||||
* input multimap.
|
||||
*
|
||||
* @param multimap the multimap whose contents are copied to this multimap
|
||||
*/
|
||||
public static <K extends @Nullable Object, V extends @Nullable Object>
|
||||
LinkedListMultimap<K, V> create(Multimap<? extends K, ? extends V> multimap) {
|
||||
return new LinkedListMultimap<>(multimap);
|
||||
}
|
||||
|
||||
LinkedListMultimap() {
|
||||
this(12);
|
||||
}
|
||||
|
||||
private LinkedListMultimap(int expectedKeys) {
|
||||
keyToKeyList = Platform.newHashMapWithExpectedSize(expectedKeys);
|
||||
}
|
||||
|
||||
private LinkedListMultimap(Multimap<? extends K, ? extends V> multimap) {
|
||||
this(multimap.keySet().size());
|
||||
putAll(multimap);
|
||||
}
|
||||
|
||||
/**
|
||||
* Adds a new node for the specified key-value pair before the specified {@code nextSibling}
|
||||
* element, or at the end of the list if {@code nextSibling} is null. Note: if {@code nextSibling}
|
||||
* is specified, it MUST be for a node for the same {@code key}!
|
||||
*/
|
||||
@CanIgnoreReturnValue
|
||||
private Node<K, V> addNode(
|
||||
@ParametricNullness K key,
|
||||
@ParametricNullness V value,
|
||||
@CheckForNull Node<K, V> nextSibling) {
|
||||
Node<K, V> node = new Node<>(key, value);
|
||||
if (head == null) { // empty list
|
||||
head = tail = node;
|
||||
keyToKeyList.put(key, new KeyList<K, V>(node));
|
||||
modCount++;
|
||||
} else if (nextSibling == null) { // non-empty list, add to tail
|
||||
// requireNonNull is safe because the list is non-empty.
|
||||
requireNonNull(tail).next = node;
|
||||
node.previous = tail;
|
||||
tail = node;
|
||||
KeyList<K, V> keyList = keyToKeyList.get(key);
|
||||
if (keyList == null) {
|
||||
keyToKeyList.put(key, keyList = new KeyList<>(node));
|
||||
modCount++;
|
||||
} else {
|
||||
keyList.count++;
|
||||
Node<K, V> keyTail = keyList.tail;
|
||||
keyTail.nextSibling = node;
|
||||
node.previousSibling = keyTail;
|
||||
keyList.tail = node;
|
||||
}
|
||||
} else { // non-empty list, insert before nextSibling
|
||||
/*
|
||||
* requireNonNull is safe as long as callers pass a nextSibling that (a) has the same key and
|
||||
* (b) is present in the multimap. (And they do, except maybe in case of concurrent
|
||||
* modification, in which case all bets are off.)
|
||||
*/
|
||||
KeyList<K, V> keyList = requireNonNull(keyToKeyList.get(key));
|
||||
keyList.count++;
|
||||
node.previous = nextSibling.previous;
|
||||
node.previousSibling = nextSibling.previousSibling;
|
||||
node.next = nextSibling;
|
||||
node.nextSibling = nextSibling;
|
||||
if (nextSibling.previousSibling == null) { // nextSibling was key head
|
||||
keyList.head = node;
|
||||
} else {
|
||||
nextSibling.previousSibling.nextSibling = node;
|
||||
}
|
||||
if (nextSibling.previous == null) { // nextSibling was head
|
||||
head = node;
|
||||
} else {
|
||||
nextSibling.previous.next = node;
|
||||
}
|
||||
nextSibling.previous = node;
|
||||
nextSibling.previousSibling = node;
|
||||
}
|
||||
size++;
|
||||
return node;
|
||||
}
|
||||
|
||||
/**
|
||||
* Removes the specified node from the linked list. This method is only intended to be used from
|
||||
* the {@code Iterator} classes. See also {@link LinkedListMultimap#removeAllNodes(Object)}.
|
||||
*/
|
||||
private void removeNode(Node<K, V> node) {
|
||||
if (node.previous != null) {
|
||||
node.previous.next = node.next;
|
||||
} else { // node was head
|
||||
head = node.next;
|
||||
}
|
||||
if (node.next != null) {
|
||||
node.next.previous = node.previous;
|
||||
} else { // node was tail
|
||||
tail = node.previous;
|
||||
}
|
||||
if (node.previousSibling == null && node.nextSibling == null) {
|
||||
/*
|
||||
* requireNonNull is safe as long as we call removeNode only for nodes that are still in the
|
||||
* Multimap. This should be the case (except in case of concurrent modification, when all bets
|
||||
* are off).
|
||||
*/
|
||||
KeyList<K, V> keyList = requireNonNull(keyToKeyList.remove(node.key));
|
||||
keyList.count = 0;
|
||||
modCount++;
|
||||
} else {
|
||||
// requireNonNull is safe (under the conditions listed in the comment in the branch above).
|
||||
KeyList<K, V> keyList = requireNonNull(keyToKeyList.get(node.key));
|
||||
keyList.count--;
|
||||
|
||||
if (node.previousSibling == null) {
|
||||
// requireNonNull is safe because we checked that not *both* siblings were null.
|
||||
keyList.head = requireNonNull(node.nextSibling);
|
||||
} else {
|
||||
node.previousSibling.nextSibling = node.nextSibling;
|
||||
}
|
||||
|
||||
if (node.nextSibling == null) {
|
||||
// requireNonNull is safe because we checked that not *both* siblings were null.
|
||||
keyList.tail = requireNonNull(node.previousSibling);
|
||||
} else {
|
||||
node.nextSibling.previousSibling = node.previousSibling;
|
||||
}
|
||||
}
|
||||
size--;
|
||||
}
|
||||
|
||||
/** Removes all nodes for the specified key. */
|
||||
private void removeAllNodes(@ParametricNullness K key) {
|
||||
Iterators.clear(new ValueForKeyIterator(key));
|
||||
}
|
||||
|
||||
/** An {@code Iterator} over all nodes. */
|
||||
private class NodeIterator implements ListIterator<Entry<K, V>> {
|
||||
int nextIndex;
|
||||
@CheckForNull Node<K, V> next;
|
||||
@CheckForNull Node<K, V> current;
|
||||
@CheckForNull Node<K, V> previous;
|
||||
int expectedModCount = modCount;
|
||||
|
||||
NodeIterator(int index) {
|
||||
int size = size();
|
||||
checkPositionIndex(index, size);
|
||||
if (index >= (size / 2)) {
|
||||
previous = tail;
|
||||
nextIndex = size;
|
||||
while (index++ < size) {
|
||||
previous();
|
||||
}
|
||||
} else {
|
||||
next = head;
|
||||
while (index-- > 0) {
|
||||
next();
|
||||
}
|
||||
}
|
||||
current = null;
|
||||
}
|
||||
|
||||
private void checkForConcurrentModification() {
|
||||
if (modCount != expectedModCount) {
|
||||
throw new ConcurrentModificationException();
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean hasNext() {
|
||||
checkForConcurrentModification();
|
||||
return next != null;
|
||||
}
|
||||
|
||||
@CanIgnoreReturnValue
|
||||
@Override
|
||||
public Node<K, V> next() {
|
||||
checkForConcurrentModification();
|
||||
if (next == null) {
|
||||
throw new NoSuchElementException();
|
||||
}
|
||||
previous = current = next;
|
||||
next = next.next;
|
||||
nextIndex++;
|
||||
return current;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void remove() {
|
||||
checkForConcurrentModification();
|
||||
checkState(current != null, "no calls to next() since the last call to remove()");
|
||||
if (current != next) { // after call to next()
|
||||
previous = current.previous;
|
||||
nextIndex--;
|
||||
} else { // after call to previous()
|
||||
next = current.next;
|
||||
}
|
||||
removeNode(current);
|
||||
current = null;
|
||||
expectedModCount = modCount;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean hasPrevious() {
|
||||
checkForConcurrentModification();
|
||||
return previous != null;
|
||||
}
|
||||
|
||||
@CanIgnoreReturnValue
|
||||
@Override
|
||||
public Node<K, V> previous() {
|
||||
checkForConcurrentModification();
|
||||
if (previous == null) {
|
||||
throw new NoSuchElementException();
|
||||
}
|
||||
next = current = previous;
|
||||
previous = previous.previous;
|
||||
nextIndex--;
|
||||
return current;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int nextIndex() {
|
||||
return nextIndex;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int previousIndex() {
|
||||
return nextIndex - 1;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void set(Entry<K, V> e) {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void add(Entry<K, V> e) {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
|
||||
void setValue(@ParametricNullness V value) {
|
||||
checkState(current != null);
|
||||
current.value = value;
|
||||
}
|
||||
}
|
||||
|
||||
/** An {@code Iterator} over distinct keys in key head order. */
|
||||
private class DistinctKeyIterator implements Iterator<K> {
|
||||
final Set<K> seenKeys = Sets.<K>newHashSetWithExpectedSize(keySet().size());
|
||||
@CheckForNull Node<K, V> next = head;
|
||||
@CheckForNull Node<K, V> current;
|
||||
int expectedModCount = modCount;
|
||||
|
||||
private void checkForConcurrentModification() {
|
||||
if (modCount != expectedModCount) {
|
||||
throw new ConcurrentModificationException();
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean hasNext() {
|
||||
checkForConcurrentModification();
|
||||
return next != null;
|
||||
}
|
||||
|
||||
@Override
|
||||
@ParametricNullness
|
||||
public K next() {
|
||||
checkForConcurrentModification();
|
||||
if (next == null) {
|
||||
throw new NoSuchElementException();
|
||||
}
|
||||
current = next;
|
||||
seenKeys.add(current.key);
|
||||
do { // skip ahead to next unseen key
|
||||
next = next.next;
|
||||
} while ((next != null) && !seenKeys.add(next.key));
|
||||
return current.key;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void remove() {
|
||||
checkForConcurrentModification();
|
||||
checkState(current != null, "no calls to next() since the last call to remove()");
|
||||
removeAllNodes(current.key);
|
||||
current = null;
|
||||
expectedModCount = modCount;
|
||||
}
|
||||
}
|
||||
|
||||
/** A {@code ListIterator} over values for a specified key. */
|
||||
private class ValueForKeyIterator implements ListIterator<V> {
|
||||
@ParametricNullness final K key;
|
||||
int nextIndex;
|
||||
@CheckForNull Node<K, V> next;
|
||||
@CheckForNull Node<K, V> current;
|
||||
@CheckForNull Node<K, V> previous;
|
||||
|
||||
/** Constructs a new iterator over all values for the specified key. */
|
||||
ValueForKeyIterator(@ParametricNullness K key) {
|
||||
this.key = key;
|
||||
KeyList<K, V> keyList = keyToKeyList.get(key);
|
||||
next = (keyList == null) ? null : keyList.head;
|
||||
}
|
||||
|
||||
/**
|
||||
* Constructs a new iterator over all values for the specified key starting at the specified
|
||||
* index. This constructor is optimized so that it starts at either the head or the tail,
|
||||
* depending on which is closer to the specified index. This allows adds to the tail to be done
|
||||
* in constant time.
|
||||
*
|
||||
* @throws IndexOutOfBoundsException if index is invalid
|
||||
*/
|
||||
public ValueForKeyIterator(@ParametricNullness K key, int index) {
|
||||
KeyList<K, V> keyList = keyToKeyList.get(key);
|
||||
int size = (keyList == null) ? 0 : keyList.count;
|
||||
checkPositionIndex(index, size);
|
||||
if (index >= (size / 2)) {
|
||||
previous = (keyList == null) ? null : keyList.tail;
|
||||
nextIndex = size;
|
||||
while (index++ < size) {
|
||||
previous();
|
||||
}
|
||||
} else {
|
||||
next = (keyList == null) ? null : keyList.head;
|
||||
while (index-- > 0) {
|
||||
next();
|
||||
}
|
||||
}
|
||||
this.key = key;
|
||||
current = null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean hasNext() {
|
||||
return next != null;
|
||||
}
|
||||
|
||||
@CanIgnoreReturnValue
|
||||
@Override
|
||||
@ParametricNullness
|
||||
public V next() {
|
||||
if (next == null) {
|
||||
throw new NoSuchElementException();
|
||||
}
|
||||
previous = current = next;
|
||||
next = next.nextSibling;
|
||||
nextIndex++;
|
||||
return current.value;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean hasPrevious() {
|
||||
return previous != null;
|
||||
}
|
||||
|
||||
@CanIgnoreReturnValue
|
||||
@Override
|
||||
@ParametricNullness
|
||||
public V previous() {
|
||||
if (previous == null) {
|
||||
throw new NoSuchElementException();
|
||||
}
|
||||
next = current = previous;
|
||||
previous = previous.previousSibling;
|
||||
nextIndex--;
|
||||
return current.value;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int nextIndex() {
|
||||
return nextIndex;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int previousIndex() {
|
||||
return nextIndex - 1;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void remove() {
|
||||
checkState(current != null, "no calls to next() since the last call to remove()");
|
||||
if (current != next) { // after call to next()
|
||||
previous = current.previousSibling;
|
||||
nextIndex--;
|
||||
} else { // after call to previous()
|
||||
next = current.nextSibling;
|
||||
}
|
||||
removeNode(current);
|
||||
current = null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void set(@ParametricNullness V value) {
|
||||
checkState(current != null);
|
||||
current.value = value;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void add(@ParametricNullness V value) {
|
||||
previous = addNode(key, value, next);
|
||||
nextIndex++;
|
||||
current = null;
|
||||
}
|
||||
}
|
||||
|
||||
// Query Operations
|
||||
|
||||
@Override
|
||||
public int size() {
|
||||
return size;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isEmpty() {
|
||||
return head == null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean containsKey(@CheckForNull Object key) {
|
||||
return keyToKeyList.containsKey(key);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean containsValue(@CheckForNull Object value) {
|
||||
return values().contains(value);
|
||||
}
|
||||
|
||||
// Modification Operations
|
||||
|
||||
/**
|
||||
* Stores a key-value pair in the multimap.
|
||||
*
|
||||
* @param key key to store in the multimap
|
||||
* @param value value to store in the multimap
|
||||
* @return {@code true} always
|
||||
*/
|
||||
@CanIgnoreReturnValue
|
||||
@Override
|
||||
public boolean put(@ParametricNullness K key, @ParametricNullness V value) {
|
||||
addNode(key, value, null);
|
||||
return true;
|
||||
}
|
||||
|
||||
// Bulk Operations
|
||||
|
||||
/**
|
||||
* {@inheritDoc}
|
||||
*
|
||||
* <p>If any entries for the specified {@code key} already exist in the multimap, their values are
|
||||
* changed in-place without affecting the iteration order.
|
||||
*
|
||||
* <p>The returned list is immutable and implements {@link java.util.RandomAccess}.
|
||||
*/
|
||||
@CanIgnoreReturnValue
|
||||
@Override
|
||||
public List<V> replaceValues(@ParametricNullness K key, Iterable<? extends V> values) {
|
||||
List<V> oldValues = getCopy(key);
|
||||
ListIterator<V> keyValues = new ValueForKeyIterator(key);
|
||||
Iterator<? extends V> newValues = values.iterator();
|
||||
|
||||
// Replace existing values, if any.
|
||||
while (keyValues.hasNext() && newValues.hasNext()) {
|
||||
keyValues.next();
|
||||
keyValues.set(newValues.next());
|
||||
}
|
||||
|
||||
// Remove remaining old values, if any.
|
||||
while (keyValues.hasNext()) {
|
||||
keyValues.next();
|
||||
keyValues.remove();
|
||||
}
|
||||
|
||||
// Add remaining new values, if any.
|
||||
while (newValues.hasNext()) {
|
||||
keyValues.add(newValues.next());
|
||||
}
|
||||
|
||||
return oldValues;
|
||||
}
|
||||
|
||||
private List<V> getCopy(@ParametricNullness K key) {
|
||||
return unmodifiableList(Lists.newArrayList(new ValueForKeyIterator(key)));
|
||||
}
|
||||
|
||||
/**
|
||||
* {@inheritDoc}
|
||||
*
|
||||
* <p>The returned list is immutable and implements {@link java.util.RandomAccess}.
|
||||
*/
|
||||
@CanIgnoreReturnValue
|
||||
@Override
|
||||
public List<V> removeAll(@Nullable Object key) {
|
||||
/*
|
||||
* Safe because all we do is remove values for the key, not add them. (If we wanted to make sure
|
||||
* to call getCopy and removeAllNodes only with a true K, then we could check containsKey first.
|
||||
* But that check wouldn't eliminate the warnings.)
|
||||
*/
|
||||
@SuppressWarnings({"unchecked", "nullness"})
|
||||
K castKey = (K) key;
|
||||
List<V> oldValues = getCopy(castKey);
|
||||
removeAllNodes(castKey);
|
||||
return oldValues;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void clear() {
|
||||
head = null;
|
||||
tail = null;
|
||||
keyToKeyList.clear();
|
||||
size = 0;
|
||||
modCount++;
|
||||
}
|
||||
|
||||
// Views
|
||||
|
||||
/**
|
||||
* {@inheritDoc}
|
||||
*
|
||||
* <p>If the multimap is modified while an iteration over the list is in progress (except through
|
||||
* the iterator's own {@code add}, {@code set} or {@code remove} operations) the results of the
|
||||
* iteration are undefined.
|
||||
*
|
||||
* <p>The returned list is not serializable and does not have random access.
|
||||
*/
|
||||
@Override
|
||||
public List<V> get(@ParametricNullness final K key) {
|
||||
return new AbstractSequentialList<V>() {
|
||||
@Override
|
||||
public int size() {
|
||||
KeyList<K, V> keyList = keyToKeyList.get(key);
|
||||
return (keyList == null) ? 0 : keyList.count;
|
||||
}
|
||||
|
||||
@Override
|
||||
public ListIterator<V> listIterator(int index) {
|
||||
return new ValueForKeyIterator(key, index);
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
@Override
|
||||
Set<K> createKeySet() {
|
||||
@WeakOuter
|
||||
class KeySetImpl extends Sets.ImprovedAbstractSet<K> {
|
||||
@Override
|
||||
public int size() {
|
||||
return keyToKeyList.size();
|
||||
}
|
||||
|
||||
@Override
|
||||
public Iterator<K> iterator() {
|
||||
return new DistinctKeyIterator();
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean contains(@CheckForNull Object key) { // for performance
|
||||
return containsKey(key);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean remove(@CheckForNull Object o) { // for performance
|
||||
return !LinkedListMultimap.this.removeAll(o).isEmpty();
|
||||
}
|
||||
}
|
||||
return new KeySetImpl();
|
||||
}
|
||||
|
||||
@Override
|
||||
Multiset<K> createKeys() {
|
||||
return new Multimaps.Keys<K, V>(this);
|
||||
}
|
||||
|
||||
/**
|
||||
* {@inheritDoc}
|
||||
*
|
||||
* <p>The iterator generated by the returned collection traverses the values in the order they
|
||||
* were added to the multimap. Because the values may have duplicates and follow the insertion
|
||||
* ordering, this method returns a {@link List}, instead of the {@link Collection} specified in
|
||||
* the {@link ListMultimap} interface.
|
||||
*/
|
||||
@Override
|
||||
public List<V> values() {
|
||||
return (List<V>) super.values();
|
||||
}
|
||||
|
||||
@Override
|
||||
List<V> createValues() {
|
||||
@WeakOuter
|
||||
class ValuesImpl extends AbstractSequentialList<V> {
|
||||
@Override
|
||||
public int size() {
|
||||
return size;
|
||||
}
|
||||
|
||||
@Override
|
||||
public ListIterator<V> listIterator(int index) {
|
||||
final NodeIterator nodeItr = new NodeIterator(index);
|
||||
return new TransformedListIterator<Entry<K, V>, V>(nodeItr) {
|
||||
@Override
|
||||
@ParametricNullness
|
||||
V transform(Entry<K, V> entry) {
|
||||
return entry.getValue();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void set(@ParametricNullness V value) {
|
||||
nodeItr.setValue(value);
|
||||
}
|
||||
};
|
||||
}
|
||||
}
|
||||
return new ValuesImpl();
|
||||
}
|
||||
|
||||
/**
|
||||
* {@inheritDoc}
|
||||
*
|
||||
* <p>The iterator generated by the returned collection traverses the entries in the order they
|
||||
* were added to the multimap. Because the entries may have duplicates and follow the insertion
|
||||
* ordering, this method returns a {@link List}, instead of the {@link Collection} specified in
|
||||
* the {@link ListMultimap} interface.
|
||||
*
|
||||
* <p>An entry's {@link Entry#getKey} method always returns the same key, regardless of what
|
||||
* happens subsequently. As long as the corresponding key-value mapping is not removed from the
|
||||
* multimap, {@link Entry#getValue} returns the value from the multimap, which may change over
|
||||
* time, and {@link Entry#setValue} modifies that value. Removing the mapping from the multimap
|
||||
* does not alter the value returned by {@code getValue()}, though a subsequent {@code setValue()}
|
||||
* call won't update the multimap but will lead to a revised value being returned by {@code
|
||||
* getValue()}.
|
||||
*/
|
||||
@Override
|
||||
public List<Entry<K, V>> entries() {
|
||||
return (List<Entry<K, V>>) super.entries();
|
||||
}
|
||||
|
||||
@Override
|
||||
List<Entry<K, V>> createEntries() {
|
||||
@WeakOuter
|
||||
class EntriesImpl extends AbstractSequentialList<Entry<K, V>> {
|
||||
@Override
|
||||
public int size() {
|
||||
return size;
|
||||
}
|
||||
|
||||
@Override
|
||||
public ListIterator<Entry<K, V>> listIterator(int index) {
|
||||
return new NodeIterator(index);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void forEach(Consumer<? super Entry<K, V>> action) {
|
||||
checkNotNull(action);
|
||||
for (Node<K, V> node = head; node != null; node = node.next) {
|
||||
action.accept(node);
|
||||
}
|
||||
}
|
||||
}
|
||||
return new EntriesImpl();
|
||||
}
|
||||
|
||||
@Override
|
||||
Iterator<Entry<K, V>> entryIterator() {
|
||||
throw new AssertionError("should never be called");
|
||||
}
|
||||
|
||||
@Override
|
||||
Map<K, Collection<V>> createAsMap() {
|
||||
return new Multimaps.AsMap<>(this);
|
||||
}
|
||||
|
||||
/**
|
||||
* @serialData the number of distinct keys, and then for each distinct key: the first key, the
|
||||
* number of values for that key, and the key's values, followed by successive keys and values
|
||||
* from the entries() ordering
|
||||
*/
|
||||
@GwtIncompatible // java.io.ObjectOutputStream
|
||||
private void writeObject(ObjectOutputStream stream) throws IOException {
|
||||
stream.defaultWriteObject();
|
||||
stream.writeInt(size());
|
||||
for (Entry<K, V> entry : entries()) {
|
||||
stream.writeObject(entry.getKey());
|
||||
stream.writeObject(entry.getValue());
|
||||
}
|
||||
}
|
||||
|
||||
@GwtIncompatible // java.io.ObjectInputStream
|
||||
private void readObject(ObjectInputStream stream) throws IOException, ClassNotFoundException {
|
||||
stream.defaultReadObject();
|
||||
keyToKeyList = Maps.newLinkedHashMap();
|
||||
int size = stream.readInt();
|
||||
for (int i = 0; i < size; i++) {
|
||||
@SuppressWarnings("unchecked") // reading data stored by writeObject
|
||||
K key = (K) stream.readObject();
|
||||
@SuppressWarnings("unchecked") // reading data stored by writeObject
|
||||
V value = (V) stream.readObject();
|
||||
put(key, value);
|
||||
}
|
||||
}
|
||||
|
||||
@GwtIncompatible // java serialization not supported
|
||||
private static final long serialVersionUID = 0;
|
||||
}
|
|
@ -0,0 +1,69 @@
|
|||
package org.xbib.datastructures.multi;
|
||||
|
||||
import java.util.Iterator;
|
||||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
import java.util.function.Consumer;
|
||||
|
||||
public class MapsKeySet<K extends Object, V extends Object> extends ImprovedAbstractSet<K> {
|
||||
final Map<K, V> map;
|
||||
|
||||
MapsKeySet(Map<K, V> map) {
|
||||
this.map = Objects.requireNonNull(map);
|
||||
}
|
||||
|
||||
Map<K, V> map() {
|
||||
return map;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Iterator<K> iterator() {
|
||||
return keyIterator(map().entrySet().iterator());
|
||||
}
|
||||
|
||||
@Override
|
||||
public void forEach(Consumer<? super K> action) {
|
||||
Objects.requireNonNull(action);
|
||||
// avoids entry allocation for those maps that allocate entries on iteration
|
||||
map.forEach((k, v) -> action.accept(k));
|
||||
}
|
||||
|
||||
@Override
|
||||
public int size() {
|
||||
return map().size();
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isEmpty() {
|
||||
return map().isEmpty();
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean contains(Object o) {
|
||||
return map().containsKey(o);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean remove(Object o) {
|
||||
if (contains(o)) {
|
||||
map().remove(o);
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void clear() {
|
||||
map().clear();
|
||||
}
|
||||
|
||||
private static <K extends Object, V extends Object> Iterator<K> keyIterator(
|
||||
Iterator<Map.Entry<K, V>> entryIterator) {
|
||||
return new TransformedIterator<Map.Entry<K, V>, K>(entryIterator) {
|
||||
@Override
|
||||
K transform(Map.Entry<K, V> entry) {
|
||||
return entry.getKey();
|
||||
}
|
||||
};
|
||||
}
|
||||
}
|
|
@ -0,0 +1,461 @@
|
|||
package org.xbib.datastructures.multi;
|
||||
|
||||
import java.io.Serializable;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
import java.util.Comparator;
|
||||
import java.util.EnumMap;
|
||||
import java.util.EnumSet;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.LinkedHashMap;
|
||||
import java.util.LinkedHashSet;
|
||||
import java.util.LinkedList;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
import java.util.Set;
|
||||
import java.util.SortedSet;
|
||||
import java.util.TreeMap;
|
||||
import java.util.TreeSet;
|
||||
import java.util.function.Supplier;
|
||||
import org.xbib.datastructures.api.ListMultimap;
|
||||
import org.xbib.datastructures.api.Multimap;
|
||||
import org.xbib.datastructures.api.SetMultimap;
|
||||
import org.xbib.datastructures.api.SortedSetMultimap;
|
||||
import org.xbib.datastructures.immutable.order.Ordering;
|
||||
|
||||
/**
|
||||
* A builder for a multimap implementation that allows customization of the backing map and value
|
||||
* collection implementations used in a particular multimap.
|
||||
*
|
||||
* <p>This can be used to easily configure multimap data structure implementations not provided
|
||||
* explicitly in {@code com.google.common.collect}, for example:
|
||||
*
|
||||
* <pre>{@code
|
||||
* ListMultimap<String, Integer> treeListMultimap =
|
||||
* MultimapBuilder.treeKeys().arrayListValues().build();
|
||||
* SetMultimap<Integer, MyEnum> hashEnumMultimap =
|
||||
* MultimapBuilder.hashKeys().enumSetValues(MyEnum.class).build();
|
||||
* }</pre>
|
||||
*
|
||||
* <p>{@code MultimapBuilder} instances are immutable. Invoking a configuration method has no effect
|
||||
* on the receiving instance; you must store and use the new builder instance it returns instead.
|
||||
*
|
||||
* <p>The generated multimaps are serializable if the key and value types are serializable, unless
|
||||
* stated otherwise in one of the configuration methods.
|
||||
*
|
||||
* @author Louis Wasserman
|
||||
* @param <K0> An upper bound on the key type of the generated multimap.
|
||||
* @param <V0> An upper bound on the value type of the generated multimap.
|
||||
*/
|
||||
public abstract class MultimapBuilder<K0 extends Object, V0 extends Object> {
|
||||
/*
|
||||
* Leaving K and V as upper bounds rather than the actual key and value types allows type
|
||||
* parameters to be left implicit more often. CacheBuilder uses the same technique.
|
||||
*/
|
||||
|
||||
private MultimapBuilder() {}
|
||||
|
||||
private static final int DEFAULT_EXPECTED_KEYS = 8;
|
||||
|
||||
/** Uses a hash table to map keys to value collections. */
|
||||
public static MultimapBuilderWithKeys<Object> hashKeys() {
|
||||
return hashKeys(DEFAULT_EXPECTED_KEYS);
|
||||
}
|
||||
|
||||
/**
|
||||
* Uses a hash table to map keys to value collections, initialized to expect the specified number
|
||||
* of keys.
|
||||
*
|
||||
* @throws IllegalArgumentException if {@code expectedKeys < 0}
|
||||
*/
|
||||
public static MultimapBuilderWithKeys<Object> hashKeys(int expectedKeys) {
|
||||
checkNonnegative(expectedKeys, "expectedKeys");
|
||||
return new MultimapBuilderWithKeys<Object>() {
|
||||
@Override
|
||||
<K extends Object, V extends Object> Map<K, Collection<V>> createMap() {
|
||||
return new HashMap(expectedKeys);
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Uses a hash table to map keys to value collections.
|
||||
*
|
||||
* <p>The collections returned by {@link Multimap#keySet()}, {@link Multimap#keys()}, and {@link
|
||||
* Multimap#asMap()} will iterate through the keys in the order that they were first added to the
|
||||
* multimap, save that if all values associated with a key are removed and then the key is added
|
||||
* back into the multimap, that key will come last in the key iteration order.
|
||||
*/
|
||||
public static MultimapBuilderWithKeys<Object> linkedHashKeys() {
|
||||
return linkedHashKeys(DEFAULT_EXPECTED_KEYS);
|
||||
}
|
||||
|
||||
/**
|
||||
* Uses an hash table to map keys to value collections, initialized to expect the specified number
|
||||
* of keys.
|
||||
*
|
||||
* <p>The collections returned by {@link Multimap#keySet()}, {@link Multimap#keys()}, and {@link
|
||||
* Multimap#asMap()} will iterate through the keys in the order that they were first added to the
|
||||
* multimap, save that if all values associated with a key are removed and then the key is added
|
||||
* back into the multimap, that key will come last in the key iteration order.
|
||||
*/
|
||||
public static MultimapBuilderWithKeys<Object> linkedHashKeys(int expectedKeys) {
|
||||
checkNonnegative(expectedKeys, "expectedKeys");
|
||||
return new MultimapBuilderWithKeys<Object>() {
|
||||
@Override
|
||||
<K extends Object, V extends Object> Map<K, Collection<V>> createMap() {
|
||||
return new LinkedHashMap<>(expectedKeys);
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Uses a naturally-ordered {@link TreeMap} to map keys to value collections.
|
||||
*
|
||||
* <p>The collections returned by {@link Multimap#keySet()}, {@link Multimap#keys()}, and {@link
|
||||
* Multimap#asMap()} will iterate through the keys in sorted order.
|
||||
*
|
||||
* <p>For all multimaps generated by the resulting builder, the {@link Multimap#keySet()} can be
|
||||
* safely cast to a {@link SortedSet}, and the {@link Multimap#asMap()} can safely be
|
||||
* cast to a {@link java.util.SortedMap}.
|
||||
*/
|
||||
@SuppressWarnings("rawtypes")
|
||||
public static MultimapBuilderWithKeys<Comparable> treeKeys() {
|
||||
return treeKeys(Ordering.natural());
|
||||
}
|
||||
|
||||
/**
|
||||
* Uses a {@link TreeMap} sorted by the specified comparator to map keys to value collections.
|
||||
*
|
||||
* <p>The collections returned by {@link Multimap#keySet()}, {@link Multimap#keys()}, and {@link
|
||||
* Multimap#asMap()} will iterate through the keys in sorted order.
|
||||
*
|
||||
* <p>For all multimaps generated by the resulting builder, the {@link Multimap#keySet()} can be
|
||||
* safely cast to a {@link SortedSet}, and the {@link Multimap#asMap()} can safely be
|
||||
* cast to a {@link java.util.SortedMap}.
|
||||
*
|
||||
* <p>Multimaps generated by the resulting builder will not be serializable if {@code comparator}
|
||||
* is not serializable.
|
||||
*/
|
||||
public static <K0 extends Object> MultimapBuilderWithKeys<K0> treeKeys(
|
||||
Comparator<K0> comparator) {
|
||||
Objects.requireNonNull(comparator);
|
||||
return new MultimapBuilderWithKeys<K0>() {
|
||||
@Override
|
||||
<K extends K0, V extends Object> Map<K, Collection<V>> createMap() {
|
||||
return new TreeMap<>(comparator);
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Uses an {@link EnumMap} to map keys to value collections.
|
||||
*/
|
||||
public static <K0 extends Enum<K0>> MultimapBuilderWithKeys<K0> enumKeys(Class<K0> keyClass) {
|
||||
Objects.requireNonNull(keyClass);
|
||||
return new MultimapBuilderWithKeys<K0>() {
|
||||
@SuppressWarnings("unchecked")
|
||||
@Override
|
||||
<K extends K0, V extends Object> Map<K, Collection<V>> createMap() {
|
||||
// K must actually be K0, since enums are effectively final
|
||||
// (their subclasses are inaccessible)
|
||||
return (Map<K, Collection<V>>) new EnumMap<K0, Collection<V>>(keyClass);
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
private static final class ArrayListSupplier<V extends Object>
|
||||
implements Supplier<List<V>>, Serializable {
|
||||
private final int expectedValuesPerKey;
|
||||
|
||||
ArrayListSupplier(int expectedValuesPerKey) {
|
||||
this.expectedValuesPerKey = checkNonnegative(expectedValuesPerKey, "expectedValuesPerKey");
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<V> get() {
|
||||
return new ArrayList<>(expectedValuesPerKey);
|
||||
}
|
||||
}
|
||||
|
||||
private enum LinkedListSupplier implements Supplier<List<?>> {
|
||||
INSTANCE;
|
||||
|
||||
public static <V extends Object> Supplier<List<V>> instance() {
|
||||
// Each call generates a fresh LinkedList, which can serve as a List<V> for any V.
|
||||
@SuppressWarnings({"rawtypes", "unchecked"})
|
||||
Supplier<List<V>> result = (Supplier) INSTANCE;
|
||||
return result;
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<?> get() {
|
||||
return new LinkedList<>();
|
||||
}
|
||||
}
|
||||
|
||||
private static final class HashSetSupplier<V extends Object>
|
||||
implements Supplier<Set<V>>, Serializable {
|
||||
private final int expectedValuesPerKey;
|
||||
|
||||
HashSetSupplier(int expectedValuesPerKey) {
|
||||
this.expectedValuesPerKey = checkNonnegative(expectedValuesPerKey, "expectedValuesPerKey");
|
||||
}
|
||||
|
||||
@Override
|
||||
public Set<V> get() {
|
||||
return new HashSet<>(expectedValuesPerKey);
|
||||
}
|
||||
}
|
||||
|
||||
private static final class LinkedHashSetSupplier<V extends Object>
|
||||
implements Supplier<Set<V>>, Serializable {
|
||||
private final int expectedValuesPerKey;
|
||||
|
||||
LinkedHashSetSupplier(int expectedValuesPerKey) {
|
||||
this.expectedValuesPerKey = checkNonnegative(expectedValuesPerKey, "expectedValuesPerKey");
|
||||
}
|
||||
|
||||
@Override
|
||||
public Set<V> get() {
|
||||
return new LinkedHashSet(expectedValuesPerKey);
|
||||
}
|
||||
}
|
||||
|
||||
private static final class TreeSetSupplier<V extends Object>
|
||||
implements Supplier<SortedSet<V>>, Serializable {
|
||||
private final Comparator<? super V> comparator;
|
||||
|
||||
TreeSetSupplier(Comparator<? super V> comparator) {
|
||||
this.comparator = Objects.requireNonNull(comparator);
|
||||
}
|
||||
|
||||
@Override
|
||||
public SortedSet<V> get() {
|
||||
return new TreeSet<>(comparator);
|
||||
}
|
||||
}
|
||||
|
||||
private static final class EnumSetSupplier<V extends Enum<V>>
|
||||
implements Supplier<Set<V>>, Serializable {
|
||||
private final Class<V> clazz;
|
||||
|
||||
EnumSetSupplier(Class<V> clazz) {
|
||||
this.clazz = Objects.requireNonNull(clazz);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Set<V> get() {
|
||||
return EnumSet.noneOf(clazz);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* An intermediate stage in a {@link MultimapBuilder} in which the key-value collection map
|
||||
* implementation has been specified, but the value collection implementation has not.
|
||||
*
|
||||
* @param <K0> The upper bound on the key type of the generated multimap.
|
||||
* @since 16.0
|
||||
*/
|
||||
public abstract static class MultimapBuilderWithKeys<K0 extends Object> {
|
||||
|
||||
private static final int DEFAULT_EXPECTED_VALUES_PER_KEY = 2;
|
||||
|
||||
MultimapBuilderWithKeys() {}
|
||||
|
||||
abstract <K extends K0, V extends Object> Map<K, Collection<V>> createMap();
|
||||
|
||||
/** Uses an {@link ArrayList} to store value collections. */
|
||||
public ListMultimapBuilder<K0, Object> arrayListValues() {
|
||||
return arrayListValues(DEFAULT_EXPECTED_VALUES_PER_KEY);
|
||||
}
|
||||
|
||||
/**
|
||||
* Uses an {@link ArrayList} to store value collections, initialized to expect the specified
|
||||
* number of values per key.
|
||||
*
|
||||
* @throws IllegalArgumentException if {@code expectedValuesPerKey < 0}
|
||||
*/
|
||||
public ListMultimapBuilder<K0, Object> arrayListValues(int expectedValuesPerKey) {
|
||||
checkNonnegative(expectedValuesPerKey, "expectedValuesPerKey");
|
||||
return new ListMultimapBuilder<K0, Object>() {
|
||||
@Override
|
||||
public <K extends K0, V extends Object> ListMultimap<K, V> build() {
|
||||
return Multimaps.newListMultimap(
|
||||
MultimapBuilderWithKeys.this.<K, V>createMap(),
|
||||
new ArrayListSupplier<V>(expectedValuesPerKey));
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
/** Uses a {@link LinkedList} to store value collections. */
|
||||
public ListMultimapBuilder<K0, Object> linkedListValues() {
|
||||
return new ListMultimapBuilder<K0, Object>() {
|
||||
@Override
|
||||
public <K extends K0, V extends Object> ListMultimap<K, V> build() {
|
||||
return Multimaps.newListMultimap(
|
||||
MultimapBuilderWithKeys.this.<K, V>createMap(), LinkedListSupplier.<V>instance());
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
/** Uses a hash-based {@code Set} to store value collections. */
|
||||
public SetMultimapBuilder<K0, Object> hashSetValues() {
|
||||
return hashSetValues(DEFAULT_EXPECTED_VALUES_PER_KEY);
|
||||
}
|
||||
|
||||
/**
|
||||
* Uses a hash-based {@code Set} to store value collections, initialized to expect the specified
|
||||
* number of values per key.
|
||||
*
|
||||
* @throws IllegalArgumentException if {@code expectedValuesPerKey < 0}
|
||||
*/
|
||||
public SetMultimapBuilder<K0, Object> hashSetValues(int expectedValuesPerKey) {
|
||||
checkNonnegative(expectedValuesPerKey, "expectedValuesPerKey");
|
||||
return new SetMultimapBuilder<K0, Object>() {
|
||||
@Override
|
||||
public <K extends K0, V extends Object> SetMultimap<K, V> build() {
|
||||
return Multimaps.newSetMultimap(
|
||||
MultimapBuilderWithKeys.this.<K, V>createMap(),
|
||||
new HashSetSupplier<V>(expectedValuesPerKey));
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
/** Uses an insertion-ordered hash-based {@code Set} to store value collections. */
|
||||
public SetMultimapBuilder<K0, Object> linkedHashSetValues() {
|
||||
return linkedHashSetValues(DEFAULT_EXPECTED_VALUES_PER_KEY);
|
||||
}
|
||||
|
||||
/**
|
||||
* Uses an insertion-ordered hash-based {@code Set} to store value collections, initialized to
|
||||
* expect the specified number of values per key.
|
||||
*
|
||||
* @throws IllegalArgumentException if {@code expectedValuesPerKey < 0}
|
||||
*/
|
||||
public SetMultimapBuilder<K0, Object> linkedHashSetValues(int expectedValuesPerKey) {
|
||||
checkNonnegative(expectedValuesPerKey, "expectedValuesPerKey");
|
||||
return new SetMultimapBuilder<K0, Object>() {
|
||||
@Override
|
||||
public <K extends K0, V extends Object> SetMultimap<K, V> build() {
|
||||
return Multimaps.newSetMultimap(
|
||||
MultimapBuilderWithKeys.this.<K, V>createMap(),
|
||||
new LinkedHashSetSupplier<V>(expectedValuesPerKey));
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
/** Uses a naturally-ordered {@link TreeSet} to store value collections. */
|
||||
@SuppressWarnings("rawtypes")
|
||||
public SortedSetMultimapBuilder<K0, Comparable> treeSetValues() {
|
||||
return treeSetValues(Ordering.natural());
|
||||
}
|
||||
|
||||
/**
|
||||
* Uses a {@link TreeSet} ordered by the specified comparator to store value collections.
|
||||
*
|
||||
* <p>Multimaps generated by the resulting builder will not be serializable if {@code
|
||||
* comparator} is not serializable.
|
||||
*/
|
||||
public <V0 extends Object> SortedSetMultimapBuilder<K0, V0> treeSetValues(
|
||||
Comparator<V0> comparator) {
|
||||
Objects.requireNonNull(comparator, "comparator");
|
||||
return new SortedSetMultimapBuilder<K0, V0>() {
|
||||
@Override
|
||||
public <K extends K0, V extends V0> SortedSetMultimap<K, V> build() {
|
||||
return Multimaps.newSortedSetMultimap(
|
||||
MultimapBuilderWithKeys.this.<K, V>createMap(), new TreeSetSupplier<V>(comparator));
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
/** Uses an {@link EnumSet} to store value collections. */
|
||||
public <V0 extends Enum<V0>> SetMultimapBuilder<K0, V0> enumSetValues(Class<V0> valueClass) {
|
||||
Objects.requireNonNull(valueClass, "valueClass");
|
||||
return new SetMultimapBuilder<K0, V0>() {
|
||||
@Override
|
||||
public <K extends K0, V extends V0> SetMultimap<K, V> build() {
|
||||
// V must actually be V0, since enums are effectively final
|
||||
// (their subclasses are inaccessible)
|
||||
@SuppressWarnings({"unchecked", "rawtypes"})
|
||||
Supplier<Set<V>> factory = (Supplier) new EnumSetSupplier<V0>(valueClass);
|
||||
return Multimaps.newSetMultimap(MultimapBuilderWithKeys.this.<K, V>createMap(), factory);
|
||||
}
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/** Returns a new, empty {@code Multimap} with the specified implementation. */
|
||||
public abstract <K extends K0, V extends V0> Multimap<K, V> build();
|
||||
|
||||
/**
|
||||
* Returns a {@code Multimap} with the specified implementation, initialized with the entries of
|
||||
* {@code multimap}.
|
||||
*/
|
||||
public <K extends K0, V extends V0> Multimap<K, V> build(
|
||||
Multimap<? extends K, ? extends V> multimap) {
|
||||
Multimap<K, V> result = build();
|
||||
result.putAll(multimap);
|
||||
return result;
|
||||
}
|
||||
|
||||
/**
|
||||
* A specialization of {@link MultimapBuilder} that generates {@link ListMultimap} instances.
|
||||
*/
|
||||
public abstract static class ListMultimapBuilder<
|
||||
K0 extends Object, V0 extends Object>
|
||||
extends MultimapBuilder<K0, V0> {
|
||||
ListMultimapBuilder() {}
|
||||
|
||||
@Override
|
||||
public abstract <K extends K0, V extends V0> ListMultimap<K, V> build();
|
||||
|
||||
@Override
|
||||
public <K extends K0, V extends V0> ListMultimap<K, V> build(
|
||||
Multimap<? extends K, ? extends V> multimap) {
|
||||
return (ListMultimap<K, V>) super.build(multimap);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* A specialization of {@link MultimapBuilder} that generates {@link SetMultimap} instances.
|
||||
*
|
||||
* @since 16.0
|
||||
*/
|
||||
public abstract static class SetMultimapBuilder<
|
||||
K0 extends Object, V0 extends Object>
|
||||
extends MultimapBuilder<K0, V0> {
|
||||
SetMultimapBuilder() {}
|
||||
|
||||
@Override
|
||||
public abstract <K extends K0, V extends V0> SetMultimap<K, V> build();
|
||||
|
||||
@Override
|
||||
public <K extends K0, V extends V0> SetMultimap<K, V> build(
|
||||
Multimap<? extends K, ? extends V> multimap) {
|
||||
return (SetMultimap<K, V>) super.build(multimap);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* A specialization of {@link MultimapBuilder} that generates {@link SortedSetMultimap} instances.
|
||||
*
|
||||
* @since 16.0
|
||||
*/
|
||||
public abstract static class SortedSetMultimapBuilder<
|
||||
K0 extends Object, V0 extends Object>
|
||||
extends SetMultimapBuilder<K0, V0> {
|
||||
SortedSetMultimapBuilder() {}
|
||||
|
||||
@Override
|
||||
public abstract <K extends K0, V extends V0> SortedSetMultimap<K, V> build();
|
||||
|
||||
@Override
|
||||
public <K extends K0, V extends V0> SortedSetMultimap<K, V> build(
|
||||
Multimap<? extends K, ? extends V> multimap) {
|
||||
return (SortedSetMultimap<K, V>) super.build(multimap);
|
||||
}
|
||||
}
|
||||
}
|
File diff suppressed because it is too large
Load diff
|
@ -0,0 +1,39 @@
|
|||
package org.xbib.datastructures.multi;
|
||||
|
||||
import java.util.AbstractCollection;
|
||||
import java.util.Map;
|
||||
import org.xbib.datastructures.api.Multimap;
|
||||
|
||||
/** A skeleton implementation of {@link Multimap#entries()}. */
|
||||
abstract class MultimapsEntries<K extends Object, V extends Object>
|
||||
extends AbstractCollection<Map.Entry<K, V>> {
|
||||
abstract Multimap<K, V> multimap();
|
||||
|
||||
@Override
|
||||
public int size() {
|
||||
return multimap().size();
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean contains(Object o) {
|
||||
if (o instanceof Map.Entry) {
|
||||
Map.Entry<?, ?> entry = (Map.Entry<?, ?>) o;
|
||||
return multimap().containsEntry(entry.getKey(), entry.getValue());
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean remove(Object o) {
|
||||
if (o instanceof Map.Entry) {
|
||||
Map.Entry<?, ?> entry = (Map.Entry<?, ?>) o;
|
||||
return multimap().remove(entry.getKey(), entry.getValue());
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void clear() {
|
||||
multimap().clear();
|
||||
}
|
||||
}
|
|
@ -0,0 +1,47 @@
|
|||
package org.xbib.datastructures.multi;
|
||||
|
||||
import java.util.Objects;
|
||||
import org.xbib.datastructures.api.Multiset;
|
||||
|
||||
/**
|
||||
* Implementation of the {@code equals}, {@code hashCode}, and {@code toString} methods of {@link
|
||||
* Multiset.Entry}.
|
||||
*/
|
||||
public abstract class MultisetsAbstractEntry<E extends Object> implements Multiset.Entry<E> {
|
||||
/**
|
||||
* Indicates whether an object equals this entry, following the behavior specified in {@link
|
||||
* Multiset.Entry#equals}.
|
||||
*/
|
||||
@Override
|
||||
public boolean equals(Object object) {
|
||||
if (object instanceof Multiset.Entry) {
|
||||
Multiset.Entry<?> that = (Multiset.Entry<?>) object;
|
||||
return this.getCount() == that.getCount()
|
||||
&& Objects.equals(this.getElement(), that.getElement());
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
/**
|
||||
* Return this entry's hash code, following the behavior specified in {@link
|
||||
* Multiset.Entry#hashCode}.
|
||||
*/
|
||||
@Override
|
||||
public int hashCode() {
|
||||
E e = getElement();
|
||||
return ((e == null) ? 0 : e.hashCode()) ^ getCount();
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns a string representation of this multiset entry. The string representation consists of
|
||||
* the associated element if the associated count is one, and otherwise the associated element
|
||||
* followed by the characters " x " (space, x and space) followed by the count. Elements and
|
||||
* counts are converted to strings as by {@code String.valueOf}.
|
||||
*/
|
||||
@Override
|
||||
public String toString() {
|
||||
String text = String.valueOf(getElement());
|
||||
int n = getCount();
|
||||
return (n == 1) ? text : (text + " x " + n);
|
||||
}
|
||||
}
|
|
@ -0,0 +1,33 @@
|
|||
package org.xbib.datastructures.multi;
|
||||
|
||||
import java.io.Serializable;
|
||||
|
||||
public class MultisetsImmutableEntry<E extends Object> extends MultisetsAbstractEntry<E>
|
||||
implements Serializable {
|
||||
private final E element;
|
||||
private final int count;
|
||||
|
||||
MultisetsImmutableEntry(E element, int count) {
|
||||
this.element = element;
|
||||
this.count = count;
|
||||
if (count < 0) {
|
||||
throw new IllegalArgumentException("count");
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public final E getElement() {
|
||||
return element;
|
||||
}
|
||||
|
||||
@Override
|
||||
public final int getCount() {
|
||||
return count;
|
||||
}
|
||||
|
||||
public MultisetsImmutableEntry<E> nextInBucket() {
|
||||
return null;
|
||||
}
|
||||
|
||||
private static final long serialVersionUID = 0;
|
||||
}
|
|
@ -0,0 +1,204 @@
|
|||
package org.xbib.datastructures.multi;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.ObjectInputStream;
|
||||
import java.io.ObjectOutputStream;
|
||||
import java.lang.reflect.Field;
|
||||
import java.util.Collection;
|
||||
import java.util.Map;
|
||||
import org.xbib.datastructures.api.Multimap;
|
||||
import org.xbib.datastructures.api.Multiset;
|
||||
|
||||
/**
|
||||
* Provides static methods for serializing collection classes.
|
||||
*
|
||||
* <p>This class assists the implementation of collection classes. Do not use this class to
|
||||
* serialize collections that are defined elsewhere.
|
||||
*/
|
||||
final class Serialization {
|
||||
private Serialization() {}
|
||||
|
||||
/**
|
||||
* Reads a count corresponding to a serialized map, multiset, or multimap. It returns the size of
|
||||
* a map serialized by {@link #writeMap(Map, ObjectOutputStream)}, the number of distinct elements
|
||||
* in a multiset serialized by {@link #writeMultiset(Multiset, ObjectOutputStream)}, or the number
|
||||
* of distinct keys in a multimap serialized by {@link #writeMultimap(Multimap,
|
||||
* ObjectOutputStream)}.
|
||||
*/
|
||||
static int readCount(ObjectInputStream stream) throws IOException {
|
||||
return stream.readInt();
|
||||
}
|
||||
|
||||
/**
|
||||
* Stores the contents of a map in an output stream, as part of serialization. It does not support
|
||||
* concurrent maps whose content may change while the method is running.
|
||||
*
|
||||
* <p>The serialized output consists of the number of entries, first key, first value, second key,
|
||||
* second value, and so on.
|
||||
*/
|
||||
static <K extends Object, V extends Object> void writeMap(
|
||||
Map<K, V> map, ObjectOutputStream stream) throws IOException {
|
||||
stream.writeInt(map.size());
|
||||
for (Map.Entry<K, V> entry : map.entrySet()) {
|
||||
stream.writeObject(entry.getKey());
|
||||
stream.writeObject(entry.getValue());
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Populates a map by reading an input stream, as part of deserialization. See {@link #writeMap}
|
||||
* for the data format.
|
||||
*/
|
||||
static <K extends Object, V extends Object> void populateMap(
|
||||
Map<K, V> map, ObjectInputStream stream) throws IOException, ClassNotFoundException {
|
||||
int size = stream.readInt();
|
||||
populateMap(map, stream, size);
|
||||
}
|
||||
|
||||
/**
|
||||
* Populates a map by reading an input stream, as part of deserialization. See {@link #writeMap}
|
||||
* for the data format. The size is determined by a prior call to {@link #readCount}.
|
||||
*/
|
||||
static <K extends Object, V extends Object> void populateMap(
|
||||
Map<K, V> map, ObjectInputStream stream, int size)
|
||||
throws IOException, ClassNotFoundException {
|
||||
for (int i = 0; i < size; i++) {
|
||||
@SuppressWarnings("unchecked") // reading data stored by writeMap
|
||||
K key = (K) stream.readObject();
|
||||
@SuppressWarnings("unchecked") // reading data stored by writeMap
|
||||
V value = (V) stream.readObject();
|
||||
map.put(key, value);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Stores the contents of a multiset in an output stream, as part of serialization. It does not
|
||||
* support concurrent multisets whose content may change while the method is running.
|
||||
*
|
||||
* <p>The serialized output consists of the number of distinct elements, the first element, its
|
||||
* count, the second element, its count, and so on.
|
||||
*/
|
||||
static <E extends Object> void writeMultiset(
|
||||
Multiset<E> multiset, ObjectOutputStream stream) throws IOException {
|
||||
int entryCount = multiset.entrySet().size();
|
||||
stream.writeInt(entryCount);
|
||||
for (Multiset.Entry<E> entry : multiset.entrySet()) {
|
||||
stream.writeObject(entry.getElement());
|
||||
stream.writeInt(entry.getCount());
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Populates a multiset by reading an input stream, as part of deserialization. See {@link
|
||||
* #writeMultiset} for the data format.
|
||||
*/
|
||||
static <E extends Object> void populateMultiset(
|
||||
Multiset<E> multiset, ObjectInputStream stream) throws IOException, ClassNotFoundException {
|
||||
int distinctElements = stream.readInt();
|
||||
populateMultiset(multiset, stream, distinctElements);
|
||||
}
|
||||
|
||||
/**
|
||||
* Populates a multiset by reading an input stream, as part of deserialization. See {@link
|
||||
* #writeMultiset} for the data format. The number of distinct elements is determined by a prior
|
||||
* call to {@link #readCount}.
|
||||
*/
|
||||
static <E extends Object> void populateMultiset(
|
||||
Multiset<E> multiset, ObjectInputStream stream, int distinctElements)
|
||||
throws IOException, ClassNotFoundException {
|
||||
for (int i = 0; i < distinctElements; i++) {
|
||||
@SuppressWarnings("unchecked") // reading data stored by writeMultiset
|
||||
E element = (E) stream.readObject();
|
||||
int count = stream.readInt();
|
||||
multiset.add(element, count);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Stores the contents of a multimap in an output stream, as part of serialization. It does not
|
||||
* support concurrent multimaps whose content may change while the method is running. The {@link
|
||||
* Multimap#asMap} view determines the ordering in which data is written to the stream.
|
||||
*
|
||||
* <p>The serialized output consists of the number of distinct keys, and then for each distinct
|
||||
* key: the key, the number of values for that key, and the key's values.
|
||||
*/
|
||||
static <K extends Object, V extends Object> void writeMultimap(
|
||||
Multimap<K, V> multimap, ObjectOutputStream stream) throws IOException {
|
||||
stream.writeInt(multimap.asMap().size());
|
||||
for (Map.Entry<K, Collection<V>> entry : multimap.asMap().entrySet()) {
|
||||
stream.writeObject(entry.getKey());
|
||||
stream.writeInt(entry.getValue().size());
|
||||
for (V value : entry.getValue()) {
|
||||
stream.writeObject(value);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Populates a multimap by reading an input stream, as part of deserialization. See {@link
|
||||
* #writeMultimap} for the data format.
|
||||
*/
|
||||
static <K extends Object, V extends Object> void populateMultimap(
|
||||
Multimap<K, V> multimap, ObjectInputStream stream)
|
||||
throws IOException, ClassNotFoundException {
|
||||
int distinctKeys = stream.readInt();
|
||||
populateMultimap(multimap, stream, distinctKeys);
|
||||
}
|
||||
|
||||
/**
|
||||
* Populates a multimap by reading an input stream, as part of deserialization. See {@link
|
||||
* #writeMultimap} for the data format. The number of distinct keys is determined by a prior call
|
||||
* to {@link #readCount}.
|
||||
*/
|
||||
static <K extends Object, V extends Object> void populateMultimap(
|
||||
Multimap<K, V> multimap, ObjectInputStream stream, int distinctKeys)
|
||||
throws IOException, ClassNotFoundException {
|
||||
for (int i = 0; i < distinctKeys; i++) {
|
||||
@SuppressWarnings("unchecked") // reading data stored by writeMultimap
|
||||
K key = (K) stream.readObject();
|
||||
Collection<V> values = multimap.get(key);
|
||||
int valueCount = stream.readInt();
|
||||
for (int j = 0; j < valueCount; j++) {
|
||||
@SuppressWarnings("unchecked") // reading data stored by writeMultimap
|
||||
V value = (V) stream.readObject();
|
||||
values.add(value);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Secret sauce for setting final fields; don't make it public.
|
||||
static <T> FieldSetter<T> getFieldSetter(Class<T> clazz, String fieldName) {
|
||||
try {
|
||||
Field field = clazz.getDeclaredField(fieldName);
|
||||
return new FieldSetter<>(field);
|
||||
} catch (NoSuchFieldException e) {
|
||||
throw new AssertionError(e); // programmer error
|
||||
}
|
||||
}
|
||||
|
||||
// Secret sauce for setting final fields; don't make it public.
|
||||
static final class FieldSetter<T> {
|
||||
private final Field field;
|
||||
|
||||
private FieldSetter(Field field) {
|
||||
this.field = field;
|
||||
field.setAccessible(true);
|
||||
}
|
||||
|
||||
void set(T instance, Object value) {
|
||||
try {
|
||||
field.set(instance, value);
|
||||
} catch (IllegalAccessException impossible) {
|
||||
throw new AssertionError(impossible);
|
||||
}
|
||||
}
|
||||
|
||||
void set(T instance, int value) {
|
||||
try {
|
||||
field.set(instance, value);
|
||||
} catch (IllegalAccessException impossible) {
|
||||
throw new AssertionError(impossible);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,33 @@
|
|||
package org.xbib.datastructures.multi;
|
||||
|
||||
import java.util.Iterator;
|
||||
import java.util.Objects;
|
||||
|
||||
/**
|
||||
* An iterator that transforms a backing iterator; for internal use.
|
||||
*/
|
||||
abstract class TransformedIterator<F extends Object, T extends Object>
|
||||
implements Iterator<T> {
|
||||
final Iterator<? extends F> backingIterator;
|
||||
|
||||
TransformedIterator(Iterator<? extends F> backingIterator) {
|
||||
this.backingIterator = Objects.requireNonNull(backingIterator);
|
||||
}
|
||||
|
||||
abstract T transform(F from);
|
||||
|
||||
@Override
|
||||
public final boolean hasNext() {
|
||||
return backingIterator.hasNext();
|
||||
}
|
||||
|
||||
@Override
|
||||
public final T next() {
|
||||
return transform(backingIterator.next());
|
||||
}
|
||||
|
||||
@Override
|
||||
public final void remove() {
|
||||
backingIterator.remove();
|
||||
}
|
||||
}
|
|
@ -2,9 +2,3 @@ dependencies {
|
|||
testImplementation testLibs.assertj
|
||||
testImplementation testLibs.compile.testing
|
||||
}
|
||||
|
||||
test {
|
||||
jvmArgs '--add-exports=jdk.compiler/com.sun.tools.javac.api=ALL-UNNAMED',
|
||||
'--add-exports=jdk.compiler/com.sun.tools.javac.main=ALL-UNNAMED',
|
||||
'--add-exports=jdk.compiler/com.sun.tools.javac.util=ALL-UNNAMED'
|
||||
}
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
package org.xbib.datastructures.xslx;
|
||||
package com.incesoft.tools.excel.support;
|
||||
|
||||
public class CellFormat {
|
||||
public CellFormat() {
|
|
@ -1,13 +1,16 @@
|
|||
package org.xbib.datastructures.xslx;
|
||||
package com.incesoft.tools.excel.support;
|
||||
|
||||
import java.io.File;
|
||||
import java.text.SimpleDateFormat;
|
||||
|
||||
import org.xbib.datastructures.xslx.jxl.Cell;
|
||||
import org.xbib.datastructures.xslx.jxl.CellType;
|
||||
import org.xbib.datastructures.xslx.jxl.DateCell;
|
||||
import org.xbib.datastructures.xslx.jxl.Sheet;
|
||||
import org.xbib.datastructures.xslx.jxl.Workbook;
|
||||
import org.xbib.datastructures.xslx.ExcelRowIterator;
|
||||
import org.xbib.datastructures.xslx.ReaderSupport;
|
||||
|
||||
import jxl.Cell;
|
||||
import jxl.CellType;
|
||||
import jxl.DateCell;
|
||||
import jxl.Sheet;
|
||||
import jxl.Workbook;
|
||||
|
||||
public class XLSReaderSupport extends ReaderSupport {
|
||||
|
|
@ -1,4 +1,4 @@
|
|||
package org.xbib.datastructures.xslx;
|
||||
package com.incesoft.tools.excel.support;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
|
@ -7,16 +7,18 @@ import java.util.HashMap;
|
|||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
import org.xbib.datastructures.xslx.jxl.Workbook;
|
||||
import org.xbib.datastructures.xslx.jxl.format.Colour;
|
||||
import org.xbib.datastructures.xslx.jxl.format.RGB;
|
||||
import org.xbib.datastructures.xslx.jxl.write.Label;
|
||||
import org.xbib.datastructures.xslx.jxl.write.WritableCell;
|
||||
import org.xbib.datastructures.xslx.jxl.write.WritableCellFormat;
|
||||
import org.xbib.datastructures.xslx.jxl.write.WritableFont;
|
||||
import org.xbib.datastructures.xslx.jxl.write.WritableSheet;
|
||||
import org.xbib.datastructures.xslx.jxl.write.WritableWorkbook;
|
||||
import org.xbib.datastructures.xslx.jxl.write.WriteException;
|
||||
import jxl.Workbook;
|
||||
import jxl.format.Colour;
|
||||
import jxl.format.RGB;
|
||||
import jxl.write.Label;
|
||||
import jxl.write.WritableCell;
|
||||
import jxl.write.WritableCellFormat;
|
||||
import jxl.write.WritableFont;
|
||||
import jxl.write.WritableSheet;
|
||||
import jxl.write.WritableWorkbook;
|
||||
import jxl.write.WriteException;
|
||||
|
||||
import org.xbib.datastructures.xslx.WriterSupport;
|
||||
|
||||
public class XLSWriterSupport extends WriterSupport {
|
||||
WritableSheet sheet;
|
|
@ -1,11 +1,13 @@
|
|||
package org.xbib.datastructures.xslx;
|
||||
package com.incesoft.tools.excel.support;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
|
||||
import org.xbib.datastructures.xslx.Sheet.SheetRowReader;
|
||||
|
||||
import javax.xml.stream.XMLStreamException;
|
||||
import org.xbib.datastructures.xslx.ExcelRowIterator;
|
||||
import org.xbib.datastructures.xslx.ReaderSupport;
|
||||
import com.incesoft.tools.excel.xlsx.Cell;
|
||||
import com.incesoft.tools.excel.xlsx.Sheet;
|
||||
import com.incesoft.tools.excel.xlsx.SimpleXLSXWorkbook;
|
||||
import com.incesoft.tools.excel.xlsx.Sheet.SheetRowReader;
|
||||
|
||||
public class XLSXReaderSupport extends ReaderSupport {
|
||||
|
||||
|
@ -31,7 +33,7 @@ public class XLSXReaderSupport extends ReaderSupport {
|
|||
|| curRow[col] == null)
|
||||
return null;
|
||||
String v = curRow[col].getValue();
|
||||
return v == null || v.trim().isEmpty() ? null : v.trim();
|
||||
return v == null || v.trim().length() == 0 ? null : v.trim();
|
||||
}
|
||||
|
||||
public int getRowPos() {
|
||||
|
@ -44,7 +46,7 @@ public class XLSXReaderSupport extends ReaderSupport {
|
|||
|
||||
SheetRowReader reader;
|
||||
|
||||
public void init() throws IOException {
|
||||
public void init() {
|
||||
reader = sheet.newReader();
|
||||
}
|
||||
|
||||
|
@ -83,14 +85,14 @@ public class XLSXReaderSupport extends ReaderSupport {
|
|||
|
||||
int rowPos = -1;
|
||||
|
||||
public void init() throws XMLStreamException, IOException {
|
||||
public void init() {
|
||||
currentSheetRowCount = sheet.getRowCount();
|
||||
}
|
||||
|
||||
public boolean nextRow() {
|
||||
rowPos++;
|
||||
if (rowPos == currentSheetRowCount) {
|
||||
return false;
|
||||
if (rowPos == currentSheetRowCount) {// 当读取最后一行,如果当前读取的是当前sheet的最后一行
|
||||
return false;// 所有记录里面的最后一行
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
@ -99,7 +101,7 @@ public class XLSXReaderSupport extends ReaderSupport {
|
|||
if (col < 0)
|
||||
return null;
|
||||
String v = sheet.getCellValue(rowPos, col);
|
||||
return v == null || v.trim().isEmpty() ? null : v.trim();
|
||||
return v == null || v.trim().length() == 0 ? null : v.trim();
|
||||
}
|
||||
|
||||
public byte getSheetIndex() {
|
||||
|
@ -136,7 +138,7 @@ public class XLSXReaderSupport extends ReaderSupport {
|
|||
}
|
||||
}
|
||||
|
||||
public ExcelRowIterator rowIterator() throws XMLStreamException, IOException {
|
||||
public ExcelRowIterator rowIterator() {
|
||||
ExcelRowIterator iterator = lazy ? new LazyXLSXObjectIterator()
|
||||
: new XLSXObjectIterator();
|
||||
iterator.init();
|
|
@ -1,15 +1,25 @@
|
|||
package org.xbib.datastructures.xslx;
|
||||
package com.incesoft.tools.excel.support;
|
||||
|
||||
import javax.xml.stream.XMLStreamException;
|
||||
import java.io.File;
|
||||
import java.io.FileOutputStream;
|
||||
import java.io.IOException;
|
||||
import java.io.OutputStream;
|
||||
|
||||
import org.xbib.datastructures.xslx.WriterSupport;
|
||||
import com.incesoft.tools.excel.xlsx.CellStyle;
|
||||
import com.incesoft.tools.excel.xlsx.Fill;
|
||||
import com.incesoft.tools.excel.xlsx.Font;
|
||||
import com.incesoft.tools.excel.xlsx.Sheet;
|
||||
import com.incesoft.tools.excel.xlsx.SimpleXLSXWorkbook;
|
||||
|
||||
public class XLSXWriterSupport extends WriterSupport {
|
||||
SimpleXLSXWorkbook workbook;
|
||||
|
||||
public void open() {
|
||||
workbook = new SimpleXLSXWorkbook();
|
||||
if (getClass().getResource("/empty.xlsx") == null) {
|
||||
throw new IllegalStateException("no empty.xlsx found in classpath");
|
||||
}
|
||||
workbook = new SimpleXLSXWorkbook(new File(getClass().getResource("/empty.xlsx").getFile()));
|
||||
}
|
||||
|
||||
Sheet sheet;
|
||||
|
@ -18,17 +28,17 @@ public class XLSXWriterSupport extends WriterSupport {
|
|||
return Integer.MAX_VALUE / 2;
|
||||
}
|
||||
|
||||
public void writeRow(String[] rowData) throws XMLStreamException, IOException {
|
||||
public void writeRow(String[] rowData) {
|
||||
writeRow(rowData, null);
|
||||
}
|
||||
|
||||
public void writeRow(String[] rowData, CellFormat[] formats) throws XMLStreamException, IOException {
|
||||
public void writeRow(String[] rowData, CellFormat[] formats) {
|
||||
for (int col = 0; col < rowData.length; col++) {
|
||||
String string = rowData[col];
|
||||
if (string == null)
|
||||
continue;
|
||||
CellFormat format = null;
|
||||
if (formats != null) {
|
||||
if (formats != null && formats.length > 0) {
|
||||
for (CellFormat cellFormat : formats) {
|
||||
if (cellFormat != null && cellFormat.getCellIndex() == col) {
|
||||
format = cellFormat;
|
||||
|
@ -82,7 +92,7 @@ public class XLSXWriterSupport extends WriterSupport {
|
|||
}
|
||||
}
|
||||
|
||||
public void createNewSheet() throws IOException {
|
||||
public void createNewSheet() {
|
||||
if (sheetIndex > 0) {
|
||||
throw new IllegalStateException("only one sheet allowed");
|
||||
}
|
|
@ -1,4 +1,4 @@
|
|||
package org.xbib.datastructures.xslx;
|
||||
package com.incesoft.tools.excel.xlsx;
|
||||
|
||||
public class Cell {
|
||||
Cell(String r, String s, String t, String v, String text) {
|
|
@ -1,4 +1,4 @@
|
|||
package org.xbib.datastructures.xslx;
|
||||
package com.incesoft.tools.excel.xlsx;
|
||||
|
||||
|
||||
import javax.xml.stream.XMLStreamException;
|
|
@ -1,4 +1,4 @@
|
|||
package org.xbib.datastructures.xslx;
|
||||
package com.incesoft.tools.excel.xlsx;
|
||||
|
||||
|
||||
import javax.xml.stream.XMLStreamException;
|
|
@ -1,4 +1,4 @@
|
|||
package org.xbib.datastructures.xslx;
|
||||
package com.incesoft.tools.excel.xlsx;
|
||||
|
||||
import javax.xml.stream.XMLStreamException;
|
||||
import javax.xml.stream.XMLStreamWriter;
|
|
@ -1,4 +1,4 @@
|
|||
package org.xbib.datastructures.xslx;
|
||||
package com.incesoft.tools.excel.xlsx;
|
||||
|
||||
public class FontRegion {
|
||||
|
|
@ -1,4 +1,4 @@
|
|||
package org.xbib.datastructures.xslx;
|
||||
package com.incesoft.tools.excel.xlsx;
|
||||
|
||||
public abstract class IndexedObject {
|
||||
int index;
|
|
@ -1,4 +1,4 @@
|
|||
package org.xbib.datastructures.xslx;
|
||||
package com.incesoft.tools.excel.xlsx;
|
||||
|
||||
import javax.xml.stream.XMLStreamReader;
|
||||
|
|
@ -1,4 +1,4 @@
|
|||
package org.xbib.datastructures.xslx;
|
||||
package com.incesoft.tools.excel.xlsx;
|
||||
|
||||
|
||||
import java.util.ArrayList;
|
|
@ -1,4 +1,4 @@
|
|||
package org.xbib.datastructures.xslx;
|
||||
package com.incesoft.tools.excel.xlsx;
|
||||
|
||||
|
||||
import javax.xml.stream.XMLStreamException;
|
|
@ -1,4 +1,4 @@
|
|||
package org.xbib.datastructures.xslx;
|
||||
package com.incesoft.tools.excel.xlsx;
|
||||
|
||||
import javax.xml.stream.XMLStreamException;
|
||||
import javax.xml.stream.XMLStreamWriter;
|
|
@ -1,7 +1,6 @@
|
|||
package org.xbib.datastructures.xslx;
|
||||
package com.incesoft.tools.excel.xlsx;
|
||||
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
|
@ -10,8 +9,8 @@ import javax.xml.stream.XMLStreamException;
|
|||
import javax.xml.stream.XMLStreamReader;
|
||||
import javax.xml.stream.XMLStreamWriter;
|
||||
|
||||
import org.xbib.datastructures.xslx.SimpleXLSXWorkbook.ModifyEntry;
|
||||
import org.xbib.datastructures.xslx.SimpleXLSXWorkbook.XMLStreamCreator;
|
||||
import com.incesoft.tools.excel.xlsx.SimpleXLSXWorkbook.ModifyEntry;
|
||||
import com.incesoft.tools.excel.xlsx.SimpleXLSXWorkbook.XMLStreamCreator;
|
||||
|
||||
/**
|
||||
* One Sheet in a workbook.It provides read and write functions of the
|
||||
|
@ -43,7 +42,7 @@ public class Sheet {
|
|||
*/
|
||||
boolean alreadyParsed = false;
|
||||
|
||||
void parseAllRows() throws IOException {
|
||||
void parseAllRows() {
|
||||
if (!alreadyParsed) {
|
||||
alreadyParsed = true;
|
||||
new SheetRowReader(this, workbook.getSheetReader(sheetIndex + 1),
|
||||
|
@ -217,7 +216,7 @@ public class Sheet {
|
|||
|
||||
}
|
||||
|
||||
public SheetRowReader newReader() throws IOException {
|
||||
public SheetRowReader newReader() {
|
||||
return new SheetRowReader(this,
|
||||
workbook.getSheetReader(sheetIndex + 1), false);
|
||||
}
|
||||
|
@ -241,7 +240,7 @@ public class Sheet {
|
|||
private int rowCount = -2;
|
||||
|
||||
// count of the lazy or non-lazy rows
|
||||
public int getRowCount() throws IOException, XMLStreamException {
|
||||
public int getRowCount() {
|
||||
if (alreadyParsed && addToMemory) {
|
||||
return parsedRows.size();
|
||||
}
|
||||
|
@ -251,10 +250,10 @@ public class Sheet {
|
|||
try {
|
||||
// <?xml version="1.0" encoding="UTF-8" standalone="yes"?>
|
||||
// <worksheet ...><dimension ref="A1:C3"/>...<sheetData>
|
||||
loopR:
|
||||
while (reader.hasNext()) {
|
||||
loopR: while (reader.hasNext()) {
|
||||
int type = reader.next();
|
||||
if (type == XMLStreamReader.START_ELEMENT) {
|
||||
switch (type) {
|
||||
case XMLStreamReader.START_ELEMENT:
|
||||
if ("dimension".equals(reader.getLocalName())) {
|
||||
String v = reader.getAttributeValue(null, "ref");
|
||||
if (v != null) {
|
||||
|
@ -276,10 +275,16 @@ public class Sheet {
|
|||
if (r > rowCount)
|
||||
rowCount = r;
|
||||
}
|
||||
break;
|
||||
}
|
||||
}
|
||||
} catch (XMLStreamException e) {
|
||||
throw new RuntimeException(e);
|
||||
} finally {
|
||||
try {
|
||||
reader.close();
|
||||
} catch (XMLStreamException e) {
|
||||
}
|
||||
}
|
||||
}
|
||||
return rowCount;
|
||||
|
@ -632,7 +637,16 @@ public class Sheet {
|
|||
|
||||
private boolean merged = false;
|
||||
|
||||
void mergeSheet() throws XMLStreamException, IOException {
|
||||
/**
|
||||
* <?xml version="1.0" encoding="UTF-8" standalone="yes"?> <worksheet
|
||||
* xmlns="http://schemas.openxmlformats.org/spreadsheetml/2006/main"
|
||||
* xmlns:r=
|
||||
* "http://schemas.openxmlformats.org/officeDocument/2006/relationships" ><sheetData>
|
||||
*
|
||||
* @param writer
|
||||
* @throws XMLStreamException
|
||||
*/
|
||||
void mergeSheet() throws XMLStreamException {
|
||||
if (merged) {
|
||||
writeSheet();
|
||||
return;
|
|
@ -1,11 +1,11 @@
|
|||
package org.xbib.datastructures.xslx;
|
||||
package com.incesoft.tools.excel.xlsx;
|
||||
|
||||
import java.util.StringTokenizer;
|
||||
|
||||
import javax.xml.stream.XMLStreamException;
|
||||
import javax.xml.stream.XMLStreamWriter;
|
||||
|
||||
import org.xbib.datastructures.xslx.SimpleXLSXWorkbook.XMLStreamCreator;
|
||||
import com.incesoft.tools.excel.xlsx.SimpleXLSXWorkbook.XMLStreamCreator;
|
||||
|
||||
public class SheetCommentWriter {
|
||||
|
|
@ -1,4 +1,4 @@
|
|||
package org.xbib.datastructures.xslx;
|
||||
package com.incesoft.tools.excel.xlsx;
|
||||
|
||||
import java.io.ByteArrayInputStream;
|
||||
import java.io.ByteArrayOutputStream;
|
||||
|
@ -32,12 +32,31 @@ import javax.xml.stream.XMLStreamWriter;
|
|||
/**
|
||||
* A simple implementation of OOXML(Excel part) to read and modify Excel 2007+
|
||||
* documents
|
||||
*
|
||||
*/
|
||||
public class SimpleXLSXWorkbook {
|
||||
static {
|
||||
// this the fastest stax implementation by test,especially when doing
|
||||
// output
|
||||
if ("false".equals(System.getProperty("ince.tools.excel.disableXMLOptimize"))) {
|
||||
System.setProperty("javax.xml.stream.XMLInputFactory", "com.ctc.wstx.stax.WstxInputFactory");
|
||||
System.setProperty("javax.xml.stream.XMLOutputFactory", "com.ctc.wstx.stax.WstxOutputFactory");
|
||||
}
|
||||
}
|
||||
|
||||
private static final XMLInputFactory inputFactory = XMLInputFactory.newInstance();
|
||||
ZipFile zipfile;
|
||||
|
||||
private ZipFile zipfile;
|
||||
private InputStream findData(String name) {
|
||||
try {
|
||||
ZipEntry entry = zipfile.getEntry(name);
|
||||
if (entry != null) {
|
||||
return zipfile.getInputStream(entry);
|
||||
}
|
||||
} catch (IOException e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
private static final String PATH_XL_RELATION = "xl/_rels/workbook.xml.rels";
|
||||
|
||||
|
@ -47,9 +66,9 @@ public class SimpleXLSXWorkbook {
|
|||
|
||||
private static final String PATH_CONTENT_TYPES = "[Content_Types].xml";
|
||||
|
||||
private static final List<Pattern> blackListPatterns = new ArrayList<Pattern>();
|
||||
static private List<Pattern> blackListPatterns = new ArrayList<Pattern>();
|
||||
|
||||
private static final List<String> blackList = Arrays.asList(".*comments\\d+\\.xml", ".*calcChain\\.xml",
|
||||
static private List<String> blackList = Arrays.asList(".*comments\\d+\\.xml", ".*calcChain\\.xml",
|
||||
".*drawings/vmlDrawing\\d+\\.vml");
|
||||
static {
|
||||
for (String pstr : blackList) {
|
||||
|
@ -57,22 +76,16 @@ public class SimpleXLSXWorkbook {
|
|||
}
|
||||
}
|
||||
|
||||
public SimpleXLSXWorkbook() {
|
||||
sheets.add(new Sheet(0, this));
|
||||
}
|
||||
|
||||
public SimpleXLSXWorkbook(File file) throws IOException, XMLStreamException {
|
||||
public SimpleXLSXWorkbook(File file) {
|
||||
try {
|
||||
this.zipfile = new ZipFile(file);
|
||||
InputStream stream = findData(PATH_SHAREDSTRINGS);
|
||||
if (stream != null) {
|
||||
parseSharedStrings(stream);
|
||||
}
|
||||
for (int i = 0; true; i++) {
|
||||
ZipEntry entry = zipfile.getEntry(getSheetPath(i + 1));
|
||||
if (entry == null) {
|
||||
break;
|
||||
}
|
||||
sheets.add(new Sheet(i, this));
|
||||
initSheets();
|
||||
} catch (Exception e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -157,6 +170,17 @@ public class SimpleXLSXWorkbook {
|
|||
return null;
|
||||
}
|
||||
|
||||
// int getSharedStringIndex(String string) {
|
||||
// Integer i = (Integer) sharedStrings.inverseBidiMap().get(string);
|
||||
// if (i != null) {
|
||||
// return i;
|
||||
// } else {
|
||||
// return -1;
|
||||
// }
|
||||
// }
|
||||
|
||||
XMLInputFactory inputFactory = XMLInputFactory.newInstance();
|
||||
|
||||
private String getSheetPath(int i) {
|
||||
return String.format(PATH_SHEET, i);
|
||||
}
|
||||
|
@ -169,8 +193,7 @@ public class SimpleXLSXWorkbook {
|
|||
return String.format(PATH_SHEET_COMMENT_VMLDRAWING, i);
|
||||
}
|
||||
|
||||
private void parseSharedStrings(InputStream inputStream)
|
||||
throws XMLStreamException {
|
||||
private void parseSharedStrings(InputStream inputStream) throws Exception {
|
||||
XMLStreamReader reader = inputFactory.createXMLStreamReader(inputStream);
|
||||
int type;
|
||||
boolean si = false;
|
||||
|
@ -218,23 +241,10 @@ public class SimpleXLSXWorkbook {
|
|||
private static final String PATH_STYLES = "xl/styles.xml";
|
||||
|
||||
private static final String STR_XML_HEAD = "<?xml version=\"1.0\" encoding=\"UTF-8\" standalone=\"yes\"?>";
|
||||
|
||||
private static final byte[] DATA_XL_WORKSHEETS__RELS_SHEET = (STR_XML_HEAD + "<Relationships xmlns=\"http://schemas.openxmlformats.org/package/2006/relationships\"></Relationships>")
|
||||
.getBytes();
|
||||
|
||||
|
||||
XMLStreamReader getSheetReader(Integer sheetId) throws IOException {
|
||||
if (sheetId == null) {
|
||||
sheetId = 1;
|
||||
}
|
||||
return getReader(getSheetPath(sheetId));
|
||||
}
|
||||
|
||||
XMLStreamReader getStylesReader() throws IOException {
|
||||
return getReader(PATH_STYLES);
|
||||
}
|
||||
|
||||
private XMLStreamReader getReader(String resourceId) throws IOException {
|
||||
XMLStreamReader getReader(String resourceId) {
|
||||
InputStream stream = findData(resourceId);
|
||||
if (stream == null) {
|
||||
if (resourceId.startsWith("xl/worksheets/_rels/sheet")) {
|
||||
|
@ -252,9 +262,30 @@ public class SimpleXLSXWorkbook {
|
|||
}
|
||||
}
|
||||
|
||||
XMLStreamReader getSheetReader(Integer sheetId) {
|
||||
if (sheetId == null) {
|
||||
sheetId = 1;
|
||||
}
|
||||
return getReader(getSheetPath(sheetId));
|
||||
}
|
||||
|
||||
XMLStreamReader getStylesReader() {
|
||||
return getReader(PATH_STYLES);
|
||||
}
|
||||
|
||||
// SHEET>>>
|
||||
List<Sheet> sheets = new ArrayList<Sheet>();
|
||||
|
||||
private void initSheets() {
|
||||
for (int i = 0; true; i++) {
|
||||
ZipEntry entry = zipfile.getEntry(getSheetPath(i + 1));
|
||||
if (entry == null) {
|
||||
break;
|
||||
}
|
||||
sheets.add(new Sheet(i, this));
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* create new sheet added to exists sheet list
|
||||
*/
|
||||
|
@ -268,11 +299,11 @@ public class SimpleXLSXWorkbook {
|
|||
return sheets.size();
|
||||
}
|
||||
|
||||
public Sheet getSheet(int i) throws IOException {
|
||||
public Sheet getSheet(int i) {
|
||||
return getSheet(i, true);
|
||||
}
|
||||
|
||||
public Sheet getSheet(int i, boolean parseAllRow) throws IOException {
|
||||
public Sheet getSheet(int i, boolean parseAllRow) {
|
||||
if (i >= sheets.size())
|
||||
throw new IllegalArgumentException("sheet " + i + " not exists!SheetCount=" + sheets.size());
|
||||
Sheet sheet = sheets.get(i);
|
||||
|
@ -388,7 +419,7 @@ public class SimpleXLSXWorkbook {
|
|||
writer.writeEndElement();// end sst
|
||||
}
|
||||
|
||||
private void mergeStyles(XMLStreamWriter writer) throws XMLStreamException, IOException {
|
||||
private void mergeStyles(XMLStreamWriter writer) throws XMLStreamException {
|
||||
prepareStylesCount();
|
||||
|
||||
XMLStreamReader reader = getStylesReader();
|
||||
|
@ -575,11 +606,12 @@ public class SimpleXLSXWorkbook {
|
|||
|
||||
boolean stylesCountLoaded = false;
|
||||
|
||||
private void prepareStylesCount() throws XMLStreamException, IOException {
|
||||
private void prepareStylesCount() {
|
||||
if (stylesCountLoaded)
|
||||
return;
|
||||
stylesCountLoaded = true;
|
||||
|
||||
try {
|
||||
XMLStreamReader reader = getStylesReader();
|
||||
loop1: while (reader.hasNext()) {
|
||||
int event = reader.next();
|
||||
|
@ -602,6 +634,9 @@ public class SimpleXLSXWorkbook {
|
|||
break;
|
||||
}
|
||||
}
|
||||
} catch (XMLStreamException e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -617,7 +652,7 @@ public class SimpleXLSXWorkbook {
|
|||
return new Fill();
|
||||
}
|
||||
|
||||
public CellStyle createStyle(Font font, Fill fill) throws XMLStreamException, IOException {
|
||||
public CellStyle createStyle(Font font, Fill fill) {
|
||||
if (font == null && fill == null) {
|
||||
throw new IllegalArgumentException("either font or fill is required");
|
||||
}
|
||||
|
@ -796,10 +831,14 @@ public class SimpleXLSXWorkbook {
|
|||
/**
|
||||
* merge the sheet's modifications
|
||||
*/
|
||||
public void commitSheetModifications() throws XMLStreamException, IOException {
|
||||
public void commitSheetModifications() {
|
||||
try {
|
||||
if (lastCommitSheet == null)
|
||||
throw new IllegalStateException("plz call beginCommitSheet(Sheet) first");
|
||||
lastCommitSheet.mergeSheet();
|
||||
} catch (XMLStreamException e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -1026,6 +1065,21 @@ public class SimpleXLSXWorkbook {
|
|||
commiter.endCommit();
|
||||
}
|
||||
|
||||
// MODIFY <<<
|
||||
|
||||
// TEST
|
||||
public static void testMergeStyles(SimpleXLSXWorkbook excel, XMLStreamWriter writer) throws Exception {
|
||||
// CellStyle style = excel.createStyle();
|
||||
// style.setFont(new Font());
|
||||
// style.getFont().setColor("FFFF0000");
|
||||
// style = excel.createStyle();
|
||||
// style.setFont(new Font());
|
||||
// style.setFill(new Fill());
|
||||
// style.getFont().setColor("FF0000FF");
|
||||
// style.getFill().setFgColor("FF00FF00");
|
||||
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
static private class BidirectionMap implements Map {
|
||||
private Map values = new LinkedHashMap();
|
||||
|
@ -1087,12 +1141,4 @@ public class SimpleXLSXWorkbook {
|
|||
}
|
||||
}
|
||||
|
||||
private InputStream findData(String name) throws IOException {
|
||||
ZipEntry entry = zipfile.getEntry(name);
|
||||
if (entry != null) {
|
||||
return zipfile.getInputStream(entry);
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
}
|
|
@ -17,7 +17,7 @@
|
|||
* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
||||
***************************************************************************/
|
||||
|
||||
package org.xbib.datastructures.xslx.jxl;
|
||||
package jxl;
|
||||
|
||||
/**
|
||||
* This type represents the Microsoft concept of a Boolean. Accordingly, this
|
|
@ -17,7 +17,7 @@
|
|||
* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
||||
***************************************************************************/
|
||||
|
||||
package org.xbib.datastructures.xslx.jxl;
|
||||
package jxl;
|
||||
|
||||
/**
|
||||
* A mixin interface for numerical formulas, which combines the interfaces
|
|
@ -17,9 +17,9 @@
|
|||
* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
||||
***************************************************************************/
|
||||
|
||||
package org.xbib.datastructures.xslx.jxl;
|
||||
package jxl;
|
||||
|
||||
import org.xbib.datastructures.xslx.jxl.format.CellFormat;
|
||||
import jxl.format.CellFormat;
|
||||
|
||||
/**
|
||||
* Represents an individual Cell within a Sheet. May be queried for its
|
|
@ -17,9 +17,9 @@
|
|||
* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
||||
***************************************************************************/
|
||||
|
||||
package org.xbib.datastructures.xslx.jxl;
|
||||
package jxl;
|
||||
|
||||
import org.xbib.datastructures.xslx.jxl.biff.BaseCellFeatures;
|
||||
import jxl.biff.BaseCellFeatures;
|
||||
|
||||
/**
|
||||
* Container for any additional cell features
|
28
datastructures-xslx/src/main/java/jxl/CellFormat.java
Executable file
28
datastructures-xslx/src/main/java/jxl/CellFormat.java
Executable file
|
@ -0,0 +1,28 @@
|
|||
/*********************************************************************
|
||||
*
|
||||
* Copyright (C) 2002 Andrew Khan
|
||||
*
|
||||
* This library is free software; you can redistribute it and/or
|
||||
* modify it under the terms of the GNU Lesser General Public
|
||||
* License as published by the Free Software Foundation; either
|
||||
* version 2.1 of the License, or (at your option) any later version.
|
||||
*
|
||||
* This library is distributed in the hope that it will be useful,
|
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
||||
* Lesser General Public License for more details.
|
||||
*
|
||||
* You should have received a copy of the GNU Lesser General Public
|
||||
* License along with this library; if not, write to the Free Software
|
||||
* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
||||
***************************************************************************/
|
||||
|
||||
package jxl;
|
||||
|
||||
/**
|
||||
* Interface for cell formats - used for typing information
|
||||
*
|
||||
* @deprecated Repackaged as jxl.format.CellFormat
|
||||
*/
|
||||
public interface CellFormat extends jxl.format.CellFormat {
|
||||
}
|
|
@ -17,10 +17,9 @@
|
|||
* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
||||
***************************************************************************/
|
||||
|
||||
package org.xbib.datastructures.xslx.jxl;
|
||||
package jxl;
|
||||
|
||||
import org.xbib.datastructures.xslx.jxl.write.WritableWorkbook;
|
||||
import org.xbib.datastructures.xslx.jxl.biff.formula.ExternalSheet;
|
||||
import jxl.write.WritableWorkbook;
|
||||
|
||||
/**
|
||||
* Exposes some cell reference helper methods to the public interface.
|
||||
|
@ -42,7 +41,7 @@ public final class CellReferenceHelper {
|
|||
* @param buf the string buffer to append
|
||||
*/
|
||||
public static void getCellReference(int column, int row, StringBuffer buf) {
|
||||
org.xbib.datastructures.xslx.jxl.biff.CellReferenceHelper.getCellReference(column, row, buf);
|
||||
jxl.biff.CellReferenceHelper.getCellReference(column, row, buf);
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -59,7 +58,7 @@ public final class CellReferenceHelper {
|
|||
int row,
|
||||
boolean rowabs,
|
||||
StringBuffer buf) {
|
||||
org.xbib.datastructures.xslx.jxl.biff.CellReferenceHelper.getCellReference(column, colabs,
|
||||
jxl.biff.CellReferenceHelper.getCellReference(column, colabs,
|
||||
row, rowabs,
|
||||
buf);
|
||||
}
|
||||
|
@ -73,7 +72,7 @@ public final class CellReferenceHelper {
|
|||
* @return the cell reference
|
||||
*/
|
||||
public static String getCellReference(int column, int row) {
|
||||
return org.xbib.datastructures.xslx.jxl.biff.CellReferenceHelper.getCellReference(column, row);
|
||||
return jxl.biff.CellReferenceHelper.getCellReference(column, row);
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -83,7 +82,7 @@ public final class CellReferenceHelper {
|
|||
* @return the column portion of the cell reference
|
||||
*/
|
||||
public static int getColumn(String s) {
|
||||
return org.xbib.datastructures.xslx.jxl.biff.CellReferenceHelper.getColumn(s);
|
||||
return jxl.biff.CellReferenceHelper.getColumn(s);
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -93,7 +92,7 @@ public final class CellReferenceHelper {
|
|||
* @return the letter for that column number
|
||||
*/
|
||||
public static String getColumnReference(int c) {
|
||||
return org.xbib.datastructures.xslx.jxl.biff.CellReferenceHelper.getColumnReference(c);
|
||||
return jxl.biff.CellReferenceHelper.getColumnReference(c);
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -103,7 +102,7 @@ public final class CellReferenceHelper {
|
|||
* @return the row number
|
||||
*/
|
||||
public static int getRow(String s) {
|
||||
return org.xbib.datastructures.xslx.jxl.biff.CellReferenceHelper.getRow(s);
|
||||
return jxl.biff.CellReferenceHelper.getRow(s);
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -113,7 +112,7 @@ public final class CellReferenceHelper {
|
|||
* @return TRUE if the column is relative, FALSE otherwise
|
||||
*/
|
||||
public static boolean isColumnRelative(String s) {
|
||||
return org.xbib.datastructures.xslx.jxl.biff.CellReferenceHelper.isColumnRelative(s);
|
||||
return jxl.biff.CellReferenceHelper.isColumnRelative(s);
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -123,7 +122,7 @@ public final class CellReferenceHelper {
|
|||
* @return TRUE if the row is relative, FALSE otherwise
|
||||
*/
|
||||
public static boolean isRowRelative(String s) {
|
||||
return org.xbib.datastructures.xslx.jxl.biff.CellReferenceHelper.isRowRelative(s);
|
||||
return jxl.biff.CellReferenceHelper.isRowRelative(s);
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -139,8 +138,8 @@ public final class CellReferenceHelper {
|
|||
public static void getCellReference
|
||||
(int sheet, int column, int row,
|
||||
Workbook workbook, StringBuffer buf) {
|
||||
org.xbib.datastructures.xslx.jxl.biff.CellReferenceHelper.getCellReference
|
||||
(sheet, column, row, (ExternalSheet) workbook, buf);
|
||||
jxl.biff.CellReferenceHelper.getCellReference
|
||||
(sheet, column, row, (jxl.biff.formula.ExternalSheet) workbook, buf);
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -158,8 +157,8 @@ public final class CellReferenceHelper {
|
|||
int row,
|
||||
WritableWorkbook workbook,
|
||||
StringBuffer buf) {
|
||||
org.xbib.datastructures.xslx.jxl.biff.CellReferenceHelper.getCellReference
|
||||
(sheet, column, row, (ExternalSheet) workbook, buf);
|
||||
jxl.biff.CellReferenceHelper.getCellReference
|
||||
(sheet, column, row, (jxl.biff.formula.ExternalSheet) workbook, buf);
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -181,9 +180,9 @@ public final class CellReferenceHelper {
|
|||
boolean rowabs,
|
||||
Workbook workbook,
|
||||
StringBuffer buf) {
|
||||
org.xbib.datastructures.xslx.jxl.biff.CellReferenceHelper.getCellReference
|
||||
jxl.biff.CellReferenceHelper.getCellReference
|
||||
(sheet, column, colabs, row, rowabs,
|
||||
(ExternalSheet) workbook, buf);
|
||||
(jxl.biff.formula.ExternalSheet) workbook, buf);
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -200,8 +199,8 @@ public final class CellReferenceHelper {
|
|||
int column,
|
||||
int row,
|
||||
Workbook workbook) {
|
||||
return org.xbib.datastructures.xslx.jxl.biff.CellReferenceHelper.getCellReference
|
||||
(sheet, column, row, (ExternalSheet) workbook);
|
||||
return jxl.biff.CellReferenceHelper.getCellReference
|
||||
(sheet, column, row, (jxl.biff.formula.ExternalSheet) workbook);
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -218,8 +217,8 @@ public final class CellReferenceHelper {
|
|||
int column,
|
||||
int row,
|
||||
WritableWorkbook workbook) {
|
||||
return org.xbib.datastructures.xslx.jxl.biff.CellReferenceHelper.getCellReference
|
||||
(sheet, column, row, (ExternalSheet) workbook);
|
||||
return jxl.biff.CellReferenceHelper.getCellReference
|
||||
(sheet, column, row, (jxl.biff.formula.ExternalSheet) workbook);
|
||||
}
|
||||
|
||||
|
||||
|
@ -230,7 +229,7 @@ public final class CellReferenceHelper {
|
|||
* @return the sheet name
|
||||
*/
|
||||
public static String getSheet(String ref) {
|
||||
return org.xbib.datastructures.xslx.jxl.biff.CellReferenceHelper.getSheet(ref);
|
||||
return jxl.biff.CellReferenceHelper.getSheet(ref);
|
||||
}
|
||||
|
||||
/**
|
|
@ -17,7 +17,7 @@
|
|||
* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
||||
***************************************************************************/
|
||||
|
||||
package org.xbib.datastructures.xslx.jxl;
|
||||
package jxl;
|
||||
|
||||
/**
|
||||
* An enumeration type listing the available content types for a cell
|
|
@ -17,9 +17,9 @@
|
|||
* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
||||
***************************************************************************/
|
||||
|
||||
package org.xbib.datastructures.xslx.jxl;
|
||||
package jxl;
|
||||
|
||||
import org.xbib.datastructures.xslx.jxl.format.CellFormat;
|
||||
import jxl.format.CellFormat;
|
||||
|
||||
/**
|
||||
* This is a bean which client applications may use to get/set various
|
|
@ -17,7 +17,7 @@
|
|||
* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
||||
***************************************************************************/
|
||||
|
||||
package org.xbib.datastructures.xslx.jxl;
|
||||
package jxl;
|
||||
|
||||
import java.text.DateFormat;
|
||||
import java.util.Date;
|
|
@ -17,7 +17,7 @@
|
|||
* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
||||
***************************************************************************/
|
||||
|
||||
package org.xbib.datastructures.xslx.jxl;
|
||||
package jxl;
|
||||
|
||||
/**
|
||||
* A mixin interface for date formulas, which combines the interfaces
|
|
@ -17,7 +17,7 @@
|
|||
* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
||||
***************************************************************************/
|
||||
|
||||
package org.xbib.datastructures.xslx.jxl;
|
||||
package jxl;
|
||||
|
||||
/**
|
||||
* This type represents a cell which contains an error. This error will
|
|
@ -17,7 +17,7 @@
|
|||
* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
||||
***************************************************************************/
|
||||
|
||||
package org.xbib.datastructures.xslx.jxl;
|
||||
package jxl;
|
||||
|
||||
/**
|
||||
* A mixin interface for numerical formulas, which combines the interfaces
|
|
@ -17,9 +17,9 @@
|
|||
* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
||||
***************************************************************************/
|
||||
|
||||
package org.xbib.datastructures.xslx.jxl;
|
||||
package jxl;
|
||||
|
||||
import org.xbib.datastructures.xslx.jxl.biff.formula.FormulaException;
|
||||
import jxl.biff.formula.FormulaException;
|
||||
|
||||
/**
|
||||
* Interface for formulas which allow clients to read the Excel formula
|
|
@ -17,12 +17,12 @@
|
|||
* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
||||
***************************************************************************/
|
||||
|
||||
package org.xbib.datastructures.xslx.jxl;
|
||||
package jxl;
|
||||
|
||||
/**
|
||||
* Class which represents an Excel header or footer.
|
||||
*/
|
||||
public final class HeaderFooter extends org.xbib.datastructures.xslx.jxl.biff.HeaderFooter {
|
||||
public final class HeaderFooter extends jxl.biff.HeaderFooter {
|
||||
/**
|
||||
* Default constructor.
|
||||
*/
|
||||
|
@ -98,7 +98,7 @@ public final class HeaderFooter extends org.xbib.datastructures.xslx.jxl.biff.He
|
|||
*
|
||||
* @return the created contents
|
||||
*/
|
||||
protected org.xbib.datastructures.xslx.jxl.biff.HeaderFooter.Contents createContents() {
|
||||
protected jxl.biff.HeaderFooter.Contents createContents() {
|
||||
return new Contents();
|
||||
}
|
||||
|
||||
|
@ -108,7 +108,7 @@ public final class HeaderFooter extends org.xbib.datastructures.xslx.jxl.biff.He
|
|||
* @param s the string to create the contents
|
||||
* @return the created contents
|
||||
*/
|
||||
protected org.xbib.datastructures.xslx.jxl.biff.HeaderFooter.Contents createContents(String s) {
|
||||
protected jxl.biff.HeaderFooter.Contents createContents(String s) {
|
||||
return new Contents(s);
|
||||
}
|
||||
|
||||
|
@ -118,15 +118,15 @@ public final class HeaderFooter extends org.xbib.datastructures.xslx.jxl.biff.He
|
|||
* @param c the contents to copy
|
||||
* @return the new contents
|
||||
*/
|
||||
protected org.xbib.datastructures.xslx.jxl.biff.HeaderFooter.Contents
|
||||
createContents(org.xbib.datastructures.xslx.jxl.biff.HeaderFooter.Contents c) {
|
||||
protected jxl.biff.HeaderFooter.Contents
|
||||
createContents(jxl.biff.HeaderFooter.Contents c) {
|
||||
return new Contents((Contents) c);
|
||||
}
|
||||
|
||||
/**
|
||||
* The contents - a simple wrapper around a string buffer
|
||||
*/
|
||||
public static class Contents extends org.xbib.datastructures.xslx.jxl.biff.HeaderFooter.Contents {
|
||||
public static class Contents extends jxl.biff.HeaderFooter.Contents {
|
||||
/**
|
||||
* The constructor
|
||||
*/
|
|
@ -17,7 +17,7 @@
|
|||
* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
||||
***************************************************************************/
|
||||
|
||||
package org.xbib.datastructures.xslx.jxl;
|
||||
package jxl;
|
||||
|
||||
import java.io.File;
|
||||
import java.net.URL;
|
|
@ -17,10 +17,10 @@
|
|||
* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
||||
***************************************************************************/
|
||||
|
||||
package org.xbib.datastructures.xslx.jxl;
|
||||
package jxl;
|
||||
|
||||
import java.io.File;
|
||||
import org.xbib.datastructures.xslx.jxl.common.LengthUnit;
|
||||
import jxl.common.LengthUnit;
|
||||
|
||||
/**
|
||||
* Accessor functions for an image
|
|
@ -17,7 +17,7 @@
|
|||
* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
||||
***************************************************************************/
|
||||
|
||||
package org.xbib.datastructures.xslx.jxl;
|
||||
package jxl;
|
||||
|
||||
/**
|
||||
* Base exception class for JExcelAPI exceptions
|
|
@ -17,7 +17,7 @@
|
|||
* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
||||
***************************************************************************/
|
||||
|
||||
package org.xbib.datastructures.xslx.jxl;
|
||||
package jxl;
|
||||
|
||||
/**
|
||||
* A label cell
|
|
@ -17,7 +17,7 @@
|
|||
* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
||||
***************************************************************************/
|
||||
|
||||
package org.xbib.datastructures.xslx.jxl;
|
||||
package jxl;
|
||||
|
||||
import java.text.NumberFormat;
|
||||
|
|
@ -17,7 +17,7 @@
|
|||
* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
||||
***************************************************************************/
|
||||
|
||||
package org.xbib.datastructures.xslx.jxl;
|
||||
package jxl;
|
||||
|
||||
/**
|
||||
* A mixin interface for numerical formulas, which combines the interfaces
|
|
@ -17,7 +17,7 @@
|
|||
* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
||||
***************************************************************************/
|
||||
|
||||
package org.xbib.datastructures.xslx.jxl;
|
||||
package jxl;
|
||||
|
||||
/**
|
||||
* Represents a 3-D range of cells in a workbook. This object is
|
|
@ -17,10 +17,10 @@
|
|||
* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
||||
***************************************************************************/
|
||||
|
||||
package org.xbib.datastructures.xslx.jxl;
|
||||
package jxl;
|
||||
|
||||
import java.util.regex.Pattern;
|
||||
import org.xbib.datastructures.xslx.jxl.format.CellFormat;
|
||||
import jxl.format.CellFormat;
|
||||
|
||||
/**
|
||||
* Represents a sheet within a workbook. Provides a handle to the individual
|
|
@ -17,13 +17,13 @@
|
|||
* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
||||
***************************************************************************/
|
||||
|
||||
package org.xbib.datastructures.xslx.jxl;
|
||||
package jxl;
|
||||
|
||||
import org.xbib.datastructures.xslx.jxl.biff.SheetRangeImpl;
|
||||
import org.xbib.datastructures.xslx.jxl.common.Assert;
|
||||
import org.xbib.datastructures.xslx.jxl.format.PageOrder;
|
||||
import org.xbib.datastructures.xslx.jxl.format.PageOrientation;
|
||||
import org.xbib.datastructures.xslx.jxl.format.PaperSize;
|
||||
import jxl.biff.SheetRangeImpl;
|
||||
import jxl.common.Assert;
|
||||
import jxl.format.PageOrder;
|
||||
import jxl.format.PageOrientation;
|
||||
import jxl.format.PaperSize;
|
||||
|
||||
/**
|
||||
* This is a bean which client applications may use to get/set various
|
|
@ -17,7 +17,7 @@
|
|||
* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
||||
***************************************************************************/
|
||||
|
||||
package org.xbib.datastructures.xslx.jxl;
|
||||
package jxl;
|
||||
|
||||
/**
|
||||
* A mixin interface for numerical formulas, which combines the interfaces
|
|
@ -17,19 +17,19 @@
|
|||
* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
||||
***************************************************************************/
|
||||
|
||||
package org.xbib.datastructures.xslx.jxl;
|
||||
package jxl;
|
||||
|
||||
import java.io.FileInputStream;
|
||||
import java.io.FileOutputStream;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.io.OutputStream;
|
||||
import org.xbib.datastructures.xslx.jxl.read.biff.BiffException;
|
||||
import org.xbib.datastructures.xslx.jxl.read.biff.File;
|
||||
import org.xbib.datastructures.xslx.jxl.read.biff.PasswordException;
|
||||
import org.xbib.datastructures.xslx.jxl.read.biff.WorkbookParser;
|
||||
import org.xbib.datastructures.xslx.jxl.write.WritableWorkbook;
|
||||
import org.xbib.datastructures.xslx.jxl.write.biff.WritableWorkbookImpl;
|
||||
import jxl.read.biff.BiffException;
|
||||
import jxl.read.biff.File;
|
||||
import jxl.read.biff.PasswordException;
|
||||
import jxl.read.biff.WorkbookParser;
|
||||
import jxl.write.WritableWorkbook;
|
||||
import jxl.write.biff.WritableWorkbookImpl;
|
||||
|
||||
/**
|
||||
* Represents a Workbook. Contains the various factory methods and provides
|
|
@ -17,14 +17,14 @@
|
|||
* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
||||
***************************************************************************/
|
||||
|
||||
package org.xbib.datastructures.xslx.jxl;
|
||||
package jxl;
|
||||
|
||||
import java.io.File;
|
||||
import java.util.HashMap;
|
||||
import java.util.Locale;
|
||||
import org.xbib.datastructures.xslx.jxl.biff.CountryCode;
|
||||
import org.xbib.datastructures.xslx.jxl.biff.formula.FunctionNames;
|
||||
import org.xbib.datastructures.xslx.jxl.common.Logger;
|
||||
import jxl.biff.CountryCode;
|
||||
import jxl.biff.formula.FunctionNames;
|
||||
import jxl.common.Logger;
|
||||
|
||||
/**
|
||||
* This is a bean which client applications may use to set various advanced
|
|
@ -17,10 +17,10 @@
|
|||
* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
||||
***************************************************************************/
|
||||
|
||||
package org.xbib.datastructures.xslx.jxl.biff;
|
||||
package jxl.biff;
|
||||
|
||||
import java.io.IOException;
|
||||
import org.xbib.datastructures.xslx.jxl.write.biff.File;
|
||||
import jxl.write.biff.File;
|
||||
|
||||
/**
|
||||
* Information for autofiltering
|
|
@ -17,10 +17,10 @@
|
|||
* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
||||
***************************************************************************/
|
||||
|
||||
package org.xbib.datastructures.xslx.jxl.biff;
|
||||
package jxl.biff;
|
||||
|
||||
import org.xbib.datastructures.xslx.jxl.common.Logger;
|
||||
import org.xbib.datastructures.xslx.jxl.read.biff.Record;
|
||||
import jxl.common.Logger;
|
||||
import jxl.read.biff.Record;
|
||||
|
||||
/**
|
||||
* Range information for conditional formatting
|
|
@ -17,10 +17,10 @@
|
|||
* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
||||
***************************************************************************/
|
||||
|
||||
package org.xbib.datastructures.xslx.jxl.biff;
|
||||
package jxl.biff;
|
||||
|
||||
import org.xbib.datastructures.xslx.jxl.common.Logger;
|
||||
import org.xbib.datastructures.xslx.jxl.read.biff.Record;
|
||||
import jxl.common.Logger;
|
||||
import jxl.read.biff.Record;
|
||||
|
||||
/**
|
||||
* Range information for conditional formatting
|
|
@ -17,16 +17,16 @@
|
|||
* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
||||
***************************************************************************/
|
||||
|
||||
package org.xbib.datastructures.xslx.jxl.biff;
|
||||
package jxl.biff;
|
||||
|
||||
import java.util.Collection;
|
||||
import org.xbib.datastructures.xslx.jxl.CellReferenceHelper;
|
||||
import org.xbib.datastructures.xslx.jxl.Range;
|
||||
import org.xbib.datastructures.xslx.jxl.biff.drawing.ComboBox;
|
||||
import org.xbib.datastructures.xslx.jxl.biff.drawing.Comment;
|
||||
import org.xbib.datastructures.xslx.jxl.common.Assert;
|
||||
import org.xbib.datastructures.xslx.jxl.common.Logger;
|
||||
import org.xbib.datastructures.xslx.jxl.write.biff.CellValue;
|
||||
import jxl.CellReferenceHelper;
|
||||
import jxl.Range;
|
||||
import jxl.biff.drawing.ComboBox;
|
||||
import jxl.biff.drawing.Comment;
|
||||
import jxl.common.Assert;
|
||||
import jxl.common.Logger;
|
||||
import jxl.write.biff.CellValue;
|
||||
|
||||
/**
|
||||
* Container for any additional cell features
|
|
@ -17,10 +17,10 @@
|
|||
* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
||||
***************************************************************************/
|
||||
|
||||
package org.xbib.datastructures.xslx.jxl.biff;
|
||||
package jxl.biff;
|
||||
|
||||
import org.xbib.datastructures.xslx.jxl.common.Assert;
|
||||
import org.xbib.datastructures.xslx.jxl.common.Logger;
|
||||
import jxl.common.Assert;
|
||||
import jxl.common.Logger;
|
||||
|
||||
/**
|
||||
* Contains the common data for a compound file
|
|
@ -17,9 +17,9 @@
|
|||
* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
||||
***************************************************************************/
|
||||
|
||||
package org.xbib.datastructures.xslx.jxl.biff;
|
||||
package jxl.biff;
|
||||
|
||||
import org.xbib.datastructures.xslx.jxl.format.Format;
|
||||
import jxl.format.Format;
|
||||
|
||||
/**
|
||||
* The excel string for the various built in formats. Used to present
|
|
@ -17,7 +17,7 @@
|
|||
* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
||||
***************************************************************************/
|
||||
|
||||
package org.xbib.datastructures.xslx.jxl.biff;
|
||||
package jxl.biff;
|
||||
|
||||
/**
|
||||
* Enumeration of built in names
|
|
@ -17,7 +17,7 @@
|
|||
* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
||||
***************************************************************************/
|
||||
|
||||
package org.xbib.datastructures.xslx.jxl.biff;
|
||||
package jxl.biff;
|
||||
|
||||
/**
|
||||
* Represents a built in, rather than a user defined, style.
|
|
@ -17,7 +17,7 @@
|
|||
* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
||||
***************************************************************************/
|
||||
|
||||
package org.xbib.datastructures.xslx.jxl.biff;
|
||||
package jxl.biff;
|
||||
|
||||
/**
|
||||
* A growable array of bytes
|
|
@ -17,7 +17,7 @@
|
|||
* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
||||
***************************************************************************/
|
||||
|
||||
package org.xbib.datastructures.xslx.jxl.biff;
|
||||
package jxl.biff;
|
||||
|
||||
/**
|
||||
* Interface which provides a method for transferring chunks of binary
|
|
@ -17,14 +17,14 @@
|
|||
* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
||||
***************************************************************************/
|
||||
|
||||
package org.xbib.datastructures.xslx.jxl.biff;
|
||||
package jxl.biff;
|
||||
|
||||
import java.util.regex.Matcher;
|
||||
import java.util.regex.Pattern;
|
||||
import org.xbib.datastructures.xslx.jxl.Cell;
|
||||
import org.xbib.datastructures.xslx.jxl.CellType;
|
||||
import org.xbib.datastructures.xslx.jxl.LabelCell;
|
||||
import org.xbib.datastructures.xslx.jxl.Sheet;
|
||||
import jxl.Cell;
|
||||
import jxl.CellType;
|
||||
import jxl.LabelCell;
|
||||
import jxl.Sheet;
|
||||
|
||||
/**
|
||||
* Refactorisation to provide more sophisticated find cell by contents
|
|
@ -17,10 +17,10 @@
|
|||
* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
||||
***************************************************************************/
|
||||
|
||||
package org.xbib.datastructures.xslx.jxl.biff;
|
||||
package jxl.biff;
|
||||
|
||||
import org.xbib.datastructures.xslx.jxl.biff.formula.ExternalSheet;
|
||||
import org.xbib.datastructures.xslx.jxl.common.Logger;
|
||||
import jxl.biff.formula.ExternalSheet;
|
||||
import jxl.common.Logger;
|
||||
|
||||
/**
|
||||
* A helper to transform between excel cell references and
|
|
@ -17,12 +17,12 @@
|
|||
* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
||||
***************************************************************************/
|
||||
|
||||
package org.xbib.datastructures.xslx.jxl.biff;
|
||||
package jxl.biff;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Iterator;
|
||||
import org.xbib.datastructures.xslx.jxl.write.biff.File;
|
||||
import jxl.write.biff.File;
|
||||
|
||||
/**
|
||||
* Class containing the CONDFMT and CF records for conditionally formatting
|
|
@ -17,10 +17,10 @@
|
|||
* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
||||
***************************************************************************/
|
||||
|
||||
package org.xbib.datastructures.xslx.jxl.biff;
|
||||
package jxl.biff;
|
||||
|
||||
import org.xbib.datastructures.xslx.jxl.common.Logger;
|
||||
import org.xbib.datastructures.xslx.jxl.read.biff.Record;
|
||||
import jxl.common.Logger;
|
||||
import jxl.read.biff.Record;
|
||||
|
||||
/**
|
||||
* Range information for conditional formatting
|
|
@ -17,10 +17,10 @@
|
|||
* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
||||
***************************************************************************/
|
||||
|
||||
package org.xbib.datastructures.xslx.jxl.biff;
|
||||
package jxl.biff;
|
||||
|
||||
import org.xbib.datastructures.xslx.jxl.common.Logger;
|
||||
import org.xbib.datastructures.xslx.jxl.read.biff.Record;
|
||||
import jxl.common.Logger;
|
||||
import jxl.read.biff.Record;
|
||||
|
||||
/**
|
||||
* The conditional format conditions
|
|
@ -17,9 +17,9 @@
|
|||
* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
||||
***************************************************************************/
|
||||
|
||||
package org.xbib.datastructures.xslx.jxl.biff;
|
||||
package jxl.biff;
|
||||
|
||||
import org.xbib.datastructures.xslx.jxl.read.biff.Record;
|
||||
import jxl.read.biff.Record;
|
||||
|
||||
/**
|
||||
* A continue record - only used explicitly in special circumstances, as
|
|
@ -17,9 +17,9 @@
|
|||
* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
||||
***************************************************************************/
|
||||
|
||||
package org.xbib.datastructures.xslx.jxl.biff;
|
||||
package jxl.biff;
|
||||
|
||||
import org.xbib.datastructures.xslx.jxl.common.Logger;
|
||||
import jxl.common.Logger;
|
||||
|
||||
/**
|
||||
* Enumeration type for the excel country codes
|
|
@ -17,19 +17,19 @@
|
|||
* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
||||
***************************************************************************/
|
||||
|
||||
package org.xbib.datastructures.xslx.jxl.biff;
|
||||
package jxl.biff;
|
||||
|
||||
import java.text.DecimalFormat;
|
||||
import java.text.MessageFormat;
|
||||
import java.util.Collection;
|
||||
import java.util.Iterator;
|
||||
import org.xbib.datastructures.xslx.jxl.WorkbookSettings;
|
||||
import org.xbib.datastructures.xslx.jxl.biff.formula.ExternalSheet;
|
||||
import org.xbib.datastructures.xslx.jxl.biff.formula.FormulaException;
|
||||
import org.xbib.datastructures.xslx.jxl.biff.formula.FormulaParser;
|
||||
import org.xbib.datastructures.xslx.jxl.biff.formula.ParseContext;
|
||||
import org.xbib.datastructures.xslx.jxl.common.Assert;
|
||||
import org.xbib.datastructures.xslx.jxl.common.Logger;
|
||||
import jxl.WorkbookSettings;
|
||||
import jxl.biff.formula.ExternalSheet;
|
||||
import jxl.biff.formula.FormulaException;
|
||||
import jxl.biff.formula.FormulaParser;
|
||||
import jxl.biff.formula.ParseContext;
|
||||
import jxl.common.Assert;
|
||||
import jxl.common.Logger;
|
||||
|
||||
/**
|
||||
* Class which parses the binary data associated with Data Validity (DV)
|
|
@ -17,9 +17,9 @@
|
|||
* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
||||
***************************************************************************/
|
||||
|
||||
package org.xbib.datastructures.xslx.jxl.biff;
|
||||
package jxl.biff;
|
||||
|
||||
import org.xbib.datastructures.xslx.jxl.common.Logger;
|
||||
import jxl.common.Logger;
|
||||
|
||||
/**
|
||||
* Class which parses the binary data associated with Data Validity (DVal)
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Reference in a new issue