re-enable yield() method, add TreeMultiMap
This commit is contained in:
parent
7a056ec4aa
commit
df3205ee0f
8 changed files with 183 additions and 27 deletions
|
@ -1,7 +1,7 @@
|
|||
plugins {
|
||||
id "org.sonarqube" version "2.2"
|
||||
id "org.ajoberstar.github-pages" version "1.6.0-rc.1"
|
||||
id "org.xbib.gradle.plugin.jbake" version "1.1.0"
|
||||
id "org.xbib.gradle.plugin.jbake" version "1.2.1"
|
||||
}
|
||||
|
||||
|
||||
|
@ -14,7 +14,7 @@ ext {
|
|||
allprojects {
|
||||
|
||||
group = 'org.xbib'
|
||||
version = '1.0.3'
|
||||
version = '1.0.4'
|
||||
|
||||
apply plugin: 'java'
|
||||
apply plugin: 'maven'
|
||||
|
|
|
@ -127,6 +127,10 @@ public class RdfXmlContentParser<R extends RdfContentParams> implements RdfConst
|
|||
return this;
|
||||
}
|
||||
|
||||
public RdfContentBuilder<R> getBuilder() {
|
||||
return builder;
|
||||
}
|
||||
|
||||
public XmlHandler<R> getHandler() {
|
||||
return xmlHandler;
|
||||
}
|
||||
|
@ -359,6 +363,32 @@ public class RdfXmlContentParser<R extends RdfContentParams> implements RdfConst
|
|||
return l;
|
||||
}
|
||||
|
||||
/**
|
||||
* Allow to override this method to control triple stream generation.
|
||||
* @param triple a triple
|
||||
* @throws IOException if yield does not work
|
||||
*/
|
||||
protected void yield(Triple triple) throws IOException {
|
||||
if (builder != null) {
|
||||
builder.receive(triple);
|
||||
}
|
||||
}
|
||||
|
||||
private void yield(Object s, Object p, Object o) throws IOException {
|
||||
yield(new DefaultTriple(resource.newSubject(s), resource.newPredicate(p), resource.newObject(o)));
|
||||
}
|
||||
|
||||
// produce a (possibly) reified triple
|
||||
private void yield(Object s, IRI p, Object o, IRI reified) throws IOException {
|
||||
yield(s, p, o);
|
||||
if (reified != null) {
|
||||
yield(reified, RDF_TYPE, RDF_STATEMENT);
|
||||
yield(reified, RDF_SUBJECT, s);
|
||||
yield(reified, RDF_PREDICATE, p);
|
||||
yield(reified, RDF_OBJECT, o);
|
||||
}
|
||||
}
|
||||
|
||||
private static class Frame {
|
||||
IRI node = null; // the subject/object
|
||||
String lang = null; // the language tag
|
||||
|
@ -638,27 +668,6 @@ public class RdfXmlContentParser<R extends RdfContentParams> implements RdfConst
|
|||
return b;
|
||||
}
|
||||
|
||||
// produce a (possibly) reified triple
|
||||
private void yield(Object s, IRI p, Object o, IRI reified) throws IOException {
|
||||
yield(s, p, o);
|
||||
if (reified != null) {
|
||||
yield(reified, RDF_TYPE, RDF_STATEMENT);
|
||||
yield(reified, RDF_SUBJECT, s);
|
||||
yield(reified, RDF_PREDICATE, p);
|
||||
yield(reified, RDF_OBJECT, o);
|
||||
}
|
||||
}
|
||||
|
||||
private void yield(Object s, Object p, Object o) throws IOException {
|
||||
yield(new DefaultTriple(resource.newSubject(s), resource.newPredicate(p), resource.newObject(o)));
|
||||
}
|
||||
|
||||
private void yield(Triple triple) throws IOException {
|
||||
if (builder != null) {
|
||||
builder.receive(triple);
|
||||
}
|
||||
}
|
||||
|
||||
// if we're looking at a subject, is it an item in a Collection?
|
||||
private boolean isCollectionItem(Deque<Frame> stack) throws SAXException {
|
||||
if (inPredicate(stack)) {
|
||||
|
@ -668,6 +677,5 @@ public class RdfXmlContentParser<R extends RdfContentParams> implements RdfConst
|
|||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
|
|
|
@ -53,6 +53,10 @@ public abstract class AbstractXmlHandler<P extends RdfContentParams>
|
|||
return resource;
|
||||
}
|
||||
|
||||
public LinkedList<QName> getParents() {
|
||||
return parents;
|
||||
}
|
||||
|
||||
@Override
|
||||
public AbstractXmlHandler<P> setDefaultNamespace(String prefix, String namespaceURI) {
|
||||
this.defaultPrefix = prefix;
|
||||
|
|
|
@ -0,0 +1,97 @@
|
|||
package org.xbib.content.rdf.util;
|
||||
|
||||
import java.util.Collection;
|
||||
import java.util.LinkedHashSet;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
import java.util.TreeMap;
|
||||
|
||||
/**
|
||||
* A {@link TreeMap} based multi map. The keys ore ordered.
|
||||
* @param <K> te key type
|
||||
* @param <V> the value type
|
||||
*/
|
||||
public class TreeMultiMap<K, V> implements MultiMap<K, V> {
|
||||
|
||||
private final Map<K, Set<V>> map = new TreeMap<>();
|
||||
|
||||
@Override
|
||||
public int size() {
|
||||
return map.size();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void clear() {
|
||||
map.clear();
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isEmpty() {
|
||||
return map.isEmpty();
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean containsKey(K key) {
|
||||
return map.containsKey(key);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Set<K> keySet() {
|
||||
return map.keySet();
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean put(K key, V value) {
|
||||
Set<V> set = map.get(key);
|
||||
if (set == null) {
|
||||
set = new LinkedHashSet<>();
|
||||
set.add(value);
|
||||
map.put(key, set);
|
||||
return true;
|
||||
} else {
|
||||
set.add(value);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void putAll(K key, Collection<V> values) {
|
||||
Set<V> set = map.get(key);
|
||||
if (set == null) {
|
||||
set = new LinkedHashSet<>();
|
||||
map.put(key, set);
|
||||
}
|
||||
set.addAll(values);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Collection<V> get(K key) {
|
||||
return map.get(key);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Set<V> remove(K key) {
|
||||
return map.remove(key);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean remove(K key, V value) {
|
||||
Set<V> set = map.get(key);
|
||||
return set != null && set.remove(value);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
return obj != null && obj instanceof TreeMultiMap && map.equals(((TreeMultiMap) obj).map);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return map.hashCode();
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return map.toString();
|
||||
}
|
||||
}
|
|
@ -0,0 +1,43 @@
|
|||
package org.xbib.content.rdf.util;
|
||||
|
||||
import org.junit.Test;
|
||||
|
||||
import java.util.Arrays;
|
||||
|
||||
import static org.junit.Assert.assertEquals;
|
||||
import static org.junit.Assert.assertTrue;
|
||||
|
||||
/**
|
||||
*
|
||||
*/
|
||||
public class MultiMapTest {
|
||||
|
||||
@Test
|
||||
public void testLinkedHashMultiMap() {
|
||||
LinkedHashMultiMap<String,String> map = new LinkedHashMultiMap<>();
|
||||
map.put("a", "b");
|
||||
map.put("b", "c");
|
||||
map.put("a", "c");
|
||||
assertTrue(map.containsKey("a"));
|
||||
assertTrue(map.containsKey("b"));
|
||||
assertEquals("[b, c]", map.get("a").toString());
|
||||
assertEquals("[c]", map.get("b").toString());
|
||||
map.putAll("a", Arrays.asList("d", "e"));
|
||||
assertEquals("[b, c, d, e]", map.get("a").toString());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testTreeMultiMap() {
|
||||
TreeMultiMap<String,String> map = new TreeMultiMap<>();
|
||||
map.put("a", "b");
|
||||
map.put("b", "c");
|
||||
map.put("a", "c");
|
||||
assertTrue(map.containsKey("a"));
|
||||
assertTrue(map.containsKey("b"));
|
||||
assertEquals("[a, b]", map.keySet().toString());
|
||||
assertEquals("[b, c]", map.get("a").toString());
|
||||
assertEquals("[c]", map.get("b").toString());
|
||||
map.putAll("a", Arrays.asList("d", "e"));
|
||||
assertEquals("[b, c, d, e]", map.get("a").toString());
|
||||
}
|
||||
}
|
|
@ -0,0 +1,4 @@
|
|||
/**
|
||||
* Classes for testing multi maps.
|
||||
*/
|
||||
package org.xbib.content.rdf.util;
|
|
@ -1,5 +1,5 @@
|
|||
|
||||
task xbibUpload(type: Upload) {
|
||||
task xbibUpload(type: Upload, dependsOn: build) {
|
||||
configuration = configurations.archives
|
||||
uploadDescriptor = true
|
||||
repositories {
|
||||
|
@ -14,7 +14,7 @@ task xbibUpload(type: Upload) {
|
|||
}
|
||||
}
|
||||
|
||||
task sonatypeUpload(type: Upload) {
|
||||
task sonatypeUpload(type: Upload, dependsOn: build) {
|
||||
configuration = configurations.archives
|
||||
uploadDescriptor = true
|
||||
repositories {
|
||||
|
|
Loading…
Reference in a new issue