Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Modified backend and frontend for handling grouping of datasets #460

Open
wants to merge 9 commits into
base: master
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
7 changes: 7 additions & 0 deletions pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -53,6 +53,13 @@
</repositories>

<dependencies>

<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter</artifactId>
<version>2.7.15</version>
</dependency>
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Can you please move this dependency to the other Spring dependencies? Just search for the <!-- SPRING --> line to find them.


<!-- NIF transfer lib -->
<dependency>
<groupId>org.aksw</groupId>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -23,11 +23,12 @@
import org.aksw.gerbil.datatypes.ErrorTypes;
import org.aksw.gerbil.datatypes.ExperimentType;
import org.aksw.gerbil.exceptions.GerbilException;
import org.aksw.gerbil.web.config.DatasetsConfig;

/**
* Contains all information needed to load an annotator for a specific
* experiment type.
*
*
* @author Michael R&ouml;der (roeder@informatik.uni-leipzig.de)
*
*/
Expand All @@ -39,7 +40,7 @@ public class AnnotatorConfigurationImpl extends AbstractAdapterConfiguration imp
public AnnotatorConfigurationImpl(String annotatorName, boolean couldBeCached,
Constructor<? extends Annotator> constructor, Object constructorArgs[],
ExperimentType applicableForExperiment) {
super(annotatorName, couldBeCached, applicableForExperiment);
super(annotatorName, DatasetsConfig.DEFAULT_DATASET_GROUP, couldBeCached, applicableForExperiment);
this.constructor = constructor;
this.constructorArgs = constructorArgs;
}
Expand Down Expand Up @@ -87,4 +88,4 @@ public String toString() {
builder.append(')');
return builder.toString();
}
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -33,10 +33,10 @@ public abstract class AbstractDatasetConfiguration extends AbstractAdapterConfig
protected EntityCheckerManager entityCheckerManager;
protected SameAsRetriever globalRetriever;

public AbstractDatasetConfiguration(String datasetName, boolean couldBeCached,
public AbstractDatasetConfiguration(String datasetName,String datasetGroup, boolean couldBeCached,
ExperimentType applicableForExperiment, EntityCheckerManager entityCheckerManager,
SameAsRetriever globalRetriever) {
super(datasetName, couldBeCached, applicableForExperiment);
super(datasetName,datasetGroup, couldBeCached, applicableForExperiment);
this.entityCheckerManager = entityCheckerManager;
this.globalRetriever = globalRetriever;
}
Expand Down
26 changes: 26 additions & 0 deletions src/main/java/org/aksw/gerbil/dataset/AdapterConfigSerializer.java
Original file line number Diff line number Diff line change
@@ -0,0 +1,26 @@
package org.aksw.gerbil.dataset;

import com.fasterxml.jackson.core.JsonGenerator;
import com.fasterxml.jackson.databind.SerializerProvider;
import com.fasterxml.jackson.databind.annotation.JsonSerialize;
import com.fasterxml.jackson.databind.ser.std.StdSerializer;
import org.aksw.gerbil.datatypes.AbstractAdapterConfiguration;

import java.io.IOException;

@JsonSerialize(using = AdapterConfigSerializer.class)
public class AdapterConfigSerializer extends StdSerializer<AbstractAdapterConfiguration> {


public AdapterConfigSerializer(Class<AbstractAdapterConfiguration> t) {
super(AbstractAdapterConfiguration.class);
}

@Override
public void serialize(AbstractAdapterConfiguration value, JsonGenerator gen, SerializerProvider provider) throws IOException {
gen.writeStartObject();
gen.writeStringField("name", value.getName());
gen.writeStringField("group", value.getGroup());
gen.writeEndObject();
}
}
2 changes: 1 addition & 1 deletion src/main/java/org/aksw/gerbil/dataset/Dataset.java
Original file line number Diff line number Diff line change
Expand Up @@ -31,6 +31,6 @@ public interface Dataset extends Closeable {
public void setName(String name);

public List<Document> getInstances();

public void setClosePermitionGranter(ClosePermitionGranter granter);
}
Original file line number Diff line number Diff line change
Expand Up @@ -28,11 +28,11 @@ public class DatasetConfigurationImpl extends AbstractDatasetConfiguration {
protected Constructor<? extends Dataset> constructor;
protected Object constructorArgs[];

public DatasetConfigurationImpl(String datasetName, boolean couldBeCached,
public DatasetConfigurationImpl(String datasetName,String datasetGroup, boolean couldBeCached,
Constructor<? extends Dataset> constructor, Object constructorArgs[],
ExperimentType applicableForExperiment, EntityCheckerManager entityCheckerManager,
SameAsRetriever globalRetriever) {
super(datasetName, couldBeCached, applicableForExperiment, entityCheckerManager, globalRetriever);
super(datasetName, datasetGroup, couldBeCached, applicableForExperiment, entityCheckerManager, globalRetriever);
this.constructor = constructor;
this.constructorArgs = constructorArgs;
}
Expand Down Expand Up @@ -66,4 +66,4 @@ public String toString() {
builder.append(')');
return builder.toString();
}
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,7 @@
import org.aksw.gerbil.semantic.sameas.SameAsRetriever;
import org.aksw.gerbil.transfer.nif.Document;
import org.aksw.gerbil.utils.ClosePermitionGranter;
import org.aksw.gerbil.web.config.DatasetsConfig;

public class InstanceListBasedDataset extends AbstractDatasetConfiguration implements Dataset {

Expand All @@ -34,13 +35,13 @@ public InstanceListBasedDataset(List<Document> instances, ExperimentType applica
}

public InstanceListBasedDataset(String name, List<Document> instances, ExperimentType applicableForExperiment) {
super(name, false, applicableForExperiment, null, null);
super(name, DatasetsConfig.DEFAULT_DATASET_GROUP, false, applicableForExperiment, null, null);
this.instances = instances;
}

public InstanceListBasedDataset(String name, List<Document> instances, ExperimentType applicableForExperiment,
EntityCheckerManager entityCheckerManager, SameAsRetriever globalRetriever) {
super(name, false, applicableForExperiment, entityCheckerManager, globalRetriever);
super(name, DatasetsConfig.DEFAULT_DATASET_GROUP, false, applicableForExperiment, entityCheckerManager, globalRetriever);
this.instances = instances;
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -34,11 +34,11 @@ public class SingletonDatasetConfigImpl extends DatasetConfigurationImpl impleme
protected int instanceUsages = 0;
protected Semaphore instanceMutex = new Semaphore(1);

public SingletonDatasetConfigImpl(String annotatorName, boolean couldBeCached,
public SingletonDatasetConfigImpl(String annotatorName, String annotatorGroup, boolean couldBeCached,
Constructor<? extends Dataset> constructor, Object constructorArgs[],
ExperimentType applicableForExperiment, EntityCheckerManager entityCheckerManager,
SameAsRetriever globalRetriever) {
super(annotatorName, couldBeCached, constructor, constructorArgs, applicableForExperiment, entityCheckerManager,
super(annotatorName, annotatorGroup, couldBeCached, constructor, constructorArgs, applicableForExperiment, entityCheckerManager,
globalRetriever);
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -29,6 +29,7 @@
import org.aksw.gerbil.dataset.impl.nif.FileBasedNIFDataset;
import org.aksw.gerbil.datatypes.ExperimentType;
import org.aksw.gerbil.semantic.sameas.SameAsRetriever;
import org.aksw.gerbil.web.config.DatasetsConfig;
import org.apache.jena.riot.Lang;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
Expand All @@ -44,7 +45,7 @@ public class DatahubNIFConfig extends AbstractDatasetConfiguration {

public DatahubNIFConfig(String datasetName, String datasetUrl, boolean couldBeCached, EntityCheckerManager entityCheckerManager,
SameAsRetriever globalRetriever) {
super(datasetName, couldBeCached, ExperimentType.A2KB, entityCheckerManager, globalRetriever);
super(datasetName, DatasetsConfig.DEFAULT_DATASET_GROUP, couldBeCached, ExperimentType.A2KB, entityCheckerManager, globalRetriever);
this.datasetUrl = datasetUrl;
rt = new RestTemplate();
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,9 @@
public abstract class AbstractDataset implements Dataset {

protected String name;

protected String group;

protected ClosePermitionGranter granter;

public AbstractDataset() {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@
import org.aksw.gerbil.dataset.check.EntityCheckerManager;
import org.aksw.gerbil.datatypes.ExperimentType;
import org.aksw.gerbil.semantic.sameas.SameAsRetriever;
import org.aksw.gerbil.web.config.DatasetsConfig;
import org.apache.jena.riot.Lang;

public class NIFFileDatasetConfig extends AbstractDatasetConfiguration {
Expand All @@ -29,7 +30,7 @@ public class NIFFileDatasetConfig extends AbstractDatasetConfiguration {

public NIFFileDatasetConfig(String name, String file, boolean couldBeCached, ExperimentType applicableForExperiment,
EntityCheckerManager entityCheckerManager, SameAsRetriever globalRetriever) {
super(name, couldBeCached, applicableForExperiment, entityCheckerManager, globalRetriever);
super(name, DatasetsConfig.DEFAULT_DATASET_GROUP, couldBeCached, applicableForExperiment, entityCheckerManager, globalRetriever);
this.file = file;
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -23,25 +23,35 @@ public abstract class AbstractAdapterConfiguration implements AdapterConfigurati
private static final ExperimentTypeComparator EXP_TYPE_COMPARATOR = new ExperimentTypeComparator();

protected String name;
protected String group;
protected boolean couldBeCached;
protected ExperimentType applicableForExperiment;

public AbstractAdapterConfiguration(String name, boolean couldBeCached, ExperimentType applicableForExperiment) {
public AbstractAdapterConfiguration(String name,String group, boolean couldBeCached, ExperimentType applicableForExperiment) {
this.name = name;
this.couldBeCached = couldBeCached;
this.applicableForExperiment = applicableForExperiment;
this.setGroup(group);
}

@Override
public String getName() {
return name;
}

public String getGroup() {
return group;
}

@Override
public void setName(String name) {
this.name = name;
}

@Override
public void setGroup(String group) {
this.group = group;
}
@Override
public boolean couldBeCached() {
return couldBeCached;
Expand Down
31 changes: 23 additions & 8 deletions src/main/java/org/aksw/gerbil/datatypes/AdapterConfiguration.java
Original file line number Diff line number Diff line change
Expand Up @@ -19,22 +19,22 @@
/**
* Interface of an adpater configuration of the GERBIL system. It represents the
* adapter and is able to create an adapter instance.
*
*
* @author Michael Röder
*
*
*/
public interface AdapterConfiguration extends Comparable<AdapterConfiguration>{

/**
* Getter of the adapters name.
*
*
* @return The name of the adapter.
*/
public String getName();

/**
* Setter of the adapters name.
*
*
* @param name
* The name of the adapter.
*/
Expand All @@ -43,7 +43,7 @@ public interface AdapterConfiguration extends Comparable<AdapterConfiguration>{
/**
* Returns true if the system is allowed to cache the results of experiments
* in which this adapter has been involved.
*
*
* @return true if the results could be cached inside the database.
* Otherwise false is returned.
*/
Expand All @@ -53,21 +53,36 @@ public interface AdapterConfiguration extends Comparable<AdapterConfiguration>{
* Setter for the caching flag which should be set to true if the system is
* allowed to cache the results of experiments in which this adapter has
* been involved.
*
*
* @param couldBeCached
*/
public void setCouldBeCached(boolean couldBeCached);

/**
* Returns true if this adapter can be used for an experiment of the given
* type.
*
*
* @param type
* the experiment type that should be checked
* @return true if this adapter can be used for an experiment of the given
* type.
*/
public boolean isApplicableForExperiment(ExperimentType type);

public ExperimentType getExperimentType();

/**
* Getter of the adapter's group.
*
* @return The group of the adapter.
*/
public String getGroup();
Hardiksh16 marked this conversation as resolved.
Show resolved Hide resolved

/**
* Getter of the adapter's group.
*
* @param group
* The name of the adapter.
*/
public void setGroup(String group);
}
26 changes: 16 additions & 10 deletions src/main/java/org/aksw/gerbil/web/MainController.java
Original file line number Diff line number Diff line change
Expand Up @@ -18,21 +18,19 @@

import java.io.File;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import java.util.*;

import javax.annotation.PostConstruct;
import javax.servlet.http.HttpServletRequest;

import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.core.type.TypeReference;
import org.aksw.gerbil.Experimenter;
import org.aksw.gerbil.config.GerbilConfiguration;
import org.aksw.gerbil.database.ExperimentDAO;
import org.aksw.gerbil.dataid.DataIDGenerator;
import org.aksw.gerbil.dataset.DatasetConfiguration;
import org.aksw.gerbil.datatypes.ExperimentTaskConfiguration;
import org.aksw.gerbil.datatypes.ExperimentTaskStatus;
import org.aksw.gerbil.datatypes.ExperimentType;
Expand Down Expand Up @@ -306,18 +304,26 @@ public ModelAndView experiment(@RequestParam(value = "id") String id, HttpServle
}

@RequestMapping("/datasets")
public @ResponseBody List<String> datasets(@RequestParam(value = "experimentType") String experimentType) {
public @ResponseBody Map<String, List<DatasetConfiguration>> datasets(@RequestParam(value = "experimentType") String experimentType) {
ExperimentType type = null;
Map<String, List<DatasetConfiguration>> response = new TreeMap<>();
try {
type = ExperimentType.valueOf(experimentType);
} catch (IllegalArgumentException e) {
LOGGER.warn("Got a request containing a wrong ExperimentType (\"{}\"). Ignoring it.", experimentType);
return null;
}
Set<String> datasets = adapterManager.getDatasetNamesForExperiment(type);
List<String> list = Lists.newArrayList(datasets);
Collections.sort(list);
return list;
try {
List<DatasetConfiguration> datasetConfigurations = adapterManager.getDatasetDetailsForExperiment(type);
for (DatasetConfiguration config : datasetConfigurations) {
response.computeIfAbsent(config.getGroup(), k -> new ArrayList<>()).add(config);
}
response.values().forEach(newList -> newList.sort(Comparator.naturalOrder()));
} catch (Exception e) {
LOGGER.error("Error fetching datasets for ExperimentType: {}", experimentType, e);
return null;
}
return response;
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

This part should simply look like:

    public @ResponseBody Map<String, List<DatasetConfiguration>> datasets(@RequestParam(value = "experimentType") String experimentType) {
        ExperimentType type = null;
        Map<String, List<DatasetConfiguration>> response = new TreeMap<>();
        try {
            type = ExperimentType.valueOf(experimentType);
        } catch (IllegalArgumentException e) {
            LOGGER.warn("Got a request containing a wrong ExperimentType (\"{}\"). Ignoring it.", experimentType);
            return null;
        }
        return adapterManager.getDatasetDetailsForExperiment(type);
    }

Actually, the return null; is also not really nice... instead, we should return HTTP 400 with an error message.

}

/**
Expand Down
20 changes: 20 additions & 0 deletions src/main/java/org/aksw/gerbil/web/config/AdapterList.java
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,11 @@
import java.util.Map;
import java.util.Set;

import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.module.SimpleModule;
import org.aksw.gerbil.dataset.AdapterConfigSerializer;
import org.aksw.gerbil.datatypes.AbstractAdapterConfiguration;
import org.aksw.gerbil.datatypes.AdapterConfiguration;
import org.aksw.gerbil.datatypes.ExperimentType;

Expand Down Expand Up @@ -85,6 +90,21 @@ public Set<String> getAdapterNamesForExperiment(ExperimentType type) {
return names;
}

public List<String> getAdapterDetailsForExperiment(ExperimentType type) {
List<T> configs = getAdaptersForExperiment(type);
List<String> serializedConfigs = new ArrayList<>();
ObjectMapper mapper = new ObjectMapper();
for (T config : configs) {
try {
String json = mapper.writeValueAsString(config);
serializedConfigs.add(json);
} catch (JsonProcessingException e) {
throw new RuntimeException(e);
}
}
return serializedConfigs;
}
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

This serialization is actually not necessary. See comment further above.

Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Please remove it, if it is not used anymore.


public List<T> getAdaptersForName(String name) {
if (nameToAdapterMapping.containsKey(name)) {
return nameToAdapterMapping.get(name);
Expand Down
Loading
Loading