Skip to content

Commit

Permalink
Merge pull request #3632 from ingef/release
Browse files Browse the repository at this point in the history
Merge Release
  • Loading branch information
thoniTUB authored Dec 11, 2024
2 parents 9e4c021 + 330fd61 commit 19e6d2e
Show file tree
Hide file tree
Showing 105 changed files with 1,774 additions and 613 deletions.
59 changes: 53 additions & 6 deletions backend/pom.xml
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
<project xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xmlns="http://maven.apache.org/POM/4.0.0"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>

Expand All @@ -15,6 +15,8 @@
<apache-poi.version>5.2.2</apache-poi.version>
<apache-arrow.version>16.0.0</apache-arrow.version>
<apache-parquet.version>1.12.3</apache-parquet.version>
<prometheus-metrics.version>1.2.1</prometheus-metrics.version>
<openapi-generator.version>7.9.0</openapi-generator.version>
</properties>

<build>
Expand Down Expand Up @@ -75,6 +77,36 @@
</execution>
</executions>
</plugin>
<plugin>
<groupId>org.openapitools</groupId>
<artifactId>openapi-generator-maven-plugin</artifactId>
<version>${openapi-generator.version}</version>
<executions>
<execution>
<id>openapi-admin</id>
<goals>
<goal>generate</goal>
</goals>
<configuration>
<inputSpec>${project.basedir}/src/main/resources/openapi/admin-openapi.json</inputSpec>
<!-- Don't use jaxrs-jersey here because it does not allow dependency injection -->
<generatorName>jaxrs-spec</generatorName>
<modelsToGenerate>ApiResponse</modelsToGenerate>
<packageName>com.bakdata.conquery</packageName>
<!-- This package name is used by EndpointTestHelper to filter out openapi specs -->
<apiPackage>com.bakdata.conquery.models.api.openapi</apiPackage>
<modelPackage>com.bakdata.conquery.models.config</modelPackage>
<configOptions>
<!-- The implementation of these interfaces actually allows us to use DI (not possible with jaxrs-jersey) -->
<interfaceOnly>true</interfaceOnly>
<sourceFolder>src/main/java</sourceFolder>
<generateSupportingFiles>false</generateSupportingFiles>
<useJakartaEe>true</useJakartaEe>
</configOptions>
</configuration>
</execution>
</executions>
</plugin>
</plugins>
</build>

Expand Down Expand Up @@ -123,7 +155,7 @@
<dependency>
<groupId>commons-io</groupId>
<artifactId>commons-io</artifactId>
<version>2.11.0</version>
<version>2.18.0</version>
</dependency>
<dependency>
<groupId>com.univocity</groupId>
Expand Down Expand Up @@ -383,17 +415,32 @@
<dependency>
<groupId>io.prometheus</groupId>
<artifactId>prometheus-metrics-simpleclient-bridge</artifactId>
<version>1.2.1</version>
<version>${prometheus-metrics.version}</version>
</dependency>
<dependency>
<groupId>io.prometheus</groupId>
<artifactId>prometheus-metrics-exporter-servlet-jakarta</artifactId>
<version>1.2.1</version>
<version>${prometheus-metrics.version}</version>
</dependency>
<dependency>
<groupId>io.prometheus</groupId>
<artifactId>prometheus-metrics-core</artifactId>
<version>1.2.1</version>
<version>${prometheus-metrics.version}</version>
</dependency>
<dependency>
<groupId>org.openapitools</groupId>
<artifactId>openapi-generator-maven-plugin</artifactId>
<version>${openapi-generator.version}</version>
</dependency>
<dependency>
<groupId>io.swagger.core.v3</groupId>
<artifactId>swagger-integration-jakarta</artifactId>
<version>2.2.25</version>
</dependency>
<dependency>
<groupId>io.swagger.core.v3</groupId>
<artifactId>swagger-jaxrs2-jakarta</artifactId>
<version>2.2.25</version>
</dependency>
<dependency>
<groupId>org.awaitility</groupId>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -90,7 +90,7 @@ public boolean isSearchDisabled() {
public TrieSearch<FrontendValue> createTrieSearch(IndexConfig config) throws IndexCreationException {

final URI resolvedURI = FileUtil.getResolvedUri(config.getBaseUrl(), getFilePath());
log.trace("Resolved filter template reference url for search '{}': {}", this.getId(), resolvedURI);
log.trace("Resolved filter template reference url for search '{}': {}", getId(), resolvedURI);

final FrontendValueIndex search = indexService.getIndex(new FrontendValueIndexKey(
resolvedURI,
Expand All @@ -101,7 +101,7 @@ public TrieSearch<FrontendValue> createTrieSearch(IndexConfig config) throws Ind
config.getSearchSplitChars()
));

return search;
return search.getDelegate();
}

@Override
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -107,35 +107,41 @@ public class QueryProcessor {
private Validator validator;


public Stream<ExecutionStatus> getAllQueries(Dataset dataset, HttpServletRequest req, Subject subject, boolean allProviders) {
public Stream<? extends ExecutionStatus> getAllQueries(Dataset dataset, HttpServletRequest req, Subject subject, boolean allProviders) {
final Stream<ManagedExecution> allQueries = storage.getAllExecutions();

return getQueriesFiltered(dataset.getId(), RequestAwareUriBuilder.fromRequest(req), subject, allQueries, allProviders);
}

public Stream<ExecutionStatus> getQueriesFiltered(DatasetId datasetId, UriBuilder uriBuilder, Subject subject, Stream<ManagedExecution> allQueries, boolean allProviders) {
public Stream<? extends ExecutionStatus> getQueriesFiltered(DatasetId datasetId, UriBuilder uriBuilder, Subject subject, Stream<ManagedExecution> allQueries, boolean allProviders) {

return allQueries
// The following only checks the dataset, under which the query was submitted, but a query can target more that
// one dataset.
.filter(q -> q.getDataset().equals(datasetId))
// to exclude subtypes from somewhere else
.filter(QueryProcessor::canFrontendRender)
.filter(Predicate.not(ManagedExecution::isSystem))
.filter(q -> {
ExecutionState state = q.getState();
return state == ExecutionState.NEW || state == ExecutionState.DONE;
}
)
.filter(q -> subject.isPermitted(q, Ability.READ))
.map(mq -> {
final OverviewExecutionStatus status = mq.buildStatusOverview(subject);

if (mq.isReadyToDownload()) {
status.setResultUrls(getResultAssets(config.getResultProviders(), mq, uriBuilder, allProviders));
}
return status;
});
// The following only checks the dataset, under which the query was submitted, but a query can target more that
// one dataset.
.filter(q -> q.getDataset().equals(datasetId))
// to exclude subtypes from somewhere else
.filter(QueryProcessor::canFrontendRender)
.filter(Predicate.not(ManagedExecution::isSystem))
.filter(q -> {
ExecutionState state = q.getState();
return state == ExecutionState.NEW || state == ExecutionState.DONE;
})
.filter(q -> subject.isPermitted(q, Ability.READ))
.map(mq -> {
try {
final OverviewExecutionStatus status = mq.buildStatusOverview(subject);

if (mq.isReadyToDownload()) {
status.setResultUrls(getResultAssets(config.getResultProviders(), mq, uriBuilder, allProviders));
}
return status;
}
catch (Exception e) {
log.error("FAILED building status for {}", mq, e);
}
return null;
})
.filter(Objects::nonNull);
}

/**
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -200,7 +200,7 @@ private static Map<ColumnId, Integer> calculateColumnPositions(
for (Column column : table.getConnector().resolve().getResolvedTable().getColumns()) {

// ValidityDates are handled separately in column=0
if (validityDates.stream().anyMatch(vd -> vd.containsColumn(column))) {
if (validityDates.stream().anyMatch(vd -> vd.containsColumn(column.getId()))) {
continue;
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -148,10 +148,11 @@ public String defaultLabel(Locale locale) {
builder.append(" ");

for (ConceptElementId<?> id : elements) {
ConceptElement<?> conceptElement = id.resolve();
if (conceptElement.equals(getConcept())) {
if (id.equals(getConceptId())) {
continue;
}

ConceptElement<?> conceptElement = id.resolve();
builder.append(conceptElement.getLabel()).append("+");
}

Expand Down Expand Up @@ -274,9 +275,7 @@ public RequiredEntities collectRequiredEntities(QueryExecutionContext context) {
final Set<ConnectorId> connectors = getTables().stream().map(CQTable::getConnector).collect(Collectors.toSet());

return new RequiredEntities(context.getBucketManager()
.getEntitiesWithConcepts(getElements().stream()
.<ConceptElement<?>>map(ConceptElementId::resolve)
.toList(),
.getEntitiesWithConcepts(getElements(),
connectors, context.getDateRestriction()));
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -58,7 +58,11 @@ public void readDates(String value, DateReader dateReader, CDateSet out) {
DATE_SET {
@Override
public void readDates(String value, DateReader dateReader, CDateSet out) {
out.addAll(dateReader.parseToCDateSet(value));
CDateSet parsed = dateReader.parseToCDateSet(value);
if (parsed == null ) {
return;
}
out.addAll(parsed);
}
},
ALL {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@
import com.bakdata.conquery.models.identifiable.mapping.ExternalId;
import com.bakdata.conquery.util.DateReader;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.lang3.StringUtils;

@Slf4j
public class EntityResolverUtil {
Expand All @@ -41,7 +42,7 @@ public static CDateSet[] readDates(String[][] values, List<String> format, DateR
but can also don't contribute to any date aggregation.
*/
if (dateFormats.stream().allMatch(Objects::isNull)) {
// Initialize empty
// Initialize empty, so all lines appear als resolved
for (int row = 0; row < values.length; row++) {
out[row] = CDateSet.createEmpty();
}
Expand All @@ -59,10 +60,19 @@ public static CDateSet[] readDates(String[][] values, List<String> format, DateR
if (dateFormat == null) {
continue;
}
dateFormat.readDates(values[row][col], dateReader, dates);
String value = values[row][col];

if (StringUtils.isBlank(value)) {
log.trace("Found blank/null value in {}/{} (row/col)", row,col);
continue;
}

dateFormat.readDates(value, dateReader, dates);
}

if (dates.isEmpty()) {
// Don't set an empty dateset here, because this flags the line as: unresolvedDate
// TODO It might be better to set an empty dateset nonetheless, because it seems to be intentionally empty, as we had no problem while parsing a value
continue;
}

Expand All @@ -73,7 +83,9 @@ public static CDateSet[] readDates(String[][] values, List<String> format, DateR
out[row].addAll(dates);
}
catch (Exception e) {
log.warn("Failed to parse Date from {}", row, e);
// If a value is not parsable, it is included in the exceptions cause message (see DateReader)
log.trace("Failed to parse Date in row {}", row, e);
// This catch causes `out[row]` to remain `null` which later flags this line as: unresolvedDate
}
}

Expand Down Expand Up @@ -142,6 +154,7 @@ public static String tryResolveId(String[] row, List<Function<String[], External
*/
public static Map<String, String>[] readExtras(String[][] values, List<String> format) {
final String[] names = values[0];
@SuppressWarnings("unchecked")
final Map<String, String>[] extrasByRow = new Map[values.length];


Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,6 @@
import com.bakdata.conquery.tasks.QueryCleanupTask;
import com.bakdata.conquery.tasks.ReloadMetaStorageTask;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.google.common.base.Throwables;
import io.dropwizard.core.setup.Environment;
import io.dropwizard.lifecycle.Managed;
import lombok.Getter;
Expand All @@ -53,12 +52,11 @@ public class ManagerNode implements Managed {
public static final String DEFAULT_NAME = "manager";

private final String name;

private final List<ResourcesProvider> providers = new ArrayList<>();
private Validator validator;
private AdminServlet admin;
private AuthorizationController authController;
private ScheduledExecutorService maintenanceService;
private final List<ResourcesProvider> providers = new ArrayList<>();
@Delegate(excludes = Managed.class)
private Manager manager;

Expand Down Expand Up @@ -88,7 +86,8 @@ public void run(Manager manager) throws InterruptedException {
formScanner = new FormScanner(config);


config.initialize(this);
// Init all plugins
config.getPlugins().forEach(pluginConfig -> pluginConfig.initialize(this));


// Initialization of internationalization
Expand Down Expand Up @@ -127,34 +126,11 @@ public void run(Manager manager) throws InterruptedException {
}
}

try {
formScanner.execute(null, null);
}
catch (Exception e) {
Throwables.throwIfUnchecked(e);
throw new RuntimeException(e);
}
formScanner.execute(null, null);

registerTasks(manager, environment, config);
}

private void registerTasks(Manager manager, Environment environment, ConqueryConfig config) {
environment.admin().addTask(formScanner);
environment.admin().addTask(
new QueryCleanupTask(getMetaStorage(), Duration.of(
config.getQueries().getOldQueriesTime().getQuantity(),
config.getQueries().getOldQueriesTime().getUnit().toChronoUnit()
)));

environment.admin().addTask(new PermissionCleanupTask(getMetaStorage()));
manager.getAdminTasks().forEach(environment.admin()::addTask);
environment.admin().addTask(new ReloadMetaStorageTask(getMetaStorage()));

final ShutdownTask shutdown = new ShutdownTask();
environment.admin().addTask(shutdown);
environment.lifecycle().addServerLifecycleListener(shutdown);
}

private void configureApiServlet(ConqueryConfig config, Environment environment) {
ResourceConfig jerseyConfig = environment.jersey().getResourceConfig();
RESTServer.configure(config, jerseyConfig);
Expand All @@ -171,14 +147,6 @@ protected void configure() {
jerseyConfig.register(PathParamInjector.class);
}

private void loadMetaStorage() {
log.info("Opening MetaStorage");
getMetaStorage().openStores(getInternalMapperFactory().createManagerPersistenceMapper(getDatasetRegistry(), getMetaStorage()), getEnvironment().metrics());
log.info("Loading MetaStorage");
getMetaStorage().loadData();
log.info("MetaStorage loaded {}", getMetaStorage());
}

@SneakyThrows(InterruptedException.class)
public void loadNamespaces() {

Expand All @@ -203,6 +171,31 @@ public void loadNamespaces() {
}
}

private void loadMetaStorage() {
log.info("Opening MetaStorage");
getMetaStorage().openStores(getInternalMapperFactory().createManagerPersistenceMapper(getDatasetRegistry(), getMetaStorage()), getEnvironment().metrics());
log.info("Loading MetaStorage");
getMetaStorage().loadData();
log.info("MetaStorage loaded {}", getMetaStorage());
}

private void registerTasks(Manager manager, Environment environment, ConqueryConfig config) {
environment.admin().addTask(formScanner);
environment.admin().addTask(
new QueryCleanupTask(getMetaStorage(), Duration.of(
config.getQueries().getOldQueriesTime().getQuantity(),
config.getQueries().getOldQueriesTime().getUnit().toChronoUnit()
)));

environment.admin().addTask(new PermissionCleanupTask(getMetaStorage()));
manager.getAdminTasks().forEach(environment.admin()::addTask);
environment.admin().addTask(new ReloadMetaStorageTask(getMetaStorage()));

final ShutdownTask shutdown = new ShutdownTask();
environment.admin().addTask(shutdown);
environment.lifecycle().addServerLifecycleListener(shutdown);
}

@Override
public void start() throws Exception {
manager.start();
Expand Down
Loading

0 comments on commit 19e6d2e

Please sign in to comment.