diff --git a/.github/workflows/test_backend.yml b/.github/workflows/test_backend.yml index 00a5fa85d3..e481274689 100644 --- a/.github/workflows/test_backend.yml +++ b/.github/workflows/test_backend.yml @@ -14,7 +14,7 @@ on: jobs: test: runs-on: ubuntu-latest - timeout-minutes: 10 + timeout-minutes: 15 steps: - name: Cache local Maven repository uses: actions/cache@v2 @@ -35,8 +35,11 @@ jobs: - name: Build Backend run: mvn -T 1C install -pl backend -DskipTests -am - name: Unit Test - run: mvn test -T 1C -pl backend -DexcludedGroups="INTEGRATION_PROGRAMMATIC, INTEGRATION_JSON" + run: mvn test -T 1C -pl backend -DexcludedGroups="INTEGRATION_PROGRAMMATIC, INTEGRATION_JSON, INTEGRATION_SQL_BACKEND" - name: Programmatic Integration Tests run: mvn test -T 1C -pl backend -Dgroups="INTEGRATION_PROGRAMMATIC" - name: JSON based Integration Tests run: mvn test -T 1C -pl backend -Dgroups="INTEGRATION_JSON" + - name: SQL based Integration Tests + if: ${{ startsWith(github.head_ref, 'sql/') }} + run: mvn test -T 1C -pl backend -Dgroups="INTEGRATION_SQL_BACKEND" diff --git a/backend/pom.xml b/backend/pom.xml index 033f7ed060..c2cdef06ad 100644 --- a/backend/pom.xml +++ b/backend/pom.xml @@ -346,5 +346,38 @@ auto-service 1.0.1 + + org.jooq + jooq + 3.18.3 + + + org.jooq + jooq-postgres-extensions + 3.18.3 + + + com.zaxxer + HikariCP + 5.0.1 + + + org.testcontainers + testcontainers + 1.17.6 + test + + + org.testcontainers + junit-jupiter + 1.17.6 + test + + + org.testcontainers + postgresql + 1.17.6 + test + diff --git a/backend/src/main/java/com/bakdata/conquery/Conquery.java b/backend/src/main/java/com/bakdata/conquery/Conquery.java index 1f681a3d22..2ddccdb5f3 100644 --- a/backend/src/main/java/com/bakdata/conquery/Conquery.java +++ b/backend/src/main/java/com/bakdata/conquery/Conquery.java @@ -12,6 +12,10 @@ import com.bakdata.conquery.commands.StandaloneCommand; import com.bakdata.conquery.io.jackson.Jackson; import com.bakdata.conquery.io.jackson.MutableInjectableValues; +import com.bakdata.conquery.mode.Manager; +import com.bakdata.conquery.mode.ManagerProvider; +import com.bakdata.conquery.mode.cluster.ClusterManagerProvider; +import com.bakdata.conquery.mode.local.LocalManagerProvider; import com.bakdata.conquery.models.config.ConqueryConfig; import com.fasterxml.jackson.databind.ObjectMapper; import io.dropwizard.Application; @@ -34,7 +38,7 @@ public class Conquery extends Application { private final String name; @Setter - private ManagerNode manager; + private ManagerNode managerNode; public Conquery() { this("Conquery"); @@ -90,10 +94,16 @@ protected Level bootstrapLogLevel() { @Override public void run(ConqueryConfig configuration, Environment environment) throws Exception { - if (manager == null) { - manager = new ManagerNode(); + ManagerProvider provider = configuration.getSqlConnectorConfig().isEnabled() ? + new LocalManagerProvider() : new ClusterManagerProvider(); + run(provider.provideManager(configuration, environment)); + } + + public void run(Manager manager) throws InterruptedException { + if (managerNode == null) { + managerNode = new ManagerNode(); } - manager.run(configuration, environment); + managerNode.run(manager); } public static void main(String... args) throws Exception { diff --git a/backend/src/main/java/com/bakdata/conquery/apiv1/MeProcessor.java b/backend/src/main/java/com/bakdata/conquery/apiv1/MeProcessor.java index 1c631e0ccc..43ccea532a 100644 --- a/backend/src/main/java/com/bakdata/conquery/apiv1/MeProcessor.java +++ b/backend/src/main/java/com/bakdata/conquery/apiv1/MeProcessor.java @@ -15,6 +15,7 @@ import com.bakdata.conquery.models.identifiable.ids.specific.DatasetId; import com.bakdata.conquery.models.identifiable.ids.specific.GroupId; import com.bakdata.conquery.models.worker.DatasetRegistry; +import com.bakdata.conquery.models.worker.Namespace; import com.bakdata.conquery.resources.api.MeResource; import lombok.AllArgsConstructor; import lombok.Builder; @@ -34,7 +35,7 @@ public class MeProcessor { @Inject private MetaStorage storage; @Inject - private DatasetRegistry datasetRegistry; + private DatasetRegistry datasetRegistry; /** * Generates a summary of a user. It contains its name, the groups it belongs to and its permissions on a dataset. diff --git a/backend/src/main/java/com/bakdata/conquery/apiv1/QueryProcessor.java b/backend/src/main/java/com/bakdata/conquery/apiv1/QueryProcessor.java index 4a9e91d0f9..119650f5dc 100644 --- a/backend/src/main/java/com/bakdata/conquery/apiv1/QueryProcessor.java +++ b/backend/src/main/java/com/bakdata/conquery/apiv1/QueryProcessor.java @@ -60,7 +60,6 @@ import com.bakdata.conquery.models.execution.ManagedExecution; import com.bakdata.conquery.models.identifiable.ids.specific.ManagedExecutionId; import com.bakdata.conquery.models.identifiable.mapping.IdPrinter; -import com.bakdata.conquery.models.messages.namespaces.specific.CancelQuery; import com.bakdata.conquery.models.query.ExecutionManager; import com.bakdata.conquery.models.query.ManagedQuery; import com.bakdata.conquery.models.query.SingleTableResult; @@ -86,7 +85,7 @@ public class QueryProcessor { @Inject - private DatasetRegistry datasetRegistry; + private DatasetRegistry datasetRegistry; @Inject private MetaStorage storage; @Inject @@ -305,11 +304,8 @@ public void cancel(Subject subject, Dataset dataset, ManagedExecution query) { log.info("User[{}] cancelled Query[{}]", subject.getId(), query.getId()); - final Namespace namespace = datasetRegistry.get(dataset.getId()); - - query.reset(); - - namespace.sendToAll(new CancelQuery(query.getId())); + final ExecutionManager executionManager = datasetRegistry.get(dataset.getId()).getExecutionManager(); + executionManager.cancelQuery(dataset, query); } public void patchQuery(Subject subject, ManagedExecution execution, MetaDataPatch patch) { @@ -417,7 +413,7 @@ public FullExecutionStatus getSingleEntityExport(Subject subject, UriBuilder uri if (execution.getState() == ExecutionState.FAILED) { - throw ConqueryError.ContextError.fromErrorInfo(execution.getError()); + throw new ConqueryError.ExecutionProcessingError(); } @@ -465,7 +461,7 @@ public Stream> resolveEntities(Subject subject, List providers = new ArrayList<>(); private Client client; + @Delegate(excludes = Managed.class) + private Manager manager; // Resources without authentication private DropwizardResourceConfig unprotectedAuthApi; @@ -108,27 +86,20 @@ public ManagerNode(@NonNull String name) { this.name = name; } - public void run(ConqueryConfig config, Environment environment) throws InterruptedException { - this.environment = environment; - this.config = config; + public void run(Manager manager) throws InterruptedException { + Environment environment = manager.getEnvironment(); + ConqueryConfig config = manager.getConfig(); validator = environment.getValidator(); client = new JerseyClientBuilder(environment).using(config.getJerseyClient()) .build(getName()); - // Instantiate DatasetRegistry and MetaStorage, so they are ready for injection into the object mapper (API + Storage) - // The validator is already injected at this point see Conquery.java - datasetRegistry = new DatasetRegistry(config.getCluster().getEntityBucketSize(), config, this::createInternalObjectMapper); - storage = new MetaStorage(config.getStorage(), datasetRegistry); - datasetRegistry.setMetaStorage(storage); - + this.manager = manager; final ObjectMapper objectMapper = environment.getObjectMapper(); customizeApiObjectMapper(objectMapper); - jobManager = new JobManager("ManagerNode", config.isFailOnError()); - // FormScanner needs to be instantiated before plugins are initialized formScanner = new FormScanner(config); @@ -152,7 +123,7 @@ public void run(ConqueryConfig config, Environment environment) throws Interrupt loadMetaStorage(); - authController = new AuthorizationController(storage, config.getAuthorizationRealms()); + authController = new AuthorizationController(getStorage(), config.getAuthorizationRealms()); environment.lifecycle().manage(authController); unprotectedAuthAdmin = AuthServlet.generalSetup(environment.metrics(), config, environment.admin(), objectMapper); @@ -189,13 +160,13 @@ public void run(ConqueryConfig config, Environment environment) throws Interrupt environment.admin().addTask(formScanner); environment.admin().addTask( - new QueryCleanupTask(storage, Duration.of( + new QueryCleanupTask(getStorage(), Duration.of( config.getQueries().getOldQueriesTime().getQuantity(), config.getQueries().getOldQueriesTime().getUnit().toChronoUnit() ))); - environment.admin().addTask(new PermissionCleanupTask(storage)); - environment.admin().addTask(new ReportConsistencyTask(datasetRegistry)); - environment.admin().addTask(new ReloadMetaStorageTask(storage)); + environment.admin().addTask(new PermissionCleanupTask(getStorage())); + manager.getAdminTasks().forEach(environment.admin()::addTask); + environment.admin().addTask(new ReloadMetaStorageTask(getStorage())); final ShutdownTask shutdown = new ShutdownTask(); environment.admin().addTask(shutdown); @@ -207,8 +178,8 @@ private void configureApiServlet(ConqueryConfig config, DropwizardResourceConfig jerseyConfig.register(new AbstractBinder() { @Override protected void configure() { - bind(storage).to(MetaStorage.class); - bind(datasetRegistry).to(DatasetRegistry.class); + bind(getStorage()).to(MetaStorage.class); + bind(getDatasetRegistry()).to(DatasetRegistry.class); } }); @@ -257,42 +228,15 @@ public void customizeApiObjectMapper(ObjectMapper objectMapper) { * @see ManagerNode#customizeApiObjectMapper(ObjectMapper) */ public ObjectMapper createInternalObjectMapper(Class viewClass) { - final ObjectMapper objectMapper = getConfig().configureObjectMapper(Jackson.BINARY_MAPPER.copy()); - - - final MutableInjectableValues injectableValues = new MutableInjectableValues(); - objectMapper.setInjectableValues(injectableValues); - injectableValues.add(Validator.class, getValidator()); - getDatasetRegistry().injectInto(objectMapper); - getStorage().injectInto(objectMapper); - getConfig().injectInto(objectMapper); - - - if (viewClass != null) { - // Set serialization config - SerializationConfig serializationConfig = objectMapper.getSerializationConfig(); - - serializationConfig = serializationConfig.withView(viewClass); - - objectMapper.setConfig(serializationConfig); - - // Set deserialization config - DeserializationConfig deserializationConfig = objectMapper.getDeserializationConfig(); - - deserializationConfig = deserializationConfig.withView(viewClass); - - objectMapper.setConfig(deserializationConfig); - } - - return objectMapper; + return getInternalObjectMapperCreator().createInternalObjectMapper(viewClass); } private void loadMetaStorage() { log.info("Opening MetaStorage"); - storage.openStores(createInternalObjectMapper(View.Persistence.Manager.class)); + getStorage().openStores(getInternalObjectMapperCreator().createInternalObjectMapper(View.Persistence.Manager.class)); log.info("Loading MetaStorage"); - storage.loadData(); - log.info("MetaStorage loaded {}", storage); + getStorage().loadData(); + log.info("MetaStorage loaded {}", getStorage()); } @SneakyThrows(InterruptedException.class) @@ -302,94 +246,30 @@ public void loadNamespaces() { ExecutorService loaders = Executors.newFixedThreadPool(Runtime.getRuntime().availableProcessors()); // Namespaces load their storage themselves, so they can inject Namespace relevant objects into stored objects - final Collection namespaceStorages = config.getStorage().discoverNamespaceStorages(); + final Collection namespaceStorages = getConfig().getStorage().discoverNamespaceStorages(); for (NamespaceStorage namespaceStorage : namespaceStorages) { loaders.submit(() -> { - datasetRegistry.createNamespace(namespaceStorage); + getDatasetRegistry().createNamespace(namespaceStorage); }); } loaders.shutdown(); while (!loaders.awaitTermination(1, TimeUnit.MINUTES)) { - final int coundLoaded = datasetRegistry.getDatasets().size(); + final int coundLoaded = getDatasetRegistry().getDatasets().size(); log.debug("Waiting for Worker namespaces to load. {} are already finished. {} pending.", coundLoaded, namespaceStorages.size() - coundLoaded); } } - @Override - public void sessionOpened(IoSession session) { - ConqueryMDC.setLocation("ManagerNode[" + session.getLocalAddress().toString() + "]"); - log.info("New client {} connected, waiting for identity", session.getRemoteAddress()); - } - - @Override - public void sessionClosed(IoSession session) { - ConqueryMDC.setLocation("ManagerNode[" + session.getLocalAddress().toString() + "]"); - log.info("Client '{}' disconnected ", session.getAttribute(MinaAttributes.IDENTIFIER)); - } - - @Override - public void exceptionCaught(IoSession session, Throwable cause) { - ConqueryMDC.setLocation("ManagerNode[" + session.getLocalAddress().toString() + "]"); - log.error("caught exception", cause); - } - - @Override - public void messageReceived(IoSession session, Object message) { - ConqueryMDC.setLocation("ManagerNode[" + session.getLocalAddress().toString() + "]"); - if (message instanceof MessageToManagerNode toManagerNode) { - - log.trace("ManagerNode received {} from {}", message.getClass().getSimpleName(), session.getRemoteAddress()); - - Job job = new ReactingJob<>(toManagerNode, new NetworkMessageContext.ManagerNodeNetworkContext( - new NetworkSession(session), - datasetRegistry, config.getCluster().getBackpressure() - )); - - if (toManagerNode instanceof SlowMessage slowMessage) { - slowMessage.setProgressReporter(job.getProgressReporter()); - jobManager.addSlowJob(job); - } - else { - jobManager.addFastJob(job); - } - } - else { - log.error("Unknown message type {} in {}", message.getClass(), message); - } - } - @Override public void start() throws Exception { - acceptor = new NioSocketAcceptor(); - - ObjectMapper om = createInternalObjectMapper(View.InternalCommunication.class); - config.configureObjectMapper(om); - BinaryJacksonCoder coder = new BinaryJacksonCoder(datasetRegistry, validator, om); - acceptor.getFilterChain().addLast("codec", new CQProtocolCodecFilter(new ChunkWriter(coder), new ChunkReader(coder, om))); - acceptor.setHandler(this); - acceptor.getSessionConfig().setAll(config.getCluster().getMina()); - acceptor.bind(new InetSocketAddress(config.getCluster().getPort())); - log.info("Started ManagerNode @ {}", acceptor.getLocalAddress()); + manager.start(); } @Override public void stop() throws Exception { - datasetRegistry.getShardNodes().forEach(((socketAddress, shardNodeInformation) -> shardNodeInformation.send(new ShutdownShard()))); - - jobManager.close(); - - datasetRegistry.close(); - - try { - acceptor.dispose(); - } - catch (Exception e) { - log.error(acceptor + " could not be closed", e); - } - + manager.stop(); for (ResourcesProvider provider : providers) { try { provider.close(); @@ -399,11 +279,12 @@ public void stop() throws Exception { } } + try { - storage.close(); + getStorage().close(); } catch (Exception e) { - log.error(storage + " could not be closed", e); + log.error("{} could not be closed", getStorage(), e); } client.close(); diff --git a/backend/src/main/java/com/bakdata/conquery/commands/StandaloneCommand.java b/backend/src/main/java/com/bakdata/conquery/commands/StandaloneCommand.java index 54683dd211..9692a2a79d 100644 --- a/backend/src/main/java/com/bakdata/conquery/commands/StandaloneCommand.java +++ b/backend/src/main/java/com/bakdata/conquery/commands/StandaloneCommand.java @@ -11,6 +11,8 @@ import java.util.concurrent.TimeUnit; import com.bakdata.conquery.Conquery; +import com.bakdata.conquery.mode.cluster.ClusterManager; +import com.bakdata.conquery.mode.cluster.ClusterManagerProvider; import com.bakdata.conquery.models.config.ConqueryConfig; import com.bakdata.conquery.models.config.XodusStoreFactory; import com.bakdata.conquery.util.io.ConqueryMDC; @@ -26,7 +28,8 @@ public class StandaloneCommand extends io.dropwizard.cli.ServerCommand { private final Conquery conquery; - private ManagerNode manager = new ManagerNode(); + private ClusterManager manager; + private ManagerNode managerNode = new ManagerNode(); private final List shardNodes = new Vector<>(); // TODO clean up the command structure, so we can use the Environment from EnvironmentCommand @@ -68,9 +71,10 @@ protected void startStandalone(Environment environment, Namespace namespace, Con managerConfig = config.withStorage(((XodusStoreFactory) config.getStorage()).withDirectory(managerDir)); } + manager = new ClusterManagerProvider().provideManager(managerConfig, environment); - conquery.setManager(manager); - conquery.run(managerConfig, environment); + conquery.setManagerNode(managerNode); + conquery.run(manager); //create thread pool to start multiple ShardNodes at the same time ExecutorService starterPool = Executors.newFixedThreadPool( diff --git a/backend/src/main/java/com/bakdata/conquery/io/external/form/ExternalFormBackendApi.java b/backend/src/main/java/com/bakdata/conquery/io/external/form/ExternalFormBackendApi.java index 42e3fe35bd..23fee29160 100644 --- a/backend/src/main/java/com/bakdata/conquery/io/external/form/ExternalFormBackendApi.java +++ b/backend/src/main/java/com/bakdata/conquery/io/external/form/ExternalFormBackendApi.java @@ -3,6 +3,7 @@ import java.net.URI; import java.net.URL; import java.util.List; +import java.util.Map; import java.util.UUID; import java.util.function.Function; @@ -19,6 +20,7 @@ import com.bakdata.conquery.models.auth.permissions.Ability; import com.bakdata.conquery.models.config.auth.AuthenticationClientFilterProvider; import com.bakdata.conquery.models.datasets.Dataset; +import com.bakdata.conquery.models.i18n.I18n; import com.codahale.metrics.health.HealthCheck; import com.fasterxml.jackson.databind.node.ObjectNode; import io.dropwizard.health.check.http.HttpHealthCheck; @@ -27,27 +29,25 @@ @Slf4j public class ExternalFormBackendApi { + public final static String TASK_ID = "task-id"; // Custom headers for form post private static final String HTTP_HEADER_CQ_API_URL = "X-CQ-Api-Url"; private static final String HTTP_HEADER_CQ_AUTHENTICATION = "X-CQ-Authentication"; private static final String HTTP_HEADER_CQ_AUTHENTICATION_ORIGINAL = "X-CQ-Authentication-Original"; - // Custom query-params for form post private static final String QUERY_SCOPE = "scope"; private static final String QUERY_DATASET = "dataset"; - - public final static String TASK_ID = "task-id"; - private final Client client; private final WebTarget formConfigTarget; private final WebTarget postFormTarget; private final WebTarget getStatusTarget; + private final WebTarget cancelTaskTarget; private final WebTarget getHealthTarget; private final Function tokenCreator; private final WebTarget baseTarget; private final URL conqueryApiUrl; - public ExternalFormBackendApi(Client client, URI baseURI, String formConfigPath, String postFormPath, String statusTemplatePath, String healthCheckPath, Function tokenCreator, URL conqueryApiUrl, AuthenticationClientFilterProvider authFilterProvider) { + public ExternalFormBackendApi(Client client, URI baseURI, String formConfigPath, String postFormPath, String statusTemplatePath, String cancelTaskPath, String healthCheckPath, Function tokenCreator, URL conqueryApiUrl, AuthenticationClientFilterProvider authFilterProvider) { this.client = client; this.tokenCreator = tokenCreator; @@ -62,6 +62,7 @@ public ExternalFormBackendApi(Client client, URI baseURI, String formConfigPath, postFormTarget = baseTarget.path(postFormPath); getStatusTarget = baseTarget.path(statusTemplatePath); + cancelTaskTarget = baseTarget.path(cancelTaskPath); getHealthTarget = baseTarget.path(healthCheckPath); } @@ -69,8 +70,10 @@ public ExternalFormBackendApi(Client client, URI baseURI, String formConfigPath, public List getFormConfigs() { log.debug("Getting form configurations from: {}", formConfigTarget); - return formConfigTarget.request(MediaType.APPLICATION_JSON_TYPE).buildGet().invoke(new GenericType<>() { - }); + return formConfigTarget.request(MediaType.APPLICATION_JSON_TYPE) + .acceptLanguage(I18n.LOCALE.get()) + .buildGet().invoke(new GenericType<>() { + }); } public ExternalTaskState postForm(ExternalForm form, User originalUser, User serviceUser, Dataset dataset) { @@ -79,13 +82,11 @@ public ExternalTaskState postForm(ExternalForm form, User originalUser, User ser // Set headers WebTarget webTarget = postFormTarget.queryParam(QUERY_DATASET, dataset.getId()); - if (!originalUser.isPermitted(dataset, Ability.DOWNLOAD)) { - // If user is not allowed to download, only provide them with statistics. - webTarget = webTarget.queryParam(QUERY_SCOPE, DatasetDetail.STATISTIC); - } - else { - webTarget = webTarget.queryParam(QUERY_SCOPE, DatasetDetail.FULL); - } + + // If user is not allowed to download, only provide them with statistics. + + final DatasetDetail detail = originalUser.isPermitted(dataset, Ability.DOWNLOAD) ? DatasetDetail.FULL : DatasetDetail.STATISTIC; + webTarget = webTarget.queryParam(QUERY_SCOPE, detail); final Invocation.Builder request = webTarget.request(MediaType.APPLICATION_JSON_TYPE); @@ -95,30 +96,48 @@ public ExternalTaskState postForm(ExternalForm form, User originalUser, User ser request.header(HTTP_HEADER_CQ_API_URL, conqueryApiUrl) .header(HTTP_HEADER_CQ_AUTHENTICATION, serviceUserToken) - .header(HTTP_HEADER_CQ_AUTHENTICATION_ORIGINAL, originalUserToken); + .header(HTTP_HEADER_CQ_AUTHENTICATION_ORIGINAL, originalUserToken) + .acceptLanguage(I18n.LOCALE.get()) + ; ExternalTaskState post = request.post(Entity.entity(form.getExternalApiPayload(), MediaType.APPLICATION_JSON_TYPE), ExternalTaskState.class); return post; } public ExternalTaskState getFormState(UUID externalId) { + final WebTarget getStatusTargetResolved = getStatusTarget.resolveTemplate(TASK_ID, externalId); log.debug("Getting status from: {}", getStatusTargetResolved); - return getStatusTargetResolved.request(MediaType.APPLICATION_JSON_TYPE).get(ExternalTaskState.class); + return getStatusTargetResolved.request(MediaType.APPLICATION_JSON_TYPE) + .acceptLanguage(I18n.LOCALE.get()) + .get(ExternalTaskState.class); } public Response getResult(final URI resultURL) { log.debug("Query external form result from {}", resultURL); - return client.target(baseTarget.getUri().resolve(resultURL)).request().get(); + return client.target(baseTarget.getUri().resolve(resultURL)).request() + .acceptLanguage(I18n.LOCALE.get()) + .get(); } public HealthCheck createHealthCheck() { - return new HttpHealthCheck( - getHealthTarget.getUri().toString(), client - ); + return new HttpHealthCheck(getHealthTarget.getUri().toString(), client); } + public ExternalTaskState cancelTask(UUID taskId) { + log.debug("Cancelling task {}", taskId); + + final ExternalTaskState taskState = cancelTaskTarget.resolveTemplates(Map.of(TASK_ID, taskId)) + .request() + .post(null, ExternalTaskState.class); + + if (taskState.getStatus() != TaskStatus.CANCELLED) { + log.warn("Task `{}` was cancelled, but is still in state {}", taskId, taskState.getStatus()); + } + + return taskState; + } } diff --git a/backend/src/main/java/com/bakdata/conquery/io/external/form/ExternalTaskState.java b/backend/src/main/java/com/bakdata/conquery/io/external/form/ExternalTaskState.java index 6d673d8c3a..b4c56d189a 100644 --- a/backend/src/main/java/com/bakdata/conquery/io/external/form/ExternalTaskState.java +++ b/backend/src/main/java/com/bakdata/conquery/io/external/form/ExternalTaskState.java @@ -10,7 +10,7 @@ import javax.validation.constraints.NotNull; import com.bakdata.conquery.apiv1.execution.ResultAsset; -import com.bakdata.conquery.models.error.PlainError; +import com.bakdata.conquery.models.error.SimpleErrorInfo; import com.fasterxml.jackson.annotation.JsonAlias; import com.fasterxml.jackson.annotation.JsonIgnore; import io.dropwizard.validation.ValidationMethod; @@ -35,7 +35,11 @@ public class ExternalTaskState { @DecimalMin("0.0") @DecimalMax("1.0") private final BigDecimal progress; - + /** + * Short description of the possible Error. + * Only set when {@link ExternalTaskState#status} {@code = FAILURE}. + */ + private final SimpleErrorInfo error; /** * The result url. * Only set when {@link ExternalTaskState#status} {@code = SUCCESS}. @@ -43,16 +47,10 @@ public class ExternalTaskState { @Valid private List<@Valid ResultAsset> results; - /** - * Short description of the possible Error. - * Only set when {@link ExternalTaskState#status} {@code = FAILURE}. - */ - private final PlainError error; - @JsonIgnore @ValidationMethod(message = "Invalid 'taskId' for provided state") public boolean isValidTaskId() { - if (status.equals(TaskStatus.FAILURE)) { + if (status == TaskStatus.FAILURE) { return true; } return id != null; @@ -61,13 +59,14 @@ public boolean isValidTaskId() { @JsonIgnore @ValidationMethod(message = "Status is set to FAILURE, but no error information was set.") public boolean isErrorInfoSet() { - if (status != TaskStatus.FAILURE) { - return true; + if (status == TaskStatus.FAILURE) { + return error != null; } - return error != null; + return true; } + @JsonIgnore @ValidationMethod(message = "Result assets don't have unique ids") public boolean isResultAssetIdUnique() { diff --git a/backend/src/main/java/com/bakdata/conquery/io/external/form/TaskStatus.java b/backend/src/main/java/com/bakdata/conquery/io/external/form/TaskStatus.java index 2660b2d1e8..41db5bb9b2 100644 --- a/backend/src/main/java/com/bakdata/conquery/io/external/form/TaskStatus.java +++ b/backend/src/main/java/com/bakdata/conquery/io/external/form/TaskStatus.java @@ -2,7 +2,5 @@ public enum TaskStatus { - RUNNING, - FAILURE, - SUCCESS + RUNNING, FAILURE, SUCCESS, CANCELLED } diff --git a/backend/src/main/java/com/bakdata/conquery/io/jersey/RESTServer.java b/backend/src/main/java/com/bakdata/conquery/io/jersey/RESTServer.java index ad3d7f9bba..2531a57e46 100644 --- a/backend/src/main/java/com/bakdata/conquery/io/jersey/RESTServer.java +++ b/backend/src/main/java/com/bakdata/conquery/io/jersey/RESTServer.java @@ -3,7 +3,7 @@ import com.bakdata.conquery.io.jetty.CORSPreflightRequestFilter; import com.bakdata.conquery.io.jetty.CORSResponseFilter; import com.bakdata.conquery.io.jetty.CachingFilter; -import com.bakdata.conquery.io.jetty.ConqueryErrorExecptionMapper; +import com.bakdata.conquery.io.jetty.ConqueryErrorExceptionMapper; import com.bakdata.conquery.io.jetty.ConqueryJsonExceptionMapper; import com.bakdata.conquery.io.jetty.JsonValidationExceptionMapper; import com.bakdata.conquery.io.jetty.NoSuchElementExceptionMapper; @@ -35,7 +35,7 @@ public static void configure(ConqueryConfig config, ResourceConfig jersey) { jersey.register(ViewRenderExceptionMapper.class); jersey.register(NoSuchElementExceptionMapper.class); // default Dropwizard's exception mappers - jersey.register(new ConqueryErrorExecptionMapper()); + jersey.register(new ConqueryErrorExceptionMapper()); jersey.register(ConqueryJsonExceptionMapper.class); jersey.register(new LoggingExceptionMapper() {}); jersey.register(new EarlyEofExceptionMapper()); diff --git a/backend/src/main/java/com/bakdata/conquery/io/jetty/ConqueryErrorExecptionMapper.java b/backend/src/main/java/com/bakdata/conquery/io/jetty/ConqueryErrorExceptionMapper.java similarity index 74% rename from backend/src/main/java/com/bakdata/conquery/io/jetty/ConqueryErrorExecptionMapper.java rename to backend/src/main/java/com/bakdata/conquery/io/jetty/ConqueryErrorExceptionMapper.java index 21102e808c..22d52925f9 100644 --- a/backend/src/main/java/com/bakdata/conquery/io/jetty/ConqueryErrorExecptionMapper.java +++ b/backend/src/main/java/com/bakdata/conquery/io/jetty/ConqueryErrorExceptionMapper.java @@ -1,18 +1,17 @@ package com.bakdata.conquery.io.jetty; -import com.bakdata.conquery.models.error.ConqueryError; -import io.dropwizard.jersey.errors.ErrorMessage; - import javax.ws.rs.core.MediaType; import javax.ws.rs.core.Response; import javax.ws.rs.ext.ExceptionMapper; -public class ConqueryErrorExecptionMapper implements ExceptionMapper { +import com.bakdata.conquery.models.error.ConqueryError; + +public class ConqueryErrorExceptionMapper implements ExceptionMapper { @Override public Response toResponse(ConqueryError exception) { return Response.status(Response.Status.BAD_REQUEST.getStatusCode()) .type(MediaType.APPLICATION_JSON_TYPE) - .entity(exception.asPlain()) + .entity(exception) .build(); } } diff --git a/backend/src/main/java/com/bakdata/conquery/io/storage/MetaStorage.java b/backend/src/main/java/com/bakdata/conquery/io/storage/MetaStorage.java index ee294b92d8..026a549572 100644 --- a/backend/src/main/java/com/bakdata/conquery/io/storage/MetaStorage.java +++ b/backend/src/main/java/com/bakdata/conquery/io/storage/MetaStorage.java @@ -18,6 +18,7 @@ import com.bakdata.conquery.models.identifiable.ids.specific.RoleId; import com.bakdata.conquery.models.identifiable.ids.specific.UserId; import com.bakdata.conquery.models.worker.DatasetRegistry; +import com.bakdata.conquery.models.worker.Namespace; import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.base.Preconditions; import com.google.common.collect.ImmutableList; @@ -35,7 +36,7 @@ public class MetaStorage extends ConqueryStorage implements Injectable { private final StoreFactory storageFactory; @Getter - protected final DatasetRegistry datasetRegistry; + protected final DatasetRegistry datasetRegistry; private IdentifiableStore executions; private IdentifiableStore formConfigs; private IdentifiableStore authUser; diff --git a/backend/src/main/java/com/bakdata/conquery/mode/DelegateManager.java b/backend/src/main/java/com/bakdata/conquery/mode/DelegateManager.java new file mode 100644 index 0000000000..b4bf4ef7cd --- /dev/null +++ b/backend/src/main/java/com/bakdata/conquery/mode/DelegateManager.java @@ -0,0 +1,48 @@ +package com.bakdata.conquery.mode; + +import java.util.Collection; +import java.util.List; +import java.util.function.Supplier; + +import com.bakdata.conquery.io.storage.MetaStorage; +import com.bakdata.conquery.models.config.ConqueryConfig; +import com.bakdata.conquery.models.jobs.JobManager; +import com.bakdata.conquery.models.worker.DatasetRegistry; +import com.bakdata.conquery.models.worker.Namespace; +import com.bakdata.conquery.models.worker.ShardNodeInformation; +import io.dropwizard.servlets.tasks.Task; +import io.dropwizard.setup.Environment; +import lombok.Value; + +/** + * Generic manager that contains shared data. + * + * @param type of the namespace + */ +@Value +public class DelegateManager implements Manager { + ConqueryConfig config; + Environment environment; + DatasetRegistry datasetRegistry; + ImportHandler importHandler; + StorageListener storageListener; + Supplier> nodeProvider; + List adminTasks; + InternalObjectMapperCreator internalObjectMapperCreator; + JobManager jobManager; + + @Override + public void start() throws Exception { + } + + @Override + public void stop() throws Exception { + jobManager.close(); + datasetRegistry.close(); + } + + @Override + public MetaStorage getStorage() { + return datasetRegistry.getMetaStorage(); + } +} diff --git a/backend/src/main/java/com/bakdata/conquery/mode/ImportHandler.java b/backend/src/main/java/com/bakdata/conquery/mode/ImportHandler.java new file mode 100644 index 0000000000..b91e335596 --- /dev/null +++ b/backend/src/main/java/com/bakdata/conquery/mode/ImportHandler.java @@ -0,0 +1,19 @@ +package com.bakdata.conquery.mode; + +import java.io.InputStream; + +import com.bakdata.conquery.models.datasets.Import; +import com.bakdata.conquery.models.worker.Namespace; + +/** + * Handler of {@link Import} requests. + */ +public interface ImportHandler { + + void updateImport(Namespace namespace, InputStream inputStream); + + void addImport(Namespace namespace, InputStream inputStream); + + void deleteImport(Import imp); + +} diff --git a/backend/src/main/java/com/bakdata/conquery/mode/InternalObjectMapperCreator.java b/backend/src/main/java/com/bakdata/conquery/mode/InternalObjectMapperCreator.java new file mode 100644 index 0000000000..3706f728a0 --- /dev/null +++ b/backend/src/main/java/com/bakdata/conquery/mode/InternalObjectMapperCreator.java @@ -0,0 +1,68 @@ +package com.bakdata.conquery.mode; + +import javax.annotation.Nullable; +import javax.validation.Validator; + +import com.bakdata.conquery.io.jackson.Jackson; +import com.bakdata.conquery.io.jackson.MutableInjectableValues; +import com.bakdata.conquery.io.jackson.View; +import com.bakdata.conquery.io.storage.MetaStorage; +import com.bakdata.conquery.models.config.ConqueryConfig; +import com.bakdata.conquery.models.worker.DatasetRegistry; +import com.bakdata.conquery.models.worker.Namespace; +import com.fasterxml.jackson.databind.DeserializationConfig; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializationConfig; +import lombok.Getter; +import lombok.RequiredArgsConstructor; + +/** + * Creator for internal object mapper in the manager. + */ +@Getter +@RequiredArgsConstructor +public class InternalObjectMapperCreator { + private final ConqueryConfig config; + private final Validator validator; + private DatasetRegistry datasetRegistry = null; + private MetaStorage storage = null; + + public void init(DatasetRegistry datasetRegistry) { + this.datasetRegistry = datasetRegistry; + this.storage = datasetRegistry.getMetaStorage(); + } + + public ObjectMapper createInternalObjectMapper(@Nullable Class viewClass) { + if (datasetRegistry == null || storage == null) { + throw new IllegalStateException("%s must be initialized by calling its init method".formatted(this.getClass().getSimpleName())); + } + + final ObjectMapper objectMapper = getConfig().configureObjectMapper(Jackson.BINARY_MAPPER.copy()); + + final MutableInjectableValues injectableValues = new MutableInjectableValues(); + objectMapper.setInjectableValues(injectableValues); + injectableValues.add(Validator.class, getValidator()); + getDatasetRegistry().injectInto(objectMapper); + getStorage().injectInto(objectMapper); + getConfig().injectInto(objectMapper); + + + if (viewClass != null) { + // Set serialization config + SerializationConfig serializationConfig = objectMapper.getSerializationConfig(); + + serializationConfig = serializationConfig.withView(viewClass); + + objectMapper.setConfig(serializationConfig); + + // Set deserialization config + DeserializationConfig deserializationConfig = objectMapper.getDeserializationConfig(); + + deserializationConfig = deserializationConfig.withView(viewClass); + + objectMapper.setConfig(deserializationConfig); + } + + return objectMapper; + } +} diff --git a/backend/src/main/java/com/bakdata/conquery/mode/Manager.java b/backend/src/main/java/com/bakdata/conquery/mode/Manager.java new file mode 100644 index 0000000000..d2cc6fee79 --- /dev/null +++ b/backend/src/main/java/com/bakdata/conquery/mode/Manager.java @@ -0,0 +1,31 @@ +package com.bakdata.conquery.mode; + +import java.util.Collection; +import java.util.List; +import java.util.function.Supplier; + +import com.bakdata.conquery.io.storage.MetaStorage; +import com.bakdata.conquery.models.config.ConqueryConfig; +import com.bakdata.conquery.models.jobs.JobManager; +import com.bakdata.conquery.models.worker.DatasetRegistry; +import com.bakdata.conquery.models.worker.Namespace; +import com.bakdata.conquery.models.worker.ShardNodeInformation; +import io.dropwizard.lifecycle.Managed; +import io.dropwizard.servlets.tasks.Task; +import io.dropwizard.setup.Environment; + +/** + * A manager provides the implementations that differ by running mode. + */ +public interface Manager extends Managed { + ConqueryConfig getConfig(); + Environment getEnvironment(); + DatasetRegistry getDatasetRegistry(); + ImportHandler getImportHandler(); + StorageListener getStorageListener(); + Supplier> getNodeProvider(); + List getAdminTasks(); + InternalObjectMapperCreator getInternalObjectMapperCreator(); + JobManager getJobManager(); + MetaStorage getStorage(); +} diff --git a/backend/src/main/java/com/bakdata/conquery/mode/ManagerProvider.java b/backend/src/main/java/com/bakdata/conquery/mode/ManagerProvider.java new file mode 100644 index 0000000000..f0566764c5 --- /dev/null +++ b/backend/src/main/java/com/bakdata/conquery/mode/ManagerProvider.java @@ -0,0 +1,42 @@ +package com.bakdata.conquery.mode; + +import javax.validation.Validator; + +import com.bakdata.conquery.io.storage.MetaStorage; +import com.bakdata.conquery.models.config.ConqueryConfig; +import com.bakdata.conquery.models.jobs.JobManager; +import com.bakdata.conquery.models.worker.DatasetRegistry; +import com.bakdata.conquery.models.worker.Namespace; +import io.dropwizard.setup.Environment; + +/** + * Provider for {@link Manager}. + */ +public interface ManagerProvider { + + String JOB_MANAGER_NAME = "ManagerNode"; + + Manager provideManager(ConqueryConfig config, Environment environment); + + static JobManager newJobManager(ConqueryConfig config) { + return new JobManager(JOB_MANAGER_NAME, config.isFailOnError()); + } + + static InternalObjectMapperCreator newInternalObjectMapperCreator(ConqueryConfig config, Validator validator) { + return new InternalObjectMapperCreator(config, validator); + } + + static DatasetRegistry createDatasetRegistry(NamespaceHandler namespaceHandler, ConqueryConfig config, + InternalObjectMapperCreator creator) { + DatasetRegistry datasetRegistry = new DatasetRegistry<>( + config.getCluster().getEntityBucketSize(), + config, + creator, + namespaceHandler + ); + MetaStorage storage = new MetaStorage(config.getStorage(), datasetRegistry); + datasetRegistry.setMetaStorage(storage); + return datasetRegistry; + } + +} diff --git a/backend/src/main/java/com/bakdata/conquery/mode/NamespaceHandler.java b/backend/src/main/java/com/bakdata/conquery/mode/NamespaceHandler.java new file mode 100644 index 0000000000..b6087f809f --- /dev/null +++ b/backend/src/main/java/com/bakdata/conquery/mode/NamespaceHandler.java @@ -0,0 +1,54 @@ +package com.bakdata.conquery.mode; + +import java.util.ArrayList; +import java.util.List; + +import com.bakdata.conquery.io.jackson.Injectable; +import com.bakdata.conquery.io.jackson.View; +import com.bakdata.conquery.io.storage.MetaStorage; +import com.bakdata.conquery.io.storage.NamespaceStorage; +import com.bakdata.conquery.models.config.ConqueryConfig; +import com.bakdata.conquery.models.identifiable.ids.specific.DatasetId; +import com.bakdata.conquery.models.index.IndexService; +import com.bakdata.conquery.models.jobs.JobManager; +import com.bakdata.conquery.models.query.FilterSearch; +import com.bakdata.conquery.models.worker.Namespace; +import com.fasterxml.jackson.databind.ObjectMapper; + +/** + * Handler of namespaces in a ConQuery instance. + * + * @param type of the namespace. + */ +public interface NamespaceHandler { + + N createNamespace(NamespaceStorage storage, MetaStorage metaStorage); + + void removeNamespace(DatasetId id, N namespace); + + /** + * Creates the {@link NamespaceSetupData} that is shared by all {@link Namespace} types. + */ + static NamespaceSetupData createNamespaceSetup(NamespaceStorage storage, final ConqueryConfig config, final InternalObjectMapperCreator mapperCreator) { + List injectables = new ArrayList<>(); + final IndexService indexService = new IndexService(config.getCsv().createCsvParserSettings()); + injectables.add(indexService); + ObjectMapper persistenceMapper = mapperCreator.createInternalObjectMapper(View.Persistence.Manager.class); + ObjectMapper communicationMapper = mapperCreator.createInternalObjectMapper(View.InternalCommunication.class); + ObjectMapper preprocessMapper = mapperCreator.createInternalObjectMapper(null); + + injectables.forEach(i -> i.injectInto(persistenceMapper)); + injectables.forEach(i -> i.injectInto(communicationMapper)); + injectables.forEach(i -> i.injectInto(preprocessMapper)); + + // Open and load the stores + storage.openStores(persistenceMapper); + storage.loadData(); + + JobManager jobManager = new JobManager(storage.getDataset().getName(), config.isFailOnError()); + + FilterSearch filterSearch = new FilterSearch(storage, jobManager, config.getCsv(), config.getIndex()); + return new NamespaceSetupData(injectables, indexService, communicationMapper, preprocessMapper, jobManager, filterSearch); + } + +} diff --git a/backend/src/main/java/com/bakdata/conquery/mode/NamespaceSetupData.java b/backend/src/main/java/com/bakdata/conquery/mode/NamespaceSetupData.java new file mode 100644 index 0000000000..779205b73d --- /dev/null +++ b/backend/src/main/java/com/bakdata/conquery/mode/NamespaceSetupData.java @@ -0,0 +1,23 @@ +package com.bakdata.conquery.mode; + +import java.util.List; + +import com.bakdata.conquery.io.jackson.Injectable; +import com.bakdata.conquery.models.index.IndexService; +import com.bakdata.conquery.models.jobs.JobManager; +import com.bakdata.conquery.models.query.FilterSearch; +import com.fasterxml.jackson.databind.ObjectMapper; +import lombok.Value; + +/** + * Data required for the set-up of a namespace. + */ +@Value +public class NamespaceSetupData { + List injectables; + IndexService indexService; + ObjectMapper communicationMapper; + ObjectMapper preprocessMapper; + JobManager jobManager; + FilterSearch filterSearch; +} diff --git a/backend/src/main/java/com/bakdata/conquery/mode/StorageListener.java b/backend/src/main/java/com/bakdata/conquery/mode/StorageListener.java new file mode 100644 index 0000000000..ecb7a982c3 --- /dev/null +++ b/backend/src/main/java/com/bakdata/conquery/mode/StorageListener.java @@ -0,0 +1,27 @@ +package com.bakdata.conquery.mode; + +import com.bakdata.conquery.models.datasets.Dataset; +import com.bakdata.conquery.models.datasets.SecondaryIdDescription; +import com.bakdata.conquery.models.datasets.Table; +import com.bakdata.conquery.models.datasets.concepts.Concept; + +/** + * Listener for updates of stored entities in ConQuery. + */ +public interface StorageListener { + + void onAddSecondaryId(SecondaryIdDescription secondaryId); + + void onDeleteSecondaryId(SecondaryIdDescription description); + + void onAddTable(Table table); + + void onRemoveTable(Table table); + + void onAddConcept(Concept concept); + + void onDeleteConcept(Concept concept); + + void onUpdateMatchingStats(final Dataset dataset); + +} diff --git a/backend/src/main/java/com/bakdata/conquery/mode/cluster/ClusterConnectionManager.java b/backend/src/main/java/com/bakdata/conquery/mode/cluster/ClusterConnectionManager.java new file mode 100644 index 0000000000..b9101d6b62 --- /dev/null +++ b/backend/src/main/java/com/bakdata/conquery/mode/cluster/ClusterConnectionManager.java @@ -0,0 +1,122 @@ +package com.bakdata.conquery.mode.cluster; + +import java.io.IOException; +import java.net.InetSocketAddress; + +import javax.validation.Validator; + +import com.bakdata.conquery.io.jackson.View; +import com.bakdata.conquery.io.mina.BinaryJacksonCoder; +import com.bakdata.conquery.io.mina.CQProtocolCodecFilter; +import com.bakdata.conquery.io.mina.ChunkReader; +import com.bakdata.conquery.io.mina.ChunkWriter; +import com.bakdata.conquery.io.mina.MinaAttributes; +import com.bakdata.conquery.io.mina.NetworkSession; +import com.bakdata.conquery.mode.InternalObjectMapperCreator; +import com.bakdata.conquery.models.config.ConqueryConfig; +import com.bakdata.conquery.models.jobs.Job; +import com.bakdata.conquery.models.jobs.JobManager; +import com.bakdata.conquery.models.jobs.ReactingJob; +import com.bakdata.conquery.models.messages.SlowMessage; +import com.bakdata.conquery.models.messages.namespaces.specific.ShutdownShard; +import com.bakdata.conquery.models.messages.network.MessageToManagerNode; +import com.bakdata.conquery.models.messages.network.NetworkMessageContext; +import com.bakdata.conquery.models.worker.DatasetRegistry; +import com.bakdata.conquery.models.worker.DistributedNamespace; +import com.bakdata.conquery.util.io.ConqueryMDC; +import com.fasterxml.jackson.databind.ObjectMapper; +import lombok.Getter; +import lombok.RequiredArgsConstructor; +import lombok.extern.slf4j.Slf4j; +import org.apache.mina.core.service.IoAcceptor; +import org.apache.mina.core.service.IoHandlerAdapter; +import org.apache.mina.core.session.IoSession; +import org.apache.mina.transport.socket.nio.NioSocketAcceptor; + +/** + * Manager of the connection from the manager to the ConQuery shards. + */ +@Slf4j +@RequiredArgsConstructor +public class ClusterConnectionManager extends IoHandlerAdapter { + + private IoAcceptor acceptor; + private final DatasetRegistry datasetRegistry; + private final JobManager jobManager; + private final Validator validator; + private final ConqueryConfig config; + private final InternalObjectMapperCreator internalObjectMapperCreator; + @Getter + private final ClusterState clusterState; + + @Override + public void sessionOpened(IoSession session) { + ConqueryMDC.setLocation("ManagerNode[" + session.getLocalAddress().toString() + "]"); + log.info("New client {} connected, waiting for identity", session.getRemoteAddress()); + } + + @Override + public void sessionClosed(IoSession session) { + ConqueryMDC.setLocation("ManagerNode[" + session.getLocalAddress().toString() + "]"); + log.info("Client '{}' disconnected ", session.getAttribute(MinaAttributes.IDENTIFIER)); + } + + @Override + public void exceptionCaught(IoSession session, Throwable cause) { + ConqueryMDC.setLocation("ManagerNode[" + session.getLocalAddress().toString() + "]"); + log.error("caught exception", cause); + } + + @Override + public void messageReceived(IoSession session, Object message) { + ConqueryMDC.setLocation("ManagerNode[" + session.getLocalAddress().toString() + "]"); + if (message instanceof MessageToManagerNode toManagerNode) { + + log.trace("ManagerNode received {} from {}", message.getClass().getSimpleName(), session.getRemoteAddress()); + + Job job = new ReactingJob<>(toManagerNode, + new NetworkMessageContext.ManagerNodeNetworkContext( + new NetworkSession(session), + datasetRegistry, + clusterState, + config.getCluster().getBackpressure() + )); + + if (toManagerNode instanceof SlowMessage slowMessage) { + slowMessage.setProgressReporter(job.getProgressReporter()); + jobManager.addSlowJob(job); + } + else { + jobManager.addFastJob(job); + } + } + else { + log.error("Unknown message type {} in {}", message.getClass(), message); + } + } + + public void start() throws IOException { + acceptor = new NioSocketAcceptor(); + + ObjectMapper om = internalObjectMapperCreator.createInternalObjectMapper(View.InternalCommunication.class); + config.configureObjectMapper(om); + BinaryJacksonCoder coder = new BinaryJacksonCoder(datasetRegistry, validator, om); + acceptor.getFilterChain().addLast("codec", new CQProtocolCodecFilter(new ChunkWriter(coder), new ChunkReader(coder, om))); + acceptor.setHandler(this); + acceptor.getSessionConfig().setAll(config.getCluster().getMina()); + acceptor.bind(new InetSocketAddress(config.getCluster().getPort())); + log.info("Started ManagerNode @ {}", acceptor.getLocalAddress()); + } + + public void stop() { + clusterState.getShardNodes().forEach(((socketAddress, shardNodeInformation) -> shardNodeInformation.send(new ShutdownShard()))); + + try { + acceptor.dispose(); + } + catch (RuntimeException e) { + log.error("{} could not be closed", acceptor, e); + } + + } +} diff --git a/backend/src/main/java/com/bakdata/conquery/mode/cluster/ClusterImportHandler.java b/backend/src/main/java/com/bakdata/conquery/mode/cluster/ClusterImportHandler.java new file mode 100644 index 0000000000..d2e30ab3e4 --- /dev/null +++ b/backend/src/main/java/com/bakdata/conquery/mode/cluster/ClusterImportHandler.java @@ -0,0 +1,93 @@ +package com.bakdata.conquery.mode.cluster; + +import java.io.InputStream; +import java.util.Collection; + +import com.bakdata.conquery.mode.ImportHandler; +import com.bakdata.conquery.models.config.ConqueryConfig; +import com.bakdata.conquery.models.datasets.Import; +import com.bakdata.conquery.models.datasets.Table; +import com.bakdata.conquery.models.datasets.concepts.Concept; +import com.bakdata.conquery.models.datasets.concepts.Connector; +import com.bakdata.conquery.models.identifiable.IdMutex; +import com.bakdata.conquery.models.identifiable.ids.specific.DatasetId; +import com.bakdata.conquery.models.identifiable.ids.specific.DictionaryId; +import com.bakdata.conquery.models.jobs.ImportJob; +import com.bakdata.conquery.models.messages.namespaces.specific.RemoveImportJob; +import com.bakdata.conquery.models.worker.DatasetRegistry; +import com.bakdata.conquery.models.worker.DistributedNamespace; +import com.bakdata.conquery.models.worker.Namespace; +import lombok.AllArgsConstructor; +import lombok.SneakyThrows; + +/** + * Handler of {@link Import} requests that realizes them both on the manager and the cluster's shards. + */ +@AllArgsConstructor +public +class ClusterImportHandler implements ImportHandler { + + private final IdMutex sharedDictionaryLocks = new IdMutex<>(); + private final ConqueryConfig config; + private final DatasetRegistry datasetRegistry; + + @SneakyThrows + @Override + public void updateImport(Namespace namespace, InputStream inputStream) { + ImportJob job = ImportJob.createOrUpdate( + datasetRegistry.get(namespace.getDataset().getId()), + inputStream, + config.getCluster().getEntityBucketSize(), + sharedDictionaryLocks, + config, + true + ); + + namespace.getJobManager().addSlowJob(job); + + clearDependentConcepts(namespace.getStorage().getAllConcepts(), job.getTable()); + } + + @SneakyThrows + @Override + public void addImport(Namespace namespace, InputStream inputStream) { + ImportJob job = ImportJob.createOrUpdate( + datasetRegistry.get(namespace.getDataset().getId()), + inputStream, + config.getCluster().getEntityBucketSize(), + sharedDictionaryLocks, + config, + false + ); + namespace.getJobManager().addSlowJob(job); + + clearDependentConcepts(namespace.getStorage().getAllConcepts(), job.getTable()); + } + + @Override + public void deleteImport(Import imp) { + + DatasetId id = imp.getTable().getDataset().getId(); + final DistributedNamespace namespace = datasetRegistry.get(id); + + clearDependentConcepts(namespace.getStorage().getAllConcepts(), imp.getTable()); + + namespace.getStorage().removeImport(imp.getId()); + namespace.getWorkerHandler().sendToAll(new RemoveImportJob(imp)); + + // Remove bucket assignments for consistency report + namespace.getWorkerHandler().removeBucketAssignmentsForImportFormWorkers(imp); + } + + private void clearDependentConcepts(Collection> allConcepts, Table table) { + for (Concept c : allConcepts) { + for (Connector con : c.getConnectors()) { + if (!con.getTable().equals(table)) { + continue; + } + + con.getConcept().clearMatchingStats(); + } + } + } +} diff --git a/backend/src/main/java/com/bakdata/conquery/mode/cluster/ClusterManager.java b/backend/src/main/java/com/bakdata/conquery/mode/cluster/ClusterManager.java new file mode 100644 index 0000000000..809b87a945 --- /dev/null +++ b/backend/src/main/java/com/bakdata/conquery/mode/cluster/ClusterManager.java @@ -0,0 +1,33 @@ +package com.bakdata.conquery.mode.cluster; + +import com.bakdata.conquery.mode.DelegateManager; +import com.bakdata.conquery.mode.Manager; +import com.bakdata.conquery.models.worker.DistributedNamespace; +import io.dropwizard.lifecycle.Managed; +import lombok.Getter; +import lombok.RequiredArgsConstructor; +import lombok.experimental.Delegate; + +/** + * {@link Manager} for running ConQuery in cluster mode. + */ +@RequiredArgsConstructor +public class ClusterManager implements Manager { + @Delegate(excludes = Managed.class) + private final DelegateManager delegate; + @Getter + private final ClusterConnectionManager connectionManager; + + @Override + public void start() throws Exception { + delegate.start(); + connectionManager.start(); + } + + @Override + public void stop() throws Exception { + delegate.stop(); + connectionManager.stop(); + } + +} diff --git a/backend/src/main/java/com/bakdata/conquery/mode/cluster/ClusterManagerProvider.java b/backend/src/main/java/com/bakdata/conquery/mode/cluster/ClusterManagerProvider.java new file mode 100644 index 0000000000..f8dd53c541 --- /dev/null +++ b/backend/src/main/java/com/bakdata/conquery/mode/cluster/ClusterManagerProvider.java @@ -0,0 +1,56 @@ +package com.bakdata.conquery.mode.cluster; + +import java.util.Collection; +import java.util.List; +import java.util.function.Supplier; + +import com.bakdata.conquery.mode.DelegateManager; +import com.bakdata.conquery.mode.ImportHandler; +import com.bakdata.conquery.mode.InternalObjectMapperCreator; +import com.bakdata.conquery.mode.ManagerProvider; +import com.bakdata.conquery.mode.NamespaceHandler; +import com.bakdata.conquery.mode.StorageListener; +import com.bakdata.conquery.models.config.ConqueryConfig; +import com.bakdata.conquery.models.jobs.JobManager; +import com.bakdata.conquery.models.worker.DatasetRegistry; +import com.bakdata.conquery.models.worker.DistributedNamespace; +import com.bakdata.conquery.models.worker.ShardNodeInformation; +import com.bakdata.conquery.tasks.ReportConsistencyTask; +import io.dropwizard.servlets.tasks.Task; +import io.dropwizard.setup.Environment; + +public class ClusterManagerProvider implements ManagerProvider { + + public ClusterManager provideManager(ConqueryConfig config, Environment environment) { + JobManager jobManager = ManagerProvider.newJobManager(config); + InternalObjectMapperCreator creator = ManagerProvider.newInternalObjectMapperCreator(config, environment.getValidator()); + ClusterState clusterState = new ClusterState(); + NamespaceHandler namespaceHandler = new ClusterNamespaceHandler(clusterState, config, creator); + DatasetRegistry datasetRegistry = ManagerProvider.createDatasetRegistry(namespaceHandler, config, creator); + creator.init(datasetRegistry); + + ClusterConnectionManager connectionManager = new ClusterConnectionManager( + datasetRegistry, jobManager, environment.getValidator(), config, creator, clusterState + ); + ImportHandler importHandler = new ClusterImportHandler(config, datasetRegistry); + StorageListener extension = new ClusterStorageListener(jobManager, datasetRegistry); + Supplier> nodeProvider = () -> clusterState.getShardNodes().values(); + List adminTasks = List.of(new ReportConsistencyTask(clusterState)); + + DelegateManager delegate = new DelegateManager<>( + config, + environment, + datasetRegistry, + importHandler, + extension, + nodeProvider, + adminTasks, + creator, + jobManager + ); + + return new ClusterManager(delegate, connectionManager); + } + + +} diff --git a/backend/src/main/java/com/bakdata/conquery/mode/cluster/ClusterNamespaceHandler.java b/backend/src/main/java/com/bakdata/conquery/mode/cluster/ClusterNamespaceHandler.java new file mode 100644 index 0000000000..30b0237106 --- /dev/null +++ b/backend/src/main/java/com/bakdata/conquery/mode/cluster/ClusterNamespaceHandler.java @@ -0,0 +1,56 @@ +package com.bakdata.conquery.mode.cluster; + +import com.bakdata.conquery.io.storage.MetaStorage; +import com.bakdata.conquery.io.storage.NamespaceStorage; +import com.bakdata.conquery.mode.InternalObjectMapperCreator; +import com.bakdata.conquery.mode.NamespaceSetupData; +import com.bakdata.conquery.mode.NamespaceHandler; +import com.bakdata.conquery.models.config.ConqueryConfig; +import com.bakdata.conquery.models.identifiable.ids.specific.DatasetId; +import com.bakdata.conquery.models.messages.network.specific.AddWorker; +import com.bakdata.conquery.models.messages.network.specific.RemoveWorker; +import com.bakdata.conquery.models.query.DistributedExecutionManager; +import com.bakdata.conquery.models.worker.DistributedNamespace; +import com.bakdata.conquery.models.worker.ShardNodeInformation; +import com.bakdata.conquery.models.worker.WorkerHandler; +import lombok.RequiredArgsConstructor; + +@RequiredArgsConstructor +public class ClusterNamespaceHandler implements NamespaceHandler { + private final ClusterState clusterState; + private final ConqueryConfig config; + private final InternalObjectMapperCreator mapperCreator; + + @Override + public DistributedNamespace createNamespace(NamespaceStorage storage, final MetaStorage metaStorage) { + NamespaceSetupData namespaceData = NamespaceHandler.createNamespaceSetup(storage, config, mapperCreator); + DistributedExecutionManager executionManager = new DistributedExecutionManager(metaStorage, clusterState); + WorkerHandler workerHandler = new WorkerHandler(namespaceData.getCommunicationMapper(), storage); + clusterState.getWorkerHandlers().put(storage.getDataset().getId(), workerHandler); + + DistributedNamespace distributedNamespace = new DistributedNamespace( + namespaceData.getPreprocessMapper(), + namespaceData.getCommunicationMapper(), + storage, + executionManager, + namespaceData.getJobManager(), + namespaceData.getFilterSearch(), + namespaceData.getIndexService(), + namespaceData.getInjectables(), + workerHandler + ); + + for (ShardNodeInformation node : clusterState.getShardNodes().values()) { + node.send(new AddWorker(storage.getDataset())); + } + return distributedNamespace; + } + + + @Override + public void removeNamespace(DatasetId id, DistributedNamespace namespace) { + clusterState.getShardNodes().values().forEach(shardNode -> shardNode.send(new RemoveWorker(namespace.getDataset()))); + clusterState.getWorkerHandlers().keySet().removeIf(worker -> worker.getDataset().getDataset().equals(id)); + } + +} diff --git a/backend/src/main/java/com/bakdata/conquery/mode/cluster/ClusterState.java b/backend/src/main/java/com/bakdata/conquery/mode/cluster/ClusterState.java new file mode 100644 index 0000000000..36356fa9fc --- /dev/null +++ b/backend/src/main/java/com/bakdata/conquery/mode/cluster/ClusterState.java @@ -0,0 +1,35 @@ +package com.bakdata.conquery.mode.cluster; + +import java.net.SocketAddress; +import java.util.NoSuchElementException; +import java.util.Optional; +import java.util.concurrent.ConcurrentHashMap; +import java.util.concurrent.ConcurrentMap; + +import com.bakdata.conquery.models.identifiable.ids.specific.DatasetId; +import com.bakdata.conquery.models.identifiable.ids.specific.WorkerId; +import com.bakdata.conquery.models.worker.ShardNodeInformation; +import com.bakdata.conquery.models.worker.WorkerHandler; +import com.bakdata.conquery.models.worker.WorkerInformation; +import lombok.Value; + +@Value +public class ClusterState { + ConcurrentMap shardNodes = new ConcurrentHashMap<>(); + ConcurrentMap workerHandlers = new ConcurrentHashMap<>(); + + public synchronized void register(ShardNodeInformation node, WorkerInformation info) { + WorkerHandler workerHandler = workerHandlers.get(info.getDataset()); + if (workerHandler == null) { + throw new NoSuchElementException("Trying to register a worker for unknown dataset '%s'. I only know %s".formatted(info.getDataset(), workerHandlers.keySet())); + } + workerHandler.register(node, info); + } + + public WorkerInformation getWorker(final WorkerId workerId, final DatasetId id) { + return Optional.ofNullable(workerHandlers.get(id)) + .flatMap(ns -> ns.getWorkers().getOptional(workerId)) + .orElseThrow(() -> new NoSuchElementException("Unknown worker worker '%s' for dataset '%s'".formatted(workerId, id))); + } + +} diff --git a/backend/src/main/java/com/bakdata/conquery/mode/cluster/ClusterStorageListener.java b/backend/src/main/java/com/bakdata/conquery/mode/cluster/ClusterStorageListener.java new file mode 100644 index 0000000000..f16aaaa1b8 --- /dev/null +++ b/backend/src/main/java/com/bakdata/conquery/mode/cluster/ClusterStorageListener.java @@ -0,0 +1,79 @@ +package com.bakdata.conquery.mode.cluster; + +import java.util.Collection; +import java.util.stream.Collectors; + +import com.bakdata.conquery.mode.StorageListener; +import com.bakdata.conquery.models.datasets.Dataset; +import com.bakdata.conquery.models.datasets.SecondaryIdDescription; +import com.bakdata.conquery.models.datasets.Table; +import com.bakdata.conquery.models.datasets.concepts.Concept; +import com.bakdata.conquery.models.jobs.JobManager; +import com.bakdata.conquery.models.jobs.SimpleJob; +import com.bakdata.conquery.models.messages.namespaces.specific.RemoveConcept; +import com.bakdata.conquery.models.messages.namespaces.specific.RemoveSecondaryId; +import com.bakdata.conquery.models.messages.namespaces.specific.RemoveTable; +import com.bakdata.conquery.models.messages.namespaces.specific.UpdateConcept; +import com.bakdata.conquery.models.messages.namespaces.specific.UpdateMatchingStatsMessage; +import com.bakdata.conquery.models.messages.namespaces.specific.UpdateSecondaryId; +import com.bakdata.conquery.models.messages.namespaces.specific.UpdateTable; +import com.bakdata.conquery.models.worker.DatasetRegistry; +import com.bakdata.conquery.models.worker.DistributedNamespace; +import com.bakdata.conquery.models.worker.Namespace; +import com.bakdata.conquery.models.worker.WorkerHandler; +import lombok.AllArgsConstructor; + +/** + * Propagates changes of stored entities to relevant ConQuery shards in the cluster. + */ +@AllArgsConstructor +public +class ClusterStorageListener implements StorageListener { + + private final JobManager jobManager; + private final DatasetRegistry datasetRegistry; + + @Override + public void onAddSecondaryId(SecondaryIdDescription secondaryId) { + datasetRegistry.get(secondaryId.getDataset().getId()).getWorkerHandler().sendToAll(new UpdateSecondaryId(secondaryId)); + } + + @Override + public void onDeleteSecondaryId(SecondaryIdDescription secondaryId) { + datasetRegistry.get(secondaryId.getDataset().getId()).getWorkerHandler().sendToAll(new RemoveSecondaryId(secondaryId)); + } + + @Override + public void onAddTable(Table table) { + datasetRegistry.get(table.getDataset().getId()).getWorkerHandler().sendToAll(new UpdateTable(table)); + } + + @Override + public void onRemoveTable(Table table) { + datasetRegistry.get(table.getDataset().getId()).getWorkerHandler().sendToAll(new RemoveTable(table)); + } + + @Override + public void onAddConcept(Concept concept) { + WorkerHandler handler = datasetRegistry.get(concept.getDataset().getId()).getWorkerHandler(); + SimpleJob simpleJob = new SimpleJob(String.format("sendToAll : Add %s ", concept.getId()), () -> handler.sendToAll(new UpdateConcept(concept))); + jobManager.addSlowJob(simpleJob); + } + + @Override + public void onDeleteConcept(Concept concept) { + WorkerHandler handler = datasetRegistry.get(concept.getDataset().getId()).getWorkerHandler(); + SimpleJob simpleJob = new SimpleJob("sendToAll: remove " + concept.getId(), () -> handler.sendToAll(new RemoveConcept(concept))); + jobManager.addSlowJob(simpleJob); + } + + @Override + public void onUpdateMatchingStats(final Dataset dataset) { + final Namespace namespace = datasetRegistry.get(dataset.getId()); + final Collection> concepts = namespace.getStorage().getAllConcepts() + .stream() + .filter(concept -> concept.getMatchingStats() == null) + .collect(Collectors.toSet()); + datasetRegistry.get(dataset.getId()).getWorkerHandler().sendToAll(new UpdateMatchingStatsMessage(concepts)); + } +} diff --git a/backend/src/main/java/com/bakdata/conquery/mode/local/FailingImportHandler.java b/backend/src/main/java/com/bakdata/conquery/mode/local/FailingImportHandler.java new file mode 100644 index 0000000000..e6f0ff65e5 --- /dev/null +++ b/backend/src/main/java/com/bakdata/conquery/mode/local/FailingImportHandler.java @@ -0,0 +1,29 @@ +package com.bakdata.conquery.mode.local; + +import java.io.InputStream; + +import com.bakdata.conquery.mode.ImportHandler; +import com.bakdata.conquery.models.datasets.Import; +import com.bakdata.conquery.models.worker.Namespace; + +public class FailingImportHandler implements ImportHandler { + + @Override + public void updateImport(Namespace namespace, InputStream inputStream) { + fail(); + } + + @Override + public void addImport(Namespace namespace, InputStream inputStream) { + fail(); + } + + @Override + public void deleteImport(Import imp) { + fail(); + } + + private static void fail() { + throw new UnsupportedOperationException("Imports are not supported when running in SQL mode"); + } +} diff --git a/backend/src/main/java/com/bakdata/conquery/mode/local/LocalManagerProvider.java b/backend/src/main/java/com/bakdata/conquery/mode/local/LocalManagerProvider.java new file mode 100644 index 0000000000..794df98a87 --- /dev/null +++ b/backend/src/main/java/com/bakdata/conquery/mode/local/LocalManagerProvider.java @@ -0,0 +1,45 @@ +package com.bakdata.conquery.mode.local; + +import java.util.Collection; +import java.util.Collections; +import java.util.List; +import java.util.function.Supplier; + +import com.bakdata.conquery.mode.DelegateManager; +import com.bakdata.conquery.mode.InternalObjectMapperCreator; +import com.bakdata.conquery.mode.ManagerProvider; +import com.bakdata.conquery.mode.NamespaceHandler; +import com.bakdata.conquery.models.config.ConqueryConfig; +import com.bakdata.conquery.models.worker.DatasetRegistry; +import com.bakdata.conquery.models.worker.LocalNamespace; +import com.bakdata.conquery.models.worker.ShardNodeInformation; +import com.bakdata.conquery.sql.DslContextFactory; +import com.bakdata.conquery.sql.SqlContext; +import com.bakdata.conquery.sql.conversion.dialect.PostgreSqlDialect; +import io.dropwizard.setup.Environment; + +public class LocalManagerProvider implements ManagerProvider { + + private static final Supplier> EMPTY_NODE_PROVIDER = Collections::emptyList; + + public DelegateManager provideManager(ConqueryConfig config, Environment environment) { + InternalObjectMapperCreator creator = ManagerProvider.newInternalObjectMapperCreator(config, environment.getValidator()); + // todo(tm): proper injection + SqlContext sqlContext = new SqlContext(config.getSqlConnectorConfig(), new PostgreSqlDialect(DslContextFactory.create(config.getSqlConnectorConfig()))); + NamespaceHandler namespaceHandler = new LocalNamespaceHandler(config, creator, sqlContext); + DatasetRegistry datasetRegistry = ManagerProvider.createDatasetRegistry(namespaceHandler, config, creator); + creator.init(datasetRegistry); + + return new DelegateManager<>( + config, + environment, + datasetRegistry, + new FailingImportHandler(), + new LocalStorageListener(), + EMPTY_NODE_PROVIDER, + List.of(), + creator, + ManagerProvider.newJobManager(config) + ); + } +} diff --git a/backend/src/main/java/com/bakdata/conquery/mode/local/LocalNamespaceHandler.java b/backend/src/main/java/com/bakdata/conquery/mode/local/LocalNamespaceHandler.java new file mode 100644 index 0000000000..6bed3c12d8 --- /dev/null +++ b/backend/src/main/java/com/bakdata/conquery/mode/local/LocalNamespaceHandler.java @@ -0,0 +1,44 @@ +package com.bakdata.conquery.mode.local; + +import com.bakdata.conquery.io.storage.MetaStorage; +import com.bakdata.conquery.io.storage.NamespaceStorage; +import com.bakdata.conquery.mode.InternalObjectMapperCreator; +import com.bakdata.conquery.mode.NamespaceHandler; +import com.bakdata.conquery.mode.NamespaceSetupData; +import com.bakdata.conquery.models.config.ConqueryConfig; +import com.bakdata.conquery.models.identifiable.ids.specific.DatasetId; +import com.bakdata.conquery.models.query.ExecutionManager; +import com.bakdata.conquery.models.worker.LocalNamespace; +import com.bakdata.conquery.sql.SqlContext; +import com.bakdata.conquery.sql.conquery.SqlExecutionManager; +import lombok.RequiredArgsConstructor; + +@RequiredArgsConstructor +public class LocalNamespaceHandler implements NamespaceHandler { + + private final ConqueryConfig config; + private final InternalObjectMapperCreator mapperCreator; + private final SqlContext sqlContext; + + @Override + public LocalNamespace createNamespace(NamespaceStorage namespaceStorage, MetaStorage metaStorage) { + NamespaceSetupData namespaceData = NamespaceHandler.createNamespaceSetup(namespaceStorage, config, mapperCreator); + ExecutionManager executionManager = new SqlExecutionManager(sqlContext, metaStorage); + return new LocalNamespace( + namespaceData.getPreprocessMapper(), + namespaceData.getCommunicationMapper(), + namespaceStorage, + executionManager, + namespaceData.getJobManager(), + namespaceData.getFilterSearch(), + namespaceData.getIndexService(), + namespaceData.getInjectables() + ); + } + + @Override + public void removeNamespace(DatasetId id, LocalNamespace namespace) { + // nothing to do + } + +} diff --git a/backend/src/main/java/com/bakdata/conquery/mode/local/LocalStorageListener.java b/backend/src/main/java/com/bakdata/conquery/mode/local/LocalStorageListener.java new file mode 100644 index 0000000000..20dcd9e25b --- /dev/null +++ b/backend/src/main/java/com/bakdata/conquery/mode/local/LocalStorageListener.java @@ -0,0 +1,40 @@ +package com.bakdata.conquery.mode.local; + +import com.bakdata.conquery.mode.StorageListener; +import com.bakdata.conquery.models.datasets.Dataset; +import com.bakdata.conquery.models.datasets.SecondaryIdDescription; +import com.bakdata.conquery.models.datasets.Table; +import com.bakdata.conquery.models.datasets.concepts.Concept; + +public class LocalStorageListener implements StorageListener { + + // When running without shards, no further actions are required + + @Override + public void onAddSecondaryId(SecondaryIdDescription secondaryId) { + } + + @Override + public void onDeleteSecondaryId(SecondaryIdDescription description) { + } + + @Override + public void onAddTable(Table table) { + } + + @Override + public void onRemoveTable(Table table) { + } + + @Override + public void onAddConcept(Concept concept) { + } + + @Override + public void onDeleteConcept(Concept concept) { + } + + @Override + public void onUpdateMatchingStats(Dataset dataset) { + } +} diff --git a/backend/src/main/java/com/bakdata/conquery/models/auth/AuthorizationHelper.java b/backend/src/main/java/com/bakdata/conquery/models/auth/AuthorizationHelper.java index 43337e3dba..766b3abf54 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/auth/AuthorizationHelper.java +++ b/backend/src/main/java/com/bakdata/conquery/models/auth/AuthorizationHelper.java @@ -23,6 +23,7 @@ import com.bakdata.conquery.models.identifiable.ids.specific.DatasetId; import com.bakdata.conquery.models.query.Visitable; import com.bakdata.conquery.models.worker.DatasetRegistry; +import com.bakdata.conquery.models.worker.Namespace; import com.bakdata.conquery.util.QueryUtils.NamespacedIdentifiableCollector; import com.google.common.collect.ArrayListMultimap; import com.google.common.collect.Multimap; @@ -106,7 +107,7 @@ public static void authorizeDownloadDatasets(@NonNull Subject subject, @NonNull /** * Calculates the abilities on all datasets a subject has based on its permissions. */ - public static Map> buildDatasetAbilityMap(Subject subject, DatasetRegistry datasetRegistry) { + public static Map> buildDatasetAbilityMap(Subject subject, DatasetRegistry datasetRegistry) { HashMap> datasetAbilities = new HashMap<>(); for (Dataset dataset : datasetRegistry.getAllDatasets()) { diff --git a/backend/src/main/java/com/bakdata/conquery/models/config/ConqueryConfig.java b/backend/src/main/java/com/bakdata/conquery/models/config/ConqueryConfig.java index 25aa7bc60c..d04ad65584 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/config/ConqueryConfig.java +++ b/backend/src/main/java/com/bakdata/conquery/models/config/ConqueryConfig.java @@ -116,6 +116,11 @@ public class ConqueryConfig extends Configuration implements Injectable { @Valid @NotNull private List plugins = new ArrayList<>(); + + @Valid + @NotNull + private SqlConnectorConfig sqlConnectorConfig = new SqlConnectorConfig(); + /** * null means here that we try to deduce from an attached agent */ diff --git a/backend/src/main/java/com/bakdata/conquery/models/config/Dialect.java b/backend/src/main/java/com/bakdata/conquery/models/config/Dialect.java new file mode 100644 index 0000000000..2ec655aea9 --- /dev/null +++ b/backend/src/main/java/com/bakdata/conquery/models/config/Dialect.java @@ -0,0 +1,17 @@ +package com.bakdata.conquery.models.config; + +import lombok.Getter; +import org.jooq.SQLDialect; + +@Getter +public enum Dialect { + + POSTGRESQL(SQLDialect.POSTGRES); + + private final SQLDialect jooqDialect; + + Dialect(SQLDialect jooqDialect) { + this.jooqDialect = jooqDialect; + } + +} diff --git a/backend/src/main/java/com/bakdata/conquery/models/config/FormBackendConfig.java b/backend/src/main/java/com/bakdata/conquery/models/config/FormBackendConfig.java index d592a8f691..d8845e24ee 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/config/FormBackendConfig.java +++ b/backend/src/main/java/com/bakdata/conquery/models/config/FormBackendConfig.java @@ -31,18 +31,14 @@ import com.google.common.collect.ImmutableCollection; import io.dropwizard.client.JerseyClientBuilder; import io.dropwizard.jersey.jackson.JacksonMessageBodyProvider; -import lombok.Getter; -import lombok.NoArgsConstructor; -import lombok.Setter; +import lombok.Data; import lombok.extern.slf4j.Slf4j; /** * {@link PluginConfig} for an external form backend. * The external form backend must implement the OpenAPI spec for external form backend. */ -@NoArgsConstructor -@Setter -@Getter +@Data @CPSType(id = "FORM_BACKEND", base = PluginConfig.class) @Slf4j public class FormBackendConfig implements PluginConfig, MultiInstancePlugin { @@ -63,6 +59,9 @@ public class FormBackendConfig implements PluginConfig, MultiInstancePlugin { @Pattern(regexp = ".+/\\{" + ExternalFormBackendApi.TASK_ID + "}") private String statusTemplatePath = "task/{" + ExternalFormBackendApi.TASK_ID + "}"; + private String cancelTaskPath = "task/{" + ExternalFormBackendApi.TASK_ID + "}/cancel"; + + @NotEmpty private String healthCheckPath = "health"; @@ -107,7 +106,7 @@ public static ObjectMapper configureObjectMapper(ObjectMapper om) { } public ExternalFormBackendApi createApi() { - return new ExternalFormBackendApi(client, baseURI, formConfigPath, postFormPath, statusTemplatePath, healthCheckPath, this::createAccessToken, conqueryApiUrl, getAuthentication()); + return new ExternalFormBackendApi(client, baseURI, formConfigPath, postFormPath, statusTemplatePath, cancelTaskPath, healthCheckPath, this::createAccessToken, conqueryApiUrl, getAuthentication()); } public boolean supportsFormType(String formType) { diff --git a/backend/src/main/java/com/bakdata/conquery/models/config/SqlConnectorConfig.java b/backend/src/main/java/com/bakdata/conquery/models/config/SqlConnectorConfig.java new file mode 100644 index 0000000000..857018cada --- /dev/null +++ b/backend/src/main/java/com/bakdata/conquery/models/config/SqlConnectorConfig.java @@ -0,0 +1,29 @@ +package com.bakdata.conquery.models.config; + +import lombok.AllArgsConstructor; +import lombok.Builder; +import lombok.Data; +import lombok.NoArgsConstructor; + +@Data +@Builder +@NoArgsConstructor +@AllArgsConstructor +public class SqlConnectorConfig { + + boolean enabled; + + private Dialect dialect; + + /** + * Determines if generated SQL should be formatted. + */ + private boolean withPrettyPrinting; + + private String databaseUsername; + + private String databasePassword; + + private String jdbcConnectionUrl; + private String primaryColumn = "pid"; +} diff --git a/backend/src/main/java/com/bakdata/conquery/models/datasets/PreviewConfig.java b/backend/src/main/java/com/bakdata/conquery/models/datasets/PreviewConfig.java index 4d84e6150e..12367ec4c7 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/datasets/PreviewConfig.java +++ b/backend/src/main/java/com/bakdata/conquery/models/datasets/PreviewConfig.java @@ -26,6 +26,7 @@ import com.bakdata.conquery.models.query.PrintSettings; import com.bakdata.conquery.models.query.resultinfo.SelectResultInfo; import com.bakdata.conquery.models.worker.DatasetRegistry; +import com.bakdata.conquery.models.worker.Namespace; import com.fasterxml.jackson.annotation.JacksonInject; import com.fasterxml.jackson.annotation.JsonIgnore; import com.fasterxml.jackson.annotation.OptBoolean; @@ -92,7 +93,7 @@ public class PreviewConfig { @JacksonInject(useInput = OptBoolean.FALSE) @NotNull - private DatasetRegistry datasetRegistry; + private DatasetRegistry datasetRegistry; public boolean isGroupingColumn(SecondaryIdDescription desc) { return getGrouping().contains(desc.getId()); diff --git a/backend/src/main/java/com/bakdata/conquery/models/error/ConqueryError.java b/backend/src/main/java/com/bakdata/conquery/models/error/ConqueryError.java index 37a64bddbb..eb02b756e2 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/error/ConqueryError.java +++ b/backend/src/main/java/com/bakdata/conquery/models/error/ConqueryError.java @@ -1,33 +1,29 @@ package com.bakdata.conquery.models.error; -import java.util.Collections; -import java.util.Map; -import java.util.Objects; +import java.sql.SQLException; import java.util.Set; import java.util.UUID; -import javax.validation.constraints.NotEmpty; import javax.validation.constraints.NotNull; +import c10n.C10N; import com.bakdata.conquery.io.cps.CPSBase; import com.bakdata.conquery.io.cps.CPSType; import com.bakdata.conquery.models.forms.util.Alignment; import com.bakdata.conquery.models.forms.util.Resolution; import com.bakdata.conquery.models.identifiable.ids.Id; import com.bakdata.conquery.models.query.entity.Entity; -import com.bakdata.conquery.models.query.queryplan.QueryPlan; import com.bakdata.conquery.util.VariableDefaultValue; import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonIgnore; import com.fasterxml.jackson.annotation.JsonTypeInfo; -import lombok.AccessLevel; +import lombok.Data; import lombok.Getter; +import lombok.NoArgsConstructor; +import lombok.RequiredArgsConstructor; import lombok.Setter; import lombok.ToString; -import lombok.experimental.FieldNameConstants; import lombok.extern.slf4j.Slf4j; -import org.apache.commons.collections4.map.Flat3Map; -import org.apache.commons.text.StringSubstitutor; /** * Base class for errors that are thrown within Conquery and can be serialized @@ -41,45 +37,35 @@ @ToString(onlyExplicitlyIncluded = true) public abstract class ConqueryError extends RuntimeException implements ConqueryErrorInfo { - private static final String NO_MEASSAGE = "Unable to provide error message. No message template was provided by error."; + private static final String NO_MESSAGE = "Unable to provide error message. No message template was provided by error."; @VariableDefaultValue @NotNull @ToString.Include private UUID id = UUID.randomUUID(); - @NotEmpty - private String messageTemplate; - private Map context; - /** * Since Jackson does not seem to be able to deserialize throwable with super.cause set. We have our own member */ private ConqueryError conqueryCause; - - public ConqueryError(String messageTemplate, Map context) { - this(messageTemplate, context, null); + protected ConqueryError() { + this(null); } - public ConqueryError(String messageTemplate, Map context, ConqueryError conqueryCause) { + protected ConqueryError(ConqueryError conqueryCause) { this.conqueryCause = conqueryCause; - this.messageTemplate = messageTemplate; - this.context = context; } - @Override - @JsonIgnore - @ToString.Include - public String getMessage() { - if (messageTemplate == null) { - return NO_MEASSAGE; - } - return new StringSubstitutor(context).replace(messageTemplate); + /** + * Wraps the {@link Throwable} into an {@link ConqueryError}. + */ + public static ConqueryError asConqueryError(Throwable t) { + return t instanceof ConqueryError ? (ConqueryError) t : new ConqueryError.UnknownError(t); } @Override - public PlainError asPlain() { - return new PlainError(getId(), getCode(), getMessage(), getContext()); + public SimpleErrorInfo asPlain() { + return new SimpleErrorInfo(getId(), getCode(), getMessage()); } @Override @@ -88,218 +74,126 @@ public final String getCode() { return this.getClass().getAnnotation(CPSType.class).id(); } - /** - * Wraps the {@link Throwable} into an {@link ConqueryError}. - */ - public static ConqueryError asConqueryError(Throwable t) { - return t instanceof ConqueryError ? (ConqueryError) t : new ConqueryError.UnknownError(t); - } - - public abstract static class NoContextError extends ConqueryError { - - public NoContextError(String message) { - super(message, Collections.emptyMap()); - } + @Override + @JsonIgnore + @ToString.Include + public final String getMessage() { + return getMessageTemplate(C10N.get(ErrorMessages.class)); } - public static class ContextError extends ConqueryError { - - public ContextError(String messageTemplate) { - this(messageTemplate, null); - } - - public ContextError(String messageTemplate, ConqueryError cause) { - super(messageTemplate, new Flat3Map<>(), cause); - } - - public ContextError(String messageTemplate, Map context, ConqueryError cause) { - super(messageTemplate, context, cause); - } - - public static ConqueryError fromErrorInfo(ConqueryErrorInfo info) { - if (info instanceof ConqueryError) { - return (ConqueryError) info; - } - - return new ConqueryError.ContextError(info.getMessage(), info.getContext(), null); - - } - } + @JsonIgnore + public abstract String getMessageTemplate(ErrorMessages errorMessages); @Slf4j @CPSType(base = ConqueryError.class, id = "CQ_UNKNOWN_ERROR") - public static class UnknownError extends NoContextError { - - /** - * Constructor for deserialization. - */ - @JsonCreator - private UnknownError() { - super("An unknown error occured"); - } + @RequiredArgsConstructor(onConstructor_ = {@JsonCreator}) + public static class UnknownError extends ConqueryError { public UnknownError(Throwable e) { - this(); - log.error("Encountered unknown Error[{}]", this.getId(), e); + super(); + log.error("Encountered unknown Error[{}]", getId(), e); } - } - @CPSType(base = ConqueryError.class, id = "CQ_EXECUTION_CREATION") - public static class ExecutionCreationErrorUnspecified extends NoContextError { - - public ExecutionCreationErrorUnspecified() { - super("Failure during execution creation."); + @Override + public String getMessageTemplate(ErrorMessages errorMessages) { + return errorMessages.unknownError(); } } - @CPSType(base = ConqueryError.class, id = "CQ_EXECUTION_CREATION_RESOLVE") - public static class ExecutionCreationResolveError extends ContextError { - - private static final String FAILED_ELEMENT = "ELEMENT"; - private static final String FAILED_ELEMENT_CLASS = "ELEMENT_CLASS"; - private static final String TEMPLATE = "Could not find an ${" + FAILED_ELEMENT_CLASS + "} element called '${" + FAILED_ELEMENT + "}'"; - - /** - * Constructor for deserialization. - */ - @JsonCreator - private ExecutionCreationResolveError() { - super(TEMPLATE); - } - public ExecutionCreationResolveError(Id unresolvableElementId) { - this(); - getContext().put(FAILED_ELEMENT, unresolvableElementId.toString()); - getContext().put(FAILED_ELEMENT_CLASS, unresolvableElementId.getClass().getSimpleName()); + @CPSType(base = ConqueryError.class, id = "CQ_EXECUTION_CREATION") + @Data + @RequiredArgsConstructor(onConstructor_ = {@JsonCreator}) + public static class ExecutionCreationErrorUnspecified extends ConqueryError { + @Override + public String getMessageTemplate(ErrorMessages errorMessages) { + return errorMessages.executionCreationUnspecified(); } } + @CPSType(base = ConqueryError.class, id = "CQ_EXECUTION_CREATION_RESOLVE") + @Data + @RequiredArgsConstructor(onConstructor_ = {@JsonCreator}) + public static class ExecutionCreationResolveError extends ConqueryError { - @CPSType(base = ConqueryError.class, id = "CQ_EXECUTION_CREATION_RESOLVE_EXTERNAL") - public static class ExternalResolveError extends ContextError { - - private static final String FORMAT_ROW_LENGTH = "formatRowLength"; - private static final String DATA_ROW_LENGTH = "dataRowLength"; - private static final String - TEMPLATE = - "There are ${" + FORMAT_ROW_LENGTH + "} columns in the format but ${" + DATA_ROW_LENGTH + "} in at least one row"; + private final String unknownId; + private final String clazz; - /** - * Constructor for deserialization. - */ - @JsonCreator - private ExternalResolveError() { - super(TEMPLATE); + public ExecutionCreationResolveError(Id unresolvableElementId) { + unknownId = unresolvableElementId.toString(); + clazz = unresolvableElementId.getClass().getSimpleName(); } - public ExternalResolveError(int formatRowLength, int dataRowLength) { - this(); - getContext().put(FORMAT_ROW_LENGTH, Integer.toString(formatRowLength)); - getContext().put(DATA_ROW_LENGTH, Integer.toString(dataRowLength)); + @Override + public String getMessageTemplate(ErrorMessages errorMessages) { + return errorMessages.executionCreationResolve(unknownId, clazz); } } + @CPSType(base = ConqueryError.class, id = "CQ_EXECUTION_CREATION_RESOLVE_EXTERNAL_FORMAT") - public static class ExternalResolveFormatError extends ContextError { - - private static final String FORMAT_ROW_LENGTH = "formatRowLength"; - private static final String DATA_ROW_LENGTH = "dataRowLength"; - private static final String - TEMPLATE = - "There are ${" + FORMAT_ROW_LENGTH + "} columns in the format but ${" + DATA_ROW_LENGTH + "} in at least one row"; - - /** - * Constructor for deserialization. - */ - @JsonCreator - private ExternalResolveFormatError() { - super(TEMPLATE); - } + @Data + @RequiredArgsConstructor(onConstructor_ = {@JsonCreator}) + public static class ExternalResolveFormatError extends ConqueryError { + + + private final int formatRowLength; + private final int dataRowLength; - public ExternalResolveFormatError(int formatRowLength, int dataRowLength) { - this(); - getContext().put(FORMAT_ROW_LENGTH, Integer.toString(formatRowLength)); - getContext().put(DATA_ROW_LENGTH, Integer.toString(dataRowLength)); + + @Override + public String getMessageTemplate(ErrorMessages errorMessages) { + return errorMessages.externalResolveFormatError(formatRowLength, dataRowLength); } } @CPSType(base = ConqueryError.class, id = "CQ_EXECUTION_CREATION_RESOLVE_EXTERNAL_ONE_PER_ROW") - public static class ExternalResolveOnePerRowError extends NoContextError { - /** - * Constructor for deserialization. - */ - @JsonCreator - public ExternalResolveOnePerRowError() { - super("External was flagged as one row per entity, but at least one entity spans multiple rows"); + @RequiredArgsConstructor(onConstructor_ = {@JsonCreator}) + public static class ExternalResolveOnePerRowError extends ConqueryError { + + @Override + public String getMessageTemplate(ErrorMessages errorMessages) { + return errorMessages.externalEntityUnique(); } } @CPSType(base = ConqueryError.class, id = "CQ_EXECUTION_CREATION_RESOLVE_EXTERNAL_EMPTY") - public static class ExternalResolveEmptyError extends ContextError { - - private static final String TEMPLATE = "None of the provided values could be resolved."; + @NoArgsConstructor(onConstructor_ = {@JsonCreator}) + public static class ExternalResolveEmptyError extends ConqueryError { - /** - * Constructor for deserialization. - */ - @JsonCreator - public ExternalResolveEmptyError() { - super(TEMPLATE); + @Override + public String getMessageTemplate(ErrorMessages errorMessages) { + return errorMessages.externalResolveEmpty(); } } - /** - * Unspecified error during {@link QueryPlan}-creation. - */ - @CPSType(base = ConqueryError.class, id = "CQ_EXECUTION_CREATION_PLAN") - public static class ExecutionCreationPlanError extends NoContextError { + @CPSType(base = ConqueryError.class, id = "CQ_EXECUTION_CREATION_CREATION_PLAN_FLAGS_MISSING") + @Data + @RequiredArgsConstructor(onConstructor_ = {@JsonCreator}) + public static class ExecutionCreationPlanMissingFlagsError extends ConqueryError { + private final Set labels; - public ExecutionCreationPlanError() { - super("Unable to generate query plan."); - } - } - @CPSType(base = ConqueryError.class, id = "CQ_EXECUTION_CREATION_CREATION_PLAN_FLAGS_MISSING") - @FieldNameConstants(level = AccessLevel.PRIVATE) - public static class ExecutionCreationPlanMissingFlagsError extends ContextError { - - private final Void ALIGNMENT = null; - private static final String TEMPLATE = "Do not know labels ${" + Fields.ALIGNMENT + "}."; - - /** - * Constructor for deserialization. - */ - @JsonCreator - private ExecutionCreationPlanMissingFlagsError() { - super(TEMPLATE); + @Override + public String getMessageTemplate(ErrorMessages errorMessages) { + return errorMessages.missingFlags(String.join(", ", labels)); } - public ExecutionCreationPlanMissingFlagsError(Set labels) { - this(); - getContext().put(Fields.ALIGNMENT, String.join(", ", labels).trim()); - } } @CPSType(base = ConqueryError.class, id = "CQ_EXECUTION_CREATION_CREATION_PLAN_DATECONTEXT_MISMATCH") - public static class ExecutionCreationPlanDateContextError extends ContextError { - - private static final String ALIGNMENT = "alignment"; - private static final String RESOLUTION = "resolution"; - private static final String TEMPLATE = "Alignment ${" + ALIGNMENT + "} and resolution ${" + RESOLUTION + "} are not compatible."; - - /** - * Constructor for deserialization. - */ - @JsonCreator - private ExecutionCreationPlanDateContextError() { - super(TEMPLATE); - } + @Data + @RequiredArgsConstructor(onConstructor_ = {@JsonCreator}) + public static class ExecutionCreationPlanDateContextError extends ConqueryError { + + private final Alignment alignment; + private final Resolution resolution; + - public ExecutionCreationPlanDateContextError(Alignment alignment, Resolution resolution) { - this(); - getContext().put(ALIGNMENT, Objects.toString(alignment)); - getContext().put(RESOLUTION, Objects.toString(resolution)); + @Override + public String getMessageTemplate(ErrorMessages errorMessages) { + return errorMessages.dateContextMismatch(alignment, resolution); } } @@ -307,40 +201,20 @@ public ExecutionCreationPlanDateContextError(Alignment alignment, Resolution res * Unspecified execution processing error. */ @CPSType(base = ConqueryError.class, id = "CQ_EXECUTION_JOB") - public static class ExecutionJobErrorWrapper extends ContextError { - - private static final String ENTITY = "entity"; - private static final String TEMPLATE = "Failed to run query job for entity ${" + ENTITY + "}"; - - - /** - * Constructor for deserialization. - */ - @JsonCreator - private ExecutionJobErrorWrapper() { - super(TEMPLATE); - } + @Data + @RequiredArgsConstructor(onConstructor_ = {@JsonCreator}) + public static class ExecutionJobErrorWrapper extends ConqueryError { - - private ExecutionJobErrorWrapper(ConqueryError e) { - super(TEMPLATE, e); - } + private final Entity entity; public ExecutionJobErrorWrapper(Entity entity, ConqueryError e) { - this(e); - getContext().put(ENTITY, Integer.toString(entity.getId())); - + super(e); + this.entity = entity; } - } - /** - * Execution processing error with individual context. - */ - @CPSType(base = ConqueryError.class, id = "CQ_EXECUTION_PROCESSING_CONTEXT") - public static class ExecutionProcessingContextError extends ContextError { - - public ExecutionProcessingContextError(String messageTemplate, Map context, ConqueryError cause) { - super(messageTemplate, context, cause); + @Override + public String getMessageTemplate(ErrorMessages errorMessages) { + return errorMessages.unknownQueryExecutionError(entity); } } @@ -348,10 +222,12 @@ public ExecutionProcessingContextError(String messageTemplate, Map getContext(); - /** - * Returns a {@link ConqueryErrorInfo} POJO without the internal type information. - * @return - */ - PlainError asPlain(); + SimpleErrorInfo asPlain(); + /** * Method to check if two errors are basically the same, by not checking the id and the context (which possibly checks on hashcode basis). diff --git a/backend/src/main/java/com/bakdata/conquery/models/error/ErrorMessages.java b/backend/src/main/java/com/bakdata/conquery/models/error/ErrorMessages.java new file mode 100644 index 0000000000..a7225ffe63 --- /dev/null +++ b/backend/src/main/java/com/bakdata/conquery/models/error/ErrorMessages.java @@ -0,0 +1,64 @@ +package com.bakdata.conquery.models.error; + +import java.sql.SQLException; + +import c10n.annotations.De; +import c10n.annotations.En; +import com.bakdata.conquery.models.forms.util.Alignment; +import com.bakdata.conquery.models.forms.util.Resolution; +import com.bakdata.conquery.models.query.entity.Entity; + +public interface ErrorMessages { + + @En("An unknown error occurred") + @De("Ein unerwarteter Fehler ist aufgetreten.") + String unknownError(); + + @En("An unknown error occurred, while creating the query.") + @De("Ein unerwarteter Fehler ist beim Erzeugen der Anfrage aufgetreten.") + String executionCreationUnspecified(); + + @En("Could not find an ${1} element called `${0}`") + @De("Es konnte kein ${1} Objekt mit der Id `${0}` gefunden werden.") + String executionCreationResolve(String id, String clazz); + + @En("There are ${0} columns in the format but ${1} in at least one row") + @De("Das Format gibt ${0} Spalten vor, mindestens eine Zeile enthält aber ${1} Spalten.") + String externalResolveFormatError(int formatRowLength, int dataRowLength); + + @En("Entities must be unique.") + @De("Einträge müssen eindeutig sein.") + String externalEntityUnique(); + + @En("None of the provided Entities could be resolved.") + @De("Keine der Zeilen konnte aufgelöst werden.") + String externalResolveEmpty(); + + @En("Do not know labels ${0}.") + @De("Die Bezeichnung/-en ${0} sind unbekannt.") + String missingFlags(String labels); + + @En("Alignment ${0} and resolution ${0} are not compatible.") + @De("Die Korrektur ${0} und die Auflösung ${0} sind nicht kompatibel.") + String dateContextMismatch(Alignment alignment, Resolution resolution); + + @En("Failed to run query job for entity ${0}.") + @De("Die Anfrage ist für ${0} fehlgeschlagen.") + String unknownQueryExecutionError(Entity entity); + + @En("Unexpected error while processing execution.") + @De("Es ist ein unerwarteter Fehler beim verarbeiten der Anfrage aufgetreten.") + String executionProcessingError(); + + @En("Query took too long.") + @De("Die Anfrage lief zu lange und wurde abgebrochen.") + String executionTimeout(); + + @En("No secondaryId could be selected.") + @De("Die ausgewählte Analyseebenen konnte in keinem der ausgewählten Konzepten gefunden werden.") + String noSecondaryIdSelected(); + + @En("Something went wrong while querying the database: ${0}.") + @De("Etwas ist beim Anfragen des Servers fehlgeschlagen: ${0}.") + String sqlError(SQLException error); +} diff --git a/backend/src/main/java/com/bakdata/conquery/models/error/PlainError.java b/backend/src/main/java/com/bakdata/conquery/models/error/PlainError.java deleted file mode 100644 index abcbec9ee4..0000000000 --- a/backend/src/main/java/com/bakdata/conquery/models/error/PlainError.java +++ /dev/null @@ -1,33 +0,0 @@ -package com.bakdata.conquery.models.error; - -import java.util.Map; -import java.util.UUID; - -import javax.validation.constraints.NotNull; - -import lombok.EqualsAndHashCode; -import lombok.Getter; -import lombok.RequiredArgsConstructor; -import lombok.Setter; -import lombok.ToString; - -/** - * Base class, that is intended for external serialization, without type information. - */ -@RequiredArgsConstructor -@Getter -@Setter -@EqualsAndHashCode -@ToString -public class PlainError implements ConqueryErrorInfo { - @NotNull - private final UUID id; - private final String code; - private final String message; - private final Map context; - - @Override - public PlainError asPlain() { - return this; - } -} diff --git a/backend/src/main/java/com/bakdata/conquery/models/error/SimpleErrorInfo.java b/backend/src/main/java/com/bakdata/conquery/models/error/SimpleErrorInfo.java new file mode 100644 index 0000000000..1a4f1a9edc --- /dev/null +++ b/backend/src/main/java/com/bakdata/conquery/models/error/SimpleErrorInfo.java @@ -0,0 +1,23 @@ +package com.bakdata.conquery.models.error; + +import java.util.UUID; + +import javax.validation.constraints.NotNull; + +import lombok.Data; + +/** + * Base class, that is intended for external serialization, without type information. + */ +@Data +public class SimpleErrorInfo implements ConqueryErrorInfo { + @NotNull + private final UUID id; + private final String code; + private final String message; + + @Override + public SimpleErrorInfo asPlain() { + return this; + } +} diff --git a/backend/src/main/java/com/bakdata/conquery/models/execution/ManagedExecution.java b/backend/src/main/java/com/bakdata/conquery/models/execution/ManagedExecution.java index 047ecc25eb..e34e7f5998 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/execution/ManagedExecution.java +++ b/backend/src/main/java/com/bakdata/conquery/models/execution/ManagedExecution.java @@ -41,6 +41,7 @@ import com.bakdata.conquery.models.identifiable.ids.specific.ManagedExecutionId; import com.bakdata.conquery.models.query.PrintSettings; import com.bakdata.conquery.models.query.Visitable; +import com.bakdata.conquery.models.worker.DistributedNamespace; import com.bakdata.conquery.models.worker.Namespace; import com.bakdata.conquery.util.QueryUtils; import com.bakdata.conquery.util.QueryUtils.NamespacedIdentifiableCollector; @@ -119,7 +120,7 @@ public abstract class ManagedExecution extends IdentifiableImpl abilities) { } public void reset() { - setState(ExecutionState.NEW); + // This avoids endless loops with already reset queries + if(getState().equals(ExecutionState.NEW)){ + return; + } - namespace.getExecutionManager().clearQueryResults(this); + setState(ExecutionState.NEW); } + + public abstract void cancel(); } diff --git a/backend/src/main/java/com/bakdata/conquery/models/forms/managed/ExternalExecution.java b/backend/src/main/java/com/bakdata/conquery/models/forms/managed/ExternalExecution.java index ef6f840542..58c3fcfb01 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/forms/managed/ExternalExecution.java +++ b/backend/src/main/java/com/bakdata/conquery/models/forms/managed/ExternalExecution.java @@ -89,6 +89,8 @@ protected void doInitExecutable() { api = formBackendConfig.createApi(); } + + @Override public void start() { @@ -118,6 +120,10 @@ private synchronized void syncExternalState() { final ExternalTaskState formState = api.getFormState(externalTaskId); + updateStatus(formState); + } + + private void updateStatus(ExternalTaskState formState) { switch (formState.getStatus()) { case RUNNING -> { @@ -129,15 +135,13 @@ private synchronized void syncExternalState() { resultsAssetMap = registerResultAssets(formState); finish(ExecutionState.DONE); } + case CANCELLED -> reset(); } } private List> registerResultAssets(ExternalTaskState response) { final List> assetMap = new ArrayList<>(); - response.getResults().forEach(asset -> - { - assetMap.add(Pair.of(asset, createResultAssetBuilder(asset))); - }); + response.getResults().forEach(asset -> assetMap.add(Pair.of(asset, createResultAssetBuilder(asset)))); return assetMap; } @@ -163,6 +167,13 @@ public void setStatusBase(@NonNull Subject subject, @NonNull ExecutionStatus sta super.setStatusBase(subject, status); } + @Override + public void cancel() { + Preconditions.checkNotNull(externalTaskId, "Cannot check external task, because no Id is present"); + + updateStatus(api.cancelTask(externalTaskId)); + } + @Override public Stream getResultAssets() { return resultsAssetMap.stream().map(Pair::value); @@ -182,6 +193,7 @@ protected void finish(ExecutionState executionState) { if (getState().equals(executionState)) { return; } + super.finish(executionState); synchronized (this) { AuthUtil.cleanUpUserAndBelongings(serviceUser, getStorage()); diff --git a/backend/src/main/java/com/bakdata/conquery/models/forms/managed/ManagedInternalForm.java b/backend/src/main/java/com/bakdata/conquery/models/forms/managed/ManagedInternalForm.java index 67c15c0864..43a0e1a36f 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/forms/managed/ManagedInternalForm.java +++ b/backend/src/main/java/com/bakdata/conquery/models/forms/managed/ManagedInternalForm.java @@ -19,6 +19,7 @@ import com.bakdata.conquery.models.identifiable.IdMap; import com.bakdata.conquery.models.identifiable.ids.specific.ManagedExecutionId; import com.bakdata.conquery.models.messages.namespaces.WorkerMessage; +import com.bakdata.conquery.models.messages.namespaces.specific.CancelQuery; import com.bakdata.conquery.models.messages.namespaces.specific.ExecuteForm; import com.bakdata.conquery.models.query.ColumnDescriptor; import com.bakdata.conquery.models.query.ManagedQuery; @@ -123,6 +124,12 @@ protected void setAdditionalFieldsForStatusWithColumnDescription(Subject subject status.setColumnDescriptions(subQuery.generateColumnDescriptions()); } + @Override + public void cancel() { + log.debug("Sending cancel message to all workers."); + getNamespace().getWorkerHandler().sendToAll(new CancelQuery(getId())); + } + @Override @JsonIgnore public List getResultInfos() { diff --git a/backend/src/main/java/com/bakdata/conquery/models/jobs/ImportJob.java b/backend/src/main/java/com/bakdata/conquery/models/jobs/ImportJob.java index 3e9aa63cbe..b14e51b818 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/jobs/ImportJob.java +++ b/backend/src/main/java/com/bakdata/conquery/models/jobs/ImportJob.java @@ -48,7 +48,8 @@ import com.bakdata.conquery.models.preproc.PreprocessedReader; import com.bakdata.conquery.models.preproc.parser.specific.IntegerParser; import com.bakdata.conquery.models.query.entity.Entity; -import com.bakdata.conquery.models.worker.Namespace; +import com.bakdata.conquery.models.worker.DistributedNamespace; +import com.bakdata.conquery.models.worker.WorkerHandler; import com.bakdata.conquery.models.worker.WorkerInformation; import com.bakdata.conquery.util.ResourceUtil; import com.bakdata.conquery.util.progressreporter.ProgressReporter; @@ -67,7 +68,7 @@ @Slf4j public class ImportJob extends Job { - private final Namespace namespace; + private final DistributedNamespace namespace; @Getter private final Table table; @@ -80,7 +81,7 @@ public class ImportJob extends Job { private static final int NUMBER_OF_STEPS = /* directly in execute = */4; - public static ImportJob createOrUpdate(Namespace namespace, InputStream inputStream, int entityBucketSize, IdMutex sharedDictionaryLocks, ConqueryConfig config, boolean update) + public static ImportJob createOrUpdate(DistributedNamespace namespace, InputStream inputStream, int entityBucketSize, IdMutex sharedDictionaryLocks, ConqueryConfig config, boolean update) throws IOException { try (PreprocessedReader parser = new PreprocessedReader(inputStream, namespace.getPreprocessMapper())) { @@ -113,7 +114,7 @@ public static ImportJob createOrUpdate(Namespace namespace, InputStream inputStr throw new WebApplicationException(String.format("Import[%s] is not present.", importId), Response.Status.NOT_FOUND); } // before updating the import, make sure that all workers removed the last import - namespace.sendToAll(new RemoveImportJob(processedImport)); + namespace.getWorkerHandler().sendToAll(new RemoveImportJob(processedImport)); namespace.getStorage().removeImport(importId); } else if (processedImport != null) { @@ -196,7 +197,7 @@ private static Map createLocalIdReplacements(Map importDictionaries(Namespace namespace, Map dicts, Column[] columns, String importName, Table table) { + private static Map importDictionaries(DistributedNamespace namespace, Map dicts, Column[] columns, String importName, Table table) { // Empty Maps are Coalesced to null by Jackson if (dicts == null) { @@ -256,10 +257,10 @@ private static Map importDictionaries(Namespace names return out; } - private static void distributeDictionary(Namespace namespace, Dictionary dictionary) { + private static void distributeDictionary(DistributedNamespace namespace, Dictionary dictionary) { log.trace("Sending {} to all Workers", dictionary); namespace.getStorage().updateDictionary(dictionary); - namespace.sendToAll(new UpdateDictionary(dictionary)); + namespace.getWorkerHandler().sendToAll(new UpdateDictionary(dictionary)); } @@ -312,7 +313,8 @@ public void execute() throws JSONException, InterruptedException, IOException { final Map> workerAssignments = sendBuckets(container.getStarts(), container.getLengths(), primaryMapping, imp, buckets2LocalEntities, storesSorted); - workerAssignments.forEach(namespace::addBucketsToWorker); + WorkerHandler handler = namespace.getWorkerHandler(); + workerAssignments.forEach(handler::addBucketsToWorker); } @@ -327,9 +329,11 @@ private Map> sendBuckets(Map starts, M for (Map.Entry> bucket2entities : buckets2LocalEntities.entrySet()) { - WorkerInformation responsibleWorker = - Objects.requireNonNull(namespace.getResponsibleWorkerForBucket(bucket2entities.getKey()), () -> "No responsible worker for Bucket#" - + bucket2entities.getKey()); + WorkerInformation responsibleWorker = Objects.requireNonNull( + namespace + .getWorkerHandler() + .getResponsibleWorkerForBucket(bucket2entities.getKey()), + () -> "No responsible worker for Bucket#" + bucket2entities.getKey()); awaitFreeJobQueue(responsibleWorker); @@ -456,11 +460,11 @@ private void distributeWorkerResponsibilities(DictionaryMapping primaryMapping) for (int entity : primaryMapping.target()) { int bucket = Entity.getBucket(entity, bucketSize); - if (namespace.getResponsibleWorkerForBucket(bucket) != null) { + if (namespace.getWorkerHandler().getResponsibleWorkerForBucket(bucket) != null) { continue; } - namespace.addResponsibility(bucket); + namespace.getWorkerHandler().addResponsibility(bucket); } } } @@ -552,7 +556,7 @@ private Import createImport(PreprocessedHeader header, Map } imp.setDictionaries(dictionaries); - namespace.sendToAll(new AddImport(imp)); + namespace.getWorkerHandler().sendToAll(new AddImport(imp)); return imp; } diff --git a/backend/src/main/java/com/bakdata/conquery/models/messages/namespaces/NamespaceMessage.java b/backend/src/main/java/com/bakdata/conquery/models/messages/namespaces/NamespaceMessage.java index 6fd7c1a9ed..b308a38c03 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/messages/namespaces/NamespaceMessage.java +++ b/backend/src/main/java/com/bakdata/conquery/models/messages/namespaces/NamespaceMessage.java @@ -1,14 +1,14 @@ package com.bakdata.conquery.models.messages.namespaces; import com.bakdata.conquery.models.messages.SlowMessage; -import com.bakdata.conquery.models.worker.Namespace; +import com.bakdata.conquery.models.worker.DistributedNamespace; import com.bakdata.conquery.util.progressreporter.ProgressReporter; import com.fasterxml.jackson.annotation.JsonIgnore; import lombok.Getter; import lombok.Setter; @Getter @Setter -public abstract class NamespaceMessage extends NamespacedMessage implements SlowMessage { +public abstract class NamespaceMessage extends NamespacedMessage implements SlowMessage { @JsonIgnore @Getter @Setter private ProgressReporter progressReporter; diff --git a/backend/src/main/java/com/bakdata/conquery/models/messages/namespaces/specific/CollectQueryResult.java b/backend/src/main/java/com/bakdata/conquery/models/messages/namespaces/specific/CollectQueryResult.java index 0054851e13..cf5293e966 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/messages/namespaces/specific/CollectQueryResult.java +++ b/backend/src/main/java/com/bakdata/conquery/models/messages/namespaces/specific/CollectQueryResult.java @@ -4,7 +4,7 @@ import com.bakdata.conquery.models.messages.namespaces.NamespaceMessage; import com.bakdata.conquery.models.messages.namespaces.NamespacedMessage; import com.bakdata.conquery.models.query.results.ShardResult; -import com.bakdata.conquery.models.worker.Namespace; +import com.bakdata.conquery.models.worker.DistributedNamespace; import lombok.AllArgsConstructor; import lombok.Getter; import lombok.NoArgsConstructor; @@ -27,7 +27,7 @@ public class CollectQueryResult extends NamespaceMessage { private ShardResult result; @Override - public void react(Namespace context) throws Exception { + public void react(DistributedNamespace context) throws Exception { log.info("Received {} of size {}", result, result.getResults().size()); context.getExecutionManager().handleQueryResult(result); diff --git a/backend/src/main/java/com/bakdata/conquery/models/messages/namespaces/specific/ReportConsistency.java b/backend/src/main/java/com/bakdata/conquery/models/messages/namespaces/specific/ReportConsistency.java index 2ba3747eb2..0239857c86 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/messages/namespaces/specific/ReportConsistency.java +++ b/backend/src/main/java/com/bakdata/conquery/models/messages/namespaces/specific/ReportConsistency.java @@ -11,7 +11,7 @@ import com.bakdata.conquery.models.identifiable.ids.specific.WorkerId; import com.bakdata.conquery.models.messages.namespaces.NamespaceMessage; import com.bakdata.conquery.models.messages.namespaces.NamespacedMessage; -import com.bakdata.conquery.models.worker.Namespace; +import com.bakdata.conquery.models.worker.DistributedNamespace; import com.google.common.collect.Sets; import lombok.AllArgsConstructor; import lombok.Getter; @@ -40,10 +40,10 @@ public class ReportConsistency extends NamespaceMessage { @Override - public void react(Namespace context) throws Exception { + public void react(DistributedNamespace context) throws Exception { Set managerImports = context.getStorage().getAllImports().stream().map(Import::getId).collect(Collectors.toSet()); - Set assignedWorkerBuckets = context.getBucketsForWorker(workerId); + Set assignedWorkerBuckets = context.getWorkerHandler().getBucketsForWorker(workerId); boolean importsOkay = isConsistent("Imports", managerImports, workerImports, workerId); boolean bucketsOkay = isConsistent("Buckets", assignedWorkerBuckets, workerBuckets, workerId); diff --git a/backend/src/main/java/com/bakdata/conquery/models/messages/namespaces/specific/UpdateElementMatchingStats.java b/backend/src/main/java/com/bakdata/conquery/models/messages/namespaces/specific/UpdateElementMatchingStats.java index 4ba0fee85d..5efc018639 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/messages/namespaces/specific/UpdateElementMatchingStats.java +++ b/backend/src/main/java/com/bakdata/conquery/models/messages/namespaces/specific/UpdateElementMatchingStats.java @@ -10,7 +10,7 @@ import com.bakdata.conquery.models.identifiable.ids.specific.WorkerId; import com.bakdata.conquery.models.messages.namespaces.NamespaceMessage; import com.bakdata.conquery.models.messages.namespaces.NamespacedMessage; -import com.bakdata.conquery.models.worker.Namespace; +import com.bakdata.conquery.models.worker.DistributedNamespace; import com.fasterxml.jackson.annotation.JsonCreator; import lombok.AllArgsConstructor; import lombok.Getter; @@ -30,7 +30,7 @@ public class UpdateElementMatchingStats extends NamespaceMessage { private final Map, MatchingStats.Entry> values; @Override - public void react(Namespace context) throws Exception { + public void react(DistributedNamespace context) throws Exception { for (Entry, MatchingStats.Entry> entry : values.entrySet()) { try { final ConceptElement target = entry.getKey(); diff --git a/backend/src/main/java/com/bakdata/conquery/models/messages/network/NetworkMessageContext.java b/backend/src/main/java/com/bakdata/conquery/models/messages/network/NetworkMessageContext.java index fd8b905070..dd6d332c7b 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/messages/network/NetworkMessageContext.java +++ b/backend/src/main/java/com/bakdata/conquery/models/messages/network/NetworkMessageContext.java @@ -6,8 +6,10 @@ import com.bakdata.conquery.commands.ShardNode; import com.bakdata.conquery.io.mina.MessageSender; import com.bakdata.conquery.io.mina.NetworkSession; +import com.bakdata.conquery.mode.cluster.ClusterState; import com.bakdata.conquery.models.config.ConqueryConfig; import com.bakdata.conquery.models.worker.DatasetRegistry; +import com.bakdata.conquery.models.worker.DistributedNamespace; import com.bakdata.conquery.models.worker.Workers; import lombok.Getter; @@ -52,12 +54,14 @@ public ShardNodeNetworkContext(ShardNode shardNode, NetworkSession session, Work @Getter public static class ManagerNodeNetworkContext extends NetworkMessageContext { - private final DatasetRegistry namespaces; + private final ClusterState clusterState; + private final DatasetRegistry datasetRegistry; - public ManagerNodeNetworkContext(NetworkSession session, DatasetRegistry namespaces, int backpressure) { + public ManagerNodeNetworkContext(NetworkSession session, DatasetRegistry datasetRegistry, ClusterState clusterState, int backpressure) { super(session, backpressure); - this.namespaces = namespaces; + this.datasetRegistry = datasetRegistry; + this.clusterState = clusterState; } } } diff --git a/backend/src/main/java/com/bakdata/conquery/models/messages/network/specific/AddShardNode.java b/backend/src/main/java/com/bakdata/conquery/models/messages/network/specific/AddShardNode.java index 2aeaddf4bd..4dcf448d68 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/messages/network/specific/AddShardNode.java +++ b/backend/src/main/java/com/bakdata/conquery/models/messages/network/specific/AddShardNode.java @@ -26,7 +26,7 @@ public void react(NetworkMessageContext.ManagerNodeNetworkContext context) throw context.getBackpressure() ); - context.getNamespaces().getShardNodes().put(context.getRemoteAddress(), nodeInformation); + context.getClusterState().getShardNodes().put(context.getRemoteAddress(), nodeInformation); log.info("ShardNode `{}` registered.", context.getRemoteAddress()); } diff --git a/backend/src/main/java/com/bakdata/conquery/models/messages/network/specific/ForwardToNamespace.java b/backend/src/main/java/com/bakdata/conquery/models/messages/network/specific/ForwardToNamespace.java index 15aba46653..fbd9d4d3a3 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/messages/network/specific/ForwardToNamespace.java +++ b/backend/src/main/java/com/bakdata/conquery/models/messages/network/specific/ForwardToNamespace.java @@ -9,7 +9,7 @@ import com.bakdata.conquery.models.messages.network.MessageToManagerNode; import com.bakdata.conquery.models.messages.network.NetworkMessage; import com.bakdata.conquery.models.messages.network.NetworkMessageContext.ManagerNodeNetworkContext; -import com.bakdata.conquery.models.worker.Namespace; +import com.bakdata.conquery.models.worker.DistributedNamespace; import com.bakdata.conquery.util.io.ConqueryMDC; import com.bakdata.conquery.util.progressreporter.ProgressReporter; import lombok.Getter; @@ -25,7 +25,7 @@ public class ForwardToNamespace extends MessageToManagerNode implements SlowMess @Override public void react(ManagerNodeNetworkContext context) throws Exception { - Namespace ns = Objects.requireNonNull(context.getNamespaces().get(datasetId), () -> String.format("Missing dataset `%s`", datasetId)); + DistributedNamespace ns = Objects.requireNonNull(context.getDatasetRegistry().get(datasetId), () -> String.format("Missing dataset `%s`", datasetId)); ConqueryMDC.setLocation(ns.getStorage().getDataset().toString()); message.react(ns); } diff --git a/backend/src/main/java/com/bakdata/conquery/models/messages/network/specific/RegisterWorker.java b/backend/src/main/java/com/bakdata/conquery/models/messages/network/specific/RegisterWorker.java index 6b03ec0a9f..ef29e18919 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/messages/network/specific/RegisterWorker.java +++ b/backend/src/main/java/com/bakdata/conquery/models/messages/network/specific/RegisterWorker.java @@ -36,10 +36,10 @@ public void react(ManagerNodeNetworkContext context) throws Exception { } info.setConnectedShardNode(node); - context.getNamespaces().register(node, info); + context.getClusterState().register(node, info); // Request consistency report - context.getNamespaces().getWorkers().get(info.getId()).send(new RequestConsistency()); + context.getClusterState().getWorker(info.getId(), info.getDataset()).send(new RequestConsistency()); } /** @@ -48,7 +48,7 @@ public void react(ManagerNodeNetworkContext context) throws Exception { * @return the found slave or null if none was found */ private ShardNodeInformation getShardNode(ManagerNodeNetworkContext context) { - return context.getNamespaces() + return context.getClusterState() .getShardNodes() .get(context.getRemoteAddress()); } diff --git a/backend/src/main/java/com/bakdata/conquery/models/messages/network/specific/RemoveShardNode.java b/backend/src/main/java/com/bakdata/conquery/models/messages/network/specific/RemoveShardNode.java index 558eb68214..4f25093694 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/messages/network/specific/RemoveShardNode.java +++ b/backend/src/main/java/com/bakdata/conquery/models/messages/network/specific/RemoveShardNode.java @@ -24,6 +24,6 @@ public class RemoveShardNode extends MessageToManagerNode { @Override public void react(NetworkMessageContext.ManagerNodeNetworkContext context) throws Exception { log.info("ShardNode {} unregistered.", context.getRemoteAddress()); - context.getNamespaces().getShardNodes().remove(context.getRemoteAddress()); + context.getClusterState().getShardNodes().remove(context.getRemoteAddress()); } } diff --git a/backend/src/main/java/com/bakdata/conquery/models/messages/network/specific/UpdateJobManagerStatus.java b/backend/src/main/java/com/bakdata/conquery/models/messages/network/specific/UpdateJobManagerStatus.java index 3f2c339ecf..a215af08dd 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/messages/network/specific/UpdateJobManagerStatus.java +++ b/backend/src/main/java/com/bakdata/conquery/models/messages/network/specific/UpdateJobManagerStatus.java @@ -23,13 +23,14 @@ public class UpdateJobManagerStatus extends MessageToManagerNode { @Override public void react(ManagerNodeNetworkContext context) throws Exception { - final ShardNodeInformation node = context.getNamespaces().getShardNodes().get(context.getRemoteAddress()); + final ShardNodeInformation node = context.getClusterState().getShardNodes().get(context.getRemoteAddress()); if (node == null) { - log.error("Could not find ShardNode `{}`, I only know of {}", context.getRemoteAddress(), context.getNamespaces().getShardNodes().keySet()); + log.error("Could not find ShardNode `{}`, I only know of {}", context.getRemoteAddress(), context.getClusterState().getShardNodes().keySet()); return; } // The shards don't know their own name so we attach it here node.addJobManagerStatus(status.withOrigin(context.getRemoteAddress().toString())); } + } diff --git a/backend/src/main/java/com/bakdata/conquery/models/query/DistributedExecutionManager.java b/backend/src/main/java/com/bakdata/conquery/models/query/DistributedExecutionManager.java new file mode 100644 index 0000000000..da96df82af --- /dev/null +++ b/backend/src/main/java/com/bakdata/conquery/models/query/DistributedExecutionManager.java @@ -0,0 +1,183 @@ +package com.bakdata.conquery.models.query; + +import java.util.ArrayList; +import java.util.List; +import java.util.UUID; +import java.util.concurrent.ExecutionException; +import java.util.stream.Stream; + +import com.bakdata.conquery.apiv1.query.QueryDescription; +import com.bakdata.conquery.io.storage.MetaStorage; +import com.bakdata.conquery.metrics.ExecutionMetrics; +import com.bakdata.conquery.mode.cluster.ClusterState; +import com.bakdata.conquery.models.auth.AuthorizationHelper; +import com.bakdata.conquery.models.auth.entities.Group; +import com.bakdata.conquery.models.auth.entities.User; +import com.bakdata.conquery.models.config.ConqueryConfig; +import com.bakdata.conquery.models.datasets.Dataset; +import com.bakdata.conquery.models.error.ConqueryError; +import com.bakdata.conquery.models.execution.ExecutionState; +import com.bakdata.conquery.models.execution.InternalExecution; +import com.bakdata.conquery.models.execution.ManagedExecution; +import com.bakdata.conquery.models.identifiable.ids.specific.ManagedExecutionId; +import com.bakdata.conquery.models.query.results.EntityResult; +import com.bakdata.conquery.models.query.results.ShardResult; +import com.bakdata.conquery.models.worker.Namespace; +import com.google.common.cache.Cache; +import com.google.common.cache.CacheBuilder; +import com.google.common.cache.RemovalNotification; +import lombok.RequiredArgsConstructor; +import lombok.SneakyThrows; +import lombok.extern.slf4j.Slf4j; + +@RequiredArgsConstructor +@Slf4j +public class DistributedExecutionManager implements ExecutionManager { + + private final MetaStorage storage; + private final ClusterState clusterState; + + private final Cache>> executionResults = + CacheBuilder.newBuilder() + .softValues() + .removalListener(this::executionRemoved) + .build(); + + /** + * Manage state of evicted Queries, setting them to NEW. + */ + private void executionRemoved(RemovalNotification> removalNotification) { + // If removal was done manually we assume it was also handled properly + if (!removalNotification.wasEvicted()) { + return; + } + + final ManagedExecutionId executionId = removalNotification.getKey(); + + log.warn("Evicted Results for Query[{}] (Reason: {})", executionId, removalNotification.getCause()); + + storage.getExecution(executionId).reset(); + } + + @Override + public ManagedExecution runQuery(Namespace namespace, QueryDescription query, User user, Dataset submittedDataset, ConqueryConfig config, boolean system) { + final ManagedExecution execution = createExecution(query, user, submittedDataset, system); + execute(namespace, execution, config); + + return execution; + + } + + @Override + public void execute(Namespace namespace, ManagedExecution execution, ConqueryConfig config) { + try { + execution.initExecutable(namespace, config); + } + catch (Exception e) { + // ConqueryErrors are usually user input errors so no need to log them at level=ERROR + if (e instanceof ConqueryError) { + log.warn("Failed to initialize Query[{}]", execution.getId(), e); + } + else { + log.error("Failed to initialize Query[{}]", execution.getId(), e); + } + + storage.removeExecution(execution.getId()); + throw e; + } + + log.info("Starting execution[{}]", execution.getQueryId()); + + execution.start(); + + + final String primaryGroupName = AuthorizationHelper.getPrimaryGroup(execution.getOwner(), storage).map(Group::getName).orElse("none"); + ExecutionMetrics.getRunningQueriesCounter(primaryGroupName).inc(); + + if (execution instanceof InternalExecution internalExecution) { + log.info("Executing Query[{}] in Dataset[{}]", execution.getQueryId(), namespace.getDataset().getId()); + clusterState.getWorkerHandlers().get(execution.getDataset().getId()).sendToAll(internalExecution.createExecutionMessage()); + } + } + + @Override + public ManagedExecution createExecution(QueryDescription query, User user, Dataset submittedDataset, boolean system) { + return createQuery(query, UUID.randomUUID(), user, submittedDataset, system); + } + + + // Visible for testing + public ManagedExecution createQuery(QueryDescription query, UUID queryId, User user, Dataset submittedDataset, boolean system) { + // Transform the submitted query into an initialized execution + ManagedExecution managed = query.toManagedExecution(user, submittedDataset, storage); + managed.setSystem(system); + managed.setQueryId(queryId); + + // Store the execution + storage.addExecution(managed); + + return managed; + } + + /** + * Receive part of query result and store into query. + * + * @param result + */ + public > void handleQueryResult(R result) { + + final E query = (E) storage.getExecution(result.getQueryId()); + + if (query.getState() != ExecutionState.RUNNING) { + return; + } + + query.addResult(result); + + // State changed to DONE or FAILED + if (query.getState() != ExecutionState.RUNNING) { + final String primaryGroupName = AuthorizationHelper.getPrimaryGroup(query.getOwner(), storage).map(Group::getName).orElse("none"); + + ExecutionMetrics.getRunningQueriesCounter(primaryGroupName).dec(); + ExecutionMetrics.getQueryStateCounter(query.getState(), primaryGroupName).inc(); + ExecutionMetrics.getQueriesTimeHistogram(primaryGroupName).update(query.getExecutionTime().toMillis()); + } + + } + + + /** + * Register another result for the execution. + */ + + @SneakyThrows(ExecutionException.class) // can only occur if ArrayList::new fails which is unlikely and would have other problems also + public void addQueryResult(ManagedExecution execution, List queryResults) { + // We don't collect all results together into a fat list as that would cause lots of huge re-allocations for little gain. + executionResults.get(execution.getId(), ArrayList::new) + .add(queryResults); + } + + /** + * Discard the query's results. + */ + @Override + public void clearQueryResults(ManagedExecution execution) { + executionResults.invalidate(execution.getId()); + } + + @Override + public Stream streamQueryResults(ManagedExecution execution) { + final List> resultParts = executionResults.getIfPresent(execution.getId()); + + return resultParts == null + ? Stream.empty() + : resultParts.stream().flatMap(List::stream); + + } + + @Override + public void cancelQuery(Dataset dataset, ManagedExecution query) { + query.cancel(); + } + +} diff --git a/backend/src/main/java/com/bakdata/conquery/models/query/ExecutionManager.java b/backend/src/main/java/com/bakdata/conquery/models/query/ExecutionManager.java index 933fe3daab..41e714878b 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/query/ExecutionManager.java +++ b/backend/src/main/java/com/bakdata/conquery/models/query/ExecutionManager.java @@ -1,170 +1,43 @@ package com.bakdata.conquery.models.query; -import java.util.ArrayList; import java.util.List; +import java.util.Set; import java.util.UUID; -import java.util.concurrent.ExecutionException; import java.util.stream.Stream; import com.bakdata.conquery.apiv1.query.QueryDescription; -import com.bakdata.conquery.io.storage.MetaStorage; -import com.bakdata.conquery.metrics.ExecutionMetrics; -import com.bakdata.conquery.models.auth.AuthorizationHelper; -import com.bakdata.conquery.models.auth.entities.Group; import com.bakdata.conquery.models.auth.entities.User; import com.bakdata.conquery.models.config.ConqueryConfig; import com.bakdata.conquery.models.datasets.Dataset; -import com.bakdata.conquery.models.error.ConqueryError; -import com.bakdata.conquery.models.execution.ExecutionState; import com.bakdata.conquery.models.execution.InternalExecution; import com.bakdata.conquery.models.execution.ManagedExecution; +import com.bakdata.conquery.models.identifiable.ids.specific.DatasetId; import com.bakdata.conquery.models.identifiable.ids.specific.ManagedExecutionId; +import com.bakdata.conquery.models.identifiable.ids.specific.WorkerId; import com.bakdata.conquery.models.query.results.EntityResult; import com.bakdata.conquery.models.query.results.ShardResult; import com.bakdata.conquery.models.worker.Namespace; -import com.google.common.cache.Cache; -import com.google.common.cache.CacheBuilder; import com.google.common.cache.RemovalNotification; -import lombok.RequiredArgsConstructor; -import lombok.SneakyThrows; -import lombok.extern.slf4j.Slf4j; -@RequiredArgsConstructor -@Slf4j -public class ExecutionManager { +public interface ExecutionManager { - private final MetaStorage storage; - /** - * Manage state of evicted Queries, setting them to NEW. - */ - private void executionRemoved(RemovalNotification> removalNotification) { - - // If removal was done manually we assume it was also handled properly - if (!removalNotification.wasEvicted()) { - return; - } - - final ManagedExecutionId executionId = removalNotification.getKey(); - - log.warn("Evicted Results for Query[{}] (Reason: {})", executionId, removalNotification.getCause()); - - storage.getExecution(executionId).reset(); - } private final Cache>> executionResults = - CacheBuilder.newBuilder() - .softValues() - .removalListener(this::executionRemoved) - .build(); - - public ManagedExecution runQuery(Namespace namespace, QueryDescription query, User user, Dataset submittedDataset, ConqueryConfig config, boolean system) { - final ManagedExecution execution = createExecution(query, user, submittedDataset, system); - execute(namespace, execution, config); - - return execution; - } - - public ManagedExecution createExecution(QueryDescription query, User user, Dataset submittedDataset, boolean system) { - return createQuery(query, UUID.randomUUID(), user, submittedDataset, system); - } - - public void execute(Namespace namespace, ManagedExecution execution, ConqueryConfig config) { - try { - execution.initExecutable(namespace, config); - } - catch (Exception e) { - // ConqueryErrors are usually user input errors so no need to log them at level=ERROR - if (e instanceof ConqueryError) { - log.warn("Failed to initialize Query[{}]", execution.getId(), e); - } - else { - log.error("Failed to initialize Query[{}]", execution.getId(), e); - } - - storage.removeExecution(execution.getId()); - throw e; - } - - log.info("Starting execution[{}]", execution.getQueryId()); - - execution.start(); - - - final String primaryGroupName = AuthorizationHelper.getPrimaryGroup(execution.getOwner(), storage).map(Group::getName).orElse("none"); - ExecutionMetrics.getRunningQueriesCounter(primaryGroupName).inc(); - - if (execution instanceof InternalExecution internalExecution) { - log.info("Executing Query[{}] in Dataset[{}]", execution.getQueryId(), namespace.getDataset().getId()); - namespace.sendToAll(internalExecution.createExecutionMessage()); - } - } + ManagedExecution runQuery(Namespace namespace, QueryDescription query, User user, Dataset submittedDataset, ConqueryConfig config, boolean system); - public ManagedExecution createQuery(QueryDescription query, UUID queryId, User user, Dataset submittedDataset, boolean system) { - // Transform the submitted query into an initialized execution - ManagedExecution managed = query.toManagedExecution(user, submittedDataset, storage); - managed.setSystem(system); - managed.setQueryId(queryId); + void execute(Namespace namespace, ManagedExecution execution, ConqueryConfig config); - // Store the execution - storage.addExecution(managed); + ManagedExecution createExecution(QueryDescription query, User user, Dataset submittedDataset, boolean system); - return managed; - } - - /** - * Receive part of query result and store into query. - * - * @param result - */ - public > void handleQueryResult(R result) { - - - final E query = (E) storage.getExecution(result.getQueryId()); - - if (query.getState() != ExecutionState.RUNNING) { - return; - } - - query.addResult(result); - - // State changed to DONE or FAILED - if (query.getState() != ExecutionState.RUNNING) { - final String primaryGroupName = AuthorizationHelper.getPrimaryGroup(query.getOwner(), storage).map(Group::getName).orElse("none"); - - ExecutionMetrics.getRunningQueriesCounter(primaryGroupName).dec(); - ExecutionMetrics.getQueryStateCounter(query.getState(), primaryGroupName).inc(); - ExecutionMetrics.getQueriesTimeHistogram(primaryGroupName).update(query.getExecutionTime().toMillis()); - } - } - - /** - * Register another result for the execution. - */ - @SneakyThrows(ExecutionException.class) // can only occur if ArrayList::new fails which is unlikely and would have other problems also - public void addQueryResult(ManagedExecution execution, List queryResults) { - // We don't collect all results together into a fat list as that would cause lots of huge re-allocations for little gain. - executionResults.get(execution.getId(), ArrayList::new) - .add(queryResults); - } + void cancelQuery(final Dataset dataset, final ManagedExecution query); /** * Discard the query's results. */ - public void clearQueryResults(ManagedExecution execution) { - executionResults.invalidate(execution.getId()); - } + void clearQueryResults(ManagedExecution execution); /** * Stream the results of the query, if available. */ - public Stream streamQueryResults(ManagedExecution execution) { - final List> resultParts = executionResults.getIfPresent(execution.getId()); - - return resultParts == null - ? Stream.empty() - : resultParts.stream().flatMap(List::stream); - } - - - + Stream streamQueryResults(ManagedExecution execution); } diff --git a/backend/src/main/java/com/bakdata/conquery/models/query/ManagedQuery.java b/backend/src/main/java/com/bakdata/conquery/models/query/ManagedQuery.java index 4bd1f5449a..37cc0e5e63 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/query/ManagedQuery.java +++ b/backend/src/main/java/com/bakdata/conquery/models/query/ManagedQuery.java @@ -4,15 +4,11 @@ import java.util.Collections; import java.util.List; import java.util.Locale; -import java.util.Map; import java.util.Set; -import java.util.concurrent.atomic.AtomicInteger; import java.util.function.Consumer; -import java.util.function.Predicate; import java.util.stream.Collectors; import java.util.stream.Stream; -import c10n.C10N; import com.bakdata.conquery.apiv1.execution.ExecutionStatus; import com.bakdata.conquery.apiv1.execution.FullExecutionStatus; import com.bakdata.conquery.apiv1.query.Query; @@ -21,7 +17,6 @@ import com.bakdata.conquery.apiv1.query.concept.specific.CQConcept; import com.bakdata.conquery.apiv1.query.concept.specific.CQReusedQuery; import com.bakdata.conquery.apiv1.query.concept.specific.external.CQExternal; -import com.bakdata.conquery.internationalization.CQElementC10n; import com.bakdata.conquery.io.cps.CPSType; import com.bakdata.conquery.io.storage.MetaStorage; import com.bakdata.conquery.models.auth.entities.Subject; @@ -33,17 +28,19 @@ import com.bakdata.conquery.models.i18n.I18n; import com.bakdata.conquery.models.identifiable.ids.specific.WorkerId; import com.bakdata.conquery.models.messages.namespaces.WorkerMessage; +import com.bakdata.conquery.models.messages.namespaces.specific.CancelQuery; import com.bakdata.conquery.models.messages.namespaces.specific.ExecuteQuery; import com.bakdata.conquery.models.query.resultinfo.ResultInfo; import com.bakdata.conquery.models.query.resultinfo.UniqueNamer; import com.bakdata.conquery.models.query.results.EntityResult; import com.bakdata.conquery.models.query.results.ShardResult; +import com.bakdata.conquery.models.worker.DistributedNamespace; import com.bakdata.conquery.models.worker.WorkerInformation; +import com.bakdata.conquery.util.QueryUtils; import com.fasterxml.jackson.annotation.JacksonInject; import com.fasterxml.jackson.annotation.JsonIgnore; import com.fasterxml.jackson.annotation.OptBoolean; import com.google.common.base.Preconditions; -import com.google.common.base.Strings; import lombok.Getter; import lombok.NonNull; import lombok.Setter; @@ -57,7 +54,6 @@ @CPSType(base = ManagedExecution.class, id = "MANAGED_QUERY") public class ManagedQuery extends ManagedExecution implements SingleTableResult, InternalExecution { - private static final int MAX_CONCEPT_LABEL_CONCAT_LENGTH = 70; // Needs to be resolved externally before being executed private Query query; /** @@ -129,7 +125,7 @@ public long resultRowCount() { @Override public void start() { super.start(); - involvedWorkers = Collections.synchronizedSet(getNamespace().getWorkers().stream() + involvedWorkers = Collections.synchronizedSet(getNamespace().getWorkerHandler().getWorkers().stream() .map(WorkerInformation::getId) .collect(Collectors.toSet())); } @@ -185,6 +181,19 @@ public List getResultInfos() { return query.getResultInfos(); } + @Override + public void reset() { + super.reset(); + getNamespace().getExecutionManager().clearQueryResults(this); + } + + @Override + public void cancel() { + log.debug("Sending cancel message to all workers."); + + getNamespace().getWorkerHandler().sendToAll(new CancelQuery(getId())); + } + @Override @JsonIgnore public QueryDescription getSubmitted() { @@ -196,84 +205,12 @@ public QueryDescription getSubmitted() { * The Label is customized by mentioning that a description contained a * {@link CQExternal}, {@link CQReusedQuery} or {@link CQConcept}, in this order. * In case of one ore more {@link CQConcept} the distinct labels of the concepts are chosen - * and concatinated until a length of {@value #MAX_CONCEPT_LABEL_CONCAT_LENGTH} is reached. + * and concatinated until a length of MAX_CONCEPT_LABEL_CONCAT_LENGTH is reached. * All further labels are dropped. */ @Override protected String makeDefaultLabel(PrintSettings cfg) { - final StringBuilder sb = new StringBuilder(); - - final Map, List> sortedContents = - Visitable.stream(query) - .collect(Collectors.groupingBy(Visitable::getClass)); - - int sbStartSize = sb.length(); - - // Check for CQExternal - List externals = sortedContents.getOrDefault(CQExternal.class, Collections.emptyList()); - if (!externals.isEmpty()) { - if (sb.length() > 0) { - sb.append(" "); - } - sb.append(C10N.get(CQElementC10n.class, I18n.LOCALE.get()).external()); - } - - // Check for CQReused - if (sortedContents.containsKey(CQReusedQuery.class)) { - if (sb.length() > 0) { - sb.append(" "); - } - sb.append(C10N.get(CQElementC10n.class, I18n.LOCALE.get()).reused()); - } - - - // Check for CQConcept - if (sortedContents.containsKey(CQConcept.class)) { - if (sb.length() > 0) { - sb.append(" "); - } - // Track length of text we are appending for concepts. - final AtomicInteger length = new AtomicInteger(); - - sortedContents.get(CQConcept.class) - .stream() - .map(CQConcept.class::cast) - - .map(c -> makeLabelWithRootAndChild(c, cfg)) - .filter(Predicate.not(Strings::isNullOrEmpty)) - .distinct() - - .takeWhile(elem -> length.addAndGet(elem.length()) < MAX_CONCEPT_LABEL_CONCAT_LENGTH) - .forEach(label -> sb.append(label).append(" ")); - - // Last entry will output one Space that we don't want - if (sb.length() > 0) { - sb.deleteCharAt(sb.length() - 1); - } - - // If not all Concept could be included in the name, point that out - if (length.get() > MAX_CONCEPT_LABEL_CONCAT_LENGTH) { - sb.append(" ").append(C10N.get(CQElementC10n.class, I18n.LOCALE.get()).furtherConcepts()); - } - } - - - // Fallback to id if nothing could be extracted from the query description - if (sbStartSize == sb.length()) { - sb.append(getId().getExecution()); - } - - return sb.toString(); - } - - private static String makeLabelWithRootAndChild(CQConcept cqConcept, PrintSettings cfg) { - String label = cqConcept.getUserOrDefaultLabel(cfg.getLocale()); - if (label == null) { - label = cqConcept.getConcept().getLabel(); - } - - // Concat everything with dashes - return label.replace(" ", "-"); + return QueryUtils.makeQueryLabel(query, cfg, getId()); } @Override @@ -286,4 +223,9 @@ public void visit(Consumer visitor) { visitor.accept(this); query.visit(visitor); } + + public DistributedNamespace getNamespace() { + return (DistributedNamespace) super.getNamespace(); + } + } diff --git a/backend/src/main/java/com/bakdata/conquery/models/query/entity/Entity.java b/backend/src/main/java/com/bakdata/conquery/models/query/entity/Entity.java index d0470a4a4d..f304ec1024 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/query/entity/Entity.java +++ b/backend/src/main/java/com/bakdata/conquery/models/query/entity/Entity.java @@ -1,22 +1,20 @@ package com.bakdata.conquery.models.query.entity; import com.bakdata.conquery.models.datasets.Table; -import lombok.Getter; +import com.fasterxml.jackson.annotation.JsonCreator; +import lombok.Data; import lombok.RequiredArgsConstructor; -import lombok.ToString; /** * All associated data to a single entity (usually a person), over all {@link Table}s and {@link com.bakdata.conquery.models.datasets.concepts.Concept}s. * - * @implNote The ManagerNode does not hold any data of Entities, only the ShardNodes do (via Workers). Additionally, all data of a single Entity must be held by a single Worker only (See {@link com.bakdata.conquery.models.worker.Namespace::getResponsibleWorker}). + * @implNote The ManagerNode does not hold any data of Entities, only the ShardNodes do (via Workers). Additionally, all data of a single Entity must be held by a single Worker only (See {@link com.bakdata.conquery.models.worker.WorkerHandler#getResponsibleWorkerForBucket(int)}). */ -@RequiredArgsConstructor -@ToString(of = "id") +@Data +@RequiredArgsConstructor(onConstructor_ = @JsonCreator) public class Entity { - @Getter private final int id; - /** * Calculate the bucket of the {@link Entity::getId}. Used for distributing partitions of the data to {@link com.bakdata.conquery.models.worker.Worker}s */ diff --git a/backend/src/main/java/com/bakdata/conquery/models/query/queryplan/specific/ExternalNode.java b/backend/src/main/java/com/bakdata/conquery/models/query/queryplan/specific/ExternalNode.java index 9780a429fd..292e11d736 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/query/queryplan/specific/ExternalNode.java +++ b/backend/src/main/java/com/bakdata/conquery/models/query/queryplan/specific/ExternalNode.java @@ -38,7 +38,6 @@ public class ExternalNode extends QPNode { private final Map> extraAggregators; private CDateSet contained; - @ToString.Include public Set getEntities() { return includedEntities.keySet(); } diff --git a/backend/src/main/java/com/bakdata/conquery/models/worker/DatasetRegistry.java b/backend/src/main/java/com/bakdata/conquery/models/worker/DatasetRegistry.java index 2ad13d38ab..c690b42a99 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/worker/DatasetRegistry.java +++ b/backend/src/main/java/com/bakdata/conquery/models/worker/DatasetRegistry.java @@ -2,34 +2,27 @@ import java.io.Closeable; import java.io.IOException; -import java.net.SocketAddress; import java.util.Collection; import java.util.List; import java.util.NoSuchElementException; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentMap; -import java.util.function.Function; import java.util.stream.Collectors; import javax.validation.Validator; -import javax.validation.constraints.NotNull; -import com.bakdata.conquery.commands.ManagerNode; import com.bakdata.conquery.io.jackson.MutableInjectableValues; import com.bakdata.conquery.io.jackson.View; import com.bakdata.conquery.io.storage.MetaStorage; import com.bakdata.conquery.io.storage.NamespaceStorage; +import com.bakdata.conquery.mode.InternalObjectMapperCreator; +import com.bakdata.conquery.mode.NamespaceHandler; import com.bakdata.conquery.models.config.ConqueryConfig; import com.bakdata.conquery.models.datasets.Dataset; import com.bakdata.conquery.models.datasets.PreviewConfig; import com.bakdata.conquery.models.identifiable.CentralRegistry; -import com.bakdata.conquery.models.identifiable.IdMap; import com.bakdata.conquery.models.identifiable.ids.specific.DatasetId; -import com.bakdata.conquery.models.identifiable.ids.specific.WorkerId; import com.bakdata.conquery.models.identifiable.mapping.EntityIdMap; -import com.bakdata.conquery.models.messages.network.specific.AddWorker; -import com.bakdata.conquery.models.messages.network.specific.RemoveWorker; -import com.bakdata.conquery.models.query.ExecutionManager; import com.fasterxml.jackson.annotation.JsonIgnoreType; import com.fasterxml.jackson.databind.ObjectMapper; import lombok.Getter; @@ -37,42 +30,30 @@ import lombok.Setter; import lombok.extern.slf4j.Slf4j; -/** - * Holds the necessary information about all datasets on the {@link ManagerNode}. - * This includes meta data of each dataset (not to confuse with {@link MetaStorage}) as well as informations about the - * distributed query engine. - */ @Slf4j @RequiredArgsConstructor @JsonIgnoreType -public class DatasetRegistry extends IdResolveContext implements Closeable { - - private final ConcurrentMap datasets = new ConcurrentHashMap<>(); - @NotNull - @Getter - @Setter - private IdMap workers = new IdMap<>(); // TODO remove this and take it from Namespaces.datasets +public class DatasetRegistry extends IdResolveContext implements Closeable { + private final ConcurrentMap datasets = new ConcurrentHashMap<>(); @Getter private final int entityBucketSize; - @Getter - private final ConcurrentMap shardNodes = new ConcurrentHashMap<>(); - @Getter private final ConqueryConfig config; - private final Function, ObjectMapper> internalObjectMapperCreator; + private final InternalObjectMapperCreator internalObjectMapperCreator; @Getter @Setter private MetaStorage metaStorage; + private final NamespaceHandler namespaceHandler; - public Namespace createNamespace(Dataset dataset, Validator validator) throws IOException { + public N createNamespace(Dataset dataset, Validator validator) throws IOException { // Prepare empty storage NamespaceStorage datasetStorage = new NamespaceStorage(config.getStorage(), "dataset_" + dataset.getName(), validator); - final ObjectMapper persistenceMapper = internalObjectMapperCreator.apply(View.Persistence.Manager.class); + final ObjectMapper persistenceMapper = internalObjectMapperCreator.createInternalObjectMapper(View.Persistence.Manager.class); datasetStorage.openStores(persistenceMapper); datasetStorage.loadData(); @@ -84,42 +65,26 @@ public Namespace createNamespace(Dataset dataset, Validator validator) throws IO return createNamespace(datasetStorage); } - - public Namespace createNamespace(NamespaceStorage datasetStorage) { - final Namespace namespace = Namespace.create( - new ExecutionManager(getMetaStorage()), - datasetStorage, - config, - internalObjectMapperCreator - ); - + public N createNamespace(NamespaceStorage datasetStorage) { + final N namespace = namespaceHandler.createNamespace(datasetStorage, metaStorage); add(namespace); - - // for now we just add one worker to every ShardNode - for (ShardNodeInformation node : getShardNodes().values()) { - node.send(new AddWorker(datasetStorage.getDataset())); - } - return namespace; } - private void add(Namespace ns) { + public void add(N ns) { datasets.put(ns.getStorage().getDataset().getId(), ns); } - public Namespace get(DatasetId dataset) { + public N get(DatasetId dataset) { return datasets.get(dataset); } - + public void removeNamespace(DatasetId id) { - Namespace removed = datasets.remove(id); + N removed = datasets.remove(id); - if(removed != null) { + if (removed != null) { metaStorage.getCentralRegistry().remove(removed.getDataset()); - - getShardNodes().values().forEach(shardNode -> shardNode.send(new RemoveWorker(removed.getDataset()))); - - workers.keySet().removeIf(w->w.getDataset().equals(id)); + namespaceHandler.removeNamespace(id, removed); removed.remove(); } } @@ -132,39 +97,22 @@ public CentralRegistry findRegistry(DatasetId dataset) throws NoSuchElementExcep return datasets.get(dataset).getStorage().getCentralRegistry(); } - + @Override public CentralRegistry getMetaRegistry() { return metaStorage.getCentralRegistry(); } - public synchronized void register(ShardNodeInformation node, WorkerInformation info) { - WorkerInformation old = workers.getOptional(info.getId()).orElse(null); - if (old != null) { - old.setIncludedBuckets(info.getIncludedBuckets()); - old.setConnectedShardNode(node); - } - else { - info.setConnectedShardNode(node); - workers.add(info); - } - - Namespace ns = datasets.get(info.getDataset()); - if (ns == null) { - throw new NoSuchElementException( - "Trying to register a worker for unknown dataset '" + info.getDataset() + "'. I only know " + datasets.keySet()); - } - ns.addWorker(info); - } public List getAllDatasets() { return datasets.values().stream().map(Namespace::getStorage).map(NamespaceStorage::getDataset).collect(Collectors.toList()); } - public Collection getDatasets() { + public Collection getDatasets() { return datasets.values(); } - + + @Override public void close() { for (Namespace namespace : datasets.values()) { try { @@ -178,8 +126,8 @@ public void close() { @Override public MutableInjectableValues inject(MutableInjectableValues values) { - // Make this class also availiable under DatasetRegistry - return super.inject(values) - .add(DatasetRegistry.class, this); + // Make this class also available under DatasetRegistry + return super.inject(values).add(DatasetRegistry.class, this); } + } diff --git a/backend/src/main/java/com/bakdata/conquery/models/worker/DistributedNamespace.java b/backend/src/main/java/com/bakdata/conquery/models/worker/DistributedNamespace.java new file mode 100644 index 0000000000..4ef1a5d78f --- /dev/null +++ b/backend/src/main/java/com/bakdata/conquery/models/worker/DistributedNamespace.java @@ -0,0 +1,39 @@ +package com.bakdata.conquery.models.worker; + +import java.util.List; + +import com.bakdata.conquery.io.jackson.Injectable; +import com.bakdata.conquery.io.storage.NamespaceStorage; +import com.bakdata.conquery.models.datasets.Dataset; +import com.bakdata.conquery.models.index.IndexService; +import com.bakdata.conquery.models.jobs.JobManager; +import com.bakdata.conquery.models.query.DistributedExecutionManager; +import com.bakdata.conquery.models.query.FilterSearch; +import com.bakdata.conquery.models.query.entity.Entity; +import com.fasterxml.jackson.databind.ObjectMapper; +import lombok.Getter; +import lombok.ToString; +import lombok.extern.slf4j.Slf4j; + + +/** + * Keep track of all data assigned to a single dataset. Each ShardNode has one {@link Worker} per {@link Dataset} / {@link DistributedNamespace}. + * Every Worker is assigned a partition of the loaded {@link Entity}s via {@link Entity::getBucket}. + */ +@Slf4j +@Getter +@ToString(onlyExplicitlyIncluded = true) +public class DistributedNamespace extends LocalNamespace { + + private final WorkerHandler workerHandler; + private final DistributedExecutionManager executionManager; + + public DistributedNamespace(ObjectMapper preprocessMapper, ObjectMapper communicationMapper, NamespaceStorage storage, DistributedExecutionManager executionManager, + JobManager jobManager, FilterSearch filterSearch, IndexService indexService, List injectables, + WorkerHandler workerHandler) { + super(preprocessMapper, communicationMapper, storage, executionManager, jobManager, filterSearch, indexService, injectables); + this.executionManager = executionManager; + this.workerHandler = workerHandler; + } + +} diff --git a/backend/src/main/java/com/bakdata/conquery/models/worker/LocalNamespace.java b/backend/src/main/java/com/bakdata/conquery/models/worker/LocalNamespace.java new file mode 100644 index 0000000000..d77b93adff --- /dev/null +++ b/backend/src/main/java/com/bakdata/conquery/models/worker/LocalNamespace.java @@ -0,0 +1,141 @@ +package com.bakdata.conquery.models.worker; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.List; +import java.util.NoSuchElementException; +import java.util.function.Function; + +import com.bakdata.conquery.io.jackson.Injectable; +import com.bakdata.conquery.io.jackson.View; +import com.bakdata.conquery.io.storage.NamespaceStorage; +import com.bakdata.conquery.models.config.ConqueryConfig; +import com.bakdata.conquery.models.datasets.Dataset; +import com.bakdata.conquery.models.datasets.PreviewConfig; +import com.bakdata.conquery.models.datasets.concepts.select.connector.specific.MappableSingleColumnSelect; +import com.bakdata.conquery.models.identifiable.CentralRegistry; +import com.bakdata.conquery.models.identifiable.ids.specific.DatasetId; +import com.bakdata.conquery.models.index.IndexService; +import com.bakdata.conquery.models.jobs.JobManager; +import com.bakdata.conquery.models.jobs.SimpleJob; +import com.bakdata.conquery.models.query.ExecutionManager; +import com.bakdata.conquery.models.query.FilterSearch; +import com.fasterxml.jackson.databind.ObjectMapper; +import lombok.AccessLevel; +import lombok.Getter; +import lombok.RequiredArgsConstructor; +import lombok.ToString; +import lombok.extern.slf4j.Slf4j; + +@Slf4j +@Getter +@ToString(onlyExplicitlyIncluded = true) +@RequiredArgsConstructor +public class LocalNamespace extends IdResolveContext implements Namespace { + + private final ObjectMapper preprocessMapper; + private final ObjectMapper communicationMapper; + @ToString.Include + private final NamespaceStorage storage; + + private final ExecutionManager executionManager; + + // TODO: 01.07.2020 FK: This is not used a lot, as NamespacedMessages are highly convoluted and hard to decouple as is. + private final JobManager jobManager; + + private final FilterSearch filterSearch; + + private final IndexService indexService; + + // Jackson's injectables that are available when deserializing requests (see PathParamInjector) or items from the storage + private final List injectables; + + + @Override + public Dataset getDataset() { + return storage.getDataset(); + } + + @Override + public void close() { + try { + jobManager.close(); + } + catch (Exception e) { + log.error("Unable to close namespace jobmanager of {}", this, e); + } + + try { + log.info("Closing namespace storage of {}", getStorage().getDataset().getId()); + storage.close(); + } + catch (IOException e) { + log.error("Unable to close namespace storage of {}.", this, e); + } + } + + @Override + public void remove() { + try { + jobManager.close(); + } + catch (Exception e) { + log.error("Unable to close namespace jobmanager of {}", this, e); + } + + log.info("Removing namespace storage of {}", getStorage().getDataset().getId()); + storage.removeStorage(); + } + + @Override + public CentralRegistry getCentralRegistry() { + return getStorage().getCentralRegistry(); + } + + @Override + public int getNumberOfEntities() { + return getStorage().getPrimaryDictionary().getSize(); + } + + @Override + public void updateInternToExternMappings() { + storage.getAllConcepts().stream() + .flatMap(c -> c.getConnectors().stream()) + .flatMap(con -> con.getSelects().stream()) + .filter(MappableSingleColumnSelect.class::isInstance) + .map(MappableSingleColumnSelect.class::cast) + .forEach((s) -> jobManager.addSlowJob(new SimpleJob("Update internToExtern Mappings [" + s.getId() + "]", s::loadMapping))); + + storage.getSecondaryIds().stream() + .filter(desc -> desc.getMapping() != null) + .forEach((s) -> jobManager.addSlowJob(new SimpleJob("Update internToExtern Mappings [" + s.getId() + "]", s.getMapping()::init))); + } + + @Override + public void clearIndexCache() { + indexService.evictCache(); + } + + @Override + public PreviewConfig getPreviewConfig() { + return getStorage().getPreviewConfig(); + } + + @Override + public CentralRegistry findRegistry(DatasetId dataset) throws NoSuchElementException { + if (!this.getDataset().getId().equals(dataset)) { + throw new NoSuchElementException("Wrong dataset: '" + dataset + "' (expected: '" + this.getDataset().getId() + "')"); + } + return storage.getCentralRegistry(); + } + + @Override + public CentralRegistry getMetaRegistry() { + throw new UnsupportedOperationException(); + } + + @Override + public ExecutionManager getExecutionManager() { + return executionManager; + } +} diff --git a/backend/src/main/java/com/bakdata/conquery/models/worker/Namespace.java b/backend/src/main/java/com/bakdata/conquery/models/worker/Namespace.java index 5fc7de4458..e4f16697e0 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/worker/Namespace.java +++ b/backend/src/main/java/com/bakdata/conquery/models/worker/Namespace.java @@ -1,299 +1,59 @@ package com.bakdata.conquery.models.worker; import java.io.Closeable; -import java.io.IOException; -import java.util.ArrayList; -import java.util.Collections; -import java.util.Comparator; -import java.util.HashSet; import java.util.List; import java.util.NoSuchElementException; -import java.util.Objects; -import java.util.Set; -import java.util.function.Function; import com.bakdata.conquery.io.jackson.Injectable; -import com.bakdata.conquery.io.jackson.View; import com.bakdata.conquery.io.storage.NamespaceStorage; -import com.bakdata.conquery.models.config.ConqueryConfig; import com.bakdata.conquery.models.datasets.Dataset; -import com.bakdata.conquery.models.datasets.Import; import com.bakdata.conquery.models.datasets.PreviewConfig; -import com.bakdata.conquery.models.datasets.concepts.select.connector.specific.MappableSingleColumnSelect; import com.bakdata.conquery.models.identifiable.CentralRegistry; -import com.bakdata.conquery.models.identifiable.ids.specific.BucketId; +import com.bakdata.conquery.models.identifiable.Identifiable; +import com.bakdata.conquery.models.identifiable.ids.Id; +import com.bakdata.conquery.models.identifiable.ids.NamespacedId; import com.bakdata.conquery.models.identifiable.ids.specific.DatasetId; -import com.bakdata.conquery.models.identifiable.ids.specific.WorkerId; import com.bakdata.conquery.models.index.IndexService; import com.bakdata.conquery.models.jobs.JobManager; -import com.bakdata.conquery.models.jobs.SimpleJob; -import com.bakdata.conquery.models.messages.namespaces.WorkerMessage; -import com.bakdata.conquery.models.messages.namespaces.specific.UpdateWorkerBucket; import com.bakdata.conquery.models.query.ExecutionManager; import com.bakdata.conquery.models.query.FilterSearch; -import com.bakdata.conquery.models.query.entity.Entity; import com.fasterxml.jackson.databind.ObjectMapper; -import it.unimi.dsi.fastutil.ints.Int2ObjectArrayMap; -import it.unimi.dsi.fastutil.ints.Int2ObjectMap; -import lombok.AccessLevel; -import lombok.Getter; -import lombok.NonNull; -import lombok.RequiredArgsConstructor; -import lombok.ToString; -import lombok.extern.slf4j.Slf4j; +public interface Namespace extends Injectable, Closeable { -/** - * Keep track of all data assigned to a single dataset. Each ShardNode has one {@link Worker} per {@link Dataset} / {@link Namespace}. - * Every Worker is assigned a partition of the loaded {@link Entity}s via {@link Entity::getBucket}. - */ -@Slf4j -@Getter -@ToString(onlyExplicitlyIncluded = true) -@RequiredArgsConstructor(access = AccessLevel.PRIVATE) -public class Namespace extends IdResolveContext implements Closeable { + Dataset getDataset(); - private final ObjectMapper preprocessMapper; - private final ObjectMapper communicationMapper; - @ToString.Include - private final NamespaceStorage storage; + void remove(); - private final ExecutionManager executionManager; + CentralRegistry getCentralRegistry(); - // TODO: 01.07.2020 FK: This is not used a lot, as NamespacedMessages are highly convoluted and hard to decouple as is. - private final JobManager jobManager; + int getNumberOfEntities(); - /** - * All known {@link Worker}s that are part of this Namespace. - */ - private final Set workers = new HashSet<>(); + void updateInternToExternMappings(); - /** - * Map storing the buckets each Worker has been assigned. - */ - private final Int2ObjectMap bucket2WorkerMap = new Int2ObjectArrayMap<>(); + void clearIndexCache(); - private final FilterSearch filterSearch; + PreviewConfig getPreviewConfig(); - private final IndexService indexService; + CentralRegistry findRegistry(DatasetId dataset) throws NoSuchElementException; - // Jackson's injectables that are available when deserializing requests (see PathParamInjector) or items from the storage - private final List injectables; + CentralRegistry getMetaRegistry(); - public static Namespace create(ExecutionManager executionManager, NamespaceStorage storage, ConqueryConfig config, Function, ObjectMapper> mapperCreator) { + ExecutionManager getExecutionManager(); - // Prepare namespace dependent Jackson injectables - List injectables = new ArrayList<>(); - final IndexService indexService = new IndexService(config.getCsv().createCsvParserSettings()); - injectables.add(indexService); - ObjectMapper persistenceMapper = mapperCreator.apply(View.Persistence.Manager.class); - ObjectMapper communicationMapper = mapperCreator.apply(View.InternalCommunication.class); - ObjectMapper preprocessMapper = mapperCreator.apply(null); + ObjectMapper getPreprocessMapper(); - injectables.forEach(i -> i.injectInto(persistenceMapper)); - injectables.forEach(i -> i.injectInto(communicationMapper)); - injectables.forEach(i -> i.injectInto(preprocessMapper)); + ObjectMapper getCommunicationMapper(); - // Open and load the stores - storage.openStores(persistenceMapper); - storage.loadData(); + NamespaceStorage getStorage(); - JobManager jobManager = new JobManager(storage.getDataset().getName(), config.isFailOnError()); + JobManager getJobManager(); - FilterSearch filterSearch = new FilterSearch(storage, jobManager, config.getCsv(), config.getIndex()); + FilterSearch getFilterSearch(); + IndexService getIndexService(); - final Namespace namespace = new Namespace(preprocessMapper, communicationMapper, storage, executionManager, jobManager, filterSearch, indexService, injectables); + List getInjectables(); - - return namespace; - } - - - public void sendToAll(WorkerMessage msg) { - if (workers.isEmpty()) { - throw new IllegalStateException("There are no workers yet"); - } - for (WorkerInformation w : workers) { - w.send(msg); - } - } - - - /** - * Find the assigned worker for the bucket. If there is none return null. - */ - public synchronized WorkerInformation getResponsibleWorkerForBucket(int bucket) { - return bucket2WorkerMap.get(bucket); - } - - /** - * Assign responsibility of a bucket to a Worker. - * - * @implNote Currently the least occupied Worker receives a new Bucket, this can change in later implementations. (For example for dedicated Workers, or entity weightings) - */ - public synchronized void addResponsibility(int bucket) { - WorkerInformation smallest = workers.stream() - .min(Comparator.comparing(si -> si.getIncludedBuckets().size())) - .orElseThrow(() -> new IllegalStateException("Unable to find minimum.")); - - log.debug("Assigning Bucket[{}] to Worker[{}]", bucket, smallest.getId()); - - bucket2WorkerMap.put(bucket, smallest); - - smallest.getIncludedBuckets().add(bucket); - } - - public synchronized void addWorker(WorkerInformation info) { - Objects.requireNonNull(info.getConnectedShardNode(), () -> String.format("No open connections found for Worker[%s]", info.getId())); - - info.setCommunicationWriter(communicationMapper.writer()); - - workers.add(info); - - for (Integer bucket : info.getIncludedBuckets()) { - final WorkerInformation old = bucket2WorkerMap.put(bucket.intValue(), info); - - // This is a completely invalid state from which we should not recover even in production settings. - if (old != null && !old.equals(info)) { - throw new IllegalStateException(String.format("Duplicate claims for Bucket[%d] from %s and %s", bucket, old, info)); - } - } - } - - public Dataset getDataset() { - return storage.getDataset(); - } - - public void close() { - try { - jobManager.close(); - } - catch (Exception e) { - log.error("Unable to close namespace jobmanager of {}", this, e); - } - - try { - log.info("Closing namespace storage of {}", getStorage().getDataset().getId()); - storage.close(); - } - catch (IOException e) { - log.error("Unable to close namespace storage of {}.", this, e); - } - } - - public void remove() { - try { - jobManager.close(); - } - catch (Exception e) { - log.error("Unable to close namespace jobmanager of {}", this, e); - } - - log.info("Removing namespace storage of {}", getStorage().getDataset().getId()); - storage.removeStorage(); - } - - public Set getBucketsForWorker(WorkerId workerId) { - - final WorkerToBucketsMap workerBuckets = storage.getWorkerBuckets(); - if (workerBuckets == null) { - return Collections.emptySet(); - } - return workerBuckets.getBucketsForWorker(workerId); - } - - private synchronized WorkerToBucketsMap createWorkerBucketsMap() { - // Ensure that only one map is created and populated in the storage - WorkerToBucketsMap workerBuckets = storage.getWorkerBuckets(); - if (workerBuckets != null) { - return workerBuckets; - } - storage.setWorkerToBucketsMap(new WorkerToBucketsMap()); - return storage.getWorkerBuckets(); - } - - /** - * Updates the Worker-to-Buckets map, persist it and distributes the update to the shards. - * - * @see Namespace#removeBucketAssignmentsForImportFormWorkers(Import) - */ - public synchronized void addBucketsToWorker(@NonNull WorkerId id, @NonNull Set bucketIds) { - // Ensure that add and remove are not executed at the same time. - // We don't make assumptions about the underlying implementation regarding thread safety - WorkerToBucketsMap workerBuckets = storage.getWorkerBuckets(); - if (workerBuckets == null) { - workerBuckets = createWorkerBucketsMap(); - } - workerBuckets.addBucketForWorker(id, bucketIds); - - storage.setWorkerToBucketsMap(workerBuckets); - - sendUpdatedWorkerInformation(); - } - - public synchronized void removeBucketAssignmentsForImportFormWorkers(@NonNull Import importId) { - - final WorkerToBucketsMap workerBuckets = storage.getWorkerBuckets(); - if (workerBuckets == null) { - return; - } - workerBuckets.removeBucketsOfImport(importId.getId()); - - storage.setWorkerToBucketsMap(workerBuckets); - - sendUpdatedWorkerInformation(); - } - - - private synchronized void sendUpdatedWorkerInformation() { - // While we hold the lock on the namespace distribute the new, consistent state among the workers - for (WorkerInformation w : getWorkers()) { - w.send(new UpdateWorkerBucket(w)); - } - } - - public CentralRegistry getCentralRegistry() { - return getStorage().getCentralRegistry(); - } - - public int getNumberOfEntities() { - return getStorage().getPrimaryDictionary().getSize(); - } - - - public void updateInternToExternMappings() { - storage.getAllConcepts().stream() - .flatMap(c -> c.getConnectors().stream()) - .flatMap(con -> con.getSelects().stream()) - .filter(MappableSingleColumnSelect.class::isInstance) - .map(MappableSingleColumnSelect.class::cast) - .forEach((s) -> jobManager.addSlowJob(new SimpleJob("Update internToExtern Mappings [" + s.getId() + "]", s::loadMapping))); - - storage.getSecondaryIds().stream() - .filter(desc -> desc.getMapping() != null) - .forEach((s) -> jobManager.addSlowJob(new SimpleJob("Update internToExtern Mappings [" + s.getId() + "]", s.getMapping()::init))); - - } - - public void clearIndexCache() { - indexService.evictCache(); - } - - public PreviewConfig getPreviewConfig() { - return getStorage().getPreviewConfig(); - } - - @Override - public CentralRegistry findRegistry(DatasetId dataset) throws NoSuchElementException { - if (!this.getDataset().getId().equals(dataset)) { - throw new NoSuchElementException("Wrong dataset: '" + dataset + "' (expected: '" + this.getDataset().getId() + "')"); - } - return storage.getCentralRegistry(); - } - - @Override - public CentralRegistry getMetaRegistry() { - throw new UnsupportedOperationException(); - } + & NamespacedId, T extends Identifiable> T resolve(ID id); } diff --git a/backend/src/main/java/com/bakdata/conquery/models/worker/ShardNodeInformation.java b/backend/src/main/java/com/bakdata/conquery/models/worker/ShardNodeInformation.java index e369c9819b..fe15ed6524 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/worker/ShardNodeInformation.java +++ b/backend/src/main/java/com/bakdata/conquery/models/worker/ShardNodeInformation.java @@ -4,6 +4,7 @@ import java.time.temporal.ChronoUnit; import java.util.HashSet; import java.util.Set; +import java.util.concurrent.atomic.AtomicBoolean; import com.bakdata.conquery.io.mina.MessageSender; import com.bakdata.conquery.io.mina.NetworkSession; @@ -35,7 +36,7 @@ public class ShardNodeInformation extends MessageSender.Simple jobManagerStatus = new HashSet<>(); - + private final AtomicBoolean full = new AtomicBoolean(false); private LocalDateTime lastStatusTime = LocalDateTime.now(); public ShardNodeInformation(NetworkSession session, int backpressure) { @@ -57,10 +58,6 @@ private String getLatenessMetricName() { * Calculate the time in Milliseconds since we last received a {@link JobManagerStatus} from the corresponding shard. */ private long getMillisSinceLastStatus() { - if(getJobManagerStatus().isEmpty()){ - return -1; - } - return lastStatusTime.until(LocalDateTime.now(), ChronoUnit.MILLIS); } @@ -71,10 +68,6 @@ public void awaitClose() { SharedMetricRegistries.getDefault().remove(getLatenessMetricName()); } - public long calculatePressure() { - return jobManagerStatus.stream().mapToLong(status -> status.getJobs().size()).sum(); - } - public void addJobManagerStatus(JobManagerStatus incoming) { lastStatusTime = LocalDateTime.now(); @@ -82,25 +75,35 @@ public void addJobManagerStatus(JobManagerStatus incoming) { // replace with new status jobManagerStatus.remove(incoming); jobManagerStatus.add(incoming); - } - if (calculatePressure() < backpressure) { - synchronized (jobManagerSync) { - jobManagerSync.notifyAll(); + + final long pressure = calculatePressure(); + final boolean isFull = pressure > backpressure; + + full.set(isFull); + + if (!isFull) { + synchronized (jobManagerSync) { + jobManagerSync.notifyAll(); + } } } + + + } + + private long calculatePressure() { + return jobManagerStatus.stream().mapToLong(status -> status.getJobs().size()).sum(); } public void waitForFreeJobQueue() throws InterruptedException { - if (jobManagerStatus.isEmpty()) { + if (!full.get()) { return; } - if (calculatePressure() >= backpressure) { - log.trace("Have to wait for free JobQueue (size = {})", jobManagerStatus.size()); - synchronized (jobManagerSync) { - jobManagerSync.wait(); - } + synchronized (jobManagerSync) { + log.trace("Have to wait for free JobQueue"); + jobManagerSync.wait(); } } } diff --git a/backend/src/main/java/com/bakdata/conquery/models/worker/WorkerHandler.java b/backend/src/main/java/com/bakdata/conquery/models/worker/WorkerHandler.java new file mode 100644 index 0000000000..a78b17c666 --- /dev/null +++ b/backend/src/main/java/com/bakdata/conquery/models/worker/WorkerHandler.java @@ -0,0 +1,155 @@ +package com.bakdata.conquery.models.worker; + +import java.util.Collections; +import java.util.Comparator; +import java.util.Objects; +import java.util.Set; + +import com.bakdata.conquery.io.storage.NamespaceStorage; +import com.bakdata.conquery.models.datasets.Import; +import com.bakdata.conquery.models.identifiable.IdMap; +import com.bakdata.conquery.models.identifiable.ids.specific.BucketId; +import com.bakdata.conquery.models.identifiable.ids.specific.WorkerId; +import com.bakdata.conquery.models.messages.namespaces.WorkerMessage; +import com.bakdata.conquery.models.messages.namespaces.specific.UpdateWorkerBucket; +import com.fasterxml.jackson.databind.ObjectMapper; +import it.unimi.dsi.fastutil.ints.Int2ObjectArrayMap; +import it.unimi.dsi.fastutil.ints.Int2ObjectMap; +import lombok.NonNull; +import lombok.RequiredArgsConstructor; +import lombok.extern.slf4j.Slf4j; + +/** + * Handler for worker in a single namespace. + */ +@Slf4j +@RequiredArgsConstructor +public class WorkerHandler { + + private final ObjectMapper communicationMapper; + /** + * All known {@link Worker}s that are part of this Namespace. + */ + private final IdMap workers = new IdMap<>(); + + /** + * Map storing the buckets each Worker has been assigned. + */ + private final Int2ObjectMap bucket2WorkerMap = new Int2ObjectArrayMap<>(); + + private final NamespaceStorage storage; + + public IdMap getWorkers() { + return this.workers; + } + + public void sendToAll(WorkerMessage msg) { + if (workers.isEmpty()) { + throw new IllegalStateException("There are no workers yet"); + } + for (WorkerInformation w : workers.values()) { + w.send(msg); + } + } + + public synchronized void removeBucketAssignmentsForImportFormWorkers(@NonNull Import importId) { + final WorkerToBucketsMap workerBuckets = storage.getWorkerBuckets(); + if (workerBuckets == null) { + return; + } + workerBuckets.removeBucketsOfImport(importId.getId()); + + storage.setWorkerToBucketsMap(workerBuckets); + + sendUpdatedWorkerInformation(); + } + + private synchronized void sendUpdatedWorkerInformation() { + for (WorkerInformation w : this.workers.values()) { + w.send(new UpdateWorkerBucket(w)); + } + } + + private synchronized WorkerToBucketsMap createWorkerBucketsMap() { + // Ensure that only one map is created and populated in the storage + WorkerToBucketsMap workerBuckets = storage.getWorkerBuckets(); + if (workerBuckets != null) { + return workerBuckets; + } + storage.setWorkerToBucketsMap(new WorkerToBucketsMap()); + return storage.getWorkerBuckets(); + } + + public synchronized void addBucketsToWorker(@NonNull WorkerId id, @NonNull Set bucketIds) { + // Ensure that add and remove are not executed at the same time. + // We don't make assumptions about the underlying implementation regarding thread safety + WorkerToBucketsMap workerBuckets = storage.getWorkerBuckets(); + if (workerBuckets == null) { + workerBuckets = createWorkerBucketsMap(); + } + workerBuckets.addBucketForWorker(id, bucketIds); + + storage.setWorkerToBucketsMap(workerBuckets); + + sendUpdatedWorkerInformation(); + } + + public synchronized WorkerInformation getResponsibleWorkerForBucket(int bucket) { + return bucket2WorkerMap.get(bucket); + } + + /** + * @implNote Currently the least occupied Worker receives a new Bucket, this can change in later implementations. (For example for + * dedicated Workers, or entity weightings) + */ + + public synchronized void addResponsibility(int bucket) { + WorkerInformation smallest = workers + .stream() + .min(Comparator.comparing(si -> si.getIncludedBuckets().size())) + .orElseThrow(() -> new IllegalStateException("Unable to find minimum.")); + + log.debug("Assigning Bucket[{}] to Worker[{}]", bucket, smallest.getId()); + + bucket2WorkerMap.put(bucket, smallest); + + smallest.getIncludedBuckets().add(bucket); + } + + public synchronized void addWorker(WorkerInformation info) { + Objects.requireNonNull(info.getConnectedShardNode(), () -> String.format("No open connections found for Worker[%s]", info.getId())); + + info.setCommunicationWriter(communicationMapper.writer()); + + workers.add(info); + + for (Integer bucket : info.getIncludedBuckets()) { + final WorkerInformation old = bucket2WorkerMap.put(bucket.intValue(), info); + + // This is a completely invalid state from which we should not recover even in production settings. + if (old != null && !old.equals(info)) { + throw new IllegalStateException(String.format("Duplicate claims for Bucket[%d] from %s and %s", bucket, old, info)); + } + } + } + + public void register(ShardNodeInformation node, WorkerInformation info) { + WorkerInformation old = this.getWorkers().getOptional(info.getId()).orElse(null); + if (old != null) { + old.setIncludedBuckets(info.getIncludedBuckets()); + old.setConnectedShardNode(node); + } + else { + info.setConnectedShardNode(node); + } + this.addWorker(info); + } + + public Set getBucketsForWorker(WorkerId workerId) { + final WorkerToBucketsMap workerBuckets = storage.getWorkerBuckets(); + if (workerBuckets == null) { + return Collections.emptySet(); + } + return workerBuckets.getBucketsForWorker(workerId); + } +} diff --git a/backend/src/main/java/com/bakdata/conquery/resources/admin/AdminServlet.java b/backend/src/main/java/com/bakdata/conquery/resources/admin/AdminServlet.java index 3272889683..52aa5df7ff 100644 --- a/backend/src/main/java/com/bakdata/conquery/resources/admin/AdminServlet.java +++ b/backend/src/main/java/com/bakdata/conquery/resources/admin/AdminServlet.java @@ -83,21 +83,23 @@ public AdminServlet(ManagerNode manager) { jerseyConfig.register(new JacksonMessageBodyProvider(manager.getEnvironment().getObjectMapper())); // freemarker support - adminProcessor = new AdminProcessor( manager.getConfig(), manager.getStorage(), manager.getDatasetRegistry(), manager.getJobManager(), manager.getMaintenanceService(), - manager.getValidator() + manager.getValidator(), + manager.getNodeProvider() ); adminDatasetProcessor = new AdminDatasetProcessor( manager.getConfig(), manager.getValidator(), manager.getDatasetRegistry(), - manager.getJobManager() + manager.getJobManager(), + manager.getImportHandler(), + manager.getStorageListener() ); final AuthCookieFilter authCookieFilter = manager.getConfig().getAuthentication().getAuthCookieFilter(); diff --git a/backend/src/main/java/com/bakdata/conquery/resources/admin/rest/AdminDatasetProcessor.java b/backend/src/main/java/com/bakdata/conquery/resources/admin/rest/AdminDatasetProcessor.java index 90275f407d..7d65da0ecc 100644 --- a/backend/src/main/java/com/bakdata/conquery/resources/admin/rest/AdminDatasetProcessor.java +++ b/backend/src/main/java/com/bakdata/conquery/resources/admin/rest/AdminDatasetProcessor.java @@ -16,6 +16,8 @@ import javax.ws.rs.WebApplicationException; import javax.ws.rs.core.Response; +import com.bakdata.conquery.mode.ImportHandler; +import com.bakdata.conquery.mode.StorageListener; import com.bakdata.conquery.models.config.ConqueryConfig; import com.bakdata.conquery.models.datasets.Column; import com.bakdata.conquery.models.datasets.Dataset; @@ -29,26 +31,15 @@ import com.bakdata.conquery.models.datasets.concepts.filters.specific.SelectFilter; import com.bakdata.conquery.models.datasets.concepts.select.connector.specific.MappableSingleColumnSelect; import com.bakdata.conquery.models.exceptions.ValidatorHelper; -import com.bakdata.conquery.models.identifiable.IdMutex; import com.bakdata.conquery.models.identifiable.Identifiable; import com.bakdata.conquery.models.identifiable.ids.specific.ConceptId; import com.bakdata.conquery.models.identifiable.ids.specific.DatasetId; -import com.bakdata.conquery.models.identifiable.ids.specific.DictionaryId; import com.bakdata.conquery.models.identifiable.ids.specific.TableId; import com.bakdata.conquery.models.identifiable.mapping.EntityIdMap; import com.bakdata.conquery.models.index.InternToExternMapper; import com.bakdata.conquery.models.index.search.SearchIndex; -import com.bakdata.conquery.models.jobs.ImportJob; import com.bakdata.conquery.models.jobs.JobManager; import com.bakdata.conquery.models.jobs.SimpleJob; -import com.bakdata.conquery.models.messages.namespaces.specific.RemoveConcept; -import com.bakdata.conquery.models.messages.namespaces.specific.RemoveImportJob; -import com.bakdata.conquery.models.messages.namespaces.specific.RemoveSecondaryId; -import com.bakdata.conquery.models.messages.namespaces.specific.RemoveTable; -import com.bakdata.conquery.models.messages.namespaces.specific.UpdateConcept; -import com.bakdata.conquery.models.messages.namespaces.specific.UpdateMatchingStatsMessage; -import com.bakdata.conquery.models.messages.namespaces.specific.UpdateSecondaryId; -import com.bakdata.conquery.models.messages.namespaces.specific.UpdateTable; import com.bakdata.conquery.models.worker.DatasetRegistry; import com.bakdata.conquery.models.worker.Namespace; import com.univocity.parsers.csv.CsvParser; @@ -64,16 +55,16 @@ @RequiredArgsConstructor(onConstructor_ = {@Inject}) public class AdminDatasetProcessor { - public static final int MAX_IMPORTS_TEXT_LENGTH = 100; private static final String ABBREVIATION_MARKER = "\u2026"; private final ConqueryConfig config; private final Validator validator; - private final DatasetRegistry datasetRegistry; + private final DatasetRegistry datasetRegistry; private final JobManager jobManager; + private final ImportHandler importHandler; + private final StorageListener storageListener; - private final IdMutex sharedDictionaryLocks = new IdMutex<>(); /** * Creates and initializes a new dataset if it does not already exist. @@ -127,8 +118,7 @@ public synchronized void addSecondaryId(Namespace namespace, SecondaryIdDescript log.info("Received new SecondaryId[{}]", secondaryId.getId()); namespace.getStorage().addSecondaryId(secondaryId); - - namespace.sendToAll(new UpdateSecondaryId(secondaryId)); + storageListener.onAddSecondaryId(secondaryId); } /** @@ -141,7 +131,7 @@ public synchronized void deleteSecondaryId(@NonNull SecondaryIdDescription secon final List dependents = namespace.getStorage().getTables().stream() .map(Table::getColumns).flatMap(Arrays::stream) .filter(column -> secondaryId.equals(column.getSecondaryId())) - .collect(Collectors.toList()); + .toList(); if (!dependents.isEmpty()) { final Set tables = dependents.stream().map(Column::getTable).map(Identifiable::getId).collect(Collectors.toSet()); @@ -157,7 +147,7 @@ public synchronized void deleteSecondaryId(@NonNull SecondaryIdDescription secon log.info("Deleting SecondaryId[{}]", secondaryId); namespace.getStorage().removeSecondaryId(secondaryId.getId()); - namespace.sendToAll(new RemoveSecondaryId(secondaryId)); + storageListener.onDeleteSecondaryId(secondaryId); } /** @@ -182,7 +172,7 @@ else if (!table.getDataset().equals(dataset)) { ValidatorHelper.failOnError(log, validator.validate(table)); namespace.getStorage().addTable(table); - namespace.sendToAll(new UpdateTable(table)); + storageListener.onAddTable(table); } @@ -219,7 +209,7 @@ public synchronized void addConcept(@NonNull Dataset dataset, @NonNull Concept namespace.sendToAll(new UpdateConcept(concept)))); + storageListener.onAddConcept(concept); } @@ -268,55 +258,24 @@ public void setStructure(Namespace namespace, StructureNode[] structure) { /** * Reads an Import partially Importing it if not yet present, then submitting it for full import. */ - @SneakyThrows public void addImport(Namespace namespace, InputStream inputStream) throws IOException { - - ImportJob job = ImportJob.createOrUpdate(namespace, inputStream, config.getCluster().getEntityBucketSize(), sharedDictionaryLocks, config, false); - namespace.getJobManager().addSlowJob(job); - - clearDependentConcepts(namespace.getStorage().getAllConcepts(), job.getTable()); + this.importHandler.addImport(namespace, inputStream); } /** * Reads an Import partially Importing it if it is present, then submitting it for full import [Update of an import]. */ - @SneakyThrows public void updateImport(Namespace namespace, InputStream inputStream) throws IOException { - - ImportJob job = ImportJob.createOrUpdate(namespace, inputStream, config.getCluster().getEntityBucketSize(), sharedDictionaryLocks, config, true); - - namespace.getJobManager().addSlowJob(job); - - clearDependentConcepts(namespace.getStorage().getAllConcepts(), job.getTable()); + this.importHandler.updateImport(namespace, inputStream); } /** * Deletes an import. */ public synchronized void deleteImport(Import imp) { - final Namespace namespace = datasetRegistry.get(imp.getTable().getDataset().getId()); - - clearDependentConcepts(namespace.getStorage().getAllConcepts(), imp.getTable()); - - - namespace.getStorage().removeImport(imp.getId()); - namespace.sendToAll(new RemoveImportJob(imp)); - - // Remove bucket assignments for consistency report - namespace.removeBucketAssignmentsForImportFormWorkers(imp); + this.importHandler.deleteImport(imp); } - private void clearDependentConcepts(Collection> allConcepts, Table table) { - for (Concept c : allConcepts) { - for (Connector con : c.getConnectors()) { - if (!con.getTable().equals(table)) { - continue; - } - - con.getConcept().clearMatchingStats(); - } - } - } /** * Deletes a table if it has no dependents or not forced to do so. @@ -339,7 +298,7 @@ public synchronized List deleteTable(Table table, boolean force) { .forEach(this::deleteImport); namespace.getStorage().removeTable(table.getId()); - namespace.sendToAll(new RemoveTable(table)); + storageListener.onRemoveTable(table); } return dependentConcepts.stream().map(Concept::getId).collect(Collectors.toList()); @@ -352,8 +311,7 @@ public synchronized void deleteConcept(Concept concept) { final Namespace namespace = datasetRegistry.get(concept.getDataset().getId()); namespace.getStorage().removeConcept(concept.getId()); - getJobManager() - .addSlowJob(new SimpleJob("sendToAll: remove " + concept.getId(), () -> namespace.sendToAll(new RemoveConcept(concept)))); + storageListener.onDeleteConcept(concept); } /** @@ -368,12 +326,8 @@ public void updateMatchingStats(Dataset dataset) { "Initiate Update Matching Stats and FilterSearch", () -> { - final Collection> concepts = ns.getStorage().getAllConcepts() - .stream() - .filter(concept -> concept.getMatchingStats() == null) - .collect(Collectors.toSet()); - ns.sendToAll(new UpdateMatchingStatsMessage(concepts)); + storageListener.onUpdateMatchingStats(dataset); ns.getFilterSearch().updateSearch(); ns.updateInternToExternMappings(); } @@ -404,13 +358,13 @@ public List deleteInternToExternMapping(InternToExternMapper internTo final Set> dependentConcepts = namespace.getStorage().getAllConcepts().stream() .filter( - c -> c.getSelects().stream() - .filter(MappableSingleColumnSelect.class::isInstance) + c -> c.getSelects().stream() + .filter(MappableSingleColumnSelect.class::isInstance) - .map(MappableSingleColumnSelect.class::cast) - .map(MappableSingleColumnSelect::getMapping) - .anyMatch(internToExternMapper::equals) - ) + .map(MappableSingleColumnSelect.class::cast) + .map(MappableSingleColumnSelect::getMapping) + .anyMatch(internToExternMapper::equals) + ) .collect(Collectors.toSet()); if (force || dependentConcepts.isEmpty()) { diff --git a/backend/src/main/java/com/bakdata/conquery/resources/admin/rest/AdminDatasetsResource.java b/backend/src/main/java/com/bakdata/conquery/resources/admin/rest/AdminDatasetsResource.java index 750f37f058..3ced1d1d56 100644 --- a/backend/src/main/java/com/bakdata/conquery/resources/admin/rest/AdminDatasetsResource.java +++ b/backend/src/main/java/com/bakdata/conquery/resources/admin/rest/AdminDatasetsResource.java @@ -15,9 +15,8 @@ import com.bakdata.conquery.io.jersey.ExtraMimeTypes; import com.bakdata.conquery.models.datasets.Dataset; import com.bakdata.conquery.models.identifiable.ids.specific.DatasetId; -import lombok.Getter; +import com.bakdata.conquery.models.worker.Namespace; import lombok.RequiredArgsConstructor; -import lombok.Setter; @Produces({ExtraMimeTypes.JSON_STRING, ExtraMimeTypes.SMILE_STRING}) @Consumes({ExtraMimeTypes.JSON_STRING, ExtraMimeTypes.SMILE_STRING}) diff --git a/backend/src/main/java/com/bakdata/conquery/resources/admin/rest/AdminProcessor.java b/backend/src/main/java/com/bakdata/conquery/resources/admin/rest/AdminProcessor.java index 76261723dd..6f3b5a76c5 100644 --- a/backend/src/main/java/com/bakdata/conquery/resources/admin/rest/AdminProcessor.java +++ b/backend/src/main/java/com/bakdata/conquery/resources/admin/rest/AdminProcessor.java @@ -8,6 +8,7 @@ import java.util.TreeSet; import java.util.concurrent.ScheduledExecutorService; import java.util.function.Predicate; +import java.util.function.Supplier; import java.util.stream.Collectors; import javax.validation.Validator; @@ -51,11 +52,12 @@ public class AdminProcessor { private final ConqueryConfig config; private final MetaStorage storage; - private final DatasetRegistry datasetRegistry; + private final DatasetRegistry datasetRegistry; private final JobManager jobManager; private final ScheduledExecutorService maintenanceService; private final Validator validator; private final ObjectWriter jsonWriter = Jackson.MAPPER.writer(); + private final Supplier> nodeProvider; public void addRoles(List roles) { @@ -277,7 +279,7 @@ public Collection getJobs() { )); } - for (ShardNodeInformation si : getDatasetRegistry().getShardNodes().values()) { + for (ShardNodeInformation si : nodeProvider.get()) { out.addAll(si.getJobManagerStatus()); } diff --git a/backend/src/main/java/com/bakdata/conquery/resources/admin/rest/AdminResource.java b/backend/src/main/java/com/bakdata/conquery/resources/admin/rest/AdminResource.java index 479e0e8751..906fd8e151 100644 --- a/backend/src/main/java/com/bakdata/conquery/resources/admin/rest/AdminResource.java +++ b/backend/src/main/java/com/bakdata/conquery/resources/admin/rest/AdminResource.java @@ -34,6 +34,7 @@ import com.bakdata.conquery.models.jobs.JobManagerStatus; import com.bakdata.conquery.models.messages.network.specific.CancelJobMessage; import com.bakdata.conquery.models.worker.DatasetRegistry; +import com.bakdata.conquery.models.worker.Namespace; import com.bakdata.conquery.models.worker.ShardNodeInformation; import com.bakdata.conquery.resources.admin.ui.AdminUIResource; import io.dropwizard.auth.Auth; @@ -78,7 +79,7 @@ public Response cancelJob(@PathParam(JOB_ID) UUID jobId) { processor.getJobManager().cancelJob(jobId); - for (ShardNodeInformation info : processor.getDatasetRegistry().getShardNodes().values()) { + for (ShardNodeInformation info : processor.getNodeProvider().get()) { info.send(new CancelJobMessage(jobId)); } @@ -115,7 +116,7 @@ public FullExecutionStatus[] getQueries(@Auth Subject currentUser, @QueryParam(" final long limit = maybeLimit.orElse(100); final MetaStorage storage = processor.getStorage(); - final DatasetRegistry datasetRegistry = processor.getDatasetRegistry(); + final DatasetRegistry datasetRegistry = processor.getDatasetRegistry(); return storage.getAllExecutions().stream() @@ -136,4 +137,4 @@ public FullExecutionStatus[] getQueries(@Auth Subject currentUser, @QueryParam(" }) .toArray(FullExecutionStatus[]::new); } -} \ No newline at end of file +} diff --git a/backend/src/main/java/com/bakdata/conquery/resources/admin/rest/UIProcessor.java b/backend/src/main/java/com/bakdata/conquery/resources/admin/rest/UIProcessor.java index 7a4b9f21f6..be2b588645 100644 --- a/backend/src/main/java/com/bakdata/conquery/resources/admin/rest/UIProcessor.java +++ b/backend/src/main/java/com/bakdata/conquery/resources/admin/rest/UIProcessor.java @@ -33,6 +33,7 @@ import com.bakdata.conquery.models.events.CBlock; import com.bakdata.conquery.models.identifiable.ids.specific.UserId; import com.bakdata.conquery.models.worker.DatasetRegistry; +import com.bakdata.conquery.models.worker.Namespace; import com.bakdata.conquery.resources.admin.ui.model.FrontendAuthOverview; import com.bakdata.conquery.resources.admin.ui.model.FrontendGroupContent; import com.bakdata.conquery.resources.admin.ui.model.FrontendPermission; @@ -57,7 +58,7 @@ public class UIProcessor { @Getter private final AdminProcessor adminProcessor; - public DatasetRegistry getDatasetRegistry() { + public DatasetRegistry getDatasetRegistry() { return adminProcessor.getDatasetRegistry(); } @@ -66,7 +67,7 @@ public MetaStorage getStorage() { } public UIContext getUIContext() { - return new UIContext(getDatasetRegistry()); + return new UIContext(adminProcessor.getNodeProvider()); } public FrontendAuthOverview getAuthOverview() { diff --git a/backend/src/main/java/com/bakdata/conquery/resources/admin/ui/model/UIContext.java b/backend/src/main/java/com/bakdata/conquery/resources/admin/ui/model/UIContext.java index 940f8bc618..83d48c9271 100644 --- a/backend/src/main/java/com/bakdata/conquery/resources/admin/ui/model/UIContext.java +++ b/backend/src/main/java/com/bakdata/conquery/resources/admin/ui/model/UIContext.java @@ -1,9 +1,13 @@ package com.bakdata.conquery.resources.admin.ui.model; +import java.net.SocketAddress; import java.util.Collection; +import java.util.Map; +import java.util.function.Function; +import java.util.function.Supplier; +import java.util.stream.Collectors; -import com.bakdata.conquery.models.worker.DatasetRegistry; -import com.bakdata.conquery.models.worker.WorkerInformation; +import com.bakdata.conquery.models.worker.ShardNodeInformation; import com.bakdata.conquery.resources.ResourceConstants; import freemarker.template.TemplateModel; import lombok.Getter; @@ -11,25 +15,18 @@ @RequiredArgsConstructor public class UIContext { - + private static final TemplateModel STATIC_URI_ELEMENTS = ResourceConstants.getAsTemplateModel(); - @Getter - private final DatasetRegistry namespaces; + private final Supplier> shardNodeSupplier; @Getter public final TemplateModel staticUriElem = STATIC_URI_ELEMENTS; - public boolean[] getWorkerStatuses() { - boolean[] result = new boolean[namespaces.getShardNodes().values().size()]; - int id = 0; - for(WorkerInformation wi:namespaces.getWorkers().values()) { - result[id++] = wi.isConnected(); - } - return result; - } - - public Collection getWorkers() { - return namespaces.getWorkers().values(); + public Map getShardNodes() { + return shardNodeSupplier.get().stream().collect(Collectors.toMap( + ShardNodeInformation::getRemoteAddress, + Function.identity() + )); } } diff --git a/backend/src/main/java/com/bakdata/conquery/resources/api/ConceptsProcessor.java b/backend/src/main/java/com/bakdata/conquery/resources/api/ConceptsProcessor.java index a3ff393d83..ab877f9b41 100644 --- a/backend/src/main/java/com/bakdata/conquery/resources/api/ConceptsProcessor.java +++ b/backend/src/main/java/com/bakdata/conquery/resources/api/ConceptsProcessor.java @@ -60,7 +60,7 @@ @RequiredArgsConstructor(onConstructor_ = {@Inject}) public class ConceptsProcessor { - private final DatasetRegistry namespaces; + private final DatasetRegistry namespaces; private final Validator validator; private final ConqueryConfig config; diff --git a/backend/src/main/java/com/bakdata/conquery/sql/DslContextFactory.java b/backend/src/main/java/com/bakdata/conquery/sql/DslContextFactory.java new file mode 100644 index 0000000000..16bfe54ecd --- /dev/null +++ b/backend/src/main/java/com/bakdata/conquery/sql/DslContextFactory.java @@ -0,0 +1,29 @@ +package com.bakdata.conquery.sql; + +import javax.sql.DataSource; + +import com.bakdata.conquery.models.config.SqlConnectorConfig; +import com.zaxxer.hikari.HikariConfig; +import com.zaxxer.hikari.HikariDataSource; +import org.jooq.DSLContext; +import org.jooq.conf.Settings; +import org.jooq.impl.DSL; + +public class DslContextFactory { + + public static DSLContext create(SqlConnectorConfig config) { + HikariConfig hikariConfig = new HikariConfig(); + hikariConfig.setJdbcUrl(config.getJdbcConnectionUrl()); + hikariConfig.setUsername(config.getDatabaseUsername()); + hikariConfig.setPassword(config.getDatabasePassword()); + + DataSource dataSource = new HikariDataSource(hikariConfig); + + return DSL.using( + dataSource, + config.getDialect().getJooqDialect(), + new Settings().withRenderFormatted(config.isWithPrettyPrinting()) + ); + } + +} diff --git a/backend/src/main/java/com/bakdata/conquery/sql/SqlContext.java b/backend/src/main/java/com/bakdata/conquery/sql/SqlContext.java new file mode 100644 index 0000000000..bca5ca6f2e --- /dev/null +++ b/backend/src/main/java/com/bakdata/conquery/sql/SqlContext.java @@ -0,0 +1,11 @@ +package com.bakdata.conquery.sql; + +import com.bakdata.conquery.models.config.SqlConnectorConfig; +import com.bakdata.conquery.sql.conversion.dialect.SqlDialect; +import lombok.Value; + +@Value +public class SqlContext { + SqlConnectorConfig config; + SqlDialect sqlDialect; +} diff --git a/backend/src/main/java/com/bakdata/conquery/sql/SqlQuery.java b/backend/src/main/java/com/bakdata/conquery/sql/SqlQuery.java new file mode 100644 index 0000000000..1d93f04749 --- /dev/null +++ b/backend/src/main/java/com/bakdata/conquery/sql/SqlQuery.java @@ -0,0 +1,8 @@ +package com.bakdata.conquery.sql; + +import lombok.Value; + +@Value +public class SqlQuery { + String sqlString; +} diff --git a/backend/src/main/java/com/bakdata/conquery/sql/conquery/SqlExecutionManager.java b/backend/src/main/java/com/bakdata/conquery/sql/conquery/SqlExecutionManager.java new file mode 100644 index 0000000000..9ab60cacde --- /dev/null +++ b/backend/src/main/java/com/bakdata/conquery/sql/conquery/SqlExecutionManager.java @@ -0,0 +1,79 @@ +package com.bakdata.conquery.sql.conquery; + + +import java.util.stream.Stream; + +import com.bakdata.conquery.apiv1.query.Query; +import com.bakdata.conquery.apiv1.query.QueryDescription; +import com.bakdata.conquery.io.storage.MetaStorage; +import com.bakdata.conquery.models.auth.entities.User; +import com.bakdata.conquery.models.config.ConqueryConfig; +import com.bakdata.conquery.models.datasets.Dataset; +import com.bakdata.conquery.models.execution.ManagedExecution; +import com.bakdata.conquery.models.query.ExecutionManager; +import com.bakdata.conquery.models.query.results.EntityResult; +import com.bakdata.conquery.models.worker.Namespace; +import com.bakdata.conquery.sql.SqlContext; +import com.bakdata.conquery.sql.SqlQuery; +import com.bakdata.conquery.sql.conversion.SqlConverter; +import com.bakdata.conquery.sql.execution.SqlExecutionResult; +import com.bakdata.conquery.sql.execution.SqlExecutionService; +import lombok.extern.slf4j.Slf4j; + +@Slf4j +public class SqlExecutionManager implements ExecutionManager { + private final MetaStorage metaStorage; + private final SqlExecutionService executionService; + private final SqlConverter converter; + + public SqlExecutionManager(final SqlContext context, MetaStorage metaStorage) { + this.metaStorage = metaStorage; + this.executionService = new SqlExecutionService(context.getSqlDialect().getDSLContext()); + this.converter = new SqlConverter(context.getSqlDialect(), context.getConfig()); + } + + @Override + public SqlManagedQuery runQuery(Namespace namespace, QueryDescription query, User user, Dataset submittedDataset, ConqueryConfig config, boolean system) { + SqlManagedQuery execution = createExecution(query, user, submittedDataset, system); + execution.initExecutable(namespace, config); + execution.start(); + // todo(tm): Non-blocking execution + SqlExecutionResult result = this.executionService.execute(execution); + execution.finish(result); + return execution; + } + + @Override + public void execute(Namespace namespace, ManagedExecution execution, ConqueryConfig config) { + if (!(execution instanceof SqlManagedQuery)) { + throw new UnsupportedOperationException("The SQL execution manager can only execute SQL queries, but got a %s".formatted(execution.getClass())); + } + + this.executionService.execute(((SqlManagedQuery) execution)); + } + + @Override + public SqlManagedQuery createExecution(QueryDescription query, User user, Dataset submittedDataset, boolean system) { + Query castQuery = (Query) query; + SqlQuery converted = this.converter.convert(castQuery); + SqlManagedQuery sqlManagedQuery = new SqlManagedQuery(castQuery, user, submittedDataset, metaStorage, converted); + metaStorage.addExecution(sqlManagedQuery); + return sqlManagedQuery; + } + + @Override + public void cancelQuery(Dataset dataset, ManagedExecution query) { + // unsupported for now + } + + @Override + public void clearQueryResults(ManagedExecution execution) { + // unsupported for now + } + + @Override + public Stream streamQueryResults(ManagedExecution execution) { + throw new UnsupportedOperationException("Streaming for now not supported"); + } + +} diff --git a/backend/src/main/java/com/bakdata/conquery/sql/conquery/SqlManagedQuery.java b/backend/src/main/java/com/bakdata/conquery/sql/conquery/SqlManagedQuery.java new file mode 100644 index 0000000000..783026723a --- /dev/null +++ b/backend/src/main/java/com/bakdata/conquery/sql/conquery/SqlManagedQuery.java @@ -0,0 +1,125 @@ +package com.bakdata.conquery.sql.conquery; + +import java.util.ArrayList; +import java.util.List; +import java.util.Locale; +import java.util.function.Consumer; +import java.util.stream.Collectors; +import java.util.stream.Stream; + +import com.bakdata.conquery.apiv1.query.Query; +import com.bakdata.conquery.apiv1.query.QueryDescription; +import com.bakdata.conquery.io.cps.CPSType; +import com.bakdata.conquery.io.storage.MetaStorage; +import com.bakdata.conquery.models.auth.entities.User; +import com.bakdata.conquery.models.datasets.Dataset; +import com.bakdata.conquery.models.execution.ExecutionState; +import com.bakdata.conquery.models.execution.ManagedExecution; +import com.bakdata.conquery.models.i18n.I18n; +import com.bakdata.conquery.models.query.ColumnDescriptor; +import com.bakdata.conquery.models.query.PrintSettings; +import com.bakdata.conquery.models.query.QueryResolveContext; +import com.bakdata.conquery.models.query.SingleTableResult; +import com.bakdata.conquery.models.query.Visitable; +import com.bakdata.conquery.models.query.resultinfo.ResultInfo; +import com.bakdata.conquery.models.query.resultinfo.UniqueNamer; +import com.bakdata.conquery.models.query.results.EntityResult; +import com.bakdata.conquery.models.types.ResultType; +import com.bakdata.conquery.sql.SqlQuery; +import com.bakdata.conquery.sql.execution.SqlExecutionResult; +import com.bakdata.conquery.util.QueryUtils; +import com.google.common.base.Preconditions; +import lombok.Getter; +import lombok.Setter; + +@Setter +@Getter +@CPSType(base = ManagedExecution.class, id = "SQL_QUERY") +public class SqlManagedQuery extends ManagedExecution implements SingleTableResult { + private Query query; + private SqlQuery sqlQuery; + private SqlExecutionResult result; + + protected SqlManagedQuery(MetaStorage storage) { + super(storage); + } + + public SqlManagedQuery(Query query, User owner, Dataset dataset, MetaStorage storage, SqlQuery sqlQuery) { + super(owner, dataset, storage); + this.query = query; + this.sqlQuery = sqlQuery; + } + + @Override + protected void doInitExecutable() { + query.resolve(new QueryResolveContext(getNamespace(), getConfig(), getStorage(), null)); + } + + @Override + public QueryDescription getSubmitted() { + return query; + } + + @Override + protected String makeDefaultLabel(PrintSettings cfg) { + return QueryUtils.makeQueryLabel(query, cfg, getId()); + } + + @Override + public void cancel() { + //TODO when async is implemented. + } + + @Override + public void visit(Consumer visitor) { + visitor.accept(this); + } + + @Override + public List generateColumnDescriptions() { + // todo(tm): This is basically a duplicate from ManagedQuery, but sets the ResultType to String because the SQL connector doesn't convert types for now. + // As soon as the connector properly handles types, we can extract this into a helper and use it for both this and ManagedQuery. + Preconditions.checkArgument(isInitialized(), "The execution must have been initialized first"); + List columnDescriptions = new ArrayList<>(); + + final Locale locale = I18n.LOCALE.get(); + + PrintSettings settings = new PrintSettings(true, locale, getNamespace(), getConfig(), null); + + UniqueNamer uniqNamer = new UniqueNamer(settings); + + // First add the id columns to the descriptor list. The are the first columns + for (ResultInfo header : getConfig().getIdColumns().getIdResultInfos()) { + columnDescriptions.add(ColumnDescriptor.builder() + .label(uniqNamer.getUniqueName(header)) + .type(ResultType.StringT.getINSTANCE().typeInfo()) + .semantics(header.getSemantics()) + .build()); + } + + final UniqueNamer collector = new UniqueNamer(settings); + getResultInfos().forEach(info -> columnDescriptions.add(info.asColumnDescriptor(settings, collector))); + return columnDescriptions; + } + + @Override + public List getResultInfos() { + // See above: For now, the SQL connector doesn't handle types + return query.getResultInfos().stream().map(SqlResultInfo::new).collect(Collectors.toList()); + } + + @Override + public Stream streamResults() { + return result.getTable().stream(); + } + + @Override + public long resultRowCount() { + return result.getRowCount(); + } + + public void finish(final SqlExecutionResult result) { + this.result = result; + super.finish(ExecutionState.DONE); + } +} diff --git a/backend/src/main/java/com/bakdata/conquery/sql/conquery/SqlResultInfo.java b/backend/src/main/java/com/bakdata/conquery/sql/conquery/SqlResultInfo.java new file mode 100644 index 0000000000..5ab7aa2102 --- /dev/null +++ b/backend/src/main/java/com/bakdata/conquery/sql/conquery/SqlResultInfo.java @@ -0,0 +1,44 @@ +package com.bakdata.conquery.sql.conquery; + +import java.util.Set; + +import com.bakdata.conquery.models.query.PrintSettings; +import com.bakdata.conquery.models.query.resultinfo.ResultInfo; +import com.bakdata.conquery.models.types.ResultType; +import com.bakdata.conquery.models.types.SemanticType; + +/** + * Temporary result info that sets all {@link ResultType} to {@link com.bakdata.conquery.models.types.ResultType.StringT}. + */ +public class SqlResultInfo extends ResultInfo { + private final ResultInfo delegate; + + public SqlResultInfo(ResultInfo delegate) { + this.delegate = delegate; + } + + @Override + public String userColumnName(PrintSettings printSettings) { + return delegate.userColumnName(printSettings); + } + + @Override + public String defaultColumnName(PrintSettings printSettings) { + return delegate.defaultColumnName(printSettings); + } + + @Override + public ResultType getType() { + return ResultType.StringT.getINSTANCE(); + } + + @Override + public Set getSemantics() { + return delegate.getSemantics(); + } + + @Override + public String getDescription() { + return delegate.getDescription(); + } +} diff --git a/backend/src/main/java/com/bakdata/conquery/sql/conquery/package-info.java b/backend/src/main/java/com/bakdata/conquery/sql/conquery/package-info.java new file mode 100644 index 0000000000..9a7ac7868c --- /dev/null +++ b/backend/src/main/java/com/bakdata/conquery/sql/conquery/package-info.java @@ -0,0 +1,4 @@ +/** + * Integration with the existing ConQuery architecture. + */ +package com.bakdata.conquery.sql.conquery; diff --git a/backend/src/main/java/com/bakdata/conquery/sql/conversion/Converter.java b/backend/src/main/java/com/bakdata/conquery/sql/conversion/Converter.java new file mode 100644 index 0000000000..2557139ab9 --- /dev/null +++ b/backend/src/main/java/com/bakdata/conquery/sql/conversion/Converter.java @@ -0,0 +1,26 @@ +package com.bakdata.conquery.sql.conversion; + +import java.util.Optional; + +import com.bakdata.conquery.sql.conversion.context.ConversionContext; + +/** + * A converter converts an input into a result object if the input matches the conversion class. + * + * @param type that can be converted + * @param type of the result + */ +public interface Converter { + + default Optional tryConvert(I input, ConversionContext context) { + if (getConversionClass().isInstance(input)) { + return Optional.ofNullable(convert(getConversionClass().cast(input), context)); + } + return Optional.empty(); + } + + Class getConversionClass(); + + R convert(final C convert, final ConversionContext context); + +} diff --git a/backend/src/main/java/com/bakdata/conquery/sql/conversion/ConverterService.java b/backend/src/main/java/com/bakdata/conquery/sql/conversion/ConverterService.java new file mode 100644 index 0000000000..cddd02e0d1 --- /dev/null +++ b/backend/src/main/java/com/bakdata/conquery/sql/conversion/ConverterService.java @@ -0,0 +1,29 @@ +package com.bakdata.conquery.sql.conversion; + +import java.util.List; + +import com.bakdata.conquery.sql.conversion.context.ConversionContext; +import com.google.common.collect.MoreCollectors; + +/** + * Converts an input to a result with an applicable converter. + * + * @param type that can be converted + * @param type of the result + * @see Converter + */ +public abstract class ConverterService { + + private final List> converters; + + protected ConverterService(List> converters) { + this.converters = converters; + } + + public R convert(C selectNode, ConversionContext context) { + return converters.stream() + .flatMap(converter -> converter.tryConvert(selectNode, context).stream()) + .collect(MoreCollectors.onlyElement()); + } + +} diff --git a/backend/src/main/java/com/bakdata/conquery/sql/conversion/NodeConverter.java b/backend/src/main/java/com/bakdata/conquery/sql/conversion/NodeConverter.java new file mode 100644 index 0000000000..7eb6cb0c0c --- /dev/null +++ b/backend/src/main/java/com/bakdata/conquery/sql/conversion/NodeConverter.java @@ -0,0 +1,17 @@ +package com.bakdata.conquery.sql.conversion; + +import com.bakdata.conquery.models.query.Visitable; +import com.bakdata.conquery.sql.conversion.context.ConversionContext; + +/** + * Interface for converters that implement the translation of a ConQuery query to an SQL query. + * + *

+ * A ConQuery is a graph that has a {@link com.bakdata.conquery.apiv1.query.QueryDescription} as its root. + * The children of the root are of type {@link com.bakdata.conquery.apiv1.query.CQElement}. + * + * @param type of the node to convert + */ +public interface NodeConverter extends Converter { + +} diff --git a/backend/src/main/java/com/bakdata/conquery/sql/conversion/NodeConverterService.java b/backend/src/main/java/com/bakdata/conquery/sql/conversion/NodeConverterService.java new file mode 100644 index 0000000000..f174a48265 --- /dev/null +++ b/backend/src/main/java/com/bakdata/conquery/sql/conversion/NodeConverterService.java @@ -0,0 +1,32 @@ +package com.bakdata.conquery.sql.conversion; + +import com.bakdata.conquery.apiv1.query.QueryDescription; +import com.bakdata.conquery.models.config.SqlConnectorConfig; +import com.bakdata.conquery.models.query.Visitable; +import com.bakdata.conquery.sql.conversion.context.ConversionContext; +import com.bakdata.conquery.sql.conversion.dialect.SqlDialect; + +/** + * Entry point for converting {@link QueryDescription} to an SQL query. + */ +public class NodeConverterService extends ConverterService { + + private final SqlDialect dialect; + private final SqlConnectorConfig config; + + public NodeConverterService(SqlDialect dialect, SqlConnectorConfig config) { + super(dialect.getNodeConverters()); + this.dialect = dialect; + this.config = config; + } + + public ConversionContext convert(QueryDescription queryDescription) { + ConversionContext initialCtx = ConversionContext.builder() + .config(config) + .nodeConverterService(this) + .sqlDialect(this.dialect) + .build(); + return convert(queryDescription, initialCtx); + } + +} diff --git a/backend/src/main/java/com/bakdata/conquery/sql/conversion/SqlConverter.java b/backend/src/main/java/com/bakdata/conquery/sql/conversion/SqlConverter.java new file mode 100644 index 0000000000..8ee38b2f56 --- /dev/null +++ b/backend/src/main/java/com/bakdata/conquery/sql/conversion/SqlConverter.java @@ -0,0 +1,22 @@ +package com.bakdata.conquery.sql.conversion; + +import com.bakdata.conquery.apiv1.query.QueryDescription; +import com.bakdata.conquery.models.config.SqlConnectorConfig; +import com.bakdata.conquery.sql.SqlQuery; +import com.bakdata.conquery.sql.conversion.context.ConversionContext; +import com.bakdata.conquery.sql.conversion.dialect.SqlDialect; +import org.jooq.conf.ParamType; + +public class SqlConverter { + + private final NodeConverterService nodeConverterService; + + public SqlConverter(SqlDialect dialect, SqlConnectorConfig config) { + this.nodeConverterService = new NodeConverterService(dialect, config); + } + + public SqlQuery convert(QueryDescription queryDescription) { + ConversionContext converted = nodeConverterService.convert(queryDescription); + return new SqlQuery(converted.getFinalQuery().getSQL(ParamType.INLINED)); + } +} diff --git a/backend/src/main/java/com/bakdata/conquery/sql/conversion/context/ConversionContext.java b/backend/src/main/java/com/bakdata/conquery/sql/conversion/context/ConversionContext.java new file mode 100644 index 0000000000..b07383dacf --- /dev/null +++ b/backend/src/main/java/com/bakdata/conquery/sql/conversion/context/ConversionContext.java @@ -0,0 +1,44 @@ +package com.bakdata.conquery.sql.conversion.context; + +import com.bakdata.conquery.models.common.daterange.CDateRange; +import com.bakdata.conquery.models.config.SqlConnectorConfig; +import com.bakdata.conquery.sql.conversion.NodeConverterService; +import com.bakdata.conquery.sql.conversion.context.step.QueryStep; +import com.bakdata.conquery.sql.conversion.dialect.SqlDialect; +import lombok.Builder; +import lombok.Singular; +import lombok.Value; +import lombok.With; +import org.jooq.Record; +import org.jooq.Select; + +import java.util.List; + +@Value +@With +@Builder(toBuilder = true) +public class ConversionContext { + + SqlConnectorConfig config; + NodeConverterService nodeConverterService; + SqlDialect sqlDialect; + @Singular + List querySteps; + Select finalQuery; + boolean negation; + CDateRange dateRestrictionRange; + int queryStepCounter; + + + public boolean dateRestrictionActive() { + return this.dateRestrictionRange != null; + } + + public ConversionContext withQueryStep(QueryStep queryStep) { + return this.toBuilder() + .queryStep(queryStep) + .queryStepCounter(queryStepCounter + 1) + .build(); + } + +} diff --git a/backend/src/main/java/com/bakdata/conquery/sql/conversion/context/selects/ConceptSelects.java b/backend/src/main/java/com/bakdata/conquery/sql/conversion/context/selects/ConceptSelects.java new file mode 100644 index 0000000000..8e42dde561 --- /dev/null +++ b/backend/src/main/java/com/bakdata/conquery/sql/conversion/context/selects/ConceptSelects.java @@ -0,0 +1,69 @@ +package com.bakdata.conquery.sql.conversion.context.selects; + +import java.util.List; +import java.util.Optional; +import java.util.function.Function; +import java.util.stream.Stream; + +import com.bakdata.conquery.apiv1.query.concept.specific.CQConcept; +import lombok.Builder; +import lombok.Value; +import lombok.With; +import org.jooq.Field; + +/** + * {@link ConceptSelects} represent all select fields of a {@link CQConcept}. + */ +@Value +@With +@Builder(toBuilder = true) +public class ConceptSelects implements Selects { + + Field primaryColumn; + Optional> dateRestriction; + Optional> validityDate; + List> eventSelect; + List> eventFilter; + List> groupSelect; + List> groupFilter; + + @Override + public ConceptSelects byName(String qualifier) { + return builder() + .primaryColumn(this.mapFieldToQualifier(qualifier, this.primaryColumn)) + .dateRestriction(this.mapFieldStreamToQualifier(qualifier, this.dateRestriction.stream()).findFirst()) + .validityDate(this.mapFieldStreamToQualifier(qualifier, this.validityDate.stream()).findFirst()) + .eventSelect(this.mapFieldStreamToQualifier(qualifier, this.eventSelect.stream()).toList()) + .eventFilter(this.mapFieldStreamToQualifier(qualifier, this.eventFilter.stream()).toList()) + .groupSelect(this.mapFieldStreamToQualifier(qualifier, this.groupSelect.stream()).toList()) + .groupFilter(this.mapFieldStreamToQualifier(qualifier, this.groupFilter.stream()).toList()) + .build(); + } + + @Override + public List> all() { + return Stream.concat( + this.primaryColumnAndValidityDate(), + this.explicitSelects().stream() + ).toList(); + } + + private Stream> primaryColumnAndValidityDate() { + return Stream.concat( + Stream.of(this.primaryColumn), + this.validityDate.stream() + ); + } + + @Override + public List> explicitSelects() { + return Stream.of( + this.dateRestriction.stream(), + this.eventSelect.stream(), + this.eventFilter.stream(), + this.groupSelect.stream(), + this.groupFilter.stream() + ).flatMap(Function.identity()).toList(); + } + +} diff --git a/backend/src/main/java/com/bakdata/conquery/sql/conversion/context/selects/MergedSelects.java b/backend/src/main/java/com/bakdata/conquery/sql/conversion/context/selects/MergedSelects.java new file mode 100644 index 0000000000..1ce36e8ae8 --- /dev/null +++ b/backend/src/main/java/com/bakdata/conquery/sql/conversion/context/selects/MergedSelects.java @@ -0,0 +1,98 @@ +package com.bakdata.conquery.sql.conversion.context.selects; + +import java.util.List; +import java.util.Optional; +import java.util.stream.Stream; + +import com.bakdata.conquery.sql.conversion.context.step.QueryStep; +import lombok.AccessLevel; +import lombok.AllArgsConstructor; +import lombok.Value; +import org.jooq.Field; +import org.jooq.impl.DSL; + +/** + * {@link MergedSelects} represent the combination of multiple {@link Selects}. + * Default selects fields of multiple {@link Selects} will be merged and special select fields like the primary column + * or validity dates will be unified or aggregated due to defined policies. + */ +@Value +@AllArgsConstructor(access = AccessLevel.PRIVATE) +public class MergedSelects implements Selects { + + String PRIMARY_COLUMN_ALIAS = "primary_column"; + + Field primaryColumn; + + Optional> validityDate; + + /** + * A merged list of all select fields, except the primary column and validity date, + * of the {@link QueryStep}'s passed to the {@link MergedSelects} constructor. + * Each field name is qualified with its associated CTE name. + */ + List> mergedSelects; + + public MergedSelects(List querySteps) { + this.primaryColumn = this.coalescePrimaryColumns(querySteps); + this.validityDate = this.extractValidityDate(querySteps); + this.mergedSelects = this.mergeSelects(querySteps); + } + + private Field coalescePrimaryColumns(List querySteps) { + List> primaryColumns = querySteps.stream() + .map(queryStep -> this.mapFieldToQualifier(queryStep.getCteName(), queryStep.getSelects().getPrimaryColumn())) + .toList(); + return DSL.coalesce((Object) primaryColumns.get(0), primaryColumns.subList(1, primaryColumns.size()).toArray()) + .as(PRIMARY_COLUMN_ALIAS); + } + + private Optional> extractValidityDate(List querySteps) { + // TODO: date aggregation... + if (querySteps.isEmpty()) { + return Optional.empty(); + } + QueryStep firstQueryStep = querySteps.get(0); + return this.mapFieldStreamToQualifier(firstQueryStep.getCteName(), firstQueryStep.getSelects().getValidityDate().stream()) + .findFirst(); + } + + private List> mergeSelects(List queriesToJoin) { + return queriesToJoin.stream() + .flatMap(queryStep -> queryStep.getSelects().explicitSelects().stream() + .map(field -> this.mapFieldToQualifier(queryStep.getCteName(), field))) + .toList(); + } + + @Override + public MergedSelects byName(String qualifier) { + return new MergedSelects( + this.mapFieldToQualifier(qualifier, this.primaryColumn), + this.mapFieldStreamToQualifier(qualifier, this.validityDate.stream()).findFirst(), + this.mergedSelects.stream() + .map(field -> this.mapFieldToQualifier(qualifier, field)) + .toList() + ); + } + + @Override + public List> all() { + return Stream.concat( + this.primaryColumnAndValidityDate(), + this.mergedSelects.stream() + ).toList(); + } + + private Stream> primaryColumnAndValidityDate() { + return Stream.concat( + Stream.of(this.primaryColumn), + this.validityDate.stream() + ); + } + + @Override + public List> explicitSelects() { + return this.mergedSelects; + } + +} diff --git a/backend/src/main/java/com/bakdata/conquery/sql/conversion/context/selects/Selects.java b/backend/src/main/java/com/bakdata/conquery/sql/conversion/context/selects/Selects.java new file mode 100644 index 0000000000..6229c247a9 --- /dev/null +++ b/backend/src/main/java/com/bakdata/conquery/sql/conversion/context/selects/Selects.java @@ -0,0 +1,63 @@ +package com.bakdata.conquery.sql.conversion.context.selects; + +import java.util.List; +import java.util.Optional; +import java.util.stream.Stream; + +import org.jooq.Field; +import org.jooq.impl.DSL; + +public interface Selects { + + Field getPrimaryColumn(); + + Optional> getValidityDate(); + + /** + * Returns the selected columns as fully qualified reference. + * + * @param qualifier the table name that creates these selects + * @return selects as fully qualified reference + * @see Selects#mapFieldToQualifier(String, Field) + */ + Selects byName(String qualifier); + + /** + * @return A list of all select fields including the primary column and validity date. + */ + List> all(); + + /** + * List of columns that the user explicitly referenced, either via a filter or a select. + * + * @return A list of all select fields WITHOUT implicitly selected columns like the primary column and validity date. + */ + List> explicitSelects(); + + default Stream> mapFieldStreamToQualifier(String qualifier, Stream> objectField) { + return objectField.map(column -> this.mapFieldToQualifier(qualifier, column)); + } + + /** + * Converts a select to its fully qualified reference. + * + *

+ *

Example:

+ *
{@code
+	 * with a as (select c1 - c2 as c
+	 * from t1)
+	 * select t1.c
+	 * from a
+	 * }
+ *

+ * This function maps the select {@code c1 - c2 as c} to {@code t1.c}. + * + * @param qualifier + * @param field + * @return + */ + default Field mapFieldToQualifier(String qualifier, Field field) { + return DSL.field(DSL.name(qualifier, field.getName())); + } + +} diff --git a/backend/src/main/java/com/bakdata/conquery/sql/conversion/context/step/LogicalOperation.java b/backend/src/main/java/com/bakdata/conquery/sql/conversion/context/step/LogicalOperation.java new file mode 100644 index 0000000000..9f0351c854 --- /dev/null +++ b/backend/src/main/java/com/bakdata/conquery/sql/conversion/context/step/LogicalOperation.java @@ -0,0 +1,6 @@ +package com.bakdata.conquery.sql.conversion.context.step; + +public enum LogicalOperation { + AND, + OR +} diff --git a/backend/src/main/java/com/bakdata/conquery/sql/conversion/context/step/QueryStep.java b/backend/src/main/java/com/bakdata/conquery/sql/conversion/context/step/QueryStep.java new file mode 100644 index 0000000000..9d72ec56bd --- /dev/null +++ b/backend/src/main/java/com/bakdata/conquery/sql/conversion/context/step/QueryStep.java @@ -0,0 +1,40 @@ +package com.bakdata.conquery.sql.conversion.context.step; + +import java.util.List; + +import com.bakdata.conquery.sql.conversion.context.selects.Selects; +import lombok.Builder; +import lombok.Value; +import org.jooq.Condition; +import org.jooq.Record; +import org.jooq.TableLike; +import org.jooq.impl.DSL; + +/** + * Intermediate representation of an SQL query. + */ +@Value +@Builder +public class QueryStep { + + String cteName; + Selects selects; + TableLike fromTable; + List conditions; + /** + * The CTEs referenced by this QueryStep + */ + List predecessors; + + public static TableLike toTableLike(String fromTableName) { + return DSL.table(DSL.name(fromTableName)); + } + + /** + * @return All selects re-mapped to a qualifier, which is the cteName of this QueryStep. + */ + public Selects getQualifiedSelects() { + return this.selects.byName(this.cteName); + } + +} diff --git a/backend/src/main/java/com/bakdata/conquery/sql/conversion/context/step/QueryStepTransformer.java b/backend/src/main/java/com/bakdata/conquery/sql/conversion/context/step/QueryStepTransformer.java new file mode 100644 index 0000000000..e41c457235 --- /dev/null +++ b/backend/src/main/java/com/bakdata/conquery/sql/conversion/context/step/QueryStepTransformer.java @@ -0,0 +1,59 @@ +package com.bakdata.conquery.sql.conversion.context.step; + +import java.util.List; +import java.util.stream.Stream; + +import org.jooq.CommonTableExpression; +import org.jooq.DSLContext; +import org.jooq.Record; +import org.jooq.Select; +import org.jooq.impl.DSL; + +/** + * Transformer for translating the intermediate representation of {@link QueryStep} into the final SQL query. + */ +public class QueryStepTransformer { + + private final DSLContext dslContext; + + public QueryStepTransformer(DSLContext dslContext) { + this.dslContext = dslContext; + } + + /** + * Converts a given {@link QueryStep} into an executable SELECT statement. + */ + public Select toSelectQuery(QueryStep queryStep) { + return this.dslContext.with(this.constructPredecessorCteList(queryStep)) + .select(queryStep.getSelects().all()) + .from(queryStep.getFromTable()) + .where(queryStep.getConditions()); + } + + private List> constructPredecessorCteList(QueryStep queryStep) { + return queryStep.getPredecessors().stream() + .flatMap(predecessor -> this.toCteList(predecessor).stream()) + .toList(); + } + + private List> toCteList(QueryStep queryStep) { + return Stream.concat( + this.predecessorCtes(queryStep), + Stream.of(this.toCte(queryStep)) + ).toList(); + } + + private Stream> predecessorCtes(QueryStep queryStep) { + return queryStep.getPredecessors().stream() + .flatMap(predecessor -> this.toCteList(predecessor).stream()); + } + + private CommonTableExpression toCte(QueryStep queryStep) { + return DSL.name(queryStep.getCteName()).as( + this.dslContext.select(queryStep.getSelects().all()) + .from(queryStep.getFromTable()) + .where(queryStep.getConditions()) + ); + } + +} diff --git a/backend/src/main/java/com/bakdata/conquery/sql/conversion/context/step/StepJoiner.java b/backend/src/main/java/com/bakdata/conquery/sql/conversion/context/step/StepJoiner.java new file mode 100644 index 0000000000..84b103f955 --- /dev/null +++ b/backend/src/main/java/com/bakdata/conquery/sql/conversion/context/step/StepJoiner.java @@ -0,0 +1,89 @@ +package com.bakdata.conquery.sql.conversion.context.step; + +import java.util.Collections; +import java.util.List; +import java.util.stream.Collectors; + +import com.bakdata.conquery.apiv1.query.CQElement; +import com.bakdata.conquery.sql.conversion.context.ConversionContext; +import com.bakdata.conquery.sql.conversion.context.selects.MergedSelects; +import com.bakdata.conquery.sql.conversion.dialect.SqlFunctionProvider; +import org.jooq.Field; +import org.jooq.Record; +import org.jooq.Table; +import org.jooq.TableLike; +import org.jooq.TableOnConditionStep; +import org.jooq.impl.DSL; + +public class StepJoiner { + + public static ConversionContext joinChildren(Iterable children, ConversionContext context, LogicalOperation logicalOperation) { + + ConversionContext childrenContext = context; + for (CQElement childNode : children) { + childrenContext = context.getNodeConverterService().convert(childNode, childrenContext); + } + + List queriesToJoin = childrenContext.getQuerySteps(); + QueryStep andQueryStep = QueryStep.builder() + .cteName(constructJoinedQueryStepLabel(queriesToJoin, logicalOperation)) + .selects(new MergedSelects(queriesToJoin)) + .fromTable(constructJoinedTable(queriesToJoin, logicalOperation, context)) + .conditions(Collections.emptyList()) + .predecessors(queriesToJoin) + .build(); + + return context.withQuerySteps(List.of(andQueryStep)); + } + + private static String constructJoinedQueryStepLabel(List queriesToJoin, LogicalOperation logicalOperation) { + + String labelConnector = switch (logicalOperation) { + case AND -> "_AND_"; + case OR -> "_OR_"; + }; + + return queriesToJoin.stream() + .map(QueryStep::getCteName) + .collect(Collectors.joining(labelConnector)); + } + + private static TableLike constructJoinedTable(List queriesToJoin, LogicalOperation logicalOperation, ConversionContext context) { + + Table joinedQuery = getIntitialJoinTable(queriesToJoin); + + SqlFunctionProvider functionProvider = context.getSqlDialect().getFunction(); + JoinType joinType = switch (logicalOperation) { + case AND -> functionProvider::innerJoin; + case OR -> functionProvider::fullOuterJoin; + }; + + for (int i = 0; i < queriesToJoin.size() - 1; i++) { + + QueryStep leftPartQS = queriesToJoin.get(i); + QueryStep rightPartQS = queriesToJoin.get(i + 1); + + Field leftPartPrimaryColumn = leftPartQS.getQualifiedSelects().getPrimaryColumn(); + Field rightPartPrimaryColumn = rightPartQS.getQualifiedSelects().getPrimaryColumn(); + + joinedQuery = joinType.join(joinedQuery, rightPartQS, leftPartPrimaryColumn, rightPartPrimaryColumn); + } + + return joinedQuery; + } + + private static Table getIntitialJoinTable(List queriesToJoin) { + return DSL.table(DSL.name(queriesToJoin.get(0).getCteName())); + } + + @FunctionalInterface + private interface JoinType { + TableOnConditionStep join( + Table leftPartQueryBase, + QueryStep rightPartQS, + Field leftPartPrimaryColumn, + Field rightPartPrimaryColumn + ); + } + +} diff --git a/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/CQAndConverter.java b/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/CQAndConverter.java new file mode 100644 index 0000000000..31404da21d --- /dev/null +++ b/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/CQAndConverter.java @@ -0,0 +1,24 @@ +package com.bakdata.conquery.sql.conversion.cqelement; + +import com.bakdata.conquery.apiv1.query.concept.specific.CQAnd; +import com.bakdata.conquery.sql.conversion.NodeConverter; +import com.bakdata.conquery.sql.conversion.context.ConversionContext; +import com.bakdata.conquery.sql.conversion.context.step.LogicalOperation; +import com.bakdata.conquery.sql.conversion.context.step.StepJoiner; + +public class CQAndConverter implements NodeConverter { + + @Override + public Class getConversionClass() { + return CQAnd.class; + } + + @Override + public ConversionContext convert(CQAnd andNode, ConversionContext context) { + if (andNode.getChildren().size() == 1) { + return context.getNodeConverterService().convert(andNode.getChildren().get(0), context); + } + return StepJoiner.joinChildren(andNode.getChildren(), context, LogicalOperation.AND); + } + +} diff --git a/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/CQConceptConverter.java b/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/CQConceptConverter.java new file mode 100644 index 0000000000..db4e9b43b2 --- /dev/null +++ b/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/CQConceptConverter.java @@ -0,0 +1,208 @@ +package com.bakdata.conquery.sql.conversion.cqelement; + +import java.util.Collections; +import java.util.List; +import java.util.Locale; +import java.util.Optional; + +import com.bakdata.conquery.apiv1.query.concept.filter.CQTable; +import com.bakdata.conquery.apiv1.query.concept.specific.CQConcept; +import com.bakdata.conquery.sql.conversion.NodeConverter; +import com.bakdata.conquery.sql.conversion.context.ConversionContext; +import com.bakdata.conquery.sql.conversion.context.selects.ConceptSelects; +import com.bakdata.conquery.sql.conversion.context.step.QueryStep; +import com.bakdata.conquery.sql.conversion.filter.FilterConverterService; +import com.bakdata.conquery.sql.conversion.select.SelectConverterService; +import org.jooq.Condition; +import org.jooq.Field; + +public class CQConceptConverter implements NodeConverter { + + private final FilterConverterService filterConverterService; + private final SelectConverterService selectConverterService; + + public CQConceptConverter(FilterConverterService filterConverterService, SelectConverterService selectConverterService) { + this.filterConverterService = filterConverterService; + this.selectConverterService = selectConverterService; + } + + @Override + public Class getConversionClass() { + return CQConcept.class; + } + + @Override + public ConversionContext convert(CQConcept node, ConversionContext context) { + + if (node.getTables().size() > 1) { + throw new UnsupportedOperationException("Can't handle concepts with multiple tables for now."); + } + + ConceptPreprocessingService preprocessingService = new ConceptPreprocessingService(node, context); + CQTable table = node.getTables().get(0); + String conceptLabel = this.getConceptLabel(node, context); + + QueryStep preprocessingStep = preprocessingService.buildPreprocessingQueryStepForTable(conceptLabel, table); + QueryStep dateRestriction = this.buildDateRestrictionQueryStep(context, node, conceptLabel, preprocessingStep); + QueryStep eventSelect = this.buildEventSelectQueryStep(context, table, conceptLabel, dateRestriction); + QueryStep eventFilter = this.buildEventFilterQueryStep(context, table, conceptLabel, eventSelect); + QueryStep finalStep = this.buildFinalQueryStep(conceptLabel, eventFilter); + + return context.withQueryStep(finalStep); + } + + private String getConceptLabel(CQConcept node, ConversionContext context) { + // only relevant for debugging purposes as it will be part of the generated SQL query + // we prefix each cte name of a concept with an incrementing counter to prevent naming collisions if the same concept is selected multiple times + return "%s_%s".formatted( + context.getQueryStepCounter(), + node.getUserOrDefaultLabel(Locale.ENGLISH) + .toLowerCase() + .replace(' ', '_') + .replaceAll("\\s", "_") + ); + } + + /** + * selects: + * - all of previous step + */ + private QueryStep buildDateRestrictionQueryStep( + ConversionContext context, + CQConcept node, + String conceptLabel, + QueryStep previous + ) { + if (((ConceptSelects) previous.getSelects()).getDateRestriction().isEmpty()) { + return previous; + } + + ConceptSelects dateRestrictionSelects = this.prepareDateRestrictionSelects(node, previous); + List dateRestriction = this.buildDateRestriction(context, previous); + + return QueryStep.builder() + .cteName(createCteName(conceptLabel, "_date_restriction")) + .fromTable(QueryStep.toTableLike(previous.getCteName())) + .selects(dateRestrictionSelects) + .conditions(dateRestriction) + .predecessors(List.of(previous)) + .build(); + } + + /** + * selects: + * - all of previous steps + * - transformed columns with selects + */ + private QueryStep buildEventSelectQueryStep( + ConversionContext context, + CQTable table, + String conceptLabel, QueryStep previous + ) { + if (table.getSelects().isEmpty()) { + return previous; + } + + ConceptSelects eventSelectSelects = this.prepareEventSelectSelects(context, table, previous); + + return QueryStep.builder() + .cteName(createCteName(conceptLabel, "_event_select")) + .fromTable(QueryStep.toTableLike(previous.getCteName())) + .selects(eventSelectSelects) + .conditions(Collections.emptyList()) + .predecessors(List.of(previous)) + .build(); + } + + /** + * selects: + * - all of previous step + * - remove filter + */ + private QueryStep buildEventFilterQueryStep( + ConversionContext context, + CQTable table, + String conceptLabel, + QueryStep previous + ) { + if (table.getFilters().isEmpty()) { + return previous; + } + + ConceptSelects eventFilterSelects = this.prepareEventFilterSelects(previous); + List eventFilterConditions = this.buildEventFilterConditions(context, table); + + return QueryStep.builder() + .cteName(createCteName(conceptLabel, "_event_filter")) + .fromTable(QueryStep.toTableLike(previous.getCteName())) + .selects(eventFilterSelects) + .conditions(eventFilterConditions) + .predecessors(List.of(previous)) + .build(); + } + + private ConceptSelects prepareDateRestrictionSelects(CQConcept node, QueryStep previous) { + ConceptSelects.ConceptSelectsBuilder selectsBuilder = ((ConceptSelects) previous.getQualifiedSelects()).toBuilder(); + selectsBuilder.dateRestriction(Optional.empty()); + if (node.isExcludeFromTimeAggregation()) { + selectsBuilder.validityDate(Optional.empty()); + } + return selectsBuilder.build(); + } + + private List buildDateRestriction(ConversionContext context, QueryStep previous) { + return ((ConceptSelects) previous.getSelects()).getDateRestriction() + .map(dateRestrictionColumn -> getDateRestrictionAsCondition(context, previous, dateRestrictionColumn)) + .orElseGet(Collections::emptyList); + } + + private static List getDateRestrictionAsCondition(ConversionContext context, QueryStep previous, Field dateRestrictionColumn) { + return previous.getSelects().getValidityDate().stream() + .map(validityDateColumn -> context.getSqlDialect().getFunction().dateRestriction(dateRestrictionColumn, validityDateColumn)) + .toList(); + } + + private ConceptSelects prepareEventSelectSelects( + ConversionContext context, + CQTable table, + QueryStep previous + ) { + return ((ConceptSelects) previous.getQualifiedSelects()).withEventSelect(this.getEventSelects(context, table)); + } + + private ConceptSelects prepareEventFilterSelects(QueryStep previous) { + return ((ConceptSelects) previous.getQualifiedSelects()).withEventFilter(Collections.emptyList()); + } + + private List buildEventFilterConditions(ConversionContext context, CQTable table) { + return table.getFilters().stream() + .map(filterValue -> this.filterConverterService.convert(filterValue, context)) + .toList(); + } + + private List> getEventSelects(ConversionContext context, CQTable table) { + return table.getSelects().stream() + .map(select -> (Field) this.selectConverterService.convert(select, context)) + .toList(); + } + + /** + * selects: + * - all of previous step + */ + private QueryStep buildFinalQueryStep(String conceptLabel, QueryStep previous) { + ConceptSelects finalSelects = ((ConceptSelects) previous.getQualifiedSelects()); + return QueryStep.builder() + .cteName(createCteName(conceptLabel, "")) + .fromTable(QueryStep.toTableLike(previous.getCteName())) + .selects(finalSelects) + .conditions(Collections.emptyList()) + .predecessors(List.of(previous)) + .build(); + } + + private static String createCteName(String conceptLabel, String suffix) { + return "concept_%s%s".formatted(conceptLabel, suffix); + } + +} diff --git a/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/CQDateRestrictionConverter.java b/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/CQDateRestrictionConverter.java new file mode 100644 index 0000000000..37e3c1d56f --- /dev/null +++ b/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/CQDateRestrictionConverter.java @@ -0,0 +1,20 @@ +package com.bakdata.conquery.sql.conversion.cqelement; + +import com.bakdata.conquery.apiv1.query.concept.specific.CQDateRestriction; +import com.bakdata.conquery.models.common.daterange.CDateRange; +import com.bakdata.conquery.sql.conversion.NodeConverter; +import com.bakdata.conquery.sql.conversion.context.ConversionContext; + +public class CQDateRestrictionConverter implements NodeConverter { + + @Override + public ConversionContext convert(CQDateRestriction node, ConversionContext context) { + ConversionContext childContext = context.withDateRestrictionRange(CDateRange.of(node.getDateRange())); + return context.getNodeConverterService().convert(node.getChild(), childContext).withDateRestrictionRange(null); + } + + @Override + public Class getConversionClass() { + return CQDateRestriction.class; + } +} diff --git a/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/CQNegationConverter.java b/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/CQNegationConverter.java new file mode 100644 index 0000000000..ce1db05bd7 --- /dev/null +++ b/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/CQNegationConverter.java @@ -0,0 +1,29 @@ +package com.bakdata.conquery.sql.conversion.cqelement; + +import com.bakdata.conquery.apiv1.query.CQElement; +import com.bakdata.conquery.apiv1.query.concept.specific.CQNegation; +import com.bakdata.conquery.sql.conversion.NodeConverter; +import com.bakdata.conquery.sql.conversion.context.ConversionContext; + +public class CQNegationConverter implements NodeConverter { + + @Override + public Class getConversionClass() { + return CQNegation.class; + } + + @Override + public ConversionContext convert(CQNegation negationNode, ConversionContext context) { + return this.convertChildWithNegationActive(negationNode.getChild(), context); + } + + private ConversionContext convertChildWithNegationActive(CQElement child, ConversionContext context) { + // TODO: handle negation properly after GroupSelect/GroupFilter has been implemented + // - anti-join vs. negating conditions + // - handle double negation + return context.getNodeConverterService() + .convert(child, context.withNegation(true)) + .withNegation(false); + } + +} diff --git a/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/CQOrConverter.java b/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/CQOrConverter.java new file mode 100644 index 0000000000..7df18dedaf --- /dev/null +++ b/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/CQOrConverter.java @@ -0,0 +1,24 @@ +package com.bakdata.conquery.sql.conversion.cqelement; + +import com.bakdata.conquery.apiv1.query.concept.specific.CQOr; +import com.bakdata.conquery.sql.conversion.NodeConverter; +import com.bakdata.conquery.sql.conversion.context.ConversionContext; +import com.bakdata.conquery.sql.conversion.context.step.LogicalOperation; +import com.bakdata.conquery.sql.conversion.context.step.StepJoiner; + +public class CQOrConverter implements NodeConverter { + + @Override + public Class getConversionClass() { + return CQOr.class; + } + + @Override + public ConversionContext convert(CQOr orNode, ConversionContext context) { + if (orNode.getChildren().size() == 1) { + return context.getNodeConverterService().convert(orNode.getChildren().get(0), context); + } + return StepJoiner.joinChildren(orNode.getChildren(), context, LogicalOperation.OR); + } + +} diff --git a/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/ConceptPreprocessingService.java b/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/ConceptPreprocessingService.java new file mode 100644 index 0000000000..2800d8b30a --- /dev/null +++ b/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/ConceptPreprocessingService.java @@ -0,0 +1,136 @@ +package com.bakdata.conquery.sql.conversion.cqelement; + +import java.util.Collections; +import java.util.List; +import java.util.Optional; + +import com.bakdata.conquery.apiv1.query.concept.filter.CQTable; +import com.bakdata.conquery.apiv1.query.concept.filter.FilterValue; +import com.bakdata.conquery.apiv1.query.concept.specific.CQConcept; +import com.bakdata.conquery.models.common.daterange.CDateRange; +import com.bakdata.conquery.models.datasets.Column; +import com.bakdata.conquery.sql.conversion.context.ConversionContext; +import com.bakdata.conquery.sql.conversion.context.selects.ConceptSelects; +import com.bakdata.conquery.sql.conversion.context.step.QueryStep; +import com.bakdata.conquery.sql.conversion.dialect.SqlFunctionProvider; +import org.jooq.Field; +import org.jooq.impl.DSL; + +public class ConceptPreprocessingService { + + private static final String DATE_RESTRICTION_COLUMN_NAME = "date_restriction"; + private static final String VALIDITY_DATE_COLUMN_NAME_SUFFIX = "_validity_date"; + private final CQConcept concept; + private final ConversionContext context; + private final SqlFunctionProvider sqlFunctionProvider; + + public ConceptPreprocessingService(CQConcept concept, ConversionContext context) { + this.concept = concept; + this.context = context; + this.sqlFunctionProvider = this.context.getSqlDialect().getFunction(); + } + + /** + * selects: + * - (primary column) + * - date restriction + * - validity date + * - any filter (group/event) + * - any select (group/event) + */ + public QueryStep buildPreprocessingQueryStepForTable(String conceptLabel, CQTable table) { + + ConceptSelects.ConceptSelectsBuilder selectsBuilder = ConceptSelects.builder(); + + selectsBuilder.primaryColumn(DSL.field(context.getConfig().getPrimaryColumn())); + selectsBuilder.dateRestriction(this.getDateRestrictionSelect(table)); + selectsBuilder.validityDate(this.getValidityDateSelect(table, conceptLabel)); + + List> conceptSelectFields = this.getColumnSelectReferences(table); + List> conceptFilterFields = this.getColumnFilterReferences(table); + + // deduplicate because a concepts selects and filters can require the same columns + // and selecting the same columns several times will cause SQL errors + List> deduplicatedFilterFields = conceptFilterFields.stream() + .filter(field -> !conceptSelectFields.contains(field)) + .toList(); + + selectsBuilder.eventSelect(conceptSelectFields); + selectsBuilder.eventFilter(deduplicatedFilterFields); + + // not part of preprocessing yet + selectsBuilder.groupSelect(Collections.emptyList()) + .groupFilter(Collections.emptyList()); + + return QueryStep.builder() + .cteName(this.getPreprocessingStepLabel(conceptLabel)) + .fromTable(QueryStep.toTableLike(this.getFromTableName(table))) + .selects(selectsBuilder.build()) + .conditions(Collections.emptyList()) + .predecessors(Collections.emptyList()) + .build(); + } + + private Optional> getDateRestrictionSelect(CQTable table) { + if (!this.context.dateRestrictionActive() || !this.tableHasValidityDates(table)) { + return Optional.empty(); + } + CDateRange dateRestrictionRange = this.context.getDateRestrictionRange(); + Field dateRestriction = this.sqlFunctionProvider.daterange(dateRestrictionRange) + .as(DATE_RESTRICTION_COLUMN_NAME); + return Optional.of(dateRestriction); + } + + private Optional> getValidityDateSelect(CQTable table, String conceptLabel) { + if (!this.validityDateIsRequired(table)) { + return Optional.empty(); + } + Field validityDateRange = this.sqlFunctionProvider.daterange(table.findValidityDateColumn()) + .as(conceptLabel + VALIDITY_DATE_COLUMN_NAME_SUFFIX); + return Optional.of(validityDateRange); + } + + /** + * @return True, if a date restriction is active and the node is not excluded from time aggregation + * OR there is no date restriction, but still existing validity dates which are included in time aggregation. + */ + private boolean validityDateIsRequired(CQTable table) { + return this.tableHasValidityDates(table) + && !this.concept.isExcludeFromTimeAggregation(); + } + + private boolean tableHasValidityDates(CQTable table) { + return !table.getConnector() + .getValidityDates() + .isEmpty(); + } + + private List> getColumnSelectReferences(CQTable table) { + return table.getSelects().stream() + .flatMap(select -> select.getRequiredColumns().stream().map(column -> this.mapColumnOntoTable(column, table))) + .toList(); + } + + private List> getColumnFilterReferences(CQTable table) { + return table.getFilters().stream() + .map(FilterValue::getFilter) + .flatMap(filter -> filter.getRequiredColumns().stream().map(column -> this.mapColumnOntoTable(column, table))) + .toList(); + } + + private String getFromTableName(CQTable table) { + return table.getConnector() + .getTable() + .getName(); + } + + private Field mapColumnOntoTable(Column column, CQTable table) { + return DSL.field(DSL.name(this.getFromTableName(table), column.getName())); + } + + private String getPreprocessingStepLabel(String conceptLabel) { + return "concept_%s_preprocessing".formatted(conceptLabel); + } + + +} diff --git a/backend/src/main/java/com/bakdata/conquery/sql/conversion/dialect/PostgreSqlDialect.java b/backend/src/main/java/com/bakdata/conquery/sql/conversion/dialect/PostgreSqlDialect.java new file mode 100644 index 0000000000..406a412792 --- /dev/null +++ b/backend/src/main/java/com/bakdata/conquery/sql/conversion/dialect/PostgreSqlDialect.java @@ -0,0 +1,46 @@ +package com.bakdata.conquery.sql.conversion.dialect; + +import java.util.List; + +import com.bakdata.conquery.apiv1.query.concept.filter.FilterValue; +import com.bakdata.conquery.models.datasets.concepts.select.Select; +import com.bakdata.conquery.models.query.Visitable; +import com.bakdata.conquery.sql.conversion.filter.FilterConverter; +import com.bakdata.conquery.sql.conversion.NodeConverter; +import com.bakdata.conquery.sql.conversion.select.SelectConverter; +import org.jooq.DSLContext; + +public class PostgreSqlDialect implements SqlDialect { + + private final DSLContext dslContext; + + public PostgreSqlDialect(DSLContext dslContext) { + this.dslContext = dslContext; + } + + @Override + public DSLContext getDSLContext() { + return this.dslContext; + } + + @Override + public List> getNodeConverters() { + return getDefaultNodeConverters(); + } + + @Override + public List>> getFilterConverters() { + return getDefaultFilterConverters(); + } + + @Override + public List> getSelectConverters() { + return getDefaultSelectConverters(); + } + + @Override + public SqlFunctionProvider getFunction() { + return new PostgreSqlFunctionProvider(); + } + +} diff --git a/backend/src/main/java/com/bakdata/conquery/sql/conversion/dialect/PostgreSqlFunctionProvider.java b/backend/src/main/java/com/bakdata/conquery/sql/conversion/dialect/PostgreSqlFunctionProvider.java new file mode 100644 index 0000000000..5ee2069393 --- /dev/null +++ b/backend/src/main/java/com/bakdata/conquery/sql/conversion/dialect/PostgreSqlFunctionProvider.java @@ -0,0 +1,52 @@ +package com.bakdata.conquery.sql.conversion.dialect; + +import com.bakdata.conquery.models.common.daterange.CDateRange; +import com.bakdata.conquery.models.datasets.Column; +import org.jooq.Condition; +import org.jooq.Field; +import org.jooq.impl.DSL; + +/** + * Provider of SQL functions for PostgresSQL. + * + * @see PostgreSQL Documentation + */ +public class PostgreSqlFunctionProvider implements SqlFunctionProvider { + + @Override + public Condition dateRestriction(Field dateRestrictionColumn, Field validityDateColumn) { + // the && operator checks if two ranges overlap (see https://www.postgresql.org/docs/15/functions-range.html) + return DSL.condition( + "{0} && {1}", + dateRestrictionColumn, + validityDateColumn + ); + } + + @Override + public Field daterange(CDateRange dateRestriction) { + return DSL.field( + "daterange({0}::date, {1}::date, '[]')", + DSL.val(dateRestriction.getMin().toString()), + DSL.val(dateRestriction.getMax().toString()) + ); + } + + @Override + public Field daterange(Column column) { + return switch (column.getType()) { + // if validityDateColumn is a DATE_RANGE we can make use of Postgres' integrated daterange type. + case DATE_RANGE -> DSL.field(column.getName()); + // if the validity date column is not of daterange type, we construct it manually + case DATE -> DSL.field( + "daterange({0}, {0}, '[]')", + DSL.field(column.getName()) + ); + default -> throw new IllegalArgumentException( + "Given column type '%s' can't be converted to a proper date restriction." + .formatted(column.getType()) + ); + }; + } + +} diff --git a/backend/src/main/java/com/bakdata/conquery/sql/conversion/dialect/SqlDialect.java b/backend/src/main/java/com/bakdata/conquery/sql/conversion/dialect/SqlDialect.java new file mode 100644 index 0000000000..8ee2a08256 --- /dev/null +++ b/backend/src/main/java/com/bakdata/conquery/sql/conversion/dialect/SqlDialect.java @@ -0,0 +1,82 @@ +package com.bakdata.conquery.sql.conversion.dialect; + +import com.bakdata.conquery.apiv1.query.concept.filter.FilterValue; +import com.bakdata.conquery.models.datasets.concepts.select.Select; +import com.bakdata.conquery.models.query.Visitable; +import com.bakdata.conquery.sql.conversion.Converter; +import com.bakdata.conquery.sql.conversion.NodeConverter; +import com.bakdata.conquery.sql.conversion.context.step.QueryStepTransformer; +import com.bakdata.conquery.sql.conversion.cqelement.*; +import com.bakdata.conquery.sql.conversion.filter.FilterConverter; +import com.bakdata.conquery.sql.conversion.filter.FilterConverterService; +import com.bakdata.conquery.sql.conversion.filter.MultiSelectConverter; +import com.bakdata.conquery.sql.conversion.filter.RealRangeConverter; +import com.bakdata.conquery.sql.conversion.query.ConceptQueryConverter; +import com.bakdata.conquery.sql.conversion.select.DateDistanceConverter; +import com.bakdata.conquery.sql.conversion.select.FirstValueConverter; +import com.bakdata.conquery.sql.conversion.select.SelectConverter; +import com.bakdata.conquery.sql.conversion.select.SelectConverterService; +import com.bakdata.conquery.sql.conversion.supplier.SystemDateNowSupplier; +import org.jooq.DSLContext; + +import java.util.List; +import java.util.Map; +import java.util.function.Function; +import java.util.stream.Collectors; + +public interface SqlDialect { + + SqlFunctionProvider getFunction(); + + List> getNodeConverters(); + + List>> getFilterConverters(); + + List> getSelectConverters(); + + DSLContext getDSLContext(); + + default List> getDefaultNodeConverters() { + return List.of( + new CQDateRestrictionConverter(), + new CQAndConverter(), + new CQOrConverter(), + new CQNegationConverter(), + new CQConceptConverter(new FilterConverterService(getFilterConverters()), new SelectConverterService(getSelectConverters())), + new ConceptQueryConverter(new QueryStepTransformer(getDSLContext())) + ); + } + + default List>> getDefaultFilterConverters() { + return List.of( + new MultiSelectConverter(), + new RealRangeConverter() + ); + } + + default List> customizeSelectConverters(List> substitutes) { + return customize(getDefaultSelectConverters(), substitutes); + } + + default List> getDefaultSelectConverters() { + return List.of( + new FirstValueConverter(), + new DateDistanceConverter(new SystemDateNowSupplier()) + ); + } + + private static > List customize(List defaults, List substitutes) { + Map, C> substituteMap = getSubstituteMap(substitutes); + return defaults.stream() + .map(converter -> substituteMap.getOrDefault(converter.getConversionClass(), converter)) + .collect(Collectors.toList()); + } + + private static > Map, C> getSubstituteMap(List substitutes) { + return substitutes.stream() + .collect(Collectors.toMap( + Converter::getConversionClass, + Function.identity() + )); + } +} diff --git a/backend/src/main/java/com/bakdata/conquery/sql/conversion/dialect/SqlFunctionProvider.java b/backend/src/main/java/com/bakdata/conquery/sql/conversion/dialect/SqlFunctionProvider.java new file mode 100644 index 0000000000..cdfde5aa06 --- /dev/null +++ b/backend/src/main/java/com/bakdata/conquery/sql/conversion/dialect/SqlFunctionProvider.java @@ -0,0 +1,83 @@ +package com.bakdata.conquery.sql.conversion.dialect; + +import java.sql.Date; + +import com.bakdata.conquery.models.common.daterange.CDateRange; +import com.bakdata.conquery.models.datasets.Column; +import com.bakdata.conquery.models.events.MajorTypeId; +import com.bakdata.conquery.sql.conversion.context.step.QueryStep; +import org.jooq.Condition; +import org.jooq.DatePart; +import org.jooq.Field; +import org.jooq.Record; +import org.jooq.Table; +import org.jooq.TableOnConditionStep; +import org.jooq.impl.DSL; + +/** + * Provider of SQL functions. + */ +public interface SqlFunctionProvider { + + String DEFAULT_DATE_FORMAT = "yyyy-mm-dd"; + + + Condition dateRestriction(Field dateRestrictionColumn, Field validityDateColumn); + + /** + * @return A daterange for a date restriction. + */ + Field daterange(CDateRange dateRestriction); + + /** + * @return A daterange for an existing column. + */ + Field daterange(Column column); + + default Field toDate(String dateColumn) { + return DSL.toDate(dateColumn, DEFAULT_DATE_FORMAT); + } + + default Field dateDistance(DatePart timeUnit, Date endDate, Column startDateColumn) { + if (startDateColumn.getType() != MajorTypeId.DATE) { + throw new UnsupportedOperationException("Can't calculate date distance to column of type " + + startDateColumn.getType()); + } + // we can now safely cast to Field of type Date + Field startDate = DSL.field(startDateColumn.getName(), Date.class); + return DSL.dateDiff(timeUnit, startDate, endDate); + } + + default Condition in(String columnName, String[] values) { + return DSL.field(columnName) + .in(values); + } + + default Field first(String columnName) { + // TODO: this is just a temporary placeholder + return DSL.field(columnName); + } + + default TableOnConditionStep innerJoin( + Table leftPartQueryBase, + QueryStep rightPartQS, + Field leftPartPrimaryColumn, + Field rightPartPrimaryColumn + ) { + return leftPartQueryBase + .innerJoin(DSL.name(rightPartQS.getCteName())) + .on(leftPartPrimaryColumn.eq(rightPartPrimaryColumn)); + } + + default TableOnConditionStep fullOuterJoin( + Table leftPartQueryBase, + QueryStep rightPartQS, + Field leftPartPrimaryColumn, + Field rightPartPrimaryColumn + ) { + return leftPartQueryBase + .fullOuterJoin(DSL.name(rightPartQS.getCteName())) + .on(leftPartPrimaryColumn.eq(rightPartPrimaryColumn)); + } + +} diff --git a/backend/src/main/java/com/bakdata/conquery/sql/conversion/filter/FilterConverter.java b/backend/src/main/java/com/bakdata/conquery/sql/conversion/filter/FilterConverter.java new file mode 100644 index 0000000000..1b645ba0fa --- /dev/null +++ b/backend/src/main/java/com/bakdata/conquery/sql/conversion/filter/FilterConverter.java @@ -0,0 +1,21 @@ +package com.bakdata.conquery.sql.conversion.filter; + +import com.bakdata.conquery.apiv1.query.concept.filter.FilterValue; +import com.bakdata.conquery.models.datasets.concepts.filters.SingleColumnFilter; +import com.bakdata.conquery.sql.conversion.Converter; +import org.jooq.Condition; + +/** + * Converts a {@link com.bakdata.conquery.apiv1.query.concept.filter.FilterValue} + * to a condition for a SQL WHERE clause. + * + * @param The type of Filter this converter is responsible for. + */ +public interface FilterConverter> extends Converter { + + static String getColumnName(FilterValue filter) { + // works for now but we might have to distinguish later if we encounter non-SingleColumnFilters + return ((SingleColumnFilter) filter.getFilter()).getColumn().getName(); + } + +} diff --git a/backend/src/main/java/com/bakdata/conquery/sql/conversion/filter/FilterConverterService.java b/backend/src/main/java/com/bakdata/conquery/sql/conversion/filter/FilterConverterService.java new file mode 100644 index 0000000000..1b5a4253af --- /dev/null +++ b/backend/src/main/java/com/bakdata/conquery/sql/conversion/filter/FilterConverterService.java @@ -0,0 +1,26 @@ +package com.bakdata.conquery.sql.conversion.filter; + +import com.bakdata.conquery.apiv1.query.concept.filter.FilterValue; +import com.bakdata.conquery.sql.conversion.ConverterService; +import com.bakdata.conquery.sql.conversion.context.ConversionContext; +import org.jooq.Condition; +import org.jooq.impl.DSL; + +import java.util.List; + +public class FilterConverterService extends ConverterService, Condition> { + + public FilterConverterService(List> converters) { + super(converters); + } + + @Override + public Condition convert(FilterValue filterValue, ConversionContext context) { + Condition condition = super.convert(filterValue, context); + if (!context.isNegation()) { + return condition; + } + return DSL.not(condition); + } + +} diff --git a/backend/src/main/java/com/bakdata/conquery/sql/conversion/filter/MultiSelectConverter.java b/backend/src/main/java/com/bakdata/conquery/sql/conversion/filter/MultiSelectConverter.java new file mode 100644 index 0000000000..14239afd0a --- /dev/null +++ b/backend/src/main/java/com/bakdata/conquery/sql/conversion/filter/MultiSelectConverter.java @@ -0,0 +1,19 @@ +package com.bakdata.conquery.sql.conversion.filter; + +import com.bakdata.conquery.apiv1.query.concept.filter.FilterValue; +import com.bakdata.conquery.sql.conversion.context.ConversionContext; +import org.jooq.Condition; + +public class MultiSelectConverter implements FilterConverter { + + @Override + public Condition convert(FilterValue.CQBigMultiSelectFilter filter, ConversionContext context) { + return context.getSqlDialect().getFunction() + .in(FilterConverter.getColumnName(filter), filter.getValue()); + } + + @Override + public Class getConversionClass() { + return FilterValue.CQBigMultiSelectFilter.class; + } +} diff --git a/backend/src/main/java/com/bakdata/conquery/sql/conversion/filter/RealRangeConverter.java b/backend/src/main/java/com/bakdata/conquery/sql/conversion/filter/RealRangeConverter.java new file mode 100644 index 0000000000..ec77749315 --- /dev/null +++ b/backend/src/main/java/com/bakdata/conquery/sql/conversion/filter/RealRangeConverter.java @@ -0,0 +1,30 @@ +package com.bakdata.conquery.sql.conversion.filter; + +import java.util.Optional; +import java.util.stream.Stream; + +import com.bakdata.conquery.apiv1.query.concept.filter.FilterValue; +import com.bakdata.conquery.sql.conversion.context.ConversionContext; +import org.jooq.Condition; +import org.jooq.Field; +import org.jooq.impl.DSL; + +public class RealRangeConverter implements FilterConverter { + + @Override + public Condition convert(FilterValue.CQRealRangeFilter filter, ConversionContext context) { + Field field = DSL.field(FilterConverter.getColumnName(filter)); + + Optional greaterOrEqualCondition = Optional.ofNullable(filter.getValue().getMin()).map(field::greaterOrEqual); + Optional lessOrEqualCondition = Optional.ofNullable(filter.getValue().getMax()).map(field::lessOrEqual); + return Stream.concat(greaterOrEqualCondition.stream(), lessOrEqualCondition.stream()) + .reduce(Condition::and) + .orElseThrow(() -> new IllegalArgumentException("Missing min or max value for real range filter.")); + } + + @Override + public Class getConversionClass() { + return FilterValue.CQRealRangeFilter.class; + } + +} diff --git a/backend/src/main/java/com/bakdata/conquery/sql/conversion/package-info.java b/backend/src/main/java/com/bakdata/conquery/sql/conversion/package-info.java new file mode 100644 index 0000000000..21a2e2c710 --- /dev/null +++ b/backend/src/main/java/com/bakdata/conquery/sql/conversion/package-info.java @@ -0,0 +1,4 @@ +/** + * Module implements the conversion of {@link com.bakdata.conquery.apiv1.query.QueryDescription} to a SQL query. + */ +package com.bakdata.conquery.sql.conversion; diff --git a/backend/src/main/java/com/bakdata/conquery/sql/conversion/query/ConceptQueryConverter.java b/backend/src/main/java/com/bakdata/conquery/sql/conversion/query/ConceptQueryConverter.java new file mode 100644 index 0000000000..9dd3367ed8 --- /dev/null +++ b/backend/src/main/java/com/bakdata/conquery/sql/conversion/query/ConceptQueryConverter.java @@ -0,0 +1,44 @@ +package com.bakdata.conquery.sql.conversion.query; + +import java.util.List; + +import com.bakdata.conquery.apiv1.query.ConceptQuery; +import com.bakdata.conquery.sql.conversion.NodeConverter; +import com.bakdata.conquery.sql.conversion.context.ConversionContext; +import com.bakdata.conquery.sql.conversion.context.step.QueryStep; +import com.bakdata.conquery.sql.conversion.context.step.QueryStepTransformer; +import org.jooq.Record; +import org.jooq.Select; + +public class ConceptQueryConverter implements NodeConverter { + + private final QueryStepTransformer queryStepTransformer; + + public ConceptQueryConverter(QueryStepTransformer queryStepTransformer) { + this.queryStepTransformer = queryStepTransformer; + } + + @Override + public ConversionContext convert(ConceptQuery node, ConversionContext context) { + + ConversionContext contextAfterConversion = context.getNodeConverterService() + .convert(node.getRoot(), context); + + QueryStep preFinalStep = contextAfterConversion.getQuerySteps().iterator().next(); + QueryStep finalStep = QueryStep.builder() + .cteName(null) // the final QueryStep won't be converted to a CTE + .selects(preFinalStep.getQualifiedSelects()) + .fromTable(QueryStep.toTableLike(preFinalStep.getCteName())) + .conditions(preFinalStep.getConditions()) + .predecessors(List.of(preFinalStep)) + .build(); + + Select finalQuery = this.queryStepTransformer.toSelectQuery(finalStep); + return context.withFinalQuery(finalQuery); + } + + @Override + public Class getConversionClass() { + return ConceptQuery.class; + } +} diff --git a/backend/src/main/java/com/bakdata/conquery/sql/conversion/select/DateDistanceConverter.java b/backend/src/main/java/com/bakdata/conquery/sql/conversion/select/DateDistanceConverter.java new file mode 100644 index 0000000000..d70800c39f --- /dev/null +++ b/backend/src/main/java/com/bakdata/conquery/sql/conversion/select/DateDistanceConverter.java @@ -0,0 +1,66 @@ +package com.bakdata.conquery.sql.conversion.select; + +import java.sql.Date; +import java.time.LocalDate; +import java.time.temporal.ChronoUnit; +import java.util.Map; +import java.util.Objects; + +import com.bakdata.conquery.models.datasets.Column; +import com.bakdata.conquery.models.datasets.concepts.select.connector.specific.DateDistanceSelect; +import com.bakdata.conquery.models.events.MajorTypeId; +import com.bakdata.conquery.sql.conversion.context.ConversionContext; +import com.bakdata.conquery.sql.conversion.supplier.DateNowSupplier; +import org.jooq.DatePart; +import org.jooq.Field; + +public class DateDistanceConverter implements SelectConverter { + + private static final Map DATE_CONVERSION = Map.of( + ChronoUnit.DECADES, DatePart.DECADE, + ChronoUnit.YEARS, DatePart.YEAR, + ChronoUnit.DAYS, DatePart.DAY, + ChronoUnit.MONTHS, DatePart.MONTH, + ChronoUnit.CENTURIES, DatePart.CENTURY + ); + private final DateNowSupplier dateNowSupplier; + + public DateDistanceConverter(DateNowSupplier dateNowSupplier) { + this.dateNowSupplier = dateNowSupplier; + } + + @Override + public Field convert(DateDistanceSelect select, ConversionContext context) { + DatePart timeUnit = DATE_CONVERSION.get(select.getTimeUnit()); + if (timeUnit == null) { + throw new UnsupportedOperationException("Chrono unit %s is not supported".formatted(select.getTimeUnit())); + } + Column startDateColumn = select.getColumn(); + Date endDate = getEndDate(context); + + if (startDateColumn.getType() != MajorTypeId.DATE) { + throw new UnsupportedOperationException("Can't calculate date distance to column of type " + + startDateColumn.getType()); + } + return context.getSqlDialect().getFunction().dateDistance(timeUnit, endDate, startDateColumn) + .as(select.getLabel()); + } + + private Date getEndDate(ConversionContext context) { + LocalDate endDate; + // if a date restriction is set, the max of the date restriction equals the end date of the date distance + if (Objects.nonNull(context.getDateRestrictionRange())) { + endDate = context.getDateRestrictionRange().getMax(); + } + else { + // otherwise the current date is the upper bound + endDate = dateNowSupplier.getLocalDateNow(); + } + return Date.valueOf(endDate); + } + + @Override + public Class getConversionClass() { + return DateDistanceSelect.class; + } +} diff --git a/backend/src/main/java/com/bakdata/conquery/sql/conversion/select/FirstValueConverter.java b/backend/src/main/java/com/bakdata/conquery/sql/conversion/select/FirstValueConverter.java new file mode 100644 index 0000000000..50a66ddb03 --- /dev/null +++ b/backend/src/main/java/com/bakdata/conquery/sql/conversion/select/FirstValueConverter.java @@ -0,0 +1,19 @@ +package com.bakdata.conquery.sql.conversion.select; + +import com.bakdata.conquery.models.datasets.concepts.select.connector.FirstValueSelect; +import com.bakdata.conquery.sql.conversion.context.ConversionContext; +import com.bakdata.conquery.sql.conversion.dialect.SqlFunctionProvider; +import org.jooq.Field; + +public class FirstValueConverter implements SelectConverter { + + public Field convert(FirstValueSelect select, ConversionContext context) { + SqlFunctionProvider fn = context.getSqlDialect().getFunction(); + return fn.first(select.getColumn().getName()); + } + + @Override + public Class getConversionClass() { + return FirstValueSelect.class; + } +} diff --git a/backend/src/main/java/com/bakdata/conquery/sql/conversion/select/SelectConverter.java b/backend/src/main/java/com/bakdata/conquery/sql/conversion/select/SelectConverter.java new file mode 100644 index 0000000000..1ec492c90b --- /dev/null +++ b/backend/src/main/java/com/bakdata/conquery/sql/conversion/select/SelectConverter.java @@ -0,0 +1,14 @@ +package com.bakdata.conquery.sql.conversion.select; + +import com.bakdata.conquery.models.datasets.concepts.select.Select; +import com.bakdata.conquery.sql.conversion.Converter; +import org.jooq.Field; + +/** + * Converts a {@link com.bakdata.conquery.models.datasets.concepts.select.Select} to a field for a SQL SELECT statement. + * + * @param The type of Select this converter is responsible for. + */ +public interface SelectConverter extends Converter> { + +} diff --git a/backend/src/main/java/com/bakdata/conquery/sql/conversion/select/SelectConverterService.java b/backend/src/main/java/com/bakdata/conquery/sql/conversion/select/SelectConverterService.java new file mode 100644 index 0000000000..88527eede2 --- /dev/null +++ b/backend/src/main/java/com/bakdata/conquery/sql/conversion/select/SelectConverterService.java @@ -0,0 +1,14 @@ +package com.bakdata.conquery.sql.conversion.select; + +import java.util.List; + +import com.bakdata.conquery.models.datasets.concepts.select.Select; +import com.bakdata.conquery.sql.conversion.ConverterService; +import org.jooq.Field; + +public class SelectConverterService extends ConverterService> { + + public SelectConverterService(List> converters) { + super(converters); + } +} diff --git a/backend/src/main/java/com/bakdata/conquery/sql/conversion/supplier/DateNowSupplier.java b/backend/src/main/java/com/bakdata/conquery/sql/conversion/supplier/DateNowSupplier.java new file mode 100644 index 0000000000..3798ed9d76 --- /dev/null +++ b/backend/src/main/java/com/bakdata/conquery/sql/conversion/supplier/DateNowSupplier.java @@ -0,0 +1,9 @@ +package com.bakdata.conquery.sql.conversion.supplier; + +import java.time.LocalDate; + +public interface DateNowSupplier { + + LocalDate getLocalDateNow(); + +} diff --git a/backend/src/main/java/com/bakdata/conquery/sql/conversion/supplier/SystemDateNowSupplier.java b/backend/src/main/java/com/bakdata/conquery/sql/conversion/supplier/SystemDateNowSupplier.java new file mode 100644 index 0000000000..5b24826dd0 --- /dev/null +++ b/backend/src/main/java/com/bakdata/conquery/sql/conversion/supplier/SystemDateNowSupplier.java @@ -0,0 +1,12 @@ +package com.bakdata.conquery.sql.conversion.supplier; + +import java.time.LocalDate; + +public class SystemDateNowSupplier implements DateNowSupplier { + + @Override + public LocalDate getLocalDateNow() { + return LocalDate.now(); + } + +} diff --git a/backend/src/main/java/com/bakdata/conquery/sql/execution/SqlEntityResult.java b/backend/src/main/java/com/bakdata/conquery/sql/execution/SqlEntityResult.java new file mode 100644 index 0000000000..9fbe011230 --- /dev/null +++ b/backend/src/main/java/com/bakdata/conquery/sql/execution/SqlEntityResult.java @@ -0,0 +1,55 @@ +package com.bakdata.conquery.sql.execution; + +import java.util.Collections; +import java.util.List; +import java.util.function.UnaryOperator; +import java.util.stream.Stream; + +import com.bakdata.conquery.io.cps.CPSType; +import com.bakdata.conquery.models.query.results.EntityResult; +import com.fasterxml.jackson.annotation.JsonCreator; +import lombok.AllArgsConstructor; +import lombok.Getter; +import lombok.Setter; +import lombok.ToString; + +@ToString +@Getter +@Setter +@AllArgsConstructor(onConstructor_=@JsonCreator) +@CPSType(id="SQL_RESULT", base= EntityResult.class) +public class SqlEntityResult implements EntityResult { + + private final int entityId; + private final String id; + private Object[] values; + + public String getId() { + return id; + } + + @Override + public int getEntityId() { + return entityId; + } + + @Override + public Stream streamValues() { + return Stream.ofNullable(values); + } + + @Override + public int columnCount() { + return values.length; + } + + @Override + public void modifyResultLinesInplace(UnaryOperator lineModifier) { + values = lineModifier.apply(values); + } + + @Override + public List listResultLines() { + return Collections.singletonList(values); + } +} diff --git a/backend/src/main/java/com/bakdata/conquery/sql/execution/SqlExecutionResult.java b/backend/src/main/java/com/bakdata/conquery/sql/execution/SqlExecutionResult.java new file mode 100644 index 0000000000..d4b828437a --- /dev/null +++ b/backend/src/main/java/com/bakdata/conquery/sql/execution/SqlExecutionResult.java @@ -0,0 +1,22 @@ +package com.bakdata.conquery.sql.execution; + +import java.util.List; + +import com.bakdata.conquery.models.query.results.EntityResult; +import com.bakdata.conquery.models.query.results.SinglelineEntityResult; +import lombok.Value; + +@Value +public class SqlExecutionResult { + + List columnNames; + List table; + int rowCount; + + public SqlExecutionResult(List columnNames, List table) { + this.columnNames = columnNames; + this.table = table; + this.rowCount = table.size(); + } + +} diff --git a/backend/src/main/java/com/bakdata/conquery/sql/execution/SqlExecutionService.java b/backend/src/main/java/com/bakdata/conquery/sql/execution/SqlExecutionService.java new file mode 100644 index 0000000000..5bea87bd0d --- /dev/null +++ b/backend/src/main/java/com/bakdata/conquery/sql/execution/SqlExecutionService.java @@ -0,0 +1,90 @@ +package com.bakdata.conquery.sql.execution; + +import java.sql.Connection; +import java.sql.ResultSet; +import java.sql.SQLException; +import java.sql.Statement; +import java.util.ArrayList; +import java.util.List; +import java.util.stream.IntStream; + +import com.bakdata.conquery.models.error.ConqueryError; +import com.bakdata.conquery.models.query.results.EntityResult; +import com.bakdata.conquery.models.query.results.SinglelineEntityResult; +import com.bakdata.conquery.sql.conquery.SqlManagedQuery; +import com.google.common.base.Stopwatch; +import lombok.RequiredArgsConstructor; +import lombok.extern.slf4j.Slf4j; +import org.jooq.DSLContext; + +@RequiredArgsConstructor +@Slf4j +public class SqlExecutionService { + + private final DSLContext dslContext; + + public SqlExecutionResult execute(SqlManagedQuery sqlQuery) { + log.info("Starting SQL execution[{}]", sqlQuery.getQueryId()); + Stopwatch stopwatch = Stopwatch.createStarted(); + SqlExecutionResult result = dslContext.connectionResult(connection -> this.createStatementAndExecute(sqlQuery, connection)); + log.info("Finished SQL execution[{}] with {} results within {}", sqlQuery.getQueryId(), result.getRowCount(), stopwatch.elapsed()); + return result; + } + + private SqlExecutionResult createStatementAndExecute(SqlManagedQuery sqlQuery, Connection connection) { + + try (Statement statement = connection.createStatement(); + ResultSet resultSet = statement.executeQuery(sqlQuery.getSqlQuery().getSqlString())) { + int columnCount = resultSet.getMetaData().getColumnCount(); + List columnNames = this.getColumnNames(resultSet, columnCount); + List resultTable = this.createResultTable(resultSet, columnCount); + + return new SqlExecutionResult(columnNames, resultTable); + } + catch (SQLException e) { + throw new ConqueryError.SqlError(e); + } + } + + private List createResultTable(ResultSet resultSet, int columnCount) throws SQLException { + List resultTable = new ArrayList<>(resultSet.getFetchSize()); + while (resultSet.next()) { + Object[] resultRow = this.getResultRow(resultSet, columnCount); + resultTable.add(new SqlEntityResult(resultSet.getRow(), resultSet.getObject(1).toString(), resultRow)); + } + return resultTable; + } + + private List getColumnNames(ResultSet resultSet, int columnCount) { + // JDBC ResultSet indices start with 1 + return IntStream.rangeClosed(2, columnCount) + .mapToObj(columnIndex -> this.getColumnName(resultSet, columnIndex)) + .toList(); + } + + private String getColumnName(ResultSet resultSet, int columnIndex) { + try { + return resultSet.getMetaData().getColumnName(columnIndex); + } + catch (SQLException e) { + throw new ConqueryError.SqlError(e); + } + } + + private Object[] getResultRow(ResultSet resultSet, int columnCount) { + // JDBC ResultSet indices start with 1 and we skip the first column because it contains the id + return IntStream.rangeClosed(2, columnCount) + .mapToObj(columnIndex -> this.getValueOfColumn(resultSet, columnIndex)) + .toArray(); + } + + private String getValueOfColumn(ResultSet resultSet, int columnIndex) { + try { + return resultSet.getString(columnIndex); + } + catch (SQLException e) { + throw new ConqueryError.SqlError(e); + } + } + +} diff --git a/backend/src/main/java/com/bakdata/conquery/tasks/ReportConsistencyTask.java b/backend/src/main/java/com/bakdata/conquery/tasks/ReportConsistencyTask.java index fe521f008a..533c9fdc8c 100644 --- a/backend/src/main/java/com/bakdata/conquery/tasks/ReportConsistencyTask.java +++ b/backend/src/main/java/com/bakdata/conquery/tasks/ReportConsistencyTask.java @@ -4,21 +4,23 @@ import java.util.List; import java.util.Map; +import com.bakdata.conquery.mode.cluster.ClusterState; import com.bakdata.conquery.models.messages.namespaces.specific.RequestConsistency; -import com.bakdata.conquery.models.worker.DatasetRegistry; import io.dropwizard.servlets.tasks.Task; public class ReportConsistencyTask extends Task { - private final DatasetRegistry datasetRegistry; + private final ClusterState clusterState; - public ReportConsistencyTask(DatasetRegistry datasetRegistry) { + public ReportConsistencyTask(ClusterState clusterState) { super("report-consistency"); - this.datasetRegistry = datasetRegistry; + this.clusterState = clusterState; } @Override public void execute(Map> parameters, PrintWriter output) throws Exception { - datasetRegistry.getWorkers().values().forEach(w -> w.send(new RequestConsistency())); + clusterState.getWorkerHandlers().values().stream() + .flatMap(ns -> ns.getWorkers().stream()) + .forEach(worker -> worker.send(new RequestConsistency())); } } diff --git a/backend/src/main/java/com/bakdata/conquery/util/QueryUtils.java b/backend/src/main/java/com/bakdata/conquery/util/QueryUtils.java index 914ebf05d2..9778859a0c 100644 --- a/backend/src/main/java/com/bakdata/conquery/util/QueryUtils.java +++ b/backend/src/main/java/com/bakdata/conquery/util/QueryUtils.java @@ -2,16 +2,21 @@ import java.util.ArrayList; import java.util.Collection; +import java.util.Collections; import java.util.HashSet; import java.util.List; import java.util.Locale; +import java.util.Map; import java.util.Objects; import java.util.Optional; import java.util.Set; +import java.util.concurrent.atomic.AtomicInteger; import java.util.function.Consumer; +import java.util.function.Predicate; import java.util.function.Supplier; import java.util.stream.Collectors; +import c10n.C10N; import com.bakdata.conquery.apiv1.query.CQElement; import com.bakdata.conquery.apiv1.query.QueryDescription; import com.bakdata.conquery.apiv1.query.concept.specific.CQAnd; @@ -19,6 +24,7 @@ import com.bakdata.conquery.apiv1.query.concept.specific.CQOr; import com.bakdata.conquery.apiv1.query.concept.specific.CQReusedQuery; import com.bakdata.conquery.apiv1.query.concept.specific.external.CQExternal; +import com.bakdata.conquery.internationalization.CQElementC10n; import com.bakdata.conquery.models.auth.permissions.Ability; import com.bakdata.conquery.models.auth.permissions.ConqueryPermission; import com.bakdata.conquery.models.common.CDateSet; @@ -26,14 +32,17 @@ import com.bakdata.conquery.models.datasets.SecondaryIdDescription; import com.bakdata.conquery.models.datasets.concepts.ConceptElement; import com.bakdata.conquery.models.datasets.concepts.Connector; +import com.bakdata.conquery.models.i18n.I18n; import com.bakdata.conquery.models.identifiable.ids.NamespacedId; import com.bakdata.conquery.models.identifiable.ids.NamespacedIdentifiable; import com.bakdata.conquery.models.identifiable.ids.specific.ManagedExecutionId; import com.bakdata.conquery.models.query.NamespacedIdentifiableHolding; +import com.bakdata.conquery.models.query.PrintSettings; import com.bakdata.conquery.models.query.QueryExecutionContext; import com.bakdata.conquery.models.query.Visitable; import com.bakdata.conquery.models.query.queryplan.aggregators.Aggregator; import com.bakdata.conquery.models.query.visitor.QueryVisitor; +import com.google.common.base.Strings; import com.google.common.collect.ClassToInstanceMap; import lombok.Getter; import lombok.NonNull; @@ -44,6 +53,7 @@ @UtilityClass public class QueryUtils { + private static final int MAX_CONCEPT_LABEL_CONCAT_LENGTH = 70; /** * Provides a starting operator for consumer chains, that does nothing. */ @@ -213,4 +223,81 @@ public static QueryExecutionContext determineDateAggregatorForContext(QueryExecu } return ctx.withQueryDateAggregator(altValidityDateAggregator.get()); } + + public static String makeQueryLabel(final Visitable query, PrintSettings cfg, ManagedExecutionId id) { + final StringBuilder sb = new StringBuilder(); + + final Map, List> sortedContents = + Visitable.stream(query) + .collect(Collectors.groupingBy(Visitable::getClass)); + + int sbStartSize = sb.length(); + + // Check for CQExternal + List externals = sortedContents.getOrDefault(CQExternal.class, Collections.emptyList()); + if (!externals.isEmpty()) { + if (!sb.isEmpty()) { + sb.append(" "); + } + sb.append(C10N.get(CQElementC10n.class, I18n.LOCALE.get()).external()); + } + + // Check for CQReused + if (sortedContents.containsKey(CQReusedQuery.class)) { + if (!sb.isEmpty()) { + sb.append(" "); + } + sb.append(C10N.get(CQElementC10n.class, I18n.LOCALE.get()).reused()); + } + + + // Check for CQConcept + if (sortedContents.containsKey(CQConcept.class)) { + if (!sb.isEmpty()) { + sb.append(" "); + } + // Track length of text we are appending for concepts. + final AtomicInteger length = new AtomicInteger(); + + sortedContents.get(CQConcept.class) + .stream() + .map(CQConcept.class::cast) + + .map(c -> makeLabelWithRootAndChild(c, cfg)) + .filter(Predicate.not(Strings::isNullOrEmpty)) + .distinct() + + .takeWhile(elem -> length.addAndGet(elem.length()) < MAX_CONCEPT_LABEL_CONCAT_LENGTH) + .forEach(label -> sb.append(label).append(" ")); + + // Last entry will output one Space that we don't want + if (!sb.isEmpty()) { + sb.deleteCharAt(sb.length() - 1); + } + + // If not all Concept could be included in the name, point that out + if (length.get() > MAX_CONCEPT_LABEL_CONCAT_LENGTH) { + sb.append(" ").append(C10N.get(CQElementC10n.class, I18n.LOCALE.get()).furtherConcepts()); + } + } + + + // Fallback to id if nothing could be extracted from the query description + if (sbStartSize == sb.length()) { + sb.append(id.getExecution()); + } + + return sb.toString(); + } + + + private static String makeLabelWithRootAndChild(CQConcept cqConcept, PrintSettings cfg) { + String label = cqConcept.getUserOrDefaultLabel(cfg.getLocale()); + if (label == null) { + label = cqConcept.getConcept().getLabel(); + } + + // Concat everything with dashes + return label.replace(" ", "-"); + } } diff --git a/backend/src/main/java/com/bakdata/conquery/util/io/IdColumnUtil.java b/backend/src/main/java/com/bakdata/conquery/util/io/IdColumnUtil.java index a3c0ec6654..3463601bff 100644 --- a/backend/src/main/java/com/bakdata/conquery/util/io/IdColumnUtil.java +++ b/backend/src/main/java/com/bakdata/conquery/util/io/IdColumnUtil.java @@ -12,9 +12,13 @@ import com.bakdata.conquery.models.execution.ManagedExecution; import com.bakdata.conquery.models.identifiable.mapping.AutoIncrementingPseudomizer; import com.bakdata.conquery.models.identifiable.mapping.EntityIdMap; +import com.bakdata.conquery.models.identifiable.mapping.EntityPrintId; import com.bakdata.conquery.models.identifiable.mapping.FullIdPrinter; import com.bakdata.conquery.models.identifiable.mapping.IdPrinter; +import com.bakdata.conquery.models.query.results.EntityResult; import com.bakdata.conquery.models.worker.Namespace; +import com.bakdata.conquery.sql.conquery.SqlManagedQuery; +import com.bakdata.conquery.sql.execution.SqlEntityResult; import lombok.experimental.UtilityClass; @UtilityClass @@ -53,6 +57,10 @@ public static IdPrinter getIdPrinter(Subject owner, ManagedExecution execution, .orElseThrow(); if (owner.isPermitted(execution.getDataset(), Ability.PRESERVE_ID)) { + // todo(tm): The integration of ids in the sql connector needs to be properly managed + if (execution instanceof SqlManagedQuery) { + return entityResult -> EntityPrintId.from(((SqlEntityResult) entityResult).getId()); + } return new FullIdPrinter(namespace.getStorage().getPrimaryDictionary(), namespace.getStorage().getIdMapping(), size, pos); } diff --git a/backend/src/main/resources/com/bakdata/conquery/external/openapi-form-backend.yaml b/backend/src/main/resources/com/bakdata/conquery/external/openapi-form-backend.yaml index b045d29b73..5d999797da 100644 --- a/backend/src/main/resources/com/bakdata/conquery/external/openapi-form-backend.yaml +++ b/backend/src/main/resources/com/bakdata/conquery/external/openapi-form-backend.yaml @@ -132,6 +132,36 @@ paths: application/json: schema: $ref: '#/components/schemas/error' + /task/{id}/cancel: + parameters: + - in: path + name: id + schema: + $ref: "#/components/schemas/taskId" + required: true + description: The task id + post: + summary: Request the cancellation of a task + operationId: cancelTask + security: + - ApiKeyAuth: [ ] + - { } + tags: + - Task + responses: + "200": + description: State of the now cancelled task + content: + application/json: + schema: + $ref: "#/components/schemas/taskState" + default: + description: Unexpected error + content: + application/json: + schema: + $ref: '#/components/schemas/error' + /health: get: summary: Request health State diff --git a/backend/src/main/resources/com/bakdata/conquery/resources/admin/ui/index.html.ftl b/backend/src/main/resources/com/bakdata/conquery/resources/admin/ui/index.html.ftl index e5c39c8f3b..29168ef3d4 100644 --- a/backend/src/main/resources/com/bakdata/conquery/resources/admin/ui/index.html.ftl +++ b/backend/src/main/resources/com/bakdata/conquery/resources/admin/ui/index.html.ftl @@ -2,9 +2,9 @@ <@layout.layout>
- <#list ctx.namespaces.shardNodes as key,shardNode> + <#list ctx.shardNodes as key,shardNode> ${key}
- \ No newline at end of file + diff --git a/backend/src/main/resources/com/bakdata/conquery/resources/admin/ui/templates/template.html.ftl b/backend/src/main/resources/com/bakdata/conquery/resources/admin/ui/templates/template.html.ftl index 4a9306ec02..3af578bd0e 100644 --- a/backend/src/main/resources/com/bakdata/conquery/resources/admin/ui/templates/template.html.ftl +++ b/backend/src/main/resources/com/bakdata/conquery/resources/admin/ui/templates/template.html.ftl @@ -70,7 +70,7 @@
- <#list ctx.namespaces.shardNodes as key,shardNode> + <#list ctx.shardNodes as key,shardNode>
@@ -129,4 +129,4 @@ {"factor": 1000000000, "unit" :"G"}, {"factor": 1000000000000, "unit" : "T" } ] /> <#assign siStr=(num / (siMap[thousands].factor))?string("0.# ") + siMap[thousands].unit /> <#return siStr /> - \ No newline at end of file + diff --git a/backend/src/test/java/com/bakdata/conquery/TestTags.java b/backend/src/test/java/com/bakdata/conquery/TestTags.java index 96ddc92423..2884330831 100644 --- a/backend/src/test/java/com/bakdata/conquery/TestTags.java +++ b/backend/src/test/java/com/bakdata/conquery/TestTags.java @@ -5,7 +5,10 @@ public class TestTags { public static final String INTEGRATION_PROGRAMMATIC = "INTEGRATION_PROGRAMMATIC"; public static final String INTEGRATION_JSON = "INTEGRATION_JSON"; + public static final String INTEGRATION_SQL_BACKEND = "INTEGRATION_SQL_BACKEND"; + public static final String TEST_DIRECTORY_ENVIRONMENT_VARIABLE = "CONQUERY_TEST_DIRECTORY"; + public static final String SQL_BACKEND_TEST_DIRECTORY_ENVIRONMENT_VARIABLE = "SQL_TEST_DIRECTORY"; public static final String TEST_PROGRAMMATIC_REGEX_FILTER = "CONQUERY_TEST_PROGRAMMATIC_REGEX_FILTER"; diff --git a/backend/src/test/java/com/bakdata/conquery/api/StoredQueriesProcessorTest.java b/backend/src/test/java/com/bakdata/conquery/api/StoredQueriesProcessorTest.java index 15870d993e..3d29b78c03 100644 --- a/backend/src/test/java/com/bakdata/conquery/api/StoredQueriesProcessorTest.java +++ b/backend/src/test/java/com/bakdata/conquery/api/StoredQueriesProcessorTest.java @@ -48,6 +48,7 @@ import com.bakdata.conquery.models.query.ManagedQuery; import com.bakdata.conquery.models.query.resultinfo.ResultInfo; import com.bakdata.conquery.models.worker.DatasetRegistry; +import com.bakdata.conquery.models.worker.DistributedNamespace; import com.bakdata.conquery.util.NonPersistentStoreFactory; import com.google.common.collect.ImmutableList; import lombok.SneakyThrows; @@ -59,7 +60,8 @@ public class StoredQueriesProcessorTest { public static final AuthorizationController AUTHORIZATION_CONTROLLER = new AuthorizationController(STORAGE, new DevelopmentAuthorizationConfig()); public static final ConqueryConfig CONFIG = new ConqueryConfig(); - private static final QueryProcessor processor = new QueryProcessor(new DatasetRegistry(0, CONFIG, null), STORAGE, CONFIG); + private static final DatasetRegistry datasetRegistry = new DatasetRegistry<>(0, CONFIG, null, null); + private static final QueryProcessor processor = new QueryProcessor(datasetRegistry, STORAGE, CONFIG); private static final Dataset DATASET_0 = new Dataset() {{ setName("dataset0"); diff --git a/backend/src/test/java/com/bakdata/conquery/api/form/config/FormConfigTest.java b/backend/src/test/java/com/bakdata/conquery/api/form/config/FormConfigTest.java index 6a989bb02d..d74b89ac95 100644 --- a/backend/src/test/java/com/bakdata/conquery/api/form/config/FormConfigTest.java +++ b/backend/src/test/java/com/bakdata/conquery/api/form/config/FormConfigTest.java @@ -46,6 +46,7 @@ import com.bakdata.conquery.models.query.ManagedQuery; import com.bakdata.conquery.models.worker.DatasetRegistry; import com.bakdata.conquery.models.worker.IdResolveContext; +import com.bakdata.conquery.models.worker.LocalNamespace; import com.bakdata.conquery.models.worker.Namespace; import com.bakdata.conquery.util.NonPersistentStoreFactory; import com.fasterxml.jackson.databind.JsonNode; @@ -101,7 +102,7 @@ public void setupTestClass() throws Exception { doAnswer(invocation -> { final DatasetId id = invocation.getArgument(0); - Namespace namespaceMock = Mockito.mock(Namespace.class); + Namespace namespaceMock = Mockito.mock(LocalNamespace.class); if (id.equals(datasetId)) { when(namespaceMock.getDataset()).thenReturn(dataset); } diff --git a/backend/src/test/java/com/bakdata/conquery/integration/ConqueryIntegrationTests.java b/backend/src/test/java/com/bakdata/conquery/integration/ConqueryIntegrationTests.java index 7b9b7ebb4a..7899e06272 100644 --- a/backend/src/test/java/com/bakdata/conquery/integration/ConqueryIntegrationTests.java +++ b/backend/src/test/java/com/bakdata/conquery/integration/ConqueryIntegrationTests.java @@ -1,30 +1,32 @@ package com.bakdata.conquery.integration; +import java.util.List; +import java.util.stream.Stream; + import com.bakdata.conquery.TestTags; import org.junit.jupiter.api.DynamicNode; import org.junit.jupiter.api.Tag; import org.junit.jupiter.api.TestFactory; -import java.util.List; -import java.util.stream.Stream; - public class ConqueryIntegrationTests extends IntegrationTests { - public ConqueryIntegrationTests() { - super("tests/", "com.bakdata.conquery.integration"); - } - - @Override - @TestFactory - @Tag(TestTags.INTEGRATION_JSON) - public List jsonTests() { - return super.jsonTests(); - } - - @Override - @TestFactory - @Tag(TestTags.INTEGRATION_PROGRAMMATIC) - public Stream programmaticTests() { - return super.programmaticTests(); - } + + public ConqueryIntegrationTests() { + super("tests/", "com.bakdata.conquery.integration"); + } + + @Override + @TestFactory + @Tag(TestTags.INTEGRATION_JSON) + public List jsonTests() { + return super.jsonTests(); + } + + @Override + @TestFactory + @Tag(TestTags.INTEGRATION_PROGRAMMATIC) + public Stream programmaticTests() { + return super.programmaticTests(); + } + } diff --git a/backend/src/test/java/com/bakdata/conquery/integration/IntegrationTest.java b/backend/src/test/java/com/bakdata/conquery/integration/IntegrationTest.java index c5c10c8a22..72c052b0a1 100644 --- a/backend/src/test/java/com/bakdata/conquery/integration/IntegrationTest.java +++ b/backend/src/test/java/com/bakdata/conquery/integration/IntegrationTest.java @@ -44,7 +44,7 @@ abstract class Simple implements IntegrationTest { public void execute(String name, TestConquery testConquery) throws Exception { StandaloneSupport conquery = testConquery.getSupport(name); // Because Shiro works with a static Security manager - testConquery.getStandaloneCommand().getManager().getAuthController().registerStaticSecurityManager(); + testConquery.getStandaloneCommand().getManagerNode().getAuthController().registerStaticSecurityManager(); try { execute(conquery); diff --git a/backend/src/test/java/com/bakdata/conquery/integration/IntegrationTests.java b/backend/src/test/java/com/bakdata/conquery/integration/IntegrationTests.java index 17e9b2f1e8..bd2fd0f5bc 100644 --- a/backend/src/test/java/com/bakdata/conquery/integration/IntegrationTests.java +++ b/backend/src/test/java/com/bakdata/conquery/integration/IntegrationTests.java @@ -7,6 +7,7 @@ import java.io.InputStream; import java.net.URI; import java.nio.file.Files; +import java.nio.file.Path; import java.util.ArrayList; import java.util.Collections; import java.util.Comparator; @@ -21,13 +22,17 @@ import com.bakdata.conquery.TestTags; import com.bakdata.conquery.integration.json.JsonIntegrationTest; +import com.bakdata.conquery.integration.sql.SqlIntegrationTest; import com.bakdata.conquery.integration.tests.ProgrammaticIntegrationTest; import com.bakdata.conquery.io.cps.CPSTypeIdResolver; import com.bakdata.conquery.io.jackson.Jackson; import com.bakdata.conquery.io.jackson.View; import com.bakdata.conquery.models.config.ConqueryConfig; +import com.bakdata.conquery.models.config.SqlConnectorConfig; +import com.bakdata.conquery.sql.conversion.dialect.SqlDialect; import com.bakdata.conquery.util.support.ConfigOverride; import com.bakdata.conquery.util.support.TestConquery; +import com.codahale.metrics.SharedMetricRegistries; import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.ObjectWriter; import com.google.common.base.Strings; @@ -127,6 +132,22 @@ public Stream programmaticTests() { .map(this::createDynamicProgrammaticTestNode); } + + @SneakyThrows + public Stream sqlTests(SqlDialect sqlDialect, SqlConnectorConfig sqlConfig) { + SharedMetricRegistries.setDefault("test"); + final Path testRootDir = Path.of(Objects.requireNonNullElse( + System.getenv(TestTags.SQL_BACKEND_TEST_DIRECTORY_ENVIRONMENT_VARIABLE), + SqlIntegrationTest.SQL_TEST_DIR + )); + + Stream paths = Files.walk(testRootDir); + List dynamicTestStream = paths.filter(path -> !Files.isDirectory(path) && path.toString().endsWith(".json")) + .map(path -> SqlIntegrationTest.fromPath(path, sqlDialect, sqlConfig)) + .map(test -> DynamicTest.dynamicTest(test.getTestSpec().getLabel(), test)).toList(); + return dynamicTestStream.stream(); + } + private DynamicTest createDynamicProgrammaticTestNode(ProgrammaticIntegrationTest test) { return DynamicTest.dynamicTest( test.getClass().getSimpleName(), diff --git a/backend/src/test/java/com/bakdata/conquery/integration/common/IntegrationUtils.java b/backend/src/test/java/com/bakdata/conquery/integration/common/IntegrationUtils.java index 5f34d01a92..d93de07fd4 100644 --- a/backend/src/test/java/com/bakdata/conquery/integration/common/IntegrationUtils.java +++ b/backend/src/test/java/com/bakdata/conquery/integration/common/IntegrationUtils.java @@ -147,6 +147,13 @@ private static URI getQueryStatusURI(StandaloneSupport conquery, String id) { )); } + private static URI getQueryCancelURI(StandaloneSupport conquery, String id) { + return HierarchyHelper.hierarchicalPath(conquery.defaultApiURIBuilder(), QueryResource.class, "cancel") + .buildFromMap(Map.of( + "query", id, "dataset", conquery.getDataset().getId() + )); + } + public static FullExecutionStatus getExecutionStatus(StandaloneSupport conquery, ManagedExecutionId executionId, User user, int expectedResponseCode) { final URI queryStatusURI = getQueryStatusURI(conquery, executionId.toString()); @@ -168,4 +175,18 @@ public static FullExecutionStatus getExecutionStatus(StandaloneSupport conquery, return response.readEntity(FullExecutionStatus.class); } + public static Response cancelQuery(StandaloneSupport conquery, ManagedExecutionId executionId, User user) { + final URI cancelQueryURI = getQueryCancelURI(conquery, executionId.toString()); + + final String userToken = conquery.getAuthorizationController() + .getConqueryTokenRealm() + .createTokenForUser(user.getId()); + + return conquery.getClient() + .target(cancelQueryURI) + .request(MediaType.APPLICATION_JSON_TYPE) + .header("Authorization", "Bearer " + userToken) + .post(null); + + } } diff --git a/backend/src/test/java/com/bakdata/conquery/integration/json/AbstractQueryEngineTest.java b/backend/src/test/java/com/bakdata/conquery/integration/json/AbstractQueryEngineTest.java index ed1f11342e..a64c65993c 100644 --- a/backend/src/test/java/com/bakdata/conquery/integration/json/AbstractQueryEngineTest.java +++ b/backend/src/test/java/com/bakdata/conquery/integration/json/AbstractQueryEngineTest.java @@ -31,7 +31,7 @@ import lombok.extern.slf4j.Slf4j; @Slf4j -public abstract class AbstractQueryEngineTest extends ConqueryTestSpec { +public abstract class AbstractQueryEngineTest extends ConqueryTestSpec { @Override diff --git a/backend/src/test/java/com/bakdata/conquery/integration/json/ConqueryTestSpec.java b/backend/src/test/java/com/bakdata/conquery/integration/json/ConqueryTestSpec.java index 41aef7a599..69fbcb62a9 100644 --- a/backend/src/test/java/com/bakdata/conquery/integration/json/ConqueryTestSpec.java +++ b/backend/src/test/java/com/bakdata/conquery/integration/json/ConqueryTestSpec.java @@ -22,6 +22,7 @@ import com.bakdata.conquery.models.worker.SingletonNamespaceCollection; import com.bakdata.conquery.util.NonPersistentStoreFactory; import com.bakdata.conquery.util.support.StandaloneSupport; +import com.bakdata.conquery.util.support.TestSupport; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.databind.DeserializationContext; import com.fasterxml.jackson.databind.JavaType; @@ -36,7 +37,7 @@ @JsonTypeInfo(use = JsonTypeInfo.Id.CUSTOM, include = JsonTypeInfo.As.PROPERTY, property = "type") @Slf4j @CPSBase -public abstract class ConqueryTestSpec { +public abstract class ConqueryTestSpec { @Getter @Setter @@ -58,9 +59,9 @@ public ConqueryConfig overrideConfig(ConqueryConfig config) { return config.withStorage(new NonPersistentStoreFactory()); } - public abstract void executeTest(StandaloneSupport support) throws Exception; + public abstract void executeTest(S support) throws Exception; - public abstract void importRequiredData(StandaloneSupport support) throws Exception; + public abstract void importRequiredData(S support) throws Exception; @Override @@ -68,19 +69,19 @@ public String toString() { return label; } - public static T parseSubTree(StandaloneSupport support, JsonNode node, Class expectedClass) throws IOException, JSONException { + public static T parseSubTree(TestSupport support, JsonNode node, Class expectedClass) throws IOException, JSONException { return parseSubTree(support, node, expectedClass, null); } - public static T parseSubTree(StandaloneSupport support, JsonNode node, Class expectedClass, Consumer modifierBeforeValidation) throws IOException, JSONException { + public static T parseSubTree(TestSupport support, JsonNode node, Class expectedClass, Consumer modifierBeforeValidation) throws IOException, JSONException { return parseSubTree(support, node, Jackson.MAPPER.getTypeFactory().constructParametricType(expectedClass, new JavaType[0]), modifierBeforeValidation); } - public static T parseSubTree(StandaloneSupport support, JsonNode node, JavaType expectedType) throws IOException, JSONException { + public static T parseSubTree(TestSupport support, JsonNode node, JavaType expectedType) throws IOException, JSONException { return parseSubTree(support, node, expectedType, null); } - public static T parseSubTree(StandaloneSupport support, JsonNode node, JavaType expectedType, Consumer modifierBeforeValidation) throws IOException, JSONException { + public static T parseSubTree(TestSupport support, JsonNode node, JavaType expectedType, Consumer modifierBeforeValidation) throws IOException, JSONException { final ObjectMapper om = Jackson.MAPPER.copy(); ObjectMapper mapper = support.getDataset().injectIntoNew( new SingletonNamespaceCollection(support.getNamespace().getStorage().getCentralRegistry(), support.getMetaStorage().getCentralRegistry()) @@ -102,7 +103,7 @@ public static T parseSubTree(StandaloneSupport support, JsonNode node, Java return result; } - public static List parseSubTreeList(StandaloneSupport support, ArrayNode node, Class expectedType, Consumer modifierBeforeValidation) throws IOException, JSONException { + public static List parseSubTreeList(TestSupport support, ArrayNode node, Class expectedType, Consumer modifierBeforeValidation) throws IOException, JSONException { final ObjectMapper om = Jackson.MAPPER.copy(); ObjectMapper mapper = support.getDataset().injectInto( new SingletonNamespaceCollection(support.getNamespace().getStorage().getCentralRegistry()).injectIntoNew( @@ -150,7 +151,7 @@ public static List parseSubTreeList(StandaloneSupport support, ArrayNode @RequiredArgsConstructor private static class DatasetPlaceHolderFiller extends DeserializationProblemHandler { - private final StandaloneSupport support; + private final TestSupport support; @Override public Object handleWeirdStringValue(DeserializationContext ctxt, Class targetType, String valueToConvert, String failureMsg) throws IOException { diff --git a/backend/src/test/java/com/bakdata/conquery/integration/json/FormTest.java b/backend/src/test/java/com/bakdata/conquery/integration/json/FormTest.java index 12fd1c29ea..01a69b3148 100644 --- a/backend/src/test/java/com/bakdata/conquery/integration/json/FormTest.java +++ b/backend/src/test/java/com/bakdata/conquery/integration/json/FormTest.java @@ -54,7 +54,7 @@ @Getter @Setter @CPSType(id = "FORM_TEST", base = ConqueryTestSpec.class) -public class FormTest extends ConqueryTestSpec { +public class FormTest extends ConqueryTestSpec { /* * parse form as json first, because it may contain namespaced ids, that can only be resolved after diff --git a/backend/src/test/java/com/bakdata/conquery/integration/json/JsonIntegrationTest.java b/backend/src/test/java/com/bakdata/conquery/integration/json/JsonIntegrationTest.java index 795a036d49..7dd908dc5e 100644 --- a/backend/src/test/java/com/bakdata/conquery/integration/json/JsonIntegrationTest.java +++ b/backend/src/test/java/com/bakdata/conquery/integration/json/JsonIntegrationTest.java @@ -48,8 +48,7 @@ public void execute(StandaloneSupport conquery) throws Exception { //ensure the metadata is collected - - conquery.getNamespace().sendToAll(new UpdateMatchingStatsMessage(conquery.getNamespace().getStorage().getAllConcepts())); + conquery.getNamespace().getWorkerHandler().sendToAll(new UpdateMatchingStatsMessage(conquery.getNamespace().getStorage().getAllConcepts())); conquery.waitUntilWorkDone(); diff --git a/backend/src/test/java/com/bakdata/conquery/integration/sql/CsvTableImporter.java b/backend/src/test/java/com/bakdata/conquery/integration/sql/CsvTableImporter.java new file mode 100644 index 0000000000..78a3d72366 --- /dev/null +++ b/backend/src/test/java/com/bakdata/conquery/integration/sql/CsvTableImporter.java @@ -0,0 +1,170 @@ +package com.bakdata.conquery.integration.sql; + + +import java.io.IOException; +import java.math.BigDecimal; +import java.nio.file.Files; +import java.nio.file.Path; +import java.sql.Connection; +import java.sql.Date; +import java.sql.Statement; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; +import java.util.stream.Collectors; + +import com.bakdata.conquery.integration.common.RequiredColumn; +import com.bakdata.conquery.integration.common.RequiredTable; +import com.bakdata.conquery.integration.common.ResourceFile; +import com.bakdata.conquery.models.common.daterange.CDateRange; +import com.bakdata.conquery.models.config.CSVConfig; +import com.bakdata.conquery.models.config.ConqueryConfig; +import com.bakdata.conquery.models.events.MajorTypeId; +import com.bakdata.conquery.models.preproc.parser.specific.DateRangeParser; +import com.bakdata.conquery.models.query.results.EntityResult; +import com.bakdata.conquery.sql.execution.SqlEntityResult; +import com.google.common.base.Strings; +import com.univocity.parsers.csv.CsvParser; +import lombok.SneakyThrows; +import org.jooq.DSLContext; +import org.jooq.DataType; +import org.jooq.Field; +import org.jooq.Record; +import org.jooq.RowN; +import org.jooq.Table; +import org.jooq.conf.ParamType; +import org.jooq.impl.BuiltInDataType; +import org.jooq.impl.DSL; +import org.jooq.impl.SQLDataType; +import org.jooq.postgres.extensions.types.DateRange; + +public class CsvTableImporter { + + private final DSLContext dslContext; + private final DateRangeParser dateRangeParser; + private final CsvParser csvReader; + + public CsvTableImporter(DSLContext dslContext) { + this.dslContext = dslContext; + this.dateRangeParser = new DateRangeParser(new ConqueryConfig()); + this.csvReader = new CSVConfig().withSkipHeader(true).createParser(); + } + + /** + * Imports the table into the database that is connected to the {@link org.jooq.DSLContext DSLContext} + * of this {@link com.bakdata.conquery.integration.sql.CsvTableImporter CSVTableImporter}. + */ + public void importTableIntoDatabase(RequiredTable requiredTable) { + + Table table = DSL.table(requiredTable.getName()); + List allRequiredColumns = this.getAllRequiredColumns(requiredTable); + List> columns = this.createFieldsForColumns(allRequiredColumns); + List content = this.getTablesContentFromCSV(requiredTable.getCsv(), allRequiredColumns); + + // because we currently won't shut down the container between the testcases, we drop tables upfront if they + // exist to ensure consistency if table names of different testcases are the same + String dropTableStatement = dslContext.dropTableIfExists(table) + .getSQL(ParamType.INLINED); + + String createTableStatement = dslContext.createTable(table) + .columns(columns) + .getSQL(ParamType.INLINED); + + String insertIntoTableStatement = dslContext.insertInto(table, columns) + .valuesOfRows(content) + .getSQL(ParamType.INLINED); + + // we directly use JDBC because JOOQ can't cope with PostgreSQL custom types + dslContext.connection((Connection connection) -> { + try (Statement statement = connection.createStatement()) { + statement.execute(dropTableStatement); + statement.execute(createTableStatement); + statement.execute(insertIntoTableStatement); + } + }); + } + + public List readExpectedEntities(Path csv) throws IOException { + List rawEntities = this.csvReader.parseAll(Files.newInputStream(csv)); + List results = new ArrayList<>(rawEntities.size()); + for (int i = 0; i < rawEntities.size(); i++) { + String[] row = rawEntities.get(i); + results.add(new SqlEntityResult(i + 1, row[0], Arrays.copyOfRange(row, 1, row.length))); + } + return results; + } + + + private List> createFieldsForColumns(List requiredColumns) { + return requiredColumns.stream() + .map(this::createField) + .collect(Collectors.toList()); + } + + private List getAllRequiredColumns(RequiredTable table) { + ArrayList requiredColumns = new ArrayList<>(); + requiredColumns.add(table.getPrimaryColumn()); + requiredColumns.addAll(Arrays.stream(table.getColumns()).toList()); + return requiredColumns; + } + + private Field createField(RequiredColumn requiredColumn) { + DataType dataType = switch (requiredColumn.getType()) { + case STRING -> SQLDataType.VARCHAR; + case INTEGER -> SQLDataType.INTEGER; + case BOOLEAN -> SQLDataType.BOOLEAN; + case REAL -> SQLDataType.REAL; + case DECIMAL, MONEY -> SQLDataType.DECIMAL; + case DATE -> SQLDataType.DATE; + case DATE_RANGE -> new BuiltInDataType<>(DateRange.class, "daterange"); + }; + return DSL.field(requiredColumn.getName(), dataType); + } + + @SneakyThrows + private List getTablesContentFromCSV(ResourceFile csvFile, List requiredColumns) { + List rawContent = this.csvReader.parseAll(csvFile.stream()); + List> castedContent = this.castContent(rawContent, requiredColumns); + return castedContent.stream() + .map(DSL::row) + .toList(); + } + + /** + * Casts all values of each row to the corresponding type of the column the value refers to. + */ + private List> castContent(List rawContent, List requiredColumns) { + List> castedContent = new ArrayList<>(rawContent.size()); + for (String[] row : rawContent) { + List castEntriesOfRow = new ArrayList<>(row.length); + for (int i = 0; i < row.length; i++) { + MajorTypeId type = requiredColumns.get(i).getType(); + castEntriesOfRow.add(this.castEntryAccordingToColumnType(row[i], type)); + } + castedContent.add(castEntriesOfRow); + } + return castedContent; + } + + private Object castEntryAccordingToColumnType(String entry, MajorTypeId type) { + + // if the entry from the CSV is empty, the value in the database should be null + if (Strings.isNullOrEmpty(entry)) { + return null; + } + + return switch (type) { + case STRING -> entry; + case BOOLEAN -> Boolean.valueOf(entry); + case INTEGER -> Integer.valueOf(entry); + case REAL -> Float.valueOf(entry); + case DECIMAL, MONEY -> new BigDecimal(entry); + case DATE -> Date.valueOf(entry); + case DATE_RANGE -> { + CDateRange dateRange = this.dateRangeParser.parse(entry); + yield DateRange.dateRange(Date.valueOf(dateRange.getMin()), Date.valueOf(dateRange.getMax())); + } + }; + } + +} diff --git a/backend/src/test/java/com/bakdata/conquery/integration/sql/PostgreSqlIntegrationTests.java b/backend/src/test/java/com/bakdata/conquery/integration/sql/PostgreSqlIntegrationTests.java new file mode 100644 index 0000000000..9ee02002ba --- /dev/null +++ b/backend/src/test/java/com/bakdata/conquery/integration/sql/PostgreSqlIntegrationTests.java @@ -0,0 +1,86 @@ +package com.bakdata.conquery.integration.sql; + +import java.util.stream.Stream; + +import com.bakdata.conquery.TestTags; +import com.bakdata.conquery.apiv1.query.ConceptQuery; +import com.bakdata.conquery.integration.IntegrationTests; +import com.bakdata.conquery.models.config.Dialect; +import com.bakdata.conquery.models.config.SqlConnectorConfig; +import com.bakdata.conquery.models.error.ConqueryError; +import com.bakdata.conquery.sql.DslContextFactory; +import com.bakdata.conquery.sql.SqlQuery; +import com.bakdata.conquery.sql.conquery.SqlManagedQuery; +import com.bakdata.conquery.sql.execution.SqlExecutionService; +import lombok.extern.slf4j.Slf4j; +import org.assertj.core.api.Assertions; +import org.jooq.DSLContext; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.DynamicTest; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.TestFactory; +import org.testcontainers.containers.PostgreSQLContainer; +import org.testcontainers.junit.jupiter.Container; +import org.testcontainers.junit.jupiter.Testcontainers; +import org.testcontainers.utility.DockerImageName; + +@Testcontainers +@Slf4j +public class PostgreSqlIntegrationTests extends IntegrationTests { + + private static final DockerImageName postgreSqlImageName = DockerImageName.parse("postgres:alpine3.17"); + private static final String databaseName = "test"; + private static final String username = "user"; + private static final String password = "pass"; + private static DSLContext dslContext; + private static SqlConnectorConfig sqlConfig; + + public PostgreSqlIntegrationTests() { + super("tests/", "com.bakdata.conquery.integration"); + } + + @Container + private static final PostgreSQLContainer postgresqlContainer = new PostgreSQLContainer<>(postgreSqlImageName) + .withDatabaseName(databaseName) + .withUsername(username) + .withPassword(password); + + + @BeforeAll + static void before() { + postgresqlContainer.start(); + sqlConfig = SqlConnectorConfig.builder() + .dialect(Dialect.POSTGRESQL) + .jdbcConnectionUrl(postgresqlContainer.getJdbcUrl()) + .databaseUsername(username) + .databasePassword(password) + .withPrettyPrinting(true) + .primaryColumn("pid") + .build(); + dslContext = DslContextFactory.create(sqlConfig); + } + + @Test + @Tag(TestTags.INTEGRATION_SQL_BACKEND) + public void shouldThrowException() { + SqlExecutionService executionService = new SqlExecutionService(dslContext); + SqlManagedQuery validQuery = new SqlManagedQuery(new ConceptQuery(), null, null, null, new SqlQuery("SELECT 1")); + Assertions.assertThatNoException().isThrownBy(() -> executionService.execute(validQuery)); + + // executing an empty query should throw an SQL error + SqlManagedQuery emptyQuery = new SqlManagedQuery(new ConceptQuery(), null, null, null, new SqlQuery("")); + Assertions.assertThatThrownBy(() -> executionService.execute(emptyQuery)) + .isInstanceOf(ConqueryError.SqlError.class) + .hasMessageContaining("Something went wrong while querying the database: org.postgresql.util.PSQLException"); + } + + + @TestFactory + @Tag(TestTags.INTEGRATION_SQL_BACKEND) + public Stream sqlBackendTests() { + return super.sqlTests(new TestPostgreSqlDialect(dslContext), sqlConfig); + } + + +} diff --git a/backend/src/test/java/com/bakdata/conquery/integration/sql/SqlIntegrationTest.java b/backend/src/test/java/com/bakdata/conquery/integration/sql/SqlIntegrationTest.java new file mode 100644 index 0000000000..c64c775951 --- /dev/null +++ b/backend/src/test/java/com/bakdata/conquery/integration/sql/SqlIntegrationTest.java @@ -0,0 +1,31 @@ +package com.bakdata.conquery.integration.sql; + +import java.io.IOException; +import java.nio.file.Path; + +import com.bakdata.conquery.models.config.SqlConnectorConfig; +import com.bakdata.conquery.models.exceptions.JSONException; +import com.bakdata.conquery.sql.conversion.dialect.SqlDialect; +import lombok.AllArgsConstructor; +import lombok.Getter; +import org.junit.jupiter.api.function.Executable; + +@AllArgsConstructor +@Getter +public class SqlIntegrationTest implements Executable { + + public static final String SQL_TEST_DIR = "src/test/resources/tests/sql"; + + private final SqlStandaloneSupport support; + private final SqlIntegrationTestSpec testSpec; + + public void execute() throws IOException, JSONException { + testSpec.importRequiredData(support); + testSpec.executeTest(support); + } + + public static SqlIntegrationTest fromPath(final Path path, final SqlDialect sqlDialect, final SqlConnectorConfig sqlConfig) { + return new SqlIntegrationTest(new SqlStandaloneSupport(sqlDialect, sqlConfig), SqlIntegrationTestSpec.fromJsonSpec(path)); + } + +} diff --git a/backend/src/test/java/com/bakdata/conquery/integration/sql/SqlIntegrationTestSpec.java b/backend/src/test/java/com/bakdata/conquery/integration/sql/SqlIntegrationTestSpec.java new file mode 100644 index 0000000000..83bc210090 --- /dev/null +++ b/backend/src/test/java/com/bakdata/conquery/integration/sql/SqlIntegrationTestSpec.java @@ -0,0 +1,124 @@ +package com.bakdata.conquery.integration.sql; + +import java.io.IOException; +import java.nio.file.Files; +import java.nio.file.Path; +import java.util.List; + +import javax.validation.Valid; +import javax.validation.constraints.NotNull; + +import com.bakdata.conquery.apiv1.query.Query; +import com.bakdata.conquery.integration.common.RequiredData; +import com.bakdata.conquery.integration.common.RequiredTable; +import com.bakdata.conquery.integration.json.ConqueryTestSpec; +import com.bakdata.conquery.io.cps.CPSType; +import com.bakdata.conquery.io.jackson.Jackson; +import com.bakdata.conquery.models.datasets.Table; +import com.bakdata.conquery.models.datasets.concepts.Concept; +import com.bakdata.conquery.models.exceptions.JSONException; +import com.bakdata.conquery.models.query.results.EntityResult; +import com.bakdata.conquery.models.query.results.SinglelineEntityResult; +import com.bakdata.conquery.sql.conquery.SqlManagedQuery; +import com.bakdata.conquery.sql.execution.SqlEntityResult; +import com.bakdata.conquery.sql.execution.SqlExecutionResult; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.ObjectReader; +import com.fasterxml.jackson.databind.node.ArrayNode; +import lombok.Getter; +import lombok.Setter; +import lombok.SneakyThrows; +import lombok.extern.slf4j.Slf4j; +import org.assertj.core.api.Assertions; + +@Getter +@Setter +@CPSType(id = "SQL_TEST", base = ConqueryTestSpec.class) +@Slf4j +public class SqlIntegrationTestSpec extends ConqueryTestSpec { + + private static final String EXPECTED_SQL_FILENAME = "expected.sql"; + + @NotNull + @JsonProperty("query") + private JsonNode rawQuery; + + @JsonIgnore + private String description; + + @NotNull + private String expectedCsv; + + @Valid + @NotNull + private RequiredData content; + + @NotNull + @JsonProperty("concepts") + private ArrayNode rawConcepts; + + @JsonIgnore + private Query query; + + @JsonIgnore + private Path specDir; + + + @SneakyThrows + public static SqlIntegrationTestSpec fromJsonSpec(Path path) { + SqlIntegrationTestSpec test = readSpecFromJson(path); + test.setSpecDir(path.getParent()); + return test; + } + + private static SqlIntegrationTestSpec readSpecFromJson(Path path) throws IOException { + final ObjectReader objectReader = Jackson.MAPPER.readerFor(SqlIntegrationTestSpec.class); + return objectReader.readValue(Files.readString(path)); + } + + @Override + public void executeTest(SqlStandaloneSupport support) throws IOException { + for (RequiredTable table : content.getTables()) { + support.getTableImporter().importTableIntoDatabase(table); + } + + SqlManagedQuery managedQuery = support.getExecutionManager() + .runQuery(support.getNamespace(), getQuery(), support.getTestUser(), support.getDataset(), support.getConfig(), false); + log.info("Execute query: \n{}", managedQuery.getSqlQuery().getSqlString()); + + SqlExecutionResult result = managedQuery.getResult(); + List resultCsv = result.getTable(); + Path expectedCsvFile = this.specDir.resolve(this.expectedCsv); + List expectedCsv = support.getTableImporter().readExpectedEntities(expectedCsvFile); + Assertions.assertThat(resultCsv).usingRecursiveFieldByFieldElementComparator().containsExactlyElementsOf(expectedCsv); + } + + @Override + public void importRequiredData(SqlStandaloneSupport support) throws IOException, JSONException { + importTables(support); + importConcepts(support); + Query parsedQuery = ConqueryTestSpec.parseSubTree(support, getRawQuery(), Query.class); + setQuery(parsedQuery); + } + + private void importTables(SqlStandaloneSupport support) { + for (RequiredTable rTable : getContent().getTables()) { + final Table table = rTable.toTable(support.getDataset(), support.getNamespaceStorage().getCentralRegistry()); + support.getNamespaceStorage().addTable(table); + } + } + + private void importConcepts(SqlStandaloneSupport support) throws IOException, JSONException { + List> + concepts = + ConqueryTestSpec.parseSubTreeList(support, getRawConcepts(), Concept.class, concept -> concept.setDataset(support.getDataset())); + + for (Concept concept : concepts) { + support.getNamespaceStorage().updateConcept(concept); + } + } + + +} diff --git a/backend/src/test/java/com/bakdata/conquery/integration/sql/SqlStandaloneSupport.java b/backend/src/test/java/com/bakdata/conquery/integration/sql/SqlStandaloneSupport.java new file mode 100644 index 0000000000..32fb9525d9 --- /dev/null +++ b/backend/src/test/java/com/bakdata/conquery/integration/sql/SqlStandaloneSupport.java @@ -0,0 +1,93 @@ +package com.bakdata.conquery.integration.sql; + +import javax.validation.Validator; + +import com.bakdata.conquery.integration.IntegrationTests; +import com.bakdata.conquery.io.jackson.Jackson; +import com.bakdata.conquery.io.storage.MetaStorage; +import com.bakdata.conquery.io.storage.NamespaceStorage; +import com.bakdata.conquery.mode.InternalObjectMapperCreator; +import com.bakdata.conquery.mode.local.LocalNamespaceHandler; +import com.bakdata.conquery.models.auth.entities.User; +import com.bakdata.conquery.models.config.ConqueryConfig; +import com.bakdata.conquery.models.config.SqlConnectorConfig; +import com.bakdata.conquery.models.datasets.Dataset; +import com.bakdata.conquery.models.worker.DatasetRegistry; +import com.bakdata.conquery.models.worker.LocalNamespace; +import com.bakdata.conquery.models.worker.Namespace; +import com.bakdata.conquery.sql.SqlContext; +import com.bakdata.conquery.sql.conquery.SqlExecutionManager; +import com.bakdata.conquery.sql.conversion.dialect.SqlDialect; +import com.bakdata.conquery.util.NonPersistentStoreFactory; +import com.bakdata.conquery.util.support.TestSupport; +import io.dropwizard.jersey.validation.Validators; +import lombok.Value; + +@Value +public class SqlStandaloneSupport implements TestSupport { + + private static final Validator VALIDATOR = Validators.newValidator(); + Dataset dataset; + Namespace namespace; + ConqueryConfig config; + MetaStorage metaStorage; + User testUser; + + CsvTableImporter tableImporter; + SqlExecutionManager executionManager; + + public SqlStandaloneSupport(final SqlDialect sqlDialect, final SqlConnectorConfig sqlConfig) { + this.dataset = new Dataset("test"); + NamespaceStorage storage = new NamespaceStorage(new NonPersistentStoreFactory(), "", VALIDATOR) { + }; + storage.openStores(Jackson.MAPPER.copy()); + storage.updateDataset(dataset); + config = IntegrationTests.DEFAULT_CONFIG; + config.setSqlConnectorConfig(sqlConfig); + InternalObjectMapperCreator creator = new InternalObjectMapperCreator(config, getValidator()); + SqlContext context = new SqlContext(sqlConfig, sqlDialect); + LocalNamespaceHandler localNamespaceHandler = new LocalNamespaceHandler(config, creator, context); + DatasetRegistry registry = new DatasetRegistry<>(0, config, creator, localNamespaceHandler); + + metaStorage = new MetaStorage(new NonPersistentStoreFactory(), registry); + metaStorage.openStores(Jackson.MAPPER.copy()); + registry.setMetaStorage(metaStorage); + creator.init(registry); + + testUser = getConfig().getAuthorizationRealms().getInitialUsers().get(0).createOrOverwriteUser(metaStorage); + metaStorage.updateUser(testUser); + namespace = registry.createNamespace(storage); + tableImporter = new CsvTableImporter(sqlDialect.getDSLContext()); + executionManager = (SqlExecutionManager) namespace.getExecutionManager(); + } + + @Override + public Namespace getNamespace() { + return namespace; + } + + @Override + public Validator getValidator() { + return VALIDATOR; + } + + @Override + public MetaStorage getMetaStorage() { + return metaStorage; + } + + @Override + public NamespaceStorage getNamespaceStorage() { + return namespace.getStorage(); + } + + @Override + public ConqueryConfig getConfig() { + return config; + } + + @Override + public User getTestUser() { + return testUser; + } +} diff --git a/backend/src/test/java/com/bakdata/conquery/integration/sql/TestPostgreSqlDialect.java b/backend/src/test/java/com/bakdata/conquery/integration/sql/TestPostgreSqlDialect.java new file mode 100644 index 0000000000..09ee227eb3 --- /dev/null +++ b/backend/src/test/java/com/bakdata/conquery/integration/sql/TestPostgreSqlDialect.java @@ -0,0 +1,35 @@ +package com.bakdata.conquery.integration.sql; + +import com.bakdata.conquery.models.datasets.concepts.select.Select; +import com.bakdata.conquery.sql.conversion.select.SelectConverter; +import com.bakdata.conquery.sql.conversion.select.DateDistanceConverter; +import com.bakdata.conquery.sql.conversion.dialect.PostgreSqlDialect; +import com.bakdata.conquery.sql.conversion.supplier.DateNowSupplier; +import org.jooq.DSLContext; + +import java.time.LocalDate; +import java.util.List; + +public class TestPostgreSqlDialect extends PostgreSqlDialect { + + public TestPostgreSqlDialect(DSLContext dslContext) { + super(dslContext); + } + + @Override + public List> getSelectConverters() { + return this.customizeSelectConverters(List.of( + new DateDistanceConverter(new MockDateNowSupplier()) + )); + } + + private class MockDateNowSupplier implements DateNowSupplier { + + @Override + public LocalDate getLocalDateNow() { + return LocalDate.parse("2023-03-28"); + } + + } + +} diff --git a/backend/src/test/java/com/bakdata/conquery/integration/tests/AdminEndpointTest.java b/backend/src/test/java/com/bakdata/conquery/integration/tests/AdminEndpointTest.java index 852dc02217..17718ea918 100644 --- a/backend/src/test/java/com/bakdata/conquery/integration/tests/AdminEndpointTest.java +++ b/backend/src/test/java/com/bakdata/conquery/integration/tests/AdminEndpointTest.java @@ -20,7 +20,7 @@ public class AdminEndpointTest implements ProgrammaticIntegrationTest { public void execute(String name, TestConquery testConquery) throws Exception { List expectedEndpoints = READER.readValue(In.resource("/tests/endpoints/adminEndpointInfo.json").asStream()); - DropwizardResourceConfig jerseyConfig = testConquery.getStandaloneCommand().getManager().getAdmin().getJerseyConfig(); + DropwizardResourceConfig jerseyConfig = testConquery.getStandaloneCommand().getManagerNode().getAdmin().getJerseyConfig(); List resources = EndpointTestHelper.collectEndpoints(jerseyConfig); diff --git a/backend/src/test/java/com/bakdata/conquery/integration/tests/AdminUIEndpointTest.java b/backend/src/test/java/com/bakdata/conquery/integration/tests/AdminUIEndpointTest.java index 2a8ad28d11..0fa0c1d05a 100644 --- a/backend/src/test/java/com/bakdata/conquery/integration/tests/AdminUIEndpointTest.java +++ b/backend/src/test/java/com/bakdata/conquery/integration/tests/AdminUIEndpointTest.java @@ -20,7 +20,7 @@ public class AdminUIEndpointTest implements ProgrammaticIntegrationTest { public void execute(String name, TestConquery testConquery) throws Exception { List expectedEndpoints = READER.readValue(In.resource("/tests/endpoints/adminUIEndpointInfo.json").asStream()); - DropwizardResourceConfig jerseyConfig = testConquery.getStandaloneCommand().getManager().getAdmin().getJerseyConfigUI(); + DropwizardResourceConfig jerseyConfig = testConquery.getStandaloneCommand().getManagerNode().getAdmin().getJerseyConfigUI(); List resources = EndpointTestHelper.collectEndpoints(jerseyConfig); diff --git a/backend/src/test/java/com/bakdata/conquery/integration/tests/ConceptPermissionTest.java b/backend/src/test/java/com/bakdata/conquery/integration/tests/ConceptPermissionTest.java index a6e99141d4..603b94445f 100644 --- a/backend/src/test/java/com/bakdata/conquery/integration/tests/ConceptPermissionTest.java +++ b/backend/src/test/java/com/bakdata/conquery/integration/tests/ConceptPermissionTest.java @@ -2,7 +2,6 @@ import static com.bakdata.conquery.integration.common.LoadingUtil.importSecondaryIds; -import com.bakdata.conquery.apiv1.QueryProcessor; import com.bakdata.conquery.apiv1.query.Query; import com.bakdata.conquery.integration.IntegrationTest; import com.bakdata.conquery.integration.common.IntegrationUtils; @@ -31,8 +30,7 @@ public void execute(StandaloneSupport conquery) throws Exception { final MetaStorage storage = conquery.getMetaStorage(); final Dataset dataset = conquery.getDataset(); final String testJson = In.resource("/tests/query/SIMPLE_TREECONCEPT_QUERY/SIMPLE_TREECONCEPT_Query.test.json").withUTF8().readAll(); - final QueryTest test = (QueryTest) JsonIntegrationTest.readJson(dataset.getId(), testJson); - final QueryProcessor processor = new QueryProcessor(conquery.getDatasetRegistry(), storage, conquery.getConfig()); + final QueryTest test = JsonIntegrationTest.readJson(dataset.getId(), testJson); final User user = new User("testUser", "testUserLabel", storage); // Manually import data, so we can do our own work. diff --git a/backend/src/test/java/com/bakdata/conquery/integration/tests/ExternalFormBackendTest.java b/backend/src/test/java/com/bakdata/conquery/integration/tests/ExternalFormBackendTest.java index 3931d12eeb..93617d472f 100644 --- a/backend/src/test/java/com/bakdata/conquery/integration/tests/ExternalFormBackendTest.java +++ b/backend/src/test/java/com/bakdata/conquery/integration/tests/ExternalFormBackendTest.java @@ -52,7 +52,7 @@ public void execute(String name, TestConquery testConquery) throws Exception { log.info("Test health"); assertThat(testConquery.getStandaloneCommand() - .getManager() + .getManagerNode() .getEnvironment() .healthChecks() .runHealthCheck(FORM_BACKEND_ID) @@ -60,7 +60,7 @@ public void execute(String name, TestConquery testConquery) throws Exception { .describedAs("Checking health of form backend").isTrue(); log.info("Get external form configs"); - final FormScanner formScanner = testConquery.getStandaloneCommand().getManager().getFormScanner(); + final FormScanner formScanner = testConquery.getStandaloneCommand().getManagerNode().getFormScanner(); formScanner.execute(Collections.emptyMap(), null); final String externalFormId = FormBackendConfig.createSubTypedId("SOME_EXTERNAL_FORM"); diff --git a/backend/src/test/java/com/bakdata/conquery/integration/tests/MetadataCollectionTest.java b/backend/src/test/java/com/bakdata/conquery/integration/tests/MetadataCollectionTest.java index 1b81dc4f59..6605a6e6f5 100644 --- a/backend/src/test/java/com/bakdata/conquery/integration/tests/MetadataCollectionTest.java +++ b/backend/src/test/java/com/bakdata/conquery/integration/tests/MetadataCollectionTest.java @@ -23,19 +23,19 @@ public class MetadataCollectionTest extends IntegrationTest.Simple implements Pr public void execute(StandaloneSupport conquery) throws Exception { //read test sepcification String testJson = In.resource("/tests/query/SIMPLE_TREECONCEPT_QUERY/SIMPLE_TREECONCEPT_Query.test.json").withUTF8().readAll(); - + DatasetId dataset = conquery.getDataset().getId(); - + ConqueryTestSpec test = JsonIntegrationTest.readJson(dataset, testJson); ValidatorHelper.failOnError(log, conquery.getValidator().validate(test)); - + test.importRequiredData(conquery); - + //ensure the metadata is collected - conquery.getNamespace().sendToAll(new UpdateMatchingStatsMessage(conquery.getNamespace().getStorage().getAllConcepts())); + conquery.getNamespace().getWorkerHandler().sendToAll(new UpdateMatchingStatsMessage(conquery.getNamespace().getStorage().getAllConcepts())); conquery.waitUntilWorkDone(); - + TreeConcept concept = (TreeConcept) conquery.getNamespace().getStorage().getAllConcepts().iterator().next(); //check the number of matched events diff --git a/backend/src/test/java/com/bakdata/conquery/integration/tests/RestartTest.java b/backend/src/test/java/com/bakdata/conquery/integration/tests/RestartTest.java index 1cc0b95608..0a0af0aa8e 100644 --- a/backend/src/test/java/com/bakdata/conquery/integration/tests/RestartTest.java +++ b/backend/src/test/java/com/bakdata/conquery/integration/tests/RestartTest.java @@ -46,7 +46,7 @@ public void execute(String name, TestConquery testConquery) throws Exception { Validator validator = Validators.newValidator(); EntityIdMap entityIdMap = IdMapSerialisationTest.createTestPersistentMap(); - ManagerNode manager = testConquery.getStandaloneCommand().getManager(); + ManagerNode manager = testConquery.getStandaloneCommand().getManagerNode(); AdminDatasetProcessor adminDatasetProcessor = manager.getAdmin().getAdminDatasetProcessor(); AdminProcessor adminProcessor = manager.getAdmin().getAdminProcessor(); @@ -179,7 +179,7 @@ public void execute(String name, TestConquery testConquery) throws Exception { assertThat(entityIdMapAfterRestart).isEqualTo(entityIdMap); // We need to reassign the dataset processor because the instance prio to the restart became invalid - adminDatasetProcessor = testConquery.getStandaloneCommand().getManager().getAdmin().getAdminDatasetProcessor(); + adminDatasetProcessor = testConquery.getStandaloneCommand().getManagerNode().getAdmin().getAdminDatasetProcessor(); // Cleanup adminDatasetProcessor.deleteDataset(dataset1); adminDatasetProcessor.deleteDataset(dataset2); @@ -189,4 +189,3 @@ public void execute(String name, TestConquery testConquery) throws Exception { adminDatasetProcessor.deleteDataset(dataset6); } } - diff --git a/backend/src/test/java/com/bakdata/conquery/io/AbstractSerializationTest.java b/backend/src/test/java/com/bakdata/conquery/io/AbstractSerializationTest.java index 379f1f07d6..48cb1582cb 100644 --- a/backend/src/test/java/com/bakdata/conquery/io/AbstractSerializationTest.java +++ b/backend/src/test/java/com/bakdata/conquery/io/AbstractSerializationTest.java @@ -10,8 +10,12 @@ import com.bakdata.conquery.io.jackson.Jackson; import com.bakdata.conquery.io.jackson.View; import com.bakdata.conquery.io.storage.MetaStorage; +import com.bakdata.conquery.mode.InternalObjectMapperCreator; +import com.bakdata.conquery.mode.cluster.ClusterNamespaceHandler; +import com.bakdata.conquery.mode.cluster.ClusterState; import com.bakdata.conquery.models.config.ConqueryConfig; import com.bakdata.conquery.models.worker.DatasetRegistry; +import com.bakdata.conquery.models.worker.DistributedNamespace; import com.bakdata.conquery.util.NonPersistentStoreFactory; import com.fasterxml.jackson.databind.ObjectMapper; import io.dropwizard.jersey.validation.Validators; @@ -23,7 +27,7 @@ public abstract class AbstractSerializationTest { private final Validator validator = Validators.newValidator(); private final ConqueryConfig config = new ConqueryConfig(); - private DatasetRegistry datasetRegistry; + private DatasetRegistry datasetRegistry; private MetaStorage metaStorage; private ObjectMapper managerInternalMapper; @@ -33,15 +37,19 @@ public abstract class AbstractSerializationTest { @BeforeEach public void before() { - datasetRegistry = new DatasetRegistry(0, config, null); + InternalObjectMapperCreator creator = new InternalObjectMapperCreator(config, validator); + datasetRegistry = new DatasetRegistry<>(0, config, null, new ClusterNamespaceHandler(new ClusterState(), config, creator)); metaStorage = new MetaStorage(new NonPersistentStoreFactory(), datasetRegistry); + datasetRegistry.setMetaStorage(metaStorage); + creator.init(datasetRegistry); // Prepare manager node internal mapper final ManagerNode managerNode = mock(ManagerNode.class); when(managerNode.getConfig()).thenReturn(config); when(managerNode.getValidator()).thenReturn(validator); - when(managerNode.getDatasetRegistry()).thenReturn(datasetRegistry); + doReturn(datasetRegistry).when(managerNode).getDatasetRegistry(); when(managerNode.getStorage()).thenReturn(metaStorage); + when(managerNode.getInternalObjectMapperCreator()).thenReturn(creator); when(managerNode.createInternalObjectMapper(any())).thenCallRealMethod(); managerInternalMapper = managerNode.createInternalObjectMapper(View.Persistence.Manager.class); diff --git a/backend/src/test/java/com/bakdata/conquery/io/jackson/serializer/IdRefrenceTest.java b/backend/src/test/java/com/bakdata/conquery/io/jackson/serializer/IdRefrenceTest.java index 67a53f5b1c..efb6619344 100644 --- a/backend/src/test/java/com/bakdata/conquery/io/jackson/serializer/IdRefrenceTest.java +++ b/backend/src/test/java/com/bakdata/conquery/io/jackson/serializer/IdRefrenceTest.java @@ -14,6 +14,7 @@ import com.bakdata.conquery.models.datasets.Table; import com.bakdata.conquery.models.identifiable.CentralRegistry; import com.bakdata.conquery.models.worker.DatasetRegistry; +import com.bakdata.conquery.models.worker.DistributedNamespace; import com.bakdata.conquery.models.worker.SingletonNamespaceCollection; import com.bakdata.conquery.util.NonPersistentStoreFactory; import com.fasterxml.jackson.annotation.JsonCreator; @@ -38,7 +39,7 @@ public void testListReferences() throws IOException { registry.register(dataset); registry.register(table); - final DatasetRegistry datasetRegistry = new DatasetRegistry(0, null, null); + final DatasetRegistry datasetRegistry = new DatasetRegistry<>(0, null, null, null); final MetaStorage metaStorage = new MetaStorage(new NonPersistentStoreFactory(),datasetRegistry); diff --git a/backend/src/test/java/com/bakdata/conquery/models/SerializationTests.java b/backend/src/test/java/com/bakdata/conquery/models/SerializationTests.java index 249fc6d654..3d68b80131 100644 --- a/backend/src/test/java/com/bakdata/conquery/models/SerializationTests.java +++ b/backend/src/test/java/com/bakdata/conquery/models/SerializationTests.java @@ -74,6 +74,7 @@ import com.bakdata.conquery.models.forms.managed.ManagedInternalForm; import com.bakdata.conquery.models.forms.util.Alignment; import com.bakdata.conquery.models.forms.util.Resolution; +import com.bakdata.conquery.models.i18n.I18n; import com.bakdata.conquery.models.identifiable.CentralRegistry; import com.bakdata.conquery.models.identifiable.IdMapSerialisationTest; import com.bakdata.conquery.models.identifiable.ids.specific.DatasetId; @@ -465,7 +466,10 @@ public void cqConcept() throws JSONException, IOException { @Test public void executionCreationPlanError() throws JSONException, IOException { - ConqueryError error = new ConqueryError.ExecutionCreationPlanError(); + + I18n.init(); + + ConqueryError error = new ConqueryError.ExecutionProcessingError(); SerializationTestUtil .forType(ConqueryError.class) diff --git a/backend/src/test/java/com/bakdata/conquery/models/error/ConqueryErrorTest.java b/backend/src/test/java/com/bakdata/conquery/models/error/ConqueryErrorTest.java index 6215055697..c160e77359 100644 --- a/backend/src/test/java/com/bakdata/conquery/models/error/ConqueryErrorTest.java +++ b/backend/src/test/java/com/bakdata/conquery/models/error/ConqueryErrorTest.java @@ -2,34 +2,31 @@ import static org.assertj.core.api.Assertions.assertThat; -import java.util.Map; import java.util.UUID; import com.bakdata.conquery.io.jackson.Jackson; import com.bakdata.conquery.models.error.ConqueryError.ExternalResolveFormatError; import com.fasterxml.jackson.core.JsonProcessingException; -import com.fasterxml.jackson.databind.JsonMappingException; import org.junit.jupiter.api.Test; public class ConqueryErrorTest { @Test public void errorConvertion() { - ExternalResolveFormatError error = new ExternalResolveFormatError(5, 6); - assertThat(error.asPlain()).isEqualTo(new PlainError(error.getId(), "CQ_EXECUTION_CREATION_RESOLVE_EXTERNAL_FORMAT", error.getMessage(), error.getContext())); + final ExternalResolveFormatError error = new ExternalResolveFormatError(5, 6); + assertThat(error.asPlain()).isEqualTo(new SimpleErrorInfo(error.getId(), "CQ_EXECUTION_CREATION_RESOLVE_EXTERNAL_FORMAT", error.getMessage())); } - + @Test - public void errorDeserialization() throws JsonMappingException, JsonProcessingException { - PlainError error = Jackson.MAPPER.readerFor(PlainError.class).readValue("{\r\n" + - " \"code\": \"TEST_ERROR\",\r\n" + - " \"context\": {\r\n" + - " \"group\": \"group\"\r\n" + - " },\r\n" + - " \"id\": \"c8be5f10-1ea8-11eb-8fb8-26885ec43e14\",\r\n" + - " \"message\": \"group was empty.\"\r\n" + - " }"); - - assertThat(error).isEqualTo(new PlainError(UUID.fromString("c8be5f10-1ea8-11eb-8fb8-26885ec43e14"), "TEST_ERROR", "group was empty.", Map.of("group", "group"))); + public void errorDeserialization() throws JsonProcessingException { + final SimpleErrorInfo error = Jackson.MAPPER.readerFor(SimpleErrorInfo.class).readValue( + """ + {\r + "code": "TEST_ERROR",\r + "id": "c8be5f10-1ea8-11eb-8fb8-26885ec43e14",\r + "message": "group was empty."\r + }"""); + + assertThat(error).isEqualTo(new SimpleErrorInfo(UUID.fromString("c8be5f10-1ea8-11eb-8fb8-26885ec43e14"), "TEST_ERROR", "group was empty.")); } } diff --git a/backend/src/test/java/com/bakdata/conquery/models/execution/DefaultLabelTest.java b/backend/src/test/java/com/bakdata/conquery/models/execution/DefaultLabelTest.java index 74c3145abd..f2b1bece25 100644 --- a/backend/src/test/java/com/bakdata/conquery/models/execution/DefaultLabelTest.java +++ b/backend/src/test/java/com/bakdata/conquery/models/execution/DefaultLabelTest.java @@ -24,6 +24,7 @@ import com.bakdata.conquery.models.i18n.I18n; import com.bakdata.conquery.models.query.ManagedQuery; import com.bakdata.conquery.models.query.PrintSettings; +import com.bakdata.conquery.models.worker.LocalNamespace; import com.bakdata.conquery.models.worker.Namespace; import com.bakdata.conquery.util.NonPersistentStoreFactory; import org.jetbrains.annotations.NotNull; @@ -36,7 +37,7 @@ public class DefaultLabelTest { private final static MetaStorage STORAGE = new NonPersistentStoreFactory().createMetaStorage(); - private static final Namespace NAMESPACE = Mockito.mock(Namespace.class); + private static final Namespace NAMESPACE = Mockito.mock(LocalNamespace.class); private static final Dataset DATASET = new Dataset("dataset"); private static final User user = new User("user","user", STORAGE); diff --git a/backend/src/test/java/com/bakdata/conquery/models/query/DefaultColumnNameTest.java b/backend/src/test/java/com/bakdata/conquery/models/query/DefaultColumnNameTest.java index 06056941ab..8043860107 100644 --- a/backend/src/test/java/com/bakdata/conquery/models/query/DefaultColumnNameTest.java +++ b/backend/src/test/java/com/bakdata/conquery/models/query/DefaultColumnNameTest.java @@ -33,6 +33,7 @@ import com.bakdata.conquery.models.query.queryplan.aggregators.Aggregator; import com.bakdata.conquery.models.query.resultinfo.SelectResultInfo; import com.bakdata.conquery.models.query.resultinfo.UniqueNamer; +import com.bakdata.conquery.models.worker.LocalNamespace; import com.bakdata.conquery.models.worker.Namespace; import io.dropwizard.jersey.validation.Validators; import lombok.SneakyThrows; @@ -43,7 +44,7 @@ @Slf4j public class DefaultColumnNameTest { - private static final Namespace NAMESPACE = mock(Namespace.class); + private static final Namespace NAMESPACE = mock(LocalNamespace.class); private static final PrintSettings SETTINGS = new PrintSettings(false, Locale.ENGLISH, NAMESPACE, new ConqueryConfig(), null); private static final Validator VALIDATOR = Validators.newValidator(); diff --git a/backend/src/test/java/com/bakdata/conquery/util/support/StandaloneSupport.java b/backend/src/test/java/com/bakdata/conquery/util/support/StandaloneSupport.java index 160774811d..89f5a7a925 100644 --- a/backend/src/test/java/com/bakdata/conquery/util/support/StandaloneSupport.java +++ b/backend/src/test/java/com/bakdata/conquery/util/support/StandaloneSupport.java @@ -23,7 +23,7 @@ import com.bakdata.conquery.models.identifiable.ids.Id; import com.bakdata.conquery.models.identifiable.ids.NamespacedId; import com.bakdata.conquery.models.worker.DatasetRegistry; -import com.bakdata.conquery.models.worker.Namespace; +import com.bakdata.conquery.models.worker.DistributedNamespace; import com.bakdata.conquery.resources.admin.rest.AdminDatasetProcessor; import com.bakdata.conquery.resources.admin.rest.AdminProcessor; import com.google.common.util.concurrent.MoreExecutors; @@ -35,11 +35,11 @@ @Slf4j @RequiredArgsConstructor -public class StandaloneSupport { +public class StandaloneSupport implements TestSupport { private final TestConquery testConquery; @Getter - private final Namespace namespace; + private final DistributedNamespace namespace; @Getter private final Dataset dataset; @Getter @@ -54,7 +54,7 @@ public class StandaloneSupport { private final User testUser; public AuthorizationController getAuthorizationController() { - return testConquery.getStandaloneCommand().getManager().getAuthController(); + return testConquery.getStandaloneCommand().getManagerNode().getAuthController(); } public void waitUntilWorkDone() { @@ -84,19 +84,19 @@ public void run(Environment environment, net.sourceforge.argparse4j.inf.Namespac public Validator getValidator() { - return testConquery.getStandaloneCommand().getManager().getValidator(); + return testConquery.getStandaloneCommand().getManagerNode().getValidator(); } public MetaStorage getMetaStorage() { - return testConquery.getStandaloneCommand().getManager().getStorage(); + return testConquery.getStandaloneCommand().getManagerNode().getStorage(); } public NamespaceStorage getNamespaceStorage() { - return testConquery.getStandaloneCommand().getManager().getDatasetRegistry().get(dataset.getId()).getStorage(); + return testConquery.getStandaloneCommand().getManagerNode().getDatasetRegistry().get(dataset.getId()).getStorage(); } public DatasetRegistry getDatasetRegistry() { - return testConquery.getStandaloneCommand().getManager().getDatasetRegistry(); + return testConquery.getStandaloneCommand().getManagerNode().getDatasetRegistry(); } public List getShardNodes() { diff --git a/backend/src/test/java/com/bakdata/conquery/util/support/TestConquery.java b/backend/src/test/java/com/bakdata/conquery/util/support/TestConquery.java index 1ea114f87f..b67083bd99 100644 --- a/backend/src/test/java/com/bakdata/conquery/util/support/TestConquery.java +++ b/backend/src/test/java/com/bakdata/conquery/util/support/TestConquery.java @@ -21,6 +21,7 @@ import com.bakdata.conquery.commands.StandaloneCommand; import com.bakdata.conquery.integration.IntegrationTests; import com.bakdata.conquery.io.storage.MetaStorage; +import com.bakdata.conquery.mode.cluster.ClusterState; import com.bakdata.conquery.models.auth.entities.User; import com.bakdata.conquery.models.config.ConqueryConfig; import com.bakdata.conquery.models.config.XodusStoreFactory; @@ -29,6 +30,7 @@ import com.bakdata.conquery.models.execution.ManagedExecution; import com.bakdata.conquery.models.identifiable.ids.specific.DatasetId; import com.bakdata.conquery.models.worker.DatasetRegistry; +import com.bakdata.conquery.models.worker.DistributedNamespace; import com.bakdata.conquery.models.worker.Namespace; import com.bakdata.conquery.util.Wait; import com.bakdata.conquery.util.io.Cloner; @@ -123,10 +125,11 @@ public synchronized StandaloneSupport openDataset(DatasetId datasetId) { } private synchronized StandaloneSupport createSupport(DatasetId datasetId, String name) { - DatasetRegistry datasets = standaloneCommand.getManager().getDatasetRegistry(); - Namespace ns = datasets.get(datasetId); + DatasetRegistry datasets = standaloneCommand.getManager().getDatasetRegistry(); + DistributedNamespace ns = datasets.get(datasetId); - assertThat(datasets.getShardNodes()).hasSize(2); + ClusterState clusterState = standaloneCommand.getManager().getConnectionManager().getClusterState(); + assertThat(clusterState.getShardNodes()).hasSize(2); // make tmp subdir and change cfg accordingly File localTmpDir = new File(tmpDir, "tmp_" + name); @@ -139,7 +142,7 @@ private synchronized StandaloneSupport createSupport(DatasetId datasetId, String log.info("Reusing existing folder {} for Support", localTmpDir.getPath()); } - ConqueryConfig localCfg = Cloner.clone(config, Map.of(Validator.class, standaloneCommand.getManager().getEnvironment().getValidator()), IntegrationTests.MAPPER); + ConqueryConfig localCfg = Cloner.clone(config, Map.of(Validator.class, standaloneCommand.getManagerNode().getEnvironment().getValidator()), IntegrationTests.MAPPER); StandaloneSupport support = new StandaloneSupport( @@ -148,8 +151,8 @@ private synchronized StandaloneSupport createSupport(DatasetId datasetId, String ns.getStorage().getDataset(), localTmpDir, localCfg, - standaloneCommand.getManager().getAdmin().getAdminProcessor(), - standaloneCommand.getManager().getAdmin().getAdminDatasetProcessor(), + standaloneCommand.getManagerNode().getAdmin().getAdminProcessor(), + standaloneCommand.getManagerNode().getAdmin().getAdminDatasetProcessor(), // Getting the User from AuthorizationConfig testUser ); @@ -158,7 +161,7 @@ private synchronized StandaloneSupport createSupport(DatasetId datasetId, String .total(Duration.ofSeconds(5)) .stepTime(Duration.ofMillis(5)) .build() - .until(() -> ns.getWorkers().size() == datasets.getShardNodes().size()); + .until(() -> clusterState.getWorkerHandlers().get(datasetId).getWorkers().size() == clusterState.getShardNodes().size()); support.waitUntilWorkDone(); openSupports.add(support); @@ -173,7 +176,7 @@ public synchronized StandaloneSupport getSupport(String name) { name += "[" + count + "]"; } Dataset dataset = new Dataset(name); - standaloneCommand.getManager().getAdmin().getAdminDatasetProcessor().addDataset(dataset); + standaloneCommand.getManagerNode().getAdmin().getAdminDatasetProcessor().addDataset(dataset); return createSupport(dataset.getId(), name); } catch (Exception e) { @@ -223,13 +226,13 @@ public void afterEach() throws Exception { } openSupports.clear(); } - this.getStandaloneCommand().getManager().getStorage().clear(); + this.getStandaloneCommand().getManagerNode().getStorage().clear(); waitUntilWorkDone(); } @SneakyThrows public void removeSupportDataset(StandaloneSupport support) { - standaloneCommand.getManager().getDatasetRegistry().removeNamespace(support.getDataset().getId()); + standaloneCommand.getManagerNode().getDatasetRegistry().removeNamespace(support.getDataset().getId()); } public void removeSupport(StandaloneSupport support) { @@ -265,15 +268,15 @@ public void waitUntilWorkDone() { private boolean isBusy() { boolean busy; - busy = standaloneCommand.getManager().getJobManager().isSlowWorkerBusy(); - busy |= standaloneCommand.getManager() + busy = standaloneCommand.getManagerNode().getJobManager().isSlowWorkerBusy(); + busy |= standaloneCommand.getManagerNode() .getStorage() .getAllExecutions() .stream() .map(ManagedExecution::getState) .anyMatch(ExecutionState.RUNNING::equals); - for (Namespace namespace : standaloneCommand.getManager().getDatasetRegistry().getDatasets()) { + for (Namespace namespace : standaloneCommand.getManagerNode().getDatasetRegistry().getDatasets()) { busy |= namespace.getJobManager().isSlowWorkerBusy(); } @@ -284,8 +287,8 @@ private boolean isBusy() { } public void beforeEach() { - final MetaStorage storage = standaloneCommand.getManager().getStorage(); - testUser = standaloneCommand.getManager().getConfig().getAuthorizationRealms().getInitialUsers().get(0).createOrOverwriteUser(storage); + final MetaStorage storage = standaloneCommand.getManagerNode().getStorage(); + testUser = standaloneCommand.getManagerNode().getConfig().getAuthorizationRealms().getInitialUsers().get(0).createOrOverwriteUser(storage); storage.updateUser(testUser); } } diff --git a/backend/src/test/java/com/bakdata/conquery/util/support/TestSupport.java b/backend/src/test/java/com/bakdata/conquery/util/support/TestSupport.java new file mode 100644 index 0000000000..e7f9d6a7f6 --- /dev/null +++ b/backend/src/test/java/com/bakdata/conquery/util/support/TestSupport.java @@ -0,0 +1,28 @@ +package com.bakdata.conquery.util.support; + +import javax.validation.Validator; + +import com.bakdata.conquery.io.storage.MetaStorage; +import com.bakdata.conquery.io.storage.NamespaceStorage; +import com.bakdata.conquery.models.auth.entities.User; +import com.bakdata.conquery.models.config.ConqueryConfig; +import com.bakdata.conquery.models.datasets.Dataset; +import com.bakdata.conquery.models.worker.Namespace; + +public interface TestSupport { + + Dataset getDataset(); + + Namespace getNamespace(); + + Validator getValidator(); + + MetaStorage getMetaStorage(); + + NamespaceStorage getNamespaceStorage(); + + ConqueryConfig getConfig(); + + User getTestUser(); + +} diff --git a/backend/src/test/resources/tests/sql/and/different_concept/and.json b/backend/src/test/resources/tests/sql/and/different_concept/and.json new file mode 100644 index 0000000000..9e855976df --- /dev/null +++ b/backend/src/test/resources/tests/sql/and/different_concept/and.json @@ -0,0 +1,207 @@ +{ + "label": "Simple AND query for 3 different concepts", + "expectedCsv": "expected.csv", + "type": "SQL_TEST", + "query": { + "type": "CONCEPT_QUERY", + "root": { + "type": "AND", + "children": [ + { + "type": "CONCEPT", + "label": "vs", + "ids": [ + "number" + ], + "tables": [ + { + "id": "number.number_connector", + "filters": [ + { + "filter": "number.number_connector.value", + "type": "REAL_RANGE", + "value": { + "min": 0, + "max": 1 + } + } + ], + "selects": [ + "number.number_connector.value" + ] + } + ] + }, + { + "ids": [ + "geschlecht_select" + ], + "type": "CONCEPT", + "label": "Geschlecht SELECT", + "tables": [ + { + "id": "geschlecht_select.geschlecht_connector", + "filters": [ + { + "filter": "geschlecht_select.geschlecht_connector.geschlecht", + "type": "BIG_MULTI_SELECT", + "value": [ + "f" + ] + } + ], + "selects": [ + "geschlecht_select.geschlecht_connector.geschlecht" + ] + } + ] + }, + { + "ids": [ + "language_select" + ], + "type": "CONCEPT", + "label": "Language SELECT", + "tables": [ + { + "id": "language_select.language_connector", + "filters": [ + { + "filter": "language_select.language_connector.language", + "type": "BIG_MULTI_SELECT", + "value": [ + "de" + ] + } + ], + "selects": [ + "language_select.language_connector.language" + ] + } + ] + } + ] + } + }, + "concepts": [ + { + "label": "number", + "type": "TREE", + "connectors": [ + { + "label": "number_connector", + "table": "table1", + "validityDates": { + "label": "datum", + "column": "table1.datum" + }, + "filters": { + "label": "value", + "description": "xy", + "column": "table1.value", + "type": "NUMBER" + }, + "selects": { + "name": "value", + "column": "table1.value", + "type": "FIRST" + } + } + ] + }, + { + "label": "geschlecht_select", + "type": "TREE", + "connectors": [ + { + "label": "geschlecht_connector", + "table": "table2", + "validityDates": { + "label": "datum", + "column": "table2.datum" + }, + "filters": { + "label": "geschlecht", + "description": "Geschlecht zur gegebenen Datumseinschränkung", + "column": "table2.geschlecht", + "type": "SELECT" + }, + "selects": { + "name": "geschlecht", + "column": "table2.geschlecht", + "type": "FIRST" + } + } + ] + }, + { + "label": "language_select", + "type": "TREE", + "connectors": [ + { + "label": "language_connector", + "table": "table2", + "validityDates": { + "label": "datum", + "column": "table2.datum" + }, + "filters": { + "label": "language", + "description": "Sprache", + "column": "table2.language", + "type": "SELECT" + }, + "selects": { + "name": "language", + "column": "table2.language", + "type": "FIRST" + } + } + ] + } + ], + "content": { + "tables": [ + { + "csv": "tests/sql/and/different_concept/content_1.csv", + "name": "table1", + "primaryColumn": { + "name": "pid", + "type": "STRING" + }, + "columns": [ + { + "name": "value", + "type": "REAL" + }, + { + "name": "datum", + "type": "DATE_RANGE" + } + ] + }, + { + "csv": "tests/sql/and/different_concept/content_2.csv", + "name": "table2", + "primaryColumn": { + "name": "pid", + "type": "STRING" + }, + "columns": [ + { + "name": "datum", + "type": "DATE" + }, + { + "name": "geschlecht", + "type": "STRING" + }, + { + "name": "language", + "type": "STRING" + } + ] + } + ] + } +} diff --git a/backend/src/test/resources/tests/sql/and/different_concept/content_1.csv b/backend/src/test/resources/tests/sql/and/different_concept/content_1.csv new file mode 100644 index 0000000000..1851eed139 --- /dev/null +++ b/backend/src/test/resources/tests/sql/and/different_concept/content_1.csv @@ -0,0 +1,13 @@ +pid,value,datum +1,1,"2014-06-30/2015-06-30" +2,1.01,"2014-06-30/2015-06-30" +1,1,"2015-02-03/2015-06-30" +1,0.5,"2014-06-30/2015-06-30" +3,0.5,"2014-04-30/2014-06-30" +4,1,"2014-06-30/2015-06-30" +5,0.5,"2014-04-30/2014-06-30" +5,1,"2014-06-30/2015-06-30" +6,1,"2014-04-30/2014-06-30" +7,1,"2014-02-05/2014-02-20" +8,1,"2014-04-30/2014-06-30" +7,-1,"2014-06-30/2015-06-30" diff --git a/backend/src/test/resources/tests/sql/and/different_concept/content_2.csv b/backend/src/test/resources/tests/sql/and/different_concept/content_2.csv new file mode 100644 index 0000000000..dc012de238 --- /dev/null +++ b/backend/src/test/resources/tests/sql/and/different_concept/content_2.csv @@ -0,0 +1,9 @@ +pid,datum,geschlecht,language +1,2012-01-01,"f","de" +2,2010-07-15,"m","fr" +3,2013-11-10,"f","en" +4,2012-11-11,"m","" +5,2007-11-11,"","" +6,2012-11-11,"","de" +7,2012-11-11,"mf","de" +8,2012-11-11,"fm","fr" diff --git a/backend/src/test/resources/tests/sql/and/different_concept/expected.csv b/backend/src/test/resources/tests/sql/and/different_concept/expected.csv new file mode 100644 index 0000000000..beeae56d79 --- /dev/null +++ b/backend/src/test/resources/tests/sql/and/different_concept/expected.csv @@ -0,0 +1,4 @@ +pid,datum,value,geschlecht,language +1,"[2014-06-30,2015-06-30)",1,f,de +1,"[2015-02-03,2015-06-30)",1,f,de +1,"[2014-06-30,2015-06-30)",0.5,f,de diff --git a/backend/src/test/resources/tests/sql/and/same_concept/and.json b/backend/src/test/resources/tests/sql/and/same_concept/and.json new file mode 100644 index 0000000000..81226f3d8f --- /dev/null +++ b/backend/src/test/resources/tests/sql/and/same_concept/and.json @@ -0,0 +1,93 @@ +{ + "label": "Simple AND query for same concept", + "expectedCsv": "expected.csv", + "type": "SQL_TEST", + "query": { + "type": "CONCEPT_QUERY", + "root": { + "type": "AND", + "children": [ + { + "type": "CONCEPT", + "label": "vs", + "ids": [ + "number" + ], + "tables": [ + { + "id": "number.number_connector", + "filters": [ + { + "filter": "number.number_connector.value", + "type": "REAL_RANGE", + "value": { + "min": 0.5, + "max": 1 + } + } + ] + } + ] + }, + { + "type": "CONCEPT", + "label": "vs", + "ids": [ + "number" + ], + "tables": [ + { + "id": "number.number_connector", + "filters": [ + { + "filter": "number.number_connector.value", + "type": "REAL_RANGE", + "value": { + "min": 1, + "max": 2 + } + } + ] + } + ] + } + ] + } + }, + "concepts": [ + { + "label": "number", + "type": "TREE", + "connectors": [ + { + "label": "number_connector", + "table": "table1", + "filters": { + "label": "value", + "description": "xy", + "column": "table1.value", + "type": "NUMBER" + } + } + ] + } + ], + "content": { + "tables": [ + { + "csv": "tests/sql/and/same_concept/content_1.csv", + "name": "table1", + "primaryColumn": { + "name": "pid", + "type": "STRING" + }, + "columns": [ + { + "name": "value", + "type": "REAL" + } + ] + } + ] + } +} diff --git a/backend/src/test/resources/tests/sql/and/same_concept/content_1.csv b/backend/src/test/resources/tests/sql/and/same_concept/content_1.csv new file mode 100644 index 0000000000..5ceffe16ca --- /dev/null +++ b/backend/src/test/resources/tests/sql/and/same_concept/content_1.csv @@ -0,0 +1,13 @@ +pid,value +1,1 +2,1.01 +1,1 +1,0.5 +3,0.5 +4,1 +5,0.5 +5,1 +6,1 +7,1 +8,1 +7,-1 diff --git a/backend/src/test/resources/tests/sql/and/same_concept/expected.csv b/backend/src/test/resources/tests/sql/and/same_concept/expected.csv new file mode 100644 index 0000000000..4d99aa75d0 --- /dev/null +++ b/backend/src/test/resources/tests/sql/and/same_concept/expected.csv @@ -0,0 +1,13 @@ +pid +1 +1 +1 +1 +1 +1 +4 +5 +5 +6 +7 +8 diff --git a/backend/src/test/resources/tests/sql/date_restriction/date_restriction_date_column/content.csv b/backend/src/test/resources/tests/sql/date_restriction/date_restriction_date_column/content.csv new file mode 100644 index 0000000000..6f280c47ef --- /dev/null +++ b/backend/src/test/resources/tests/sql/date_restriction/date_restriction_date_column/content.csv @@ -0,0 +1,9 @@ +pid,datum,datum_alt,geschlecht +1,"2012-06-30/2015-06-30",2012-01-01,"f" +2,"2012-06-30/2015-06-30",2010-07-15,"m" +3,"2012-02-03/2012-06-30",2012-11-10,"f" +4,"2010-06-30/2015-06-30",2012-11-11,"m" +5,"2011-04-30/2014-06-30",2007-11-11,"" +6,"2015-06-30/2016-06-30",2012-11-11,"" +7,"2014-04-30/2015-06-30",2012-11-11,"mf" +8,"2012-04-30/2014-06-30",2012-11-11,"fm" diff --git a/backend/src/test/resources/tests/sql/date_restriction/date_restriction_date_column/date_restriction_date_column.json b/backend/src/test/resources/tests/sql/date_restriction/date_restriction_date_column/date_restriction_date_column.json new file mode 100644 index 0000000000..cecd7086e0 --- /dev/null +++ b/backend/src/test/resources/tests/sql/date_restriction/date_restriction_date_column/date_restriction_date_column.json @@ -0,0 +1,98 @@ +{ + "label": "Date restriction with multiple validity dates and dateColumn", + "type": "SQL_TEST", + "expectedCsv": "expected.csv", + "query": { + "type": "CONCEPT_QUERY", + "root": { + "type": "AND", + "children": [ + { + "type": "DATE_RESTRICTION", + "dateRange": { + "min": "2012-01-01", + "max": "2012-12-31" + }, + "child": { + "ids": [ + "geschlecht_select" + ], + "type": "CONCEPT", + "label": "Geschlecht SELECT", + "tables": [ + { + "id": "geschlecht_select.geschlecht_connector", + "filters": [ + { + "filter": "geschlecht_select.geschlecht_connector.geschlecht", + "type": "BIG_MULTI_SELECT", + "value": [ + "f" + ] + } + ], + "dateColumn" : { + "value" : "geschlecht_select.geschlecht_connector.datum_alt" + } + } + ] + } + } + ] + } + }, + "concepts": [ + { + "label": "geschlecht_select", + "type": "TREE", + "connectors": [ + { + "label": "geschlecht_connector", + "table": "table1", + "validityDates": [ + { + "label": "datum", + "column": "table1.datum" + }, + { + "label": "datum_alt", + "column": "table1.datum_alt" + } + ], + "filters": { + "label": "geschlecht", + "description": "Geschlecht zur gegebenen Datumseinschränkung", + "column": "table1.geschlecht", + "type": "SELECT" + } + } + ] + } + ], + "content": { + "tables": [ + { + "csv": "tests/sql/date_restriction/date_restriction_date_column/content.csv", + "name": "table1", + "primaryColumn": { + "name": "pid", + "type": "STRING" + }, + "columns": [ + { + "name": "datum", + "type": "DATE_RANGE" + }, + { + "name": "datum_alt", + "type": "DATE" + }, + { + "name": "geschlecht", + "type": "STRING" + } + ] + } + ] + } +} diff --git a/backend/src/test/resources/tests/sql/date_restriction/date_restriction_date_column/expected.csv b/backend/src/test/resources/tests/sql/date_restriction/date_restriction_date_column/expected.csv new file mode 100644 index 0000000000..2b81453f9f --- /dev/null +++ b/backend/src/test/resources/tests/sql/date_restriction/date_restriction_date_column/expected.csv @@ -0,0 +1,3 @@ +pid,datum_alt +1,"[2012-01-01,2012-01-02)" +3,"[2012-11-10,2012-11-11)" diff --git a/backend/src/test/resources/tests/sql/date_restriction/date_restriction_no_validity_date/content.csv b/backend/src/test/resources/tests/sql/date_restriction/date_restriction_no_validity_date/content.csv new file mode 100644 index 0000000000..212025dec4 --- /dev/null +++ b/backend/src/test/resources/tests/sql/date_restriction/date_restriction_no_validity_date/content.csv @@ -0,0 +1,9 @@ +pid,datum,geschlecht +1,2012-01-01,"f" +2,2010-07-15,"m" +3,2012-11-10,"f" +4,2012-11-11,"m" +5,2007-11-11,"" +6,2012-11-11,"" +7,2012-11-11,"mf" +8,2012-11-11,"fm" diff --git a/backend/src/test/resources/tests/sql/date_restriction/date_restriction_no_validity_date/date_restriction_no_validity_date.json b/backend/src/test/resources/tests/sql/date_restriction/date_restriction_no_validity_date/date_restriction_no_validity_date.json new file mode 100644 index 0000000000..bd3ee6c472 --- /dev/null +++ b/backend/src/test/resources/tests/sql/date_restriction/date_restriction_no_validity_date/date_restriction_no_validity_date.json @@ -0,0 +1,82 @@ +{ + "label": "Date restriction query without validity date", + "description": "If a date restriction is active, but there is no validity date defined to apply the date restriction on, the date restriction filter should not be applied.", + "type": "SQL_TEST", + "expectedCsv": "expected.csv", + "query": { + "type": "CONCEPT_QUERY", + "root": { + "type": "AND", + "children": [ + { + "type": "DATE_RESTRICTION", + "dateRange": { + "min": "2022-01-01", + "max": "2022-12-31" + }, + "child": { + "ids": [ + "geschlecht_select" + ], + "type": "CONCEPT", + "label": "Geschlecht SELECT", + "tables": [ + { + "id": "geschlecht_select.geschlecht_connector", + "filters": [ + { + "filter": "geschlecht_select.geschlecht_connector.geschlecht", + "type": "BIG_MULTI_SELECT", + "value": [ + "f" + ] + } + ] + } + ] + } + } + ] + } + }, + "concepts": [ + { + "label": "geschlecht_select", + "type": "TREE", + "connectors": [ + { + "label": "geschlecht_connector", + "table": "table1", + "filters": { + "label": "geschlecht", + "description": "Geschlecht zur gegebenen Datumseinschränkung", + "column": "table1.geschlecht", + "type": "SELECT" + } + } + ] + } + ], + "content": { + "tables": [ + { + "csv": "tests/sql/date_restriction/date_restriction_no_validity_date/content.csv", + "name": "table1", + "primaryColumn": { + "name": "pid", + "type": "STRING" + }, + "columns": [ + { + "name": "datum", + "type": "DATE" + }, + { + "name": "geschlecht", + "type": "STRING" + } + ] + } + ] + } +} diff --git a/backend/src/test/resources/tests/sql/date_restriction/date_restriction_no_validity_date/expected.csv b/backend/src/test/resources/tests/sql/date_restriction/date_restriction_no_validity_date/expected.csv new file mode 100644 index 0000000000..b6a85aedc6 --- /dev/null +++ b/backend/src/test/resources/tests/sql/date_restriction/date_restriction_no_validity_date/expected.csv @@ -0,0 +1,3 @@ +pid +1 +3 diff --git a/backend/src/test/resources/tests/sql/date_restriction/daterange/content.csv b/backend/src/test/resources/tests/sql/date_restriction/daterange/content.csv new file mode 100644 index 0000000000..f3da646264 --- /dev/null +++ b/backend/src/test/resources/tests/sql/date_restriction/daterange/content.csv @@ -0,0 +1,9 @@ +pid,datum,geschlecht +1,"2012-06-30/2015-06-30","f" +2,"2012-06-30/2015-06-30","m" +3,"2012-02-03/2012-06-30","f" +4,"2010-06-30/2015-06-30","m" +5,"2011-04-30/2014-06-30","" +6,"2015-06-30/2016-06-30","" +7,"2014-04-30/2015-06-30","mf" +8,"2012-04-30/2014-06-30","fm" diff --git a/backend/src/test/resources/tests/sql/date_restriction/daterange/date_restriction_date_range.json b/backend/src/test/resources/tests/sql/date_restriction/daterange/date_restriction_date_range.json new file mode 100644 index 0000000000..4942aacbfa --- /dev/null +++ b/backend/src/test/resources/tests/sql/date_restriction/daterange/date_restriction_date_range.json @@ -0,0 +1,85 @@ +{ + "label": "Date restriction query with daterange validity date", + "type": "SQL_TEST", + "expectedCsv": "expected.csv", + "query": { + "type": "CONCEPT_QUERY", + "root": { + "type": "AND", + "children": [ + { + "type": "DATE_RESTRICTION", + "dateRange": { + "min": "2012-01-01", + "max": "2012-12-31" + }, + "child": { + "ids": [ + "geschlecht_select" + ], + "type": "CONCEPT", + "label": "Geschlecht SELECT", + "tables": [ + { + "id": "geschlecht_select.geschlecht_connector", + "filters": [ + { + "filter": "geschlecht_select.geschlecht_connector.geschlecht", + "type": "BIG_MULTI_SELECT", + "value": [ + "f" + ] + } + ] + } + ] + } + } + ] + } + }, + "concepts": [ + { + "label": "geschlecht_select", + "type": "TREE", + "connectors": [ + { + "label": "geschlecht_connector", + "table": "table1", + "validityDates": { + "label": "datum", + "column": "table1.datum" + }, + "filters": { + "label": "geschlecht", + "description": "Geschlecht zur gegebenen Datumseinschränkung", + "column": "table1.geschlecht", + "type": "SELECT" + } + } + ] + } + ], + "content": { + "tables": [ + { + "csv": "tests/sql/date_restriction/daterange/content.csv", + "name": "table1", + "primaryColumn": { + "name": "pid", + "type": "STRING" + }, + "columns": [ + { + "name": "datum", + "type": "DATE_RANGE" + }, + { + "name": "geschlecht", + "type": "STRING" + } + ] + } + ] + } +} diff --git a/backend/src/test/resources/tests/sql/date_restriction/daterange/expected.csv b/backend/src/test/resources/tests/sql/date_restriction/daterange/expected.csv new file mode 100644 index 0000000000..a8bff966b3 --- /dev/null +++ b/backend/src/test/resources/tests/sql/date_restriction/daterange/expected.csv @@ -0,0 +1,3 @@ +pid,datum +1,"[2012-06-30,2015-06-30)" +3,"[2012-02-03,2012-06-30)" diff --git a/backend/src/test/resources/tests/sql/date_restriction/simple_date/content.csv b/backend/src/test/resources/tests/sql/date_restriction/simple_date/content.csv new file mode 100644 index 0000000000..212025dec4 --- /dev/null +++ b/backend/src/test/resources/tests/sql/date_restriction/simple_date/content.csv @@ -0,0 +1,9 @@ +pid,datum,geschlecht +1,2012-01-01,"f" +2,2010-07-15,"m" +3,2012-11-10,"f" +4,2012-11-11,"m" +5,2007-11-11,"" +6,2012-11-11,"" +7,2012-11-11,"mf" +8,2012-11-11,"fm" diff --git a/backend/src/test/resources/tests/sql/date_restriction/simple_date/date_restriction_simple_date.json b/backend/src/test/resources/tests/sql/date_restriction/simple_date/date_restriction_simple_date.json new file mode 100644 index 0000000000..4749faac8d --- /dev/null +++ b/backend/src/test/resources/tests/sql/date_restriction/simple_date/date_restriction_simple_date.json @@ -0,0 +1,85 @@ +{ + "label": "Date restriction query with simple date validity date", + "type": "SQL_TEST", + "expectedCsv": "expected.csv", + "query": { + "type": "CONCEPT_QUERY", + "root": { + "type": "AND", + "children": [ + { + "type": "DATE_RESTRICTION", + "dateRange": { + "min": "2012-01-01", + "max": "2012-12-31" + }, + "child": { + "ids": [ + "geschlecht_select" + ], + "type": "CONCEPT", + "label": "Geschlecht SELECT", + "tables": [ + { + "id": "geschlecht_select.geschlecht_connector", + "filters": [ + { + "filter": "geschlecht_select.geschlecht_connector.geschlecht", + "type": "BIG_MULTI_SELECT", + "value": [ + "f" + ] + } + ] + } + ] + } + } + ] + } + }, + "concepts": [ + { + "label": "geschlecht_select", + "type": "TREE", + "connectors": [ + { + "label": "geschlecht_connector", + "table": "table1", + "validityDates": { + "label": "datum", + "column": "table1.datum" + }, + "filters": { + "label": "geschlecht", + "description": "Geschlecht zur gegebenen Datumseinschränkung", + "column": "table1.geschlecht", + "type": "SELECT" + } + } + ] + } + ], + "content": { + "tables": [ + { + "csv": "tests/sql/date_restriction/simple_date/content.csv", + "name": "table1", + "primaryColumn": { + "name": "pid", + "type": "STRING" + }, + "columns": [ + { + "name": "datum", + "type": "DATE" + }, + { + "name": "geschlecht", + "type": "STRING" + } + ] + } + ] + } +} diff --git a/backend/src/test/resources/tests/sql/date_restriction/simple_date/expected.csv b/backend/src/test/resources/tests/sql/date_restriction/simple_date/expected.csv new file mode 100644 index 0000000000..c34539ab81 --- /dev/null +++ b/backend/src/test/resources/tests/sql/date_restriction/simple_date/expected.csv @@ -0,0 +1,3 @@ +pid,datum +1,"[2012-01-01,2012-01-02)" +3,"[2012-11-10,2012-11-11)" diff --git a/backend/src/test/resources/tests/sql/filter/number/content.csv b/backend/src/test/resources/tests/sql/filter/number/content.csv new file mode 100644 index 0000000000..1851eed139 --- /dev/null +++ b/backend/src/test/resources/tests/sql/filter/number/content.csv @@ -0,0 +1,13 @@ +pid,value,datum +1,1,"2014-06-30/2015-06-30" +2,1.01,"2014-06-30/2015-06-30" +1,1,"2015-02-03/2015-06-30" +1,0.5,"2014-06-30/2015-06-30" +3,0.5,"2014-04-30/2014-06-30" +4,1,"2014-06-30/2015-06-30" +5,0.5,"2014-04-30/2014-06-30" +5,1,"2014-06-30/2015-06-30" +6,1,"2014-04-30/2014-06-30" +7,1,"2014-02-05/2014-02-20" +8,1,"2014-04-30/2014-06-30" +7,-1,"2014-06-30/2015-06-30" diff --git a/backend/src/test/resources/tests/sql/filter/number/expected.csv b/backend/src/test/resources/tests/sql/filter/number/expected.csv new file mode 100644 index 0000000000..ce715976a6 --- /dev/null +++ b/backend/src/test/resources/tests/sql/filter/number/expected.csv @@ -0,0 +1,11 @@ +pid +1 +1 +1 +3 +4 +5 +5 +6 +7 +8 diff --git a/backend/src/test/resources/tests/sql/filter/number/number.spec.json b/backend/src/test/resources/tests/sql/filter/number/number.spec.json new file mode 100644 index 0000000000..46dd0f5425 --- /dev/null +++ b/backend/src/test/resources/tests/sql/filter/number/number.spec.json @@ -0,0 +1,75 @@ +{ + "label": "Single Number-Real-Range Filter Query", + "type": "SQL_TEST", + "expectedCsv": "expected.csv", + "query": { + "type": "CONCEPT_QUERY", + "root": { + "type": "AND", + "children": [ + { + "ids": [ + "number" + ], + "type": "CONCEPT", + "label": "vs", + "tables": [ + { + "id": "number.number_connector", + "filters": [ + { + "filter": "number.number_connector.value", + "type": "REAL_RANGE", + "value": { + "min": 0.5, + "max": 1 + } + } + ] + } + ] + } + ] + } + }, + "concepts": [ + { + "label": "number", + "type": "TREE", + "connectors": [ + { + "label": "number_connector", + "table": "table1", + "filters": { + "label": "value", + "description": "xy", + "column": "table1.value", + "type": "NUMBER" + } + } + ] + } + ], + "content": { + "tables": [ + { + "csv": "tests/sql/filter/number/content.csv", + "name": "table1", + "primaryColumn": { + "name": "pid", + "type": "STRING" + }, + "columns": [ + { + "name": "value", + "type": "REAL" + }, + { + "name": "datum", + "type": "DATE_RANGE" + } + ] + } + ] + } +} diff --git a/backend/src/test/resources/tests/sql/filter/number_only_max/content.csv b/backend/src/test/resources/tests/sql/filter/number_only_max/content.csv new file mode 100644 index 0000000000..1851eed139 --- /dev/null +++ b/backend/src/test/resources/tests/sql/filter/number_only_max/content.csv @@ -0,0 +1,13 @@ +pid,value,datum +1,1,"2014-06-30/2015-06-30" +2,1.01,"2014-06-30/2015-06-30" +1,1,"2015-02-03/2015-06-30" +1,0.5,"2014-06-30/2015-06-30" +3,0.5,"2014-04-30/2014-06-30" +4,1,"2014-06-30/2015-06-30" +5,0.5,"2014-04-30/2014-06-30" +5,1,"2014-06-30/2015-06-30" +6,1,"2014-04-30/2014-06-30" +7,1,"2014-02-05/2014-02-20" +8,1,"2014-04-30/2014-06-30" +7,-1,"2014-06-30/2015-06-30" diff --git a/backend/src/test/resources/tests/sql/filter/number_only_max/expected.csv b/backend/src/test/resources/tests/sql/filter/number_only_max/expected.csv new file mode 100644 index 0000000000..ce668fc0b4 --- /dev/null +++ b/backend/src/test/resources/tests/sql/filter/number_only_max/expected.csv @@ -0,0 +1,5 @@ +pid +1 +3 +5 +7 diff --git a/backend/src/test/resources/tests/sql/filter/number_only_max/number_only_max.spec.json b/backend/src/test/resources/tests/sql/filter/number_only_max/number_only_max.spec.json new file mode 100644 index 0000000000..1b49d2b5aa --- /dev/null +++ b/backend/src/test/resources/tests/sql/filter/number_only_max/number_only_max.spec.json @@ -0,0 +1,74 @@ +{ + "label": "Single Number-Real-Range Filter Query (only max val)", + "type": "SQL_TEST", + "expectedCsv": "expected.csv", + "query": { + "type": "CONCEPT_QUERY", + "root": { + "type": "AND", + "children": [ + { + "ids": [ + "number" + ], + "type": "CONCEPT", + "label": "vs", + "tables": [ + { + "id": "number.number_connector", + "filters": [ + { + "filter": "number.number_connector.value", + "type": "REAL_RANGE", + "value": { + "max": 0.5 + } + } + ] + } + ] + } + ] + } + }, + "concepts": [ + { + "label": "number", + "type": "TREE", + "connectors": [ + { + "label": "number_connector", + "table": "table1", + "filters": { + "label": "value", + "description": "xy", + "column": "table1.value", + "type": "NUMBER" + } + } + ] + } + ], + "content": { + "tables": [ + { + "csv": "tests/sql/filter/number_only_max/content.csv", + "name": "table1", + "primaryColumn": { + "name": "pid", + "type": "STRING" + }, + "columns": [ + { + "name": "value", + "type": "REAL" + }, + { + "name": "datum", + "type": "DATE_RANGE" + } + ] + } + ] + } +} diff --git a/backend/src/test/resources/tests/sql/filter/number_only_min/content.csv b/backend/src/test/resources/tests/sql/filter/number_only_min/content.csv new file mode 100644 index 0000000000..1851eed139 --- /dev/null +++ b/backend/src/test/resources/tests/sql/filter/number_only_min/content.csv @@ -0,0 +1,13 @@ +pid,value,datum +1,1,"2014-06-30/2015-06-30" +2,1.01,"2014-06-30/2015-06-30" +1,1,"2015-02-03/2015-06-30" +1,0.5,"2014-06-30/2015-06-30" +3,0.5,"2014-04-30/2014-06-30" +4,1,"2014-06-30/2015-06-30" +5,0.5,"2014-04-30/2014-06-30" +5,1,"2014-06-30/2015-06-30" +6,1,"2014-04-30/2014-06-30" +7,1,"2014-02-05/2014-02-20" +8,1,"2014-04-30/2014-06-30" +7,-1,"2014-06-30/2015-06-30" diff --git a/backend/src/test/resources/tests/sql/filter/number_only_min/expected.csv b/backend/src/test/resources/tests/sql/filter/number_only_min/expected.csv new file mode 100644 index 0000000000..9b1503f7a8 --- /dev/null +++ b/backend/src/test/resources/tests/sql/filter/number_only_min/expected.csv @@ -0,0 +1,12 @@ +pid +1 +2 +1 +1 +3 +4 +5 +5 +6 +7 +8 diff --git a/backend/src/test/resources/tests/sql/filter/number_only_min/number_only_min.spec.json b/backend/src/test/resources/tests/sql/filter/number_only_min/number_only_min.spec.json new file mode 100644 index 0000000000..918c2d521d --- /dev/null +++ b/backend/src/test/resources/tests/sql/filter/number_only_min/number_only_min.spec.json @@ -0,0 +1,74 @@ +{ + "label": "Single Number-Real-Range Filter Query (only min val)", + "type": "SQL_TEST", + "expectedCsv": "expected.csv", + "query": { + "type": "CONCEPT_QUERY", + "root": { + "type": "AND", + "children": [ + { + "ids": [ + "number" + ], + "type": "CONCEPT", + "label": "vs", + "tables": [ + { + "id": "number.number_connector", + "filters": [ + { + "filter": "number.number_connector.value", + "type": "REAL_RANGE", + "value": { + "min": 0.5 + } + } + ] + } + ] + } + ] + } + }, + "concepts": [ + { + "label": "number", + "type": "TREE", + "connectors": [ + { + "label": "number_connector", + "table": "table1", + "filters": { + "label": "value", + "description": "xy", + "column": "table1.value", + "type": "NUMBER" + } + } + ] + } + ], + "content": { + "tables": [ + { + "csv": "tests/sql/filter/number_only_min/content.csv", + "name": "table1", + "primaryColumn": { + "name": "pid", + "type": "STRING" + }, + "columns": [ + { + "name": "value", + "type": "REAL" + }, + { + "name": "datum", + "type": "DATE_RANGE" + } + ] + } + ] + } +} diff --git a/backend/src/test/resources/tests/sql/filter/select/content.csv b/backend/src/test/resources/tests/sql/filter/select/content.csv new file mode 100644 index 0000000000..db93b08bd4 --- /dev/null +++ b/backend/src/test/resources/tests/sql/filter/select/content.csv @@ -0,0 +1,9 @@ +pid,datum,geschlecht +1,2012-01-01,"f" +2,2010-07-15,"m" +3,2013-11-10,"f" +4,2012-11-11,"m" +5,2007-11-11,"" +6,2012-11-11,"" +7,2012-11-11,"mf" +8,2012-11-11,"fm" diff --git a/backend/src/test/resources/tests/sql/filter/select/expected.csv b/backend/src/test/resources/tests/sql/filter/select/expected.csv new file mode 100644 index 0000000000..b5b5670cd1 --- /dev/null +++ b/backend/src/test/resources/tests/sql/filter/select/expected.csv @@ -0,0 +1,3 @@ +pid +1 +3 \ No newline at end of file diff --git a/backend/src/test/resources/tests/sql/filter/select/select.spec.json b/backend/src/test/resources/tests/sql/filter/select/select.spec.json new file mode 100644 index 0000000000..11ae8586f4 --- /dev/null +++ b/backend/src/test/resources/tests/sql/filter/select/select.spec.json @@ -0,0 +1,74 @@ +{ + "label": "Single Big-Multi-Select Filter Query", + "type": "SQL_TEST", + "expectedCsv": "expected.csv", + "query": { + "type": "CONCEPT_QUERY", + "root": { + "type": "AND", + "children": [ + { + "ids":[ + "geschlecht_select" + ], + "type":"CONCEPT", + "label":"Geschlecht SELECT", + "tables":[ + { + "id":"geschlecht_select.geschlecht_connector", + "filters":[ + { + "filter":"geschlecht_select.geschlecht_connector.geschlecht", + "type":"BIG_MULTI_SELECT", + "value":[ + "f" + ] + } + ] + } + ] + } + ] + } + }, + "concepts":[ + { + "label":"geschlecht_select", + "type":"TREE", + "connectors":[ + { + "label":"geschlecht_connector", + "table":"table1", + "filters":{ + "label":"geschlecht", + "description":"Geschlecht zur gegebenen Datumseinschränkung", + "column":"table1.geschlecht", + "type":"SELECT" + } + } + ] + } + ], + "content":{ + "tables":[ + { + "csv":"tests/sql/filter/select/content.csv", + "name":"table1", + "primaryColumn":{ + "name":"pid", + "type":"STRING" + }, + "columns":[ + { + "name":"datum", + "type":"DATE" + }, + { + "name":"geschlecht", + "type":"STRING" + } + ] + } + ] + } +} diff --git a/backend/src/test/resources/tests/sql/not/content.csv b/backend/src/test/resources/tests/sql/not/content.csv new file mode 100644 index 0000000000..8dab40f969 --- /dev/null +++ b/backend/src/test/resources/tests/sql/not/content.csv @@ -0,0 +1,9 @@ +pid,datum,geschlecht +1,2012-01-01,"f" +2,2010-07-15,"m" +3,2013-11-10,"f" +4,2012-11-11,"m" +5,2007-11-11, +6,2012-11-11, +7,2012-11-11,"mf" +8,2012-11-11,"fm" diff --git a/backend/src/test/resources/tests/sql/not/expected.csv b/backend/src/test/resources/tests/sql/not/expected.csv new file mode 100644 index 0000000000..416640214f --- /dev/null +++ b/backend/src/test/resources/tests/sql/not/expected.csv @@ -0,0 +1,5 @@ +pid +2 +4 +7 +8 diff --git a/backend/src/test/resources/tests/sql/not/not.spec.json b/backend/src/test/resources/tests/sql/not/not.spec.json new file mode 100644 index 0000000000..bf1611ece6 --- /dev/null +++ b/backend/src/test/resources/tests/sql/not/not.spec.json @@ -0,0 +1,72 @@ +{ + "label": "Simple Negation Query", + "type": "SQL_TEST", + "expectedCsv": "expected.csv", + "query": { + "type": "CONCEPT_QUERY", + "root": { + "type": "NEGATION", + "child": { + "ids": [ + "geschlecht_select" + ], + "type": "CONCEPT", + "label": "Geschlecht SELECT", + "tables": [ + { + "id": "geschlecht_select.geschlecht_connector", + "filters": [ + { + "filter": "geschlecht_select.geschlecht_connector.geschlecht", + "type": "BIG_MULTI_SELECT", + "value": [ + "f" + ] + } + ] + } + ] + } + } + }, + "concepts": [ + { + "label": "geschlecht_select", + "type": "TREE", + "connectors": [ + { + "label": "geschlecht_connector", + "table": "table1", + "filters": { + "label": "geschlecht", + "description": "Geschlecht zur gegebenen Datumseinschränkung", + "column": "table1.geschlecht", + "type": "SELECT" + } + } + ] + } + ], + "content": { + "tables": [ + { + "csv": "tests/sql/not/content.csv", + "name": "table1", + "primaryColumn": { + "name": "pid", + "type": "STRING" + }, + "columns": [ + { + "name": "datum", + "type": "DATE" + }, + { + "name": "geschlecht", + "type": "STRING" + } + ] + } + ] + } +} diff --git a/backend/src/test/resources/tests/sql/or/different_concept/content_1.csv b/backend/src/test/resources/tests/sql/or/different_concept/content_1.csv new file mode 100644 index 0000000000..1851eed139 --- /dev/null +++ b/backend/src/test/resources/tests/sql/or/different_concept/content_1.csv @@ -0,0 +1,13 @@ +pid,value,datum +1,1,"2014-06-30/2015-06-30" +2,1.01,"2014-06-30/2015-06-30" +1,1,"2015-02-03/2015-06-30" +1,0.5,"2014-06-30/2015-06-30" +3,0.5,"2014-04-30/2014-06-30" +4,1,"2014-06-30/2015-06-30" +5,0.5,"2014-04-30/2014-06-30" +5,1,"2014-06-30/2015-06-30" +6,1,"2014-04-30/2014-06-30" +7,1,"2014-02-05/2014-02-20" +8,1,"2014-04-30/2014-06-30" +7,-1,"2014-06-30/2015-06-30" diff --git a/backend/src/test/resources/tests/sql/or/different_concept/content_2.csv b/backend/src/test/resources/tests/sql/or/different_concept/content_2.csv new file mode 100644 index 0000000000..dc012de238 --- /dev/null +++ b/backend/src/test/resources/tests/sql/or/different_concept/content_2.csv @@ -0,0 +1,9 @@ +pid,datum,geschlecht,language +1,2012-01-01,"f","de" +2,2010-07-15,"m","fr" +3,2013-11-10,"f","en" +4,2012-11-11,"m","" +5,2007-11-11,"","" +6,2012-11-11,"","de" +7,2012-11-11,"mf","de" +8,2012-11-11,"fm","fr" diff --git a/backend/src/test/resources/tests/sql/or/different_concept/expected.csv b/backend/src/test/resources/tests/sql/or/different_concept/expected.csv new file mode 100644 index 0000000000..dea40ae400 --- /dev/null +++ b/backend/src/test/resources/tests/sql/or/different_concept/expected.csv @@ -0,0 +1,4 @@ +pid,validity_date_1,,value,geschlecht,language +7,"[2014-06-30,2015-06-30)",-1,mf, +8,,,,fr +2,,,,fr diff --git a/backend/src/test/resources/tests/sql/or/different_concept/or.spec.json b/backend/src/test/resources/tests/sql/or/different_concept/or.spec.json new file mode 100644 index 0000000000..4fd7f736e4 --- /dev/null +++ b/backend/src/test/resources/tests/sql/or/different_concept/or.spec.json @@ -0,0 +1,207 @@ +{ + "label": "Simple OR query for 3 different concepts", + "expectedCsv": "expected.csv", + "type": "SQL_TEST", + "query": { + "type": "CONCEPT_QUERY", + "root": { + "type": "OR", + "children": [ + { + "type": "CONCEPT", + "label": "vs", + "ids": [ + "number" + ], + "tables": [ + { + "id": "number.number_connector", + "filters": [ + { + "filter": "number.number_connector.value", + "type": "REAL_RANGE", + "value": { + "min": -1, + "max": 0 + } + } + ], + "selects": [ + "number.number_connector.value" + ] + } + ] + }, + { + "ids": [ + "geschlecht_select" + ], + "type": "CONCEPT", + "label": "Geschlecht SELECT", + "tables": [ + { + "id": "geschlecht_select.geschlecht_connector", + "filters": [ + { + "filter": "geschlecht_select.geschlecht_connector.geschlecht", + "type": "BIG_MULTI_SELECT", + "value": [ + "mf" + ] + } + ], + "selects": [ + "geschlecht_select.geschlecht_connector.geschlecht" + ] + } + ] + }, + { + "ids": [ + "language_select" + ], + "type": "CONCEPT", + "label": "Language SELECT", + "tables": [ + { + "id": "language_select.language_connector", + "filters": [ + { + "filter": "language_select.language_connector.language", + "type": "BIG_MULTI_SELECT", + "value": [ + "fr" + ] + } + ], + "selects": [ + "language_select.language_connector.language" + ] + } + ] + } + ] + } + }, + "concepts": [ + { + "label": "number", + "type": "TREE", + "connectors": [ + { + "label": "number_connector", + "table": "table1", + "validityDates": { + "label": "datum", + "column": "table1.datum" + }, + "filters": { + "label": "value", + "description": "xy", + "column": "table1.value", + "type": "NUMBER" + }, + "selects": { + "name": "value", + "column": "table1.value", + "type": "FIRST" + } + } + ] + }, + { + "label": "geschlecht_select", + "type": "TREE", + "connectors": [ + { + "label": "geschlecht_connector", + "table": "table2", + "validityDates": { + "label": "datum", + "column": "table2.datum" + }, + "filters": { + "label": "geschlecht", + "description": "Geschlecht zur gegebenen Datumseinschränkung", + "column": "table2.geschlecht", + "type": "SELECT" + }, + "selects": { + "name": "geschlecht", + "column": "table2.geschlecht", + "type": "FIRST" + } + } + ] + }, + { + "label": "language_select", + "type": "TREE", + "connectors": [ + { + "label": "language_connector", + "table": "table2", + "validityDates": { + "label": "datum", + "column": "table2.datum" + }, + "filters": { + "label": "language", + "description": "Sprache", + "column": "table2.language", + "type": "SELECT" + }, + "selects": { + "name": "language", + "column": "table2.language", + "type": "FIRST" + } + } + ] + } + ], + "content": { + "tables": [ + { + "csv": "tests/sql/or/different_concept/content_1.csv", + "name": "table1", + "primaryColumn": { + "name": "pid", + "type": "STRING" + }, + "columns": [ + { + "name": "value", + "type": "REAL" + }, + { + "name": "datum", + "type": "DATE_RANGE" + } + ] + }, + { + "csv": "tests/sql/or/different_concept/content_2.csv", + "name": "table2", + "primaryColumn": { + "name": "pid", + "type": "STRING" + }, + "columns": [ + { + "name": "datum", + "type": "DATE" + }, + { + "name": "geschlecht", + "type": "STRING" + }, + { + "name": "language", + "type": "STRING" + } + ] + } + ] + } +} diff --git a/backend/src/test/resources/tests/sql/or/same_concept/content_1.csv b/backend/src/test/resources/tests/sql/or/same_concept/content_1.csv new file mode 100644 index 0000000000..5ceffe16ca --- /dev/null +++ b/backend/src/test/resources/tests/sql/or/same_concept/content_1.csv @@ -0,0 +1,13 @@ +pid,value +1,1 +2,1.01 +1,1 +1,0.5 +3,0.5 +4,1 +5,0.5 +5,1 +6,1 +7,1 +8,1 +7,-1 diff --git a/backend/src/test/resources/tests/sql/or/same_concept/expected.csv b/backend/src/test/resources/tests/sql/or/same_concept/expected.csv new file mode 100644 index 0000000000..4869420d12 --- /dev/null +++ b/backend/src/test/resources/tests/sql/or/same_concept/expected.csv @@ -0,0 +1,3 @@ +pid +7 +2 diff --git a/backend/src/test/resources/tests/sql/or/same_concept/or.spec.json b/backend/src/test/resources/tests/sql/or/same_concept/or.spec.json new file mode 100644 index 0000000000..8a8dec1b9e --- /dev/null +++ b/backend/src/test/resources/tests/sql/or/same_concept/or.spec.json @@ -0,0 +1,93 @@ +{ + "label": "Simple OR query for same concept", + "expectedCsv": "expected.csv", + "type": "SQL_TEST", + "query": { + "type": "CONCEPT_QUERY", + "root": { + "type": "OR", + "children": [ + { + "type": "CONCEPT", + "label": "vs", + "ids": [ + "number" + ], + "tables": [ + { + "id": "number.number_connector", + "filters": [ + { + "filter": "number.number_connector.value", + "type": "REAL_RANGE", + "value": { + "min": -1, + "max": 0 + } + } + ] + } + ] + }, + { + "type": "CONCEPT", + "label": "vs", + "ids": [ + "number" + ], + "tables": [ + { + "id": "number.number_connector", + "filters": [ + { + "filter": "number.number_connector.value", + "type": "REAL_RANGE", + "value": { + "min": 1.0001, + "max": 1.5 + } + } + ] + } + ] + } + ] + } + }, + "concepts": [ + { + "label": "number", + "type": "TREE", + "connectors": [ + { + "label": "number_connector", + "table": "table1", + "filters": { + "label": "value", + "description": "xy", + "column": "table1.value", + "type": "NUMBER" + } + } + ] + } + ], + "content": { + "tables": [ + { + "csv": "tests/sql/or/same_concept/content_1.csv", + "name": "table1", + "primaryColumn": { + "name": "pid", + "type": "STRING" + }, + "columns": [ + { + "name": "value", + "type": "REAL" + } + ] + } + ] + } +} diff --git a/backend/src/test/resources/tests/sql/selects/date_distance/centuries/centuries.spec.json b/backend/src/test/resources/tests/sql/selects/date_distance/centuries/centuries.spec.json new file mode 100644 index 0000000000..97e6b8f6e1 --- /dev/null +++ b/backend/src/test/resources/tests/sql/selects/date_distance/centuries/centuries.spec.json @@ -0,0 +1,87 @@ +{ + "label": "DATE_DISTANCE select query with timeUnit CENTURIES", + "type": "SQL_TEST", + "expectedCsv": "expected.csv", + "query": { + "type": "CONCEPT_QUERY", + "root": { + "type": "AND", + "children": [ + { + "ids":[ + "geschlecht_select" + ], + "type":"CONCEPT", + "label":"Geschlecht SELECT", + "tables":[ + { + "id":"geschlecht_select.geschlecht_connector", + "filters":[ + { + "filter":"geschlecht_select.geschlecht_connector.geschlecht", + "type":"BIG_MULTI_SELECT", + "value":[ + "f" + ] + } + ], + "selects": [ + "geschlecht_select.geschlecht_connector.date_distance_centuries" + ] + } + ] + } + ] + } + }, + "concepts":[ + { + "label":"geschlecht_select", + "type":"TREE", + "connectors":[ + { + "label":"geschlecht_connector", + "table":"table1", + "filters":{ + "label":"geschlecht", + "description":"Geschlecht zur gegebenen Datumseinschränkung", + "column":"table1.geschlecht", + "type":"SELECT" + }, + "selects": [ + { + "column": "table1.datum", + "default": true, + "label": "date_distance_centuries", + "name": "date_distance_centuries", + "timeUnit": "CENTURIES", + "type": "DATE_DISTANCE" + } + ] + } + ] + } + ], + "content":{ + "tables":[ + { + "csv":"tests/sql/selects/date_distance/centuries/content.csv", + "name":"table1", + "primaryColumn":{ + "name":"pid", + "type":"STRING" + }, + "columns":[ + { + "name":"datum", + "type":"DATE" + }, + { + "name":"geschlecht", + "type":"STRING" + } + ] + } + ] + } +} diff --git a/backend/src/test/resources/tests/sql/selects/date_distance/centuries/content.csv b/backend/src/test/resources/tests/sql/selects/date_distance/centuries/content.csv new file mode 100644 index 0000000000..5058be62da --- /dev/null +++ b/backend/src/test/resources/tests/sql/selects/date_distance/centuries/content.csv @@ -0,0 +1,9 @@ +pid,datum,geschlecht +1,1920-01-01,"f" +2,2010-07-15,"m" +3,2010-11-10,"f" +4,2013-11-11,"m" +5,2007-11-11,"" +6,2014-11-11,"" +7,2015-11-11,"mf" +8,2011-11-11,"fm" diff --git a/backend/src/test/resources/tests/sql/selects/date_distance/centuries/expected.csv b/backend/src/test/resources/tests/sql/selects/date_distance/centuries/expected.csv new file mode 100644 index 0000000000..c0043272fe --- /dev/null +++ b/backend/src/test/resources/tests/sql/selects/date_distance/centuries/expected.csv @@ -0,0 +1,3 @@ +pid,date_distance_centuries +1,1 +3,0 diff --git a/backend/src/test/resources/tests/sql/selects/date_distance/days_with_date_restriction/content.csv b/backend/src/test/resources/tests/sql/selects/date_distance/days_with_date_restriction/content.csv new file mode 100644 index 0000000000..77a2fec9a1 --- /dev/null +++ b/backend/src/test/resources/tests/sql/selects/date_distance/days_with_date_restriction/content.csv @@ -0,0 +1,9 @@ +pid,datum,geschlecht +1,2012-01-01,"f" +2,2010-07-15,"m" +3,2012-11-10,"f" +4,2013-11-11,"m" +5,2007-11-11,"" +6,2014-11-11,"" +7,2015-11-11,"mf" +8,2011-11-11,"fm" diff --git a/backend/src/test/resources/tests/sql/selects/date_distance/days_with_date_restriction/days_with_date_restriction.spec.json b/backend/src/test/resources/tests/sql/selects/date_distance/days_with_date_restriction/days_with_date_restriction.spec.json new file mode 100644 index 0000000000..f18f1934b2 --- /dev/null +++ b/backend/src/test/resources/tests/sql/selects/date_distance/days_with_date_restriction/days_with_date_restriction.spec.json @@ -0,0 +1,98 @@ +{ + "type": "SQL_TEST", + "label": "DATE_DISTANCE select query with timeUnit DAYS and date restriction set", + "expectedCsv": "expected.csv", + "query": { + "type": "CONCEPT_QUERY", + "root": { + "type": "AND", + "children": [ + { + "type": "DATE_RESTRICTION", + "dateRange": { + "min": "2012-01-01", + "max": "2012-12-31" + }, + "child": { + "ids":[ + "geschlecht_select" + ], + "type":"CONCEPT", + "label":"Geschlecht SELECT", + "tables":[ + { + "id":"geschlecht_select.geschlecht_connector", + "filters":[ + { + "filter":"geschlecht_select.geschlecht_connector.geschlecht", + "type":"BIG_MULTI_SELECT", + "value":[ + "f" + ] + } + ], + "selects": [ + "geschlecht_select.geschlecht_connector.date_distance_days" + ] + } + ] + } + } + ] + } + }, + "concepts":[ + { + "label":"geschlecht_select", + "type":"TREE", + "connectors":[ + { + "label":"geschlecht_connector", + "table":"table1", + "validityDates": { + "label": "datum", + "column": "table1.datum" + }, + "filters":{ + "label":"geschlecht", + "description":"Geschlecht zur gegebenen Datumseinschränkung", + "column":"table1.geschlecht", + "type":"SELECT" + }, + "selects": [ + { + "column": "table1.datum", + "default": true, + "label": "date_distance_days", + "name": "date_distance_days", + "timeUnit": "DAYS", + "type": "DATE_DISTANCE" + } + ] + } + ] + } + ], + "content":{ + "tables":[ + { + "csv":"tests/sql/selects/date_distance/days_with_date_restriction/content.csv", + "name":"table1", + "primaryColumn":{ + "name":"pid", + "type":"STRING" + }, + "columns":[ + { + "name":"datum", + "type":"DATE" + }, + { + "name":"geschlecht", + "type":"STRING" + } + ] + } + ] + } +} diff --git a/backend/src/test/resources/tests/sql/selects/date_distance/days_with_date_restriction/expected.csv b/backend/src/test/resources/tests/sql/selects/date_distance/days_with_date_restriction/expected.csv new file mode 100644 index 0000000000..f434fa729b --- /dev/null +++ b/backend/src/test/resources/tests/sql/selects/date_distance/days_with_date_restriction/expected.csv @@ -0,0 +1,3 @@ +pid,datum,date_distance_days +1,"[2012-01-01,2012-01-02)",365 +3,"[2012-11-10,2012-11-11)",51 diff --git a/backend/src/test/resources/tests/sql/selects/date_distance/days_without_date_restriction/content.csv b/backend/src/test/resources/tests/sql/selects/date_distance/days_without_date_restriction/content.csv new file mode 100644 index 0000000000..5195678965 --- /dev/null +++ b/backend/src/test/resources/tests/sql/selects/date_distance/days_without_date_restriction/content.csv @@ -0,0 +1,9 @@ +pid,datum,geschlecht +1,2012-01-01,"f" +2,2010-07-15,"m" +3,2013-11-10,"f" +4,2013-11-11,"m" +5,2007-11-11,"" +6,2014-11-11,"" +7,2015-11-11,"mf" +8,2011-11-11,"fm" diff --git a/backend/src/test/resources/tests/sql/selects/date_distance/days_without_date_restriction/days_without_date_restriction.json b/backend/src/test/resources/tests/sql/selects/date_distance/days_without_date_restriction/days_without_date_restriction.json new file mode 100644 index 0000000000..4c54123139 --- /dev/null +++ b/backend/src/test/resources/tests/sql/selects/date_distance/days_without_date_restriction/days_without_date_restriction.json @@ -0,0 +1,87 @@ +{ + "type": "SQL_TEST", + "label": "DATE_DISTANCE select query with timeUnit DAYS and without date restriction set", + "expectedCsv": "expected.csv", + "query": { + "type": "CONCEPT_QUERY", + "root": { + "type": "AND", + "children": [ + { + "ids":[ + "geschlecht_select" + ], + "type":"CONCEPT", + "label":"Geschlecht SELECT", + "tables":[ + { + "id":"geschlecht_select.geschlecht_connector", + "filters":[ + { + "filter":"geschlecht_select.geschlecht_connector.geschlecht", + "type":"BIG_MULTI_SELECT", + "value":[ + "f" + ] + } + ], + "selects": [ + "geschlecht_select.geschlecht_connector.date_distance_days" + ] + } + ] + } + ] + } + }, + "concepts":[ + { + "label":"geschlecht_select", + "type":"TREE", + "connectors":[ + { + "label":"geschlecht_connector", + "table":"table1", + "filters":{ + "label":"geschlecht", + "description":"Geschlecht zur gegebenen Datumseinschränkung", + "column":"table1.geschlecht", + "type":"SELECT" + }, + "selects": [ + { + "column": "table1.datum", + "default": true, + "label": "date_distance_days", + "name": "date_distance_days", + "timeUnit": "DAYS", + "type": "DATE_DISTANCE" + } + ] + } + ] + } + ], + "content":{ + "tables":[ + { + "csv":"tests/sql/selects/date_distance/days_without_date_restriction/content.csv", + "name":"table1", + "primaryColumn":{ + "name":"pid", + "type":"STRING" + }, + "columns":[ + { + "name":"datum", + "type":"DATE" + }, + { + "name":"geschlecht", + "type":"STRING" + } + ] + } + ] + } +} diff --git a/backend/src/test/resources/tests/sql/selects/date_distance/days_without_date_restriction/expected.csv b/backend/src/test/resources/tests/sql/selects/date_distance/days_without_date_restriction/expected.csv new file mode 100644 index 0000000000..f6fe682a1e --- /dev/null +++ b/backend/src/test/resources/tests/sql/selects/date_distance/days_without_date_restriction/expected.csv @@ -0,0 +1,3 @@ +pid,date_distance_days +1,4104 +3,3425 diff --git a/backend/src/test/resources/tests/sql/selects/date_distance/decades/content.csv b/backend/src/test/resources/tests/sql/selects/date_distance/decades/content.csv new file mode 100644 index 0000000000..a27a9a8243 --- /dev/null +++ b/backend/src/test/resources/tests/sql/selects/date_distance/decades/content.csv @@ -0,0 +1,9 @@ +pid,datum,geschlecht +1,2012-01-01,"f" +2,2010-07-15,"m" +3,2020-11-10,"f" +4,2013-11-11,"m" +5,2007-11-11,"" +6,2014-11-11,"" +7,2015-11-11,"mf" +8,2011-11-11,"fm" diff --git a/backend/src/test/resources/tests/sql/selects/date_distance/decades/decades.spec.json b/backend/src/test/resources/tests/sql/selects/date_distance/decades/decades.spec.json new file mode 100644 index 0000000000..586cc74a15 --- /dev/null +++ b/backend/src/test/resources/tests/sql/selects/date_distance/decades/decades.spec.json @@ -0,0 +1,87 @@ +{ + "type": "SQL_TEST", + "label": "DATE_DISTANCE select query with timeUnit DECADES", + "expectedCsv": "expected.csv", + "query": { + "type": "CONCEPT_QUERY", + "root": { + "type": "AND", + "children": [ + { + "ids":[ + "geschlecht_select" + ], + "type":"CONCEPT", + "label":"Geschlecht SELECT", + "tables":[ + { + "id":"geschlecht_select.geschlecht_connector", + "filters":[ + { + "filter":"geschlecht_select.geschlecht_connector.geschlecht", + "type":"BIG_MULTI_SELECT", + "value":[ + "f" + ] + } + ], + "selects": [ + "geschlecht_select.geschlecht_connector.date_distance_decades" + ] + } + ] + } + ] + } + }, + "concepts":[ + { + "label":"geschlecht_select", + "type":"TREE", + "connectors":[ + { + "label":"geschlecht_connector", + "table":"table1", + "filters":{ + "label":"geschlecht", + "description":"Geschlecht zur gegebenen Datumseinschränkung", + "column":"table1.geschlecht", + "type":"SELECT" + }, + "selects": [ + { + "column": "table1.datum", + "default": true, + "label": "date_distance_decades", + "name": "date_distance_decades", + "timeUnit": "DECADES", + "type": "DATE_DISTANCE" + } + ] + } + ] + } + ], + "content":{ + "tables":[ + { + "csv":"tests/sql/selects/date_distance/decades/content.csv", + "name":"table1", + "primaryColumn":{ + "name":"pid", + "type":"STRING" + }, + "columns":[ + { + "name":"datum", + "type":"DATE" + }, + { + "name":"geschlecht", + "type":"STRING" + } + ] + } + ] + } +} diff --git a/backend/src/test/resources/tests/sql/selects/date_distance/decades/expected.csv b/backend/src/test/resources/tests/sql/selects/date_distance/decades/expected.csv new file mode 100644 index 0000000000..171337e385 --- /dev/null +++ b/backend/src/test/resources/tests/sql/selects/date_distance/decades/expected.csv @@ -0,0 +1,3 @@ +pid,date_distance_decades +1,1 +3,0 diff --git a/backend/src/test/resources/tests/sql/selects/date_distance/months/content.csv b/backend/src/test/resources/tests/sql/selects/date_distance/months/content.csv new file mode 100644 index 0000000000..c2d4f04aef --- /dev/null +++ b/backend/src/test/resources/tests/sql/selects/date_distance/months/content.csv @@ -0,0 +1,9 @@ +pid,datum,geschlecht +1,2012-01-01,"f" +2,2010-07-15,"m" +3,2010-11-10,"f" +4,2013-11-11,"m" +5,2007-11-11,"" +6,2014-11-11,"" +7,2015-11-11,"mf" +8,2011-11-11,"fm" diff --git a/backend/src/test/resources/tests/sql/selects/date_distance/months/expected.csv b/backend/src/test/resources/tests/sql/selects/date_distance/months/expected.csv new file mode 100644 index 0000000000..d0212b39b1 --- /dev/null +++ b/backend/src/test/resources/tests/sql/selects/date_distance/months/expected.csv @@ -0,0 +1,3 @@ +pid,date_distance_months +1,134 +3,148 diff --git a/backend/src/test/resources/tests/sql/selects/date_distance/months/months.spec.json b/backend/src/test/resources/tests/sql/selects/date_distance/months/months.spec.json new file mode 100644 index 0000000000..467c8aa561 --- /dev/null +++ b/backend/src/test/resources/tests/sql/selects/date_distance/months/months.spec.json @@ -0,0 +1,87 @@ +{ + "type": "SQL_TEST", + "label": "DATE_DISTANCE select query with timeUnit MONTHS", + "expectedCsv": "expected.csv", + "query": { + "type": "CONCEPT_QUERY", + "root": { + "type": "AND", + "children": [ + { + "ids":[ + "geschlecht_select" + ], + "type":"CONCEPT", + "label":"Geschlecht SELECT", + "tables":[ + { + "id":"geschlecht_select.geschlecht_connector", + "filters":[ + { + "filter":"geschlecht_select.geschlecht_connector.geschlecht", + "type":"BIG_MULTI_SELECT", + "value":[ + "f" + ] + } + ], + "selects": [ + "geschlecht_select.geschlecht_connector.date_distance_months" + ] + } + ] + } + ] + } + }, + "concepts":[ + { + "label":"geschlecht_select", + "type":"TREE", + "connectors":[ + { + "label":"geschlecht_connector", + "table":"table1", + "filters":{ + "label":"geschlecht", + "description":"Geschlecht zur gegebenen Datumseinschränkung", + "column":"table1.geschlecht", + "type":"SELECT" + }, + "selects": [ + { + "column": "table1.datum", + "default": true, + "label": "date_distance_months", + "name": "date_distance_months", + "timeUnit": "MONTHS", + "type": "DATE_DISTANCE" + } + ] + } + ] + } + ], + "content":{ + "tables":[ + { + "csv":"tests/sql/selects/date_distance/months/content.csv", + "name":"table1", + "primaryColumn":{ + "name":"pid", + "type":"STRING" + }, + "columns":[ + { + "name":"datum", + "type":"DATE" + }, + { + "name":"geschlecht", + "type":"STRING" + } + ] + } + ] + } +} diff --git a/backend/src/test/resources/tests/sql/selects/date_distance/years/content.csv b/backend/src/test/resources/tests/sql/selects/date_distance/years/content.csv new file mode 100644 index 0000000000..c2d4f04aef --- /dev/null +++ b/backend/src/test/resources/tests/sql/selects/date_distance/years/content.csv @@ -0,0 +1,9 @@ +pid,datum,geschlecht +1,2012-01-01,"f" +2,2010-07-15,"m" +3,2010-11-10,"f" +4,2013-11-11,"m" +5,2007-11-11,"" +6,2014-11-11,"" +7,2015-11-11,"mf" +8,2011-11-11,"fm" diff --git a/backend/src/test/resources/tests/sql/selects/date_distance/years/expected.csv b/backend/src/test/resources/tests/sql/selects/date_distance/years/expected.csv new file mode 100644 index 0000000000..c0ec9df19d --- /dev/null +++ b/backend/src/test/resources/tests/sql/selects/date_distance/years/expected.csv @@ -0,0 +1,3 @@ +pid,date_distance_years +1,11 +3,13 diff --git a/backend/src/test/resources/tests/sql/selects/date_distance/years/years.spec.json b/backend/src/test/resources/tests/sql/selects/date_distance/years/years.spec.json new file mode 100644 index 0000000000..fba8c65151 --- /dev/null +++ b/backend/src/test/resources/tests/sql/selects/date_distance/years/years.spec.json @@ -0,0 +1,87 @@ +{ + "type": "SQL_TEST", + "label": "DATE_DISTANCE select query with timeUnit YEARS", + "expectedCsv": "expected.csv", + "query": { + "type": "CONCEPT_QUERY", + "root": { + "type": "AND", + "children": [ + { + "ids":[ + "geschlecht_select" + ], + "type":"CONCEPT", + "label":"Geschlecht SELECT", + "tables":[ + { + "id":"geschlecht_select.geschlecht_connector", + "filters":[ + { + "filter":"geschlecht_select.geschlecht_connector.geschlecht", + "type":"BIG_MULTI_SELECT", + "value":[ + "f" + ] + } + ], + "selects": [ + "geschlecht_select.geschlecht_connector.date_distance_years" + ] + } + ] + } + ] + } + }, + "concepts":[ + { + "label":"geschlecht_select", + "type":"TREE", + "connectors":[ + { + "label":"geschlecht_connector", + "table":"table1", + "filters":{ + "label":"geschlecht", + "description":"Geschlecht zur gegebenen Datumseinschränkung", + "column":"table1.geschlecht", + "type":"SELECT" + }, + "selects": [ + { + "column": "table1.datum", + "default": true, + "label": "date_distance_years", + "name": "date_distance_years", + "timeUnit": "YEARS", + "type": "DATE_DISTANCE" + } + ] + } + ] + } + ], + "content":{ + "tables":[ + { + "csv":"tests/sql/selects/date_distance/years/content.csv", + "name":"table1", + "primaryColumn":{ + "name":"pid", + "type":"STRING" + }, + "columns":[ + { + "name":"datum", + "type":"DATE" + }, + { + "name":"geschlecht", + "type":"STRING" + } + ] + } + ] + } +} diff --git a/backend/src/test/resources/tests/sql/selects/validity_date/default/content.csv b/backend/src/test/resources/tests/sql/selects/validity_date/default/content.csv new file mode 100644 index 0000000000..db93b08bd4 --- /dev/null +++ b/backend/src/test/resources/tests/sql/selects/validity_date/default/content.csv @@ -0,0 +1,9 @@ +pid,datum,geschlecht +1,2012-01-01,"f" +2,2010-07-15,"m" +3,2013-11-10,"f" +4,2012-11-11,"m" +5,2007-11-11,"" +6,2012-11-11,"" +7,2012-11-11,"mf" +8,2012-11-11,"fm" diff --git a/backend/src/test/resources/tests/sql/selects/validity_date/default/expected.csv b/backend/src/test/resources/tests/sql/selects/validity_date/default/expected.csv new file mode 100644 index 0000000000..a09d9ed8b4 --- /dev/null +++ b/backend/src/test/resources/tests/sql/selects/validity_date/default/expected.csv @@ -0,0 +1,3 @@ +pid,datum +1,"[2012-01-01,2012-01-02)" +3,"[2013-11-10,2013-11-11)" diff --git a/backend/src/test/resources/tests/sql/selects/validity_date/default/validity_date_default.json b/backend/src/test/resources/tests/sql/selects/validity_date/default/validity_date_default.json new file mode 100644 index 0000000000..705aeb28f5 --- /dev/null +++ b/backend/src/test/resources/tests/sql/selects/validity_date/default/validity_date_default.json @@ -0,0 +1,79 @@ +{ + "type": "SQL_TEST", + "label": "Validity date as default select without date restriction set", + "description": "If validity dates exist and the concept is not excluded from time aggregation, validity dates should be part of the final selects - regardless if a date restriction is set.", + "expectedCsv": "expected.csv", + "query": { + "type": "CONCEPT_QUERY", + "root": { + "type": "AND", + "children": [ + { + "ids":[ + "geschlecht_select" + ], + "type":"CONCEPT", + "label":"Geschlecht SELECT", + "tables":[ + { + "id":"geschlecht_select.geschlecht_connector", + "filters":[ + { + "filter":"geschlecht_select.geschlecht_connector.geschlecht", + "type":"BIG_MULTI_SELECT", + "value":[ + "f" + ] + } + ] + } + ] + } + ] + } + }, + "concepts":[ + { + "label":"geschlecht_select", + "type":"TREE", + "connectors":[ + { + "label":"geschlecht_connector", + "table":"table1", + "validityDates":{ + "label":"datum", + "column":"table1.datum" + }, + "filters":{ + "label":"geschlecht", + "description":"Geschlecht zur gegebenen Datumseinschränkung", + "column":"table1.geschlecht", + "type":"SELECT" + } + } + ] + } + ], + "content":{ + "tables":[ + { + "csv":"tests/sql/selects/validity_date/excluded_from_time_aggregation/content.csv", + "name":"table1", + "primaryColumn":{ + "name":"pid", + "type":"STRING" + }, + "columns":[ + { + "name":"datum", + "type":"DATE" + }, + { + "name":"geschlecht", + "type":"STRING" + } + ] + } + ] + } +} diff --git a/backend/src/test/resources/tests/sql/selects/validity_date/excluded_from_time_aggregation/content.csv b/backend/src/test/resources/tests/sql/selects/validity_date/excluded_from_time_aggregation/content.csv new file mode 100644 index 0000000000..db93b08bd4 --- /dev/null +++ b/backend/src/test/resources/tests/sql/selects/validity_date/excluded_from_time_aggregation/content.csv @@ -0,0 +1,9 @@ +pid,datum,geschlecht +1,2012-01-01,"f" +2,2010-07-15,"m" +3,2013-11-10,"f" +4,2012-11-11,"m" +5,2007-11-11,"" +6,2012-11-11,"" +7,2012-11-11,"mf" +8,2012-11-11,"fm" diff --git a/backend/src/test/resources/tests/sql/selects/validity_date/excluded_from_time_aggregation/expected.csv b/backend/src/test/resources/tests/sql/selects/validity_date/excluded_from_time_aggregation/expected.csv new file mode 100644 index 0000000000..b6a85aedc6 --- /dev/null +++ b/backend/src/test/resources/tests/sql/selects/validity_date/excluded_from_time_aggregation/expected.csv @@ -0,0 +1,3 @@ +pid +1 +3 diff --git a/backend/src/test/resources/tests/sql/selects/validity_date/excluded_from_time_aggregation/validity_date_excluded.json b/backend/src/test/resources/tests/sql/selects/validity_date/excluded_from_time_aggregation/validity_date_excluded.json new file mode 100644 index 0000000000..152f6f6c7d --- /dev/null +++ b/backend/src/test/resources/tests/sql/selects/validity_date/excluded_from_time_aggregation/validity_date_excluded.json @@ -0,0 +1,80 @@ +{ + "type": "SQL_TEST", + "label": "Validity date excluded from time aggregation", + "description": "If a concept is excluded from time aggregation, validity dates should not be part of the final select.", + "expectedCsv": "expected.csv", + "query": { + "type": "CONCEPT_QUERY", + "root": { + "type": "AND", + "children": [ + { + "ids": [ + "geschlecht_select" + ], + "type": "CONCEPT", + "label": "Geschlecht SELECT", + "excludeFromTimeAggregation": true, + "tables": [ + { + "id": "geschlecht_select.geschlecht_connector", + "filters": [ + { + "filter": "geschlecht_select.geschlecht_connector.geschlecht", + "type": "BIG_MULTI_SELECT", + "value": [ + "f" + ] + } + ] + } + ] + } + ] + } + }, + "concepts": [ + { + "label": "geschlecht_select", + "type": "TREE", + "connectors": [ + { + "label": "geschlecht_connector", + "table": "table1", + "validityDates": { + "label": "datum", + "column": "table1.datum" + }, + "filters": { + "label": "geschlecht", + "description": "Geschlecht zur gegebenen Datumseinschränkung", + "column": "table1.geschlecht", + "type": "SELECT" + } + } + ] + } + ], + "content": { + "tables": [ + { + "csv": "tests/sql/selects/validity_date/default/content.csv", + "name": "table1", + "primaryColumn": { + "name": "pid", + "type": "STRING" + }, + "columns": [ + { + "name": "datum", + "type": "DATE" + }, + { + "name": "geschlecht", + "type": "STRING" + } + ] + } + ] + } +} diff --git a/frontend/mock-api/mockApi.ts b/frontend/mock-api/mockApi.ts index ee7a96e03e..590bb65514 100644 --- a/frontend/mock-api/mockApi.ts +++ b/frontend/mock-api/mockApi.ts @@ -150,10 +150,7 @@ export default function mockApi(app: Application) { id: 1, status: "FAILED", error: { - code: "EXAMPLE_ERROR_INTERPOLATED", - context: { - adjective: "easy", - }, + message: "This is an example message" }, }), ); @@ -181,7 +178,7 @@ export default function mockApi(app: Application) { ], columnDescriptions: [ { - label: "Money Range", + label: "Money Range", selectId: null, type: "MONEY", }, diff --git a/frontend/src/index.ts b/frontend/src/index.ts index 90ef21ae98..959cf9b29a 100644 --- a/frontend/src/index.ts +++ b/frontend/src/index.ts @@ -1,6 +1,6 @@ import { theme } from "./app-theme"; import conquery from "./js"; -import { language, CustomEnvironment } from "./js/environment"; +import { language } from "./js/environment"; import i18next from "./js/localization/i18next"; import translationsDe from "./localization/de.json"; import translationsEn from "./localization/en.json"; @@ -9,6 +9,4 @@ i18next.addResourceBundle("de", "translation", translationsDe, true, true); i18next.addResourceBundle("en", "translation", translationsEn, true, true); i18next.changeLanguage(language); -const customEnvironment: CustomEnvironment = {}; - -conquery({ theme, customEnvironment }); +conquery({ theme }); diff --git a/frontend/src/js/api/types.ts b/frontend/src/js/api/types.ts index 2820fcc75e..a9f45f45f8 100644 --- a/frontend/src/js/api/types.ts +++ b/frontend/src/js/api/types.ts @@ -428,9 +428,8 @@ export interface GetQueryErrorResponseT { } export interface ErrorResponseT { - code: string; // To translate to localized messages - message?: string; // For developers / debugging only - context?: Record; // More information to maybe display in translated messages + message?: string; // Localized error message (based on Accept-Language header) to show to users + code: string; // Previously used to translate to localized messages, now unused } export type GetQueryResponseStatusT = diff --git a/frontend/src/js/editor-v2/EditorV2.tsx b/frontend/src/js/editor-v2/EditorV2.tsx index 4eff775dd8..bf1f574a13 100644 --- a/frontend/src/js/editor-v2/EditorV2.tsx +++ b/frontend/src/js/editor-v2/EditorV2.tsx @@ -10,12 +10,13 @@ import { faTrash, } from "@fortawesome/free-solid-svg-icons"; import { createId } from "@paralleldrive/cuid2"; -import { useCallback, useMemo, useState } from "react"; +import { useCallback, useEffect, useMemo, useState } from "react"; import { useHotkeys } from "react-hotkeys-hook"; import { useTranslation } from "react-i18next"; import IconButton from "../button/IconButton"; -import { nodeIsConceptQueryNode } from "../model/node"; +import { useDatasetId } from "../dataset/selectors"; +import { nodeIsConceptQueryNode, useActiveState } from "../model/node"; import { EmptyQueryEditorDropzone } from "../standard-query-editor/EmptyQueryEditorDropzone"; import { DragItemConceptTreeNode, @@ -92,6 +93,8 @@ const useEditorState = () => { return findNodeById(tree, selectedNodeId); }, [tree, selectedNodeId]); + const { active: selectedNodeActive } = useActiveState(selectedNode?.data); + const onReset = useCallback(() => { setTree(undefined); }, []); @@ -114,10 +117,18 @@ const useEditorState = () => { updateTreeNode, onReset, selectedNode, + selectedNodeActive, setSelectedNodeId, }; }; +const useResetOnDatasetChange = (onReset: () => void) => { + const datasetId = useDatasetId(); + useEffect(() => { + onReset(); + }, [datasetId, onReset]); +}; + export function EditorV2({ featureDates, featureNegate, @@ -142,9 +153,12 @@ export function EditorV2({ updateTreeNode, onReset, selectedNode, + selectedNodeActive, setSelectedNodeId, } = useEditorState(); + useResetOnDatasetChange(onReset); + const onFlip = useCallback(() => { if (!selectedNode || !selectedNode.children) return; @@ -311,7 +325,7 @@ export function EditorV2({ { e.stopPropagation(); onOpenQueryNodeEditor(); diff --git a/frontend/src/js/editor-v2/time-connection/TimeConnection.tsx b/frontend/src/js/editor-v2/time-connection/TimeConnection.tsx index c57748319c..4519726816 100644 --- a/frontend/src/js/editor-v2/time-connection/TimeConnection.tsx +++ b/frontend/src/js/editor-v2/time-connection/TimeConnection.tsx @@ -38,7 +38,7 @@ const Interval = styled("span")` `; const Operator = styled("span")` font-weight: bold; - color: ${({ theme }) => theme.col.palette[2]}; + color: ${({ theme }) => theme.col.palette.at(-2)}; `; export const TimeConnection = memo( diff --git a/frontend/src/js/entity-history/TabbableTimeStratifiedInfos.tsx b/frontend/src/js/entity-history/TabbableTimeStratifiedInfos.tsx index 90cbcb4798..45033bf9db 100644 --- a/frontend/src/js/entity-history/TabbableTimeStratifiedInfos.tsx +++ b/frontend/src/js/entity-history/TabbableTimeStratifiedInfos.tsx @@ -9,9 +9,11 @@ import { TimeStratifiedConceptChart } from "./TimeStratifiedConceptChart"; import { isConceptColumn, isMoneyColumn } from "./timeline/util"; const Container = styled("div")` + align-self: flex-start; display: flex; flex-direction: column; align-items: flex-end; + overflow-x: hidden; `; export const TabbableTimeStratifiedInfos = ({ diff --git a/frontend/src/js/entity-history/TimeStratifiedConceptChart.tsx b/frontend/src/js/entity-history/TimeStratifiedConceptChart.tsx index 8fb7a71cd8..8445992e75 100644 --- a/frontend/src/js/entity-history/TimeStratifiedConceptChart.tsx +++ b/frontend/src/js/entity-history/TimeStratifiedConceptChart.tsx @@ -1,10 +1,13 @@ import styled from "@emotion/styled"; +import { faBan } from "@fortawesome/free-solid-svg-icons"; +import { useTranslation } from "react-i18next"; import { ColumnDescriptionSemanticConceptColumn, TimeStratifiedInfo, } from "../api/types"; import { getConceptById } from "../concept-trees/globalTreeStoreHelper"; +import FaIcon from "../icon/FaIcon"; import WithTooltip from "../tooltip/WithTooltip"; import { ConceptBubble } from "./ConceptBubble"; @@ -12,20 +15,29 @@ import { ConceptBubble } from "./ConceptBubble"; const Container = styled("div")` display: grid; place-items: center; + max-width: 100%; + overflow-x: auto; gap: 0 3px; padding: 10px; `; +const EmptyMsg = styled("p")` + font-size: ${({ theme }) => theme.font.md}; + color: ${({ theme }) => theme.col.gray}; + margin: 40px 0; + display: flex; + align-items: center; + gap: 10px; +`; + const BubbleYes = styled("div")` - width: 10px; - height: 10px; - border-radius: ${({ theme }) => theme.borderRadius}; + width: 14px; + height: 14px; background-color: ${({ theme }) => theme.col.blueGray}; `; const BubbleNo = styled("div")` - width: 10px; - height: 10px; - border-radius: ${({ theme }) => theme.borderRadius}; + width: 14px; + height: 14px; background-color: ${({ theme }) => theme.col.grayLight}; `; @@ -38,6 +50,7 @@ export const TimeStratifiedConceptChart = ({ }: { timeStratifiedInfo: TimeStratifiedInfo; }) => { + const { t } = useTranslation(); const conceptColumn = timeStratifiedInfo.columns.at(-1); if (!conceptColumn) return null; @@ -49,8 +62,12 @@ export const TimeStratifiedConceptChart = ({ if (!conceptSemantic) return null; - const years = timeStratifiedInfo.years.map((y) => y.year); - const valuesPerYear = timeStratifiedInfo.years.map((y) => + const descYearInfos = [...timeStratifiedInfo.years].sort( + (a, b) => b.year - a.year, + ); + + const years = descYearInfos.map((y) => y.year); + const valuesPerYear = descYearInfos.map((y) => ((y.values[Object.keys(y.values)[0]] as string[]) || []).map( (conceptId) => getConceptById(conceptId, conceptSemantic?.concept)!, ), @@ -69,6 +86,16 @@ export const TimeStratifiedConceptChart = ({ ), ]; + if (allValues.length === 0) + return ( + + + + {t("history.noData")} + + + ); + return ( {val.label} ))} - {years.map((y, i) => ( + {years.map((year, i) => ( <> - {y} + {year} {allValues.map((val) => valuesPerYear[i].includes(val) ? : , )} diff --git a/frontend/src/js/entity-history/Timeline.tsx b/frontend/src/js/entity-history/Timeline.tsx index e8eb61319d..c42fc3fea7 100644 --- a/frontend/src/js/entity-history/Timeline.tsx +++ b/frontend/src/js/entity-history/Timeline.tsx @@ -20,10 +20,12 @@ import { TimelineEmptyPlaceholder } from "./timeline/TimelineEmptyPlaceholder"; import Year from "./timeline/Year"; import { isConceptColumn, + isDateColumn, isGroupableColumn, isIdColumn, isMoneyColumn, isSecondaryIdColumn, + isSourceColumn, isVisibleColumn, } from "./timeline/util"; @@ -83,14 +85,24 @@ const Timeline = ({ (state) => state.startup.config.currency, ); - const { columns, columnBuckets, rootConceptIdsByColumn } = - useColumnInformation(); + const { + columns, + dateColumn, + sourceColumn, + columnBuckets, + rootConceptIdsByColumn, + } = useColumnInformation(); const { eventsByQuarterWithGroups } = useTimeBucketedSortedData(data, { + sourceColumn, + dateColumn, sources, secondaryIds: columnBuckets.secondaryIds, }); + const isEmpty = + eventsByQuarterWithGroups.length === 0 || !dateColumn || !sourceColumn; + return ( - {eventsByQuarterWithGroups.length === 0 && } - {eventsByQuarterWithGroups.map(({ year, quarterwiseData }, i) => ( - - - {i < eventsByQuarterWithGroups.length - 1 && } - - ))} + {isEmpty && } + {dateColumn && + sourceColumn && + eventsByQuarterWithGroups.map(({ year, quarterwiseData }, i) => ( + + + {i < eventsByQuarterWithGroups.length - 1 && } + + ))} ); }; @@ -149,7 +165,11 @@ const diffObjects = (objects: Object[]): string[] => { }; const findGroupsWithinQuarter = - (secondaryIds: ColumnDescription[]) => + ( + secondaryIds: ColumnDescription[], + dateColumn: ColumnDescription, + sourceColumn: ColumnDescription, + ) => ({ quarter, events }: { quarter: number; events: EntityEvent[] }) => { if (events.length < 2) { return { quarter, groupedEvents: [events], differences: [[]] }; @@ -168,7 +188,7 @@ const findGroupsWithinQuarter = } const groupKey = - evt.source + + evt[sourceColumn.label] + secondaryIds .filter(isGroupableColumn) .map(({ label }) => evt[label]) @@ -186,9 +206,9 @@ const findGroupsWithinQuarter = return [ { ...events[0], - dates: { - from: events[0].dates.from, - to: events[events.length - 1].dates.to, + [dateColumn.label]: { + from: events[0][dateColumn.label].from, + to: events[events.length - 1][dateColumn.label].to, }, }, ...events.slice(1), @@ -208,6 +228,8 @@ const findGroupsWithinQuarter = const findGroups = ( eventsPerYears: EventsPerYear[], secondaryIds: ColumnDescription[], + dateColumn: ColumnDescription, + sourceColumn: ColumnDescription, ) => { const findGroupsWithinYear = ({ year, @@ -216,7 +238,7 @@ const findGroups = ( return { year, quarterwiseData: quarterwiseData.map( - findGroupsWithinQuarter(secondaryIds), + findGroupsWithinQuarter(secondaryIds, dateColumn, sourceColumn), ), }; }; @@ -247,19 +269,25 @@ const useTimeBucketedSortedData = ( { sources, secondaryIds, + sourceColumn, + dateColumn, }: { sources: Set; secondaryIds: ColumnDescription[]; + sourceColumn?: ColumnDescription; + dateColumn?: ColumnDescription; }, ) => { const groupByQuarter = ( entityData: EntityHistoryStateT["currentEntityData"], sources: Set, + dateColumn: ColumnDescription, + sourceColumn: ColumnDescription, ) => { const result: { [year: string]: { [quarter: number]: EntityEvent[] } } = {}; for (const row of entityData) { - const [year, month] = row.dates.from.split("-"); + const [year, month] = row[dateColumn.label].from.split("-"); const quarter = Math.floor((parseInt(month) - 1) / 3) + 1; if (!result[year]) { @@ -268,7 +296,7 @@ const useTimeBucketedSortedData = ( result[year][quarter] = []; } - if (sources.has(row.source)) { + if (sources.has(row[sourceColumn.label])) { result[year][quarter].push(row); } } @@ -311,13 +339,29 @@ const useTimeBucketedSortedData = ( }; return useMemo(() => { - const eventsByQuarter = groupByQuarter(data, sources); - const eventsByQuarterWithGroups = findGroups(eventsByQuarter, secondaryIds); + if (!data || !dateColumn || !sourceColumn) { + return { + eventsByQuarterWithGroups: [], + }; + } + + const eventsByQuarter = groupByQuarter( + data, + sources, + dateColumn, + sourceColumn, + ); + const eventsByQuarterWithGroups = findGroups( + eventsByQuarter, + secondaryIds, + dateColumn, + sourceColumn, + ); return { eventsByQuarterWithGroups, }; - }, [data, sources, secondaryIds]); + }, [data, sources, secondaryIds, dateColumn, sourceColumn]); }; export interface ColumnBuckets { @@ -337,6 +381,16 @@ const useColumnInformation = () => { (state) => state.entityHistory.columns, ); + const dateColumn = useMemo( + () => Object.values(columns).find(isDateColumn), + [columns], + ); + + const sourceColumn = useMemo( + () => Object.values(columns).find(isSourceColumn), + [columns], + ); + const columnBuckets: ColumnBuckets = useMemo(() => { const visibleColumnDescriptions = columnDescriptions.filter(isVisibleColumn); @@ -376,6 +430,8 @@ const useColumnInformation = () => { return { columns, columnBuckets, + dateColumn, + sourceColumn, rootConceptIdsByColumn, }; }; diff --git a/frontend/src/js/entity-history/actions.ts b/frontend/src/js/entity-history/actions.ts index 1ccb5c82e5..09cf99c09d 100644 --- a/frontend/src/js/entity-history/actions.ts +++ b/frontend/src/js/entity-history/actions.ts @@ -32,6 +32,7 @@ import { setMessage } from "../snack-message/actions"; import { SnackMessageType } from "../snack-message/reducer"; import { EntityEvent, EntityId } from "./reducer"; +import { isDateColumn, isSourceColumn } from "./timeline/util"; export type EntityHistoryActions = ActionType< | typeof openHistory @@ -235,15 +236,28 @@ export function useUpdateHistorySession() { } const authorizedCSVUrl = getAuthorizedUrl(csvUrl.url); - const csv = await loadCSV(authorizedCSVUrl, { english: true }); + const csv = await loadCSV(authorizedCSVUrl); const currentEntityData = await parseCSVWithHeaderToObj( csv.data.map((r) => r.join(";")).join("\n"), ); + const dateColumn = columnDescriptions.find(isDateColumn); + if (!dateColumn) { + throw new Error("No date column found"); + } + const sourceColumn = columnDescriptions.find(isSourceColumn); + if (!sourceColumn) { + throw new Error("No sources column found"); + } + + const currentEntityDataProcessed = transformEntityData( + currentEntityData, + { dateColumn }, + ); - const currentEntityDataProcessed = - transformEntityData(currentEntityData); const uniqueSources = [ - ...new Set(currentEntityDataProcessed.map((row) => row.source)), + ...new Set( + currentEntityDataProcessed.map((row) => row[sourceColumn.label]), + ), ]; const csvHeader = csv.data[0]; @@ -305,15 +319,24 @@ export function useUpdateHistorySession() { }; } -const transformEntityData = (data: { [key: string]: any }[]): EntityEvent[] => { +const transformEntityData = ( + data: { [key: string]: any }[], + { + dateColumn, + }: { + dateColumn: ColumnDescription; + }, +): EntityEvent[] => { + const dateKey = dateColumn.label; + return data .map((row) => { - const { first, last } = getFirstAndLastDateOfRange(row["dates"]); + const { first, last } = getFirstAndLastDateOfRange(row[dateKey]); return first && last ? { ...row, - dates: { + [dateKey]: { from: first, to: last, }, @@ -321,16 +344,15 @@ const transformEntityData = (data: { [key: string]: any }[]): EntityEvent[] => { : row; }) .sort((a, b) => { - return a.dates.from - b.dates.from > 0 ? -1 : 1; + return a[dateKey].from - b[dateKey].from > 0 ? -1 : 1; }) .map((row) => { - const { dates, ...rest } = row; return { - dates: { - from: formatStdDate(row.dates?.from), - to: formatStdDate(row.dates?.to), + ...row, + [dateKey]: { + from: formatStdDate(row[dateKey]?.from), + to: formatStdDate(row[dateKey]?.to), }, - ...rest, }; }); }; diff --git a/frontend/src/js/entity-history/reducer.ts b/frontend/src/js/entity-history/reducer.ts index f6d59e45d5..2fa9cc5e35 100644 --- a/frontend/src/js/entity-history/reducer.ts +++ b/frontend/src/js/entity-history/reducer.ts @@ -21,10 +21,6 @@ import { // TODO: This is quite inaccurate export type EntityEvent = { - dates: { - from: string; // e.g. 2022-01-31 - to: string; // e.g. 2022-01-31 - }; [key: string]: any; }; diff --git a/frontend/src/js/entity-history/timeline/EventCard.tsx b/frontend/src/js/entity-history/timeline/EventCard.tsx index 11bdb01202..01d9565c82 100644 --- a/frontend/src/js/entity-history/timeline/EventCard.tsx +++ b/frontend/src/js/entity-history/timeline/EventCard.tsx @@ -23,6 +23,7 @@ import type { EntityEvent } from "../reducer"; import GroupedContent from "./GroupedContent"; import { RawDataBadge } from "./RawDataBadge"; import { TinyLabel } from "./TinyLabel"; +import { isDateColumn, isSourceColumn } from "./util"; const Card = styled("div")` display: grid; @@ -95,6 +96,8 @@ const Bullet = styled("div")` const EventCard = ({ row, columns, + dateColumn, + sourceColumn, columnBuckets, currencyConfig, contentFilter, @@ -104,6 +107,8 @@ const EventCard = ({ }: { row: EntityEvent; columns: Record; + dateColumn: ColumnDescription; + sourceColumn: ColumnDescription; columnBuckets: ColumnBuckets; contentFilter: ContentFilterValue; currencyConfig: CurrencyConfigT; @@ -116,8 +121,8 @@ const EventCard = ({ const applicableGroupableIds = columnBuckets.groupableIds.filter( (column) => exists(row[column.label]) && - column.label !== "dates" && // Because they're already displayed somewhere else - column.label !== "source", // Because they're already displayed somewhere else + !isDateColumn(column) && // Because they're already displayed somewhere else + !isSourceColumn(column), // Because they're already displayed somewhere else ); const groupableIdsTooltip = t("history.content.fingerprint"); @@ -134,8 +139,8 @@ const EventCard = ({ return ( - - + + {contentFilter.money && applicableMoney.length > 0 && ( diff --git a/frontend/src/js/entity-history/timeline/GroupedContent.tsx b/frontend/src/js/entity-history/timeline/GroupedContent.tsx index 79f78c8e4e..3d55258278 100644 --- a/frontend/src/js/entity-history/timeline/GroupedContent.tsx +++ b/frontend/src/js/entity-history/timeline/GroupedContent.tsx @@ -1,7 +1,6 @@ import { css, Theme } from "@emotion/react"; import styled from "@emotion/styled"; import { memo, useMemo } from "react"; -import { useTranslation } from "react-i18next"; import { NumericFormat } from "react-number-format"; import { @@ -70,12 +69,11 @@ const GroupedContent = ({ rootConceptIdsByColumn, contentFilter, }: Props) => { - const { t } = useTranslation(); const differencesKeys = useMemo( () => groupedRowsKeysWithDifferentValues .filter((key) => { - if (key === "dates") return true; + if (isDateColumn(columns[key])) return true; if (!isVisibleColumn(columns[key])) { return false; @@ -104,9 +102,7 @@ const GroupedContent = ({ }} > {differencesKeys.map((key) => ( - - {key === "dates" ? t("history.dates") : columns[key].defaultLabel} - + {columns[key].defaultLabel} ))} {groupedRows.map((groupedRow) => differencesKeys.map((key) => ( diff --git a/frontend/src/js/entity-history/timeline/Quarter.tsx b/frontend/src/js/entity-history/timeline/Quarter.tsx index 178897552c..0834f9e364 100644 --- a/frontend/src/js/entity-history/timeline/Quarter.tsx +++ b/frontend/src/js/entity-history/timeline/Quarter.tsx @@ -85,6 +85,8 @@ const Quarter = ({ toggleOpenQuarter, differences, columns, + dateColumn, + sourceColumn, columnBuckets, currencyConfig, rootConceptIdsByColumn, @@ -99,6 +101,8 @@ const Quarter = ({ toggleOpenQuarter: (year: number, quarter: number) => void; differences: string[][]; columns: Record; + dateColumn: ColumnDescription; + sourceColumn: ColumnDescription; columnBuckets: ColumnBuckets; contentFilter: ContentFilterValue; currencyConfig: CurrencyConfigT; @@ -147,6 +151,8 @@ const Quarter = ({ { - if (key === "dates") { + if (key === dateColumn.label) { return true; // always show dates, despite it being part of groupDifferences } @@ -169,6 +175,8 @@ const Quarter = ({ { +export const RawDataBadge = ({ className, event, sourceColumn }: Props) => { return ( { } }} > - {event.source} + {event[sourceColumn.label]} ); }; diff --git a/frontend/src/js/entity-history/timeline/Year.tsx b/frontend/src/js/entity-history/timeline/Year.tsx index 4ed30bbc67..7458a0042d 100644 --- a/frontend/src/js/entity-history/timeline/Year.tsx +++ b/frontend/src/js/entity-history/timeline/Year.tsx @@ -29,6 +29,8 @@ const Year = ({ detailLevel, contentFilter, columns, + dateColumn, + sourceColumn, columnBuckets, currencyConfig, rootConceptIdsByColumn, @@ -45,6 +47,8 @@ const Year = ({ currencyConfig: CurrencyConfigT; columnBuckets: ColumnBuckets; columns: Record; + dateColumn: ColumnDescription; + sourceColumn: ColumnDescription; timeStratifiedInfos: TimeStratifiedInfo[]; }) => { const isYearOpen = getIsOpen(year); @@ -84,6 +88,8 @@ const Year = ({ differences={differences} contentFilter={contentFilter} columns={columns} + dateColumn={dateColumn} + sourceColumn={sourceColumn} columnBuckets={columnBuckets} currencyConfig={currencyConfig} rootConceptIdsByColumn={rootConceptIdsByColumn} diff --git a/frontend/src/js/entity-history/timeline/YearHead.tsx b/frontend/src/js/entity-history/timeline/YearHead.tsx index a46c326e50..70a55f7c81 100644 --- a/frontend/src/js/entity-history/timeline/YearHead.tsx +++ b/frontend/src/js/entity-history/timeline/YearHead.tsx @@ -44,7 +44,7 @@ const Col = styled("div")` const Grid = styled("div")` display: grid; - grid-template-columns: auto 45px; + grid-template-columns: auto minmax(min-content, 25px); gap: 0px 10px; `; diff --git a/frontend/src/js/entity-history/timeline/util.ts b/frontend/src/js/entity-history/timeline/util.ts index b3f7db5013..83a5c1b94f 100644 --- a/frontend/src/js/entity-history/timeline/util.ts +++ b/frontend/src/js/entity-history/timeline/util.ts @@ -6,6 +6,9 @@ export const isIdColumn = (columnDescription: ColumnDescription) => export const isDateColumn = (columnDescription: ColumnDescription) => columnDescription.semantics.some((s) => s.type === "EVENT_DATE"); +export const isSourceColumn = (columnDescription: ColumnDescription) => + columnDescription.semantics.some((s) => s.type === "SOURCES"); + export const isGroupableColumn = (columnDescription: ColumnDescription) => columnDescription.semantics.some((s) => s.type === "GROUP"); diff --git a/frontend/src/js/environment/index.ts b/frontend/src/js/environment/index.ts index 9328fbbe10..795e5a291d 100644 --- a/frontend/src/js/environment/index.ts +++ b/frontend/src/js/environment/index.ts @@ -1,5 +1,4 @@ // See index.html for an inject marker, that we use to inject env vars -import { TFunction } from "i18next"; // at container runtime function runtimeVar(variable: string): string | null { @@ -38,26 +37,3 @@ export const basename = basenameEnv || ""; export const idpUrl = idpUrlEnv || ""; export const idpRealm = idpRealmEnv || ""; export const idpClientId = idpClientIdEnv || ""; - -export interface CustomEnvironment { - getExternalSupportedErrorMessage?: ( - t: TFunction, - code: string, - context?: Record, - ) => string | undefined; -} - -let customEnvironment: CustomEnvironment | null = null; - -export const initializeEnvironment = (env: CustomEnvironment) => { - customEnvironment = env; -}; - -export const getExternalSupportedErrorMessage = ( - t: TFunction, - code: string, - context?: Record, -) => - customEnvironment && customEnvironment.getExternalSupportedErrorMessage - ? customEnvironment.getExternalSupportedErrorMessage(t, code, context) - : undefined; diff --git a/frontend/src/js/file/csv.ts b/frontend/src/js/file/csv.ts index 02b645fc2f..41c00bf75d 100644 --- a/frontend/src/js/file/csv.ts +++ b/frontend/src/js/file/csv.ts @@ -11,22 +11,9 @@ export function parseCSV(file: File, delimiter?: string) { }); } -export function loadCSV( - url: string, - { english }: { english?: boolean } = {}, -): Promise> { +export function loadCSV(url: string): Promise> { return new Promise((resolve, reject) => { - const downloadRequestHeaders = english - ? { - downloadRequestHeaders: { - // Because we support different csv header versions depending on language - "Accept-Language": "en-US,en", - }, - } - : {}; - Papa.parse(url, { - ...downloadRequestHeaders, download: true, delimiter: ";", skipEmptyLines: true, diff --git a/frontend/src/js/index.tsx b/frontend/src/js/index.tsx index b2d4b1f390..266af48f8c 100644 --- a/frontend/src/js/index.tsx +++ b/frontend/src/js/index.tsx @@ -7,7 +7,6 @@ import "../fonts.css"; import AppRoot from "./AppRoot"; import GlobalStyles from "./GlobalStyles"; import type { StateT } from "./app/reducers"; -import { initializeEnvironment, CustomEnvironment } from "./environment"; import { makeStore } from "./store"; // TODO: OG image required? @@ -18,7 +17,6 @@ let store: Store; const initialState = {}; -// Render the App including Hot Module Replacement const renderRoot = (theme: Theme) => { store = store || makeStore(initialState); @@ -32,13 +30,6 @@ const renderRoot = (theme: Theme) => { ); }; -export default function conquery({ - theme, - customEnvironment, -}: { - theme: Theme; // React-Emotion theme, will at some point completely replace sass - customEnvironment: CustomEnvironment; -}) { - initializeEnvironment(customEnvironment); - renderRoot(theme); +export default function conquery({ theme }: { theme: Theme }) { + return renderRoot(theme); } diff --git a/frontend/src/js/model/query.ts b/frontend/src/js/model/query.ts index 2370abf0e4..a94bdefc18 100644 --- a/frontend/src/js/model/query.ts +++ b/frontend/src/js/model/query.ts @@ -2,7 +2,6 @@ import { exists } from "../common/helpers/exists"; import type { StandardQueryStateT } from "../standard-query-editor/queryReducer"; import type { PreviousQueryQueryNodeType, - QueryGroupType, StandardQueryNodeT, } from "../standard-query-editor/types"; import { TIMEBASED_OPERATOR_TYPES } from "../timebased-query-editor/reducer"; @@ -43,15 +42,3 @@ export function isQueryExpandable(node: StandardQueryNodeT) { export function validateQueryLength(query: StandardQueryStateT) { return query.length > 0; } - -function elementHasValidDates(element: StandardQueryNodeT) { - return !element.excludeTimestamps; -} - -function groupHasValidDates(group: QueryGroupType) { - return !group.exclude && group.elements.some(elementHasValidDates); -} - -export function validateQueryDates(query: StandardQueryStateT) { - return !query || query.length === 0 || query.some(groupHasValidDates); -} diff --git a/frontend/src/js/query-runner/actions.ts b/frontend/src/js/query-runner/actions.ts index 1c713af6a1..f37ef44cb4 100644 --- a/frontend/src/js/query-runner/actions.ts +++ b/frontend/src/js/query-runner/actions.ts @@ -24,7 +24,6 @@ import { successPayload, } from "../common/actions/genericActions"; import { EditorV2Query } from "../editor-v2/types"; -import { getExternalSupportedErrorMessage } from "../environment"; import { useLoadFormConfigs, useLoadQueries, @@ -179,12 +178,7 @@ const getQueryErrorMessage = ({ return t("queryRunner.queryCanceled"); } - return ( - (error && - error.code && - getExternalSupportedErrorMessage(t, error.code, error.context)) || - t("queryRunner.queryFailed") - ); + return error?.message || t("queryRunner.queryFailed"); }; export const queryResultErrorAction = createAction( diff --git a/frontend/src/js/standard-query-editor/StandardQueryRunner.tsx b/frontend/src/js/standard-query-editor/StandardQueryRunner.tsx index 8ebc3a42bb..9a2c0ea5e3 100644 --- a/frontend/src/js/standard-query-editor/StandardQueryRunner.tsx +++ b/frontend/src/js/standard-query-editor/StandardQueryRunner.tsx @@ -1,9 +1,8 @@ -import { useTranslation } from "react-i18next"; import { useSelector } from "react-redux"; import type { DatasetT } from "../api/types"; import type { StateT } from "../app/reducers"; -import { validateQueryLength, validateQueryDates } from "../model/query"; +import { validateQueryLength } from "../model/query"; import QueryRunner from "../query-runner/QueryRunner"; import { useStartQuery, useStopQuery } from "../query-runner/actions"; import type { QueryRunnerStateT } from "../query-runner/reducer"; @@ -18,18 +17,6 @@ function validateDataset(datasetId: DatasetT["id"] | null) { return datasetId !== null; } -function useButtonTooltip(hasQueryValidDates: boolean) { - const { t } = useTranslation(); - - if (!hasQueryValidDates) { - return t("queryRunner.errorDates"); - } - - // Potentially add further validation and more detailed messages - - return undefined; -} - const StandardQueryRunner = () => { const datasetId = useSelector( (state) => state.datasets.selectedDatasetId, @@ -47,12 +34,9 @@ const StandardQueryRunner = () => { const queryId = queryRunner.runningQuery; const isDatasetValid = validateDataset(datasetId); - const hasQueryValidDates = validateQueryDates(query); - const isQueryValid = validateQueryLength(query) && hasQueryValidDates; + const isQueryValid = validateQueryLength(query); const queryStartStopReady = validateQueryStartStop(queryRunner); - const buttonTooltip = useButtonTooltip(hasQueryValidDates); - const startStandardQuery = useStartQuery("standard"); const stopStandardQuery = useStopQuery("standard"); @@ -72,7 +56,6 @@ const StandardQueryRunner = () => { return ( & { onCalendarSelect?: (val: string) => void; }; +// TODO: Remove this once we have solved +// - that the date picker overlays other fields in forms +const TEMPORARILY_DISABLED_DATE_PICKER = true; + const InputDate = forwardRef( ( { @@ -126,6 +130,7 @@ const InputDate = forwardRef( customInput={createElement(HiddenInput)} calendarContainer={StyledCalendar} calendarStartDay={1} + disabled={TEMPORARILY_DISABLED_DATE_PICKER} /> ); diff --git a/frontend/src/localization/de.json b/frontend/src/localization/de.json index 4d429dc251..82e79e2581 100644 --- a/frontend/src/localization/de.json +++ b/frontend/src/localization/de.json @@ -9,10 +9,6 @@ "reload": "Seite vollständig neu laden", "reloadDescription": "Bitte hinterlasse uns eine Nachricht, damit wir dieses Problem beheben können." }, - "errorCodes": { - "EXAMPLE_ERROR": "Dies ist eine Beispiel-Fehlermeldung", - "EXAMPLE_ERROR_INTERPOLATED": "Dies ist eine {{adjective}} Beispiel-Fehlermeldung" - }, "leftPane": { "conceptTrees": "Konzepte", "previousQueries": "Anfragen und Formulare" @@ -38,7 +34,6 @@ "showingMismatches": "Zeige vollständig" }, "queryRunner": { - "errorDates": "Ungültig: Alle Datumsbereiche sind von Zeitberechnung ausgeschlossen", "start": "Anfrage starten", "stop": "Anfrage stoppen", "stopSuccess": "Anfrage gestoppt.", @@ -67,7 +62,7 @@ "hasSecondaryId": "Analyse-Ebene aktiv", "removeNode": "Knoten entfernen", "removeColumn": "Spalte entfernen", - "clear": "Editor-Oberfläche zurücksetzen", + "clear": "Editor zurücksetzen", "clearConfirm": "Jetzt zurücksetzen", "hasDefaultSettings": "Standardeinstellungen", "hasNonDefaultSettings": "Eigene Einstellungen" @@ -318,8 +313,8 @@ "copyFrom": "Kopieren von ...", "copying": "Kopieren" }, - "clear": "Formular leeren", - "clearConfirm": "Formular jetzt leeren" + "clear": "Formular zurücksetzen", + "clearConfirm": "Jetzt zurücksetzen" } }, "uploadQueryResultsModal": { @@ -459,6 +454,7 @@ "queryNodeDetails": "Detail-Einstellungen bearbeiten" }, "history": { + "noData": "Keine Daten verfügbar", "blurred": "Daten-Sichtbarkeit", "emptyTimeline": { "headline": "Historie", @@ -479,7 +475,6 @@ "differencesTooltip": "Unterschiede aus den einzelnen Einträgen", "closeAll": "Alle schließen", "openAll": "Alle aufklappen", - "dates": "Datumswerte", "searchEntitiesButton": "Historie starten", "error": "Konnte Historie für diese ID nicht laden.", "options": { @@ -524,7 +519,7 @@ "time": "ZEIT", "and": "UND", "or": "ODER", - "clear": "Editor vollständig zurücksetzen", + "clear": "Editor zurücksetzen", "clearConfirm": "Jetzt zurücksetzen", "flip": "Drehen", "dates": "Datum", diff --git a/frontend/src/localization/en.json b/frontend/src/localization/en.json index 69aed6ea75..0c207597fe 100644 --- a/frontend/src/localization/en.json +++ b/frontend/src/localization/en.json @@ -9,10 +9,6 @@ "reload": "Refresh page", "reloadDescription": "If this happens again, please leave us a message. That way, we can fix this issue sooner." }, - "errorCodes": { - "EXAMPLE_ERROR": "This is an example error", - "EXAMPLE_ERROR_INTERPOLATED": "This is a {{adjective}} example error" - }, "leftPane": { "conceptTrees": "Concepts", "previousQueries": "Queries and Forms" @@ -37,7 +33,6 @@ "showingMismatches": "Showing full trees" }, "queryRunner": { - "errorDates": "Invalid: all date ranges excluded from time calculation", "start": "Start Query", "stop": "Stop Query", "stopSuccess": "Query stopped", @@ -66,7 +61,7 @@ "hasSecondaryId": "Analysis layer active", "removeNode": "Remove node", "removeColumn": "Remove column", - "clear": "Clear the Editor", + "clear": "Clear settings", "clearConfirm": "Clear now", "hasDefaultSettings": "Uses default settings", "hasNonDefaultSettings": "Has changed settings" @@ -459,6 +454,7 @@ "queryNodeDetails": "Detail-Einstellungen bearbeiten" }, "history": { + "noData": "No data available", "blurred": "Data visibility", "emptyTimeline": { "headline": "History", @@ -479,7 +475,6 @@ "differencesTooltip": "Differences from the individual events", "closeAll": "Close all", "openAll": "Open all", - "dates": "Dates", "searchEntitiesButton": "Start History", "error": "Couldn't load history for this ID.", "options": { @@ -524,7 +519,7 @@ "time": "TIME", "and": "AND", "or": "OR", - "clear": "Reset editor completely", + "clear": "Reset editor", "clearConfirm": "Reset now", "flip": "Flip", "dates": "Dates",