diff --git a/.github/workflows/lint_openapi.yml b/.github/workflows/lint_openapi.yml new file mode 100644 index 0000000000..260b951f1d --- /dev/null +++ b/.github/workflows/lint_openapi.yml @@ -0,0 +1,15 @@ +name: "OpenAPI linter" +on: + pull_request: + paths: + - "openapi.yaml" + - ".github/workflows/lint_openapi.yaml" +jobs: + lint: + runs-on: ubuntu-latest + container: dshanley/vacuum + steps: + - name: Checkout + uses: actions/checkout@v2 + - name: "Lint OpenApi spec" + run: vacuum lint --details ./openapi.yaml diff --git a/.github/workflows/test_cypress.yml b/.github/workflows/test_cypress.yml index 664ebbcaee..947d1b9c9e 100644 --- a/.github/workflows/test_cypress.yml +++ b/.github/workflows/test_cypress.yml @@ -18,10 +18,15 @@ jobs: restore-keys: | ${{ runner.os }}-maven- - - uses: actions/checkout@v2 + - uses: actions/checkout@v3 with: submodules: true + - name: Setup Node + uses: actions/setup-node@v3 + with: + node-version: 18 + - name: Set up JDK uses: actions/setup-java@v1 with: diff --git a/.github/workflows/test_frontend.yml b/.github/workflows/test_frontend.yml index c432314177..b23cbb734f 100644 --- a/.github/workflows/test_frontend.yml +++ b/.github/workflows/test_frontend.yml @@ -9,15 +9,12 @@ jobs: format-lint-unittest: runs-on: ubuntu-latest timeout-minutes: 6 - strategy: - matrix: - node-version: [16.x] steps: - - uses: actions/checkout@v2 - - name: Use Node.js ${{ matrix.node-version }} - uses: actions/setup-node@v1 + - uses: actions/checkout@v3 + - name: Setup Node + uses: actions/setup-node@v3 with: - node-version: ${{ matrix.node-version }} + node-version: 18 - name: install dependencies working-directory: ./frontend run: yarn --ignore-platform diff --git a/README.md b/README.md index 19b1d5878c..2390fade0b 100644 --- a/README.md +++ b/README.md @@ -17,7 +17,7 @@ Conquery supplies a powerful interface to group event types in a hierarchical *c ## Requirements - Maven 3 (optional for building) - Java JDK 11 -- Node.js 16 + Yarn +- Node.js 18 + Yarn - curl (to import the test data) diff --git a/backend/src/main/java/com/bakdata/conquery/apiv1/QueryProcessor.java b/backend/src/main/java/com/bakdata/conquery/apiv1/QueryProcessor.java index e6394d3543..5d704a8e76 100644 --- a/backend/src/main/java/com/bakdata/conquery/apiv1/QueryProcessor.java +++ b/backend/src/main/java/com/bakdata/conquery/apiv1/QueryProcessor.java @@ -1,18 +1,18 @@ package com.bakdata.conquery.apiv1; -import static com.bakdata.conquery.models.auth.AuthorizationHelper.buildDatasetAbilityMap; - import java.net.URL; import java.time.LocalDate; +import java.util.ArrayList; import java.util.Collection; +import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Optional; -import java.util.Set; import java.util.concurrent.TimeUnit; import java.util.function.Consumer; import java.util.function.Predicate; import java.util.stream.Collectors; +import java.util.stream.IntStream; import java.util.stream.Stream; import javax.inject.Inject; @@ -28,7 +28,11 @@ import com.bakdata.conquery.apiv1.query.Query; import com.bakdata.conquery.apiv1.query.QueryDescription; import com.bakdata.conquery.apiv1.query.SecondaryIdQuery; +import com.bakdata.conquery.apiv1.query.concept.filter.CQTable; +import com.bakdata.conquery.apiv1.query.concept.filter.FilterValue; import com.bakdata.conquery.apiv1.query.concept.specific.CQAnd; +import com.bakdata.conquery.apiv1.query.concept.specific.CQConcept; +import com.bakdata.conquery.apiv1.query.concept.specific.CQOr; import com.bakdata.conquery.apiv1.query.concept.specific.external.CQExternal; import com.bakdata.conquery.io.result.ResultRender.ResultRendererProvider; import com.bakdata.conquery.io.storage.MetaStorage; @@ -39,6 +43,7 @@ import com.bakdata.conquery.models.auth.entities.User; import com.bakdata.conquery.models.auth.permissions.Ability; import com.bakdata.conquery.models.common.Range; +import com.bakdata.conquery.models.config.ColumnConfig; import com.bakdata.conquery.models.config.ConqueryConfig; import com.bakdata.conquery.models.datasets.Dataset; import com.bakdata.conquery.models.datasets.SecondaryIdDescription; @@ -46,19 +51,22 @@ import com.bakdata.conquery.models.error.ConqueryError; import com.bakdata.conquery.models.execution.ExecutionState; import com.bakdata.conquery.models.execution.ManagedExecution; -import com.bakdata.conquery.models.identifiable.ids.specific.DatasetId; import com.bakdata.conquery.models.identifiable.ids.specific.ManagedExecutionId; +import com.bakdata.conquery.models.identifiable.mapping.IdPrinter; import com.bakdata.conquery.models.messages.namespaces.specific.CancelQuery; import com.bakdata.conquery.models.query.ExecutionManager; import com.bakdata.conquery.models.query.ManagedQuery; +import com.bakdata.conquery.models.query.SingleTableResult; import com.bakdata.conquery.models.query.Visitable; import com.bakdata.conquery.models.query.preview.EntityPreviewExecution; import com.bakdata.conquery.models.query.preview.EntityPreviewForm; +import com.bakdata.conquery.models.query.queryplan.DateAggregationAction; import com.bakdata.conquery.models.query.visitor.QueryVisitor; import com.bakdata.conquery.models.worker.DatasetRegistry; import com.bakdata.conquery.models.worker.Namespace; import com.bakdata.conquery.util.QueryUtils; import com.bakdata.conquery.util.QueryUtils.NamespacedIdentifiableCollector; +import com.bakdata.conquery.util.io.IdColumnUtil; import com.google.common.collect.ClassToInstanceMap; import com.google.common.collect.MutableClassToInstanceMap; import lombok.AllArgsConstructor; @@ -81,7 +89,7 @@ public class QueryProcessor { * Creates a query for all datasets, then submits it for execution on the * intended dataset. */ - public ManagedExecution postQuery(Dataset dataset, QueryDescription query, Subject subject, boolean system) { + public ManagedExecution postQuery(Dataset dataset, QueryDescription query, Subject subject, boolean system) { log.info("Query posted on Dataset[{}] by User[{{}].", dataset.getId(), subject.getId()); @@ -125,9 +133,9 @@ public ManagedExecution postQuery(Dataset dataset, QueryDescription query, Su { final Optional executionId = visitors.getInstance(QueryUtils.OnlyReusingChecker.class).getOnlyReused(); - final Optional> + final Optional execution = - executionId.map(id -> tryReuse(query, id, datasetRegistry, config, executionManager, subject.getUser())); + executionId.map(id -> tryReuse(query, id, namespace, config, executionManager, subject.getUser())); if (execution.isPresent()) { return execution.get(); @@ -135,15 +143,15 @@ public ManagedExecution postQuery(Dataset dataset, QueryDescription query, Su } // Execute the query - return executionManager.runQuery(datasetRegistry, query, subject.getUser(), dataset, config, system); + return executionManager.runQuery(namespace, query, subject.getUser(), dataset, config, system); } /** * Determine if the submitted query does reuse ONLY another query and restart that instead of creating another one. */ - private ManagedExecution tryReuse(QueryDescription query, ManagedExecutionId executionId, DatasetRegistry datasetRegistry, ConqueryConfig config, ExecutionManager executionManager, User user) { + private ManagedExecution tryReuse(QueryDescription query, ManagedExecutionId executionId, Namespace namespace, ConqueryConfig config, ExecutionManager executionManager, User user) { - ManagedExecution execution = datasetRegistry.getMetaRegistry().resolve(executionId); + ManagedExecution execution = storage.getExecution(executionId); if (execution == null) { return null; @@ -167,9 +175,9 @@ private ManagedExecution tryReuse(QueryDescription query, ManagedExecutionId // If the user is not the owner of the execution, we definitely create a new Execution, so the owner can cancel it if (!user.isOwner(execution)) { - final ManagedExecution + final ManagedExecution newExecution = - executionManager.createExecution(datasetRegistry, execution.getSubmitted(), user, execution.getDataset(), false); + executionManager.createExecution(namespace, execution.getSubmitted(), user, execution.getDataset(), false); newExecution.setLabel(execution.getLabel()); newExecution.setTags(execution.getTags().clone()); storage.updateExecution(newExecution); @@ -184,7 +192,7 @@ private ManagedExecution tryReuse(QueryDescription query, ManagedExecutionId log.trace("Re-executing Query {}", execution); - executionManager.execute(datasetRegistry, execution, config); + executionManager.execute(namespace, execution, config); return execution; @@ -192,13 +200,12 @@ private ManagedExecution tryReuse(QueryDescription query, ManagedExecutionId public Stream getAllQueries(Dataset dataset, HttpServletRequest req, Subject subject, boolean allProviders) { - Collection> allQueries = storage.getAllExecutions(); + Collection allQueries = storage.getAllExecutions(); return getQueriesFiltered(dataset, RequestAwareUriBuilder.fromRequest(req), subject, allQueries, allProviders); } - public Stream getQueriesFiltered(Dataset datasetId, UriBuilder uriBuilder, Subject subject, Collection> allQueries, boolean allProviders) { - Map> datasetAbilities = buildDatasetAbilityMap(subject, datasetRegistry); + public Stream getQueriesFiltered(Dataset datasetId, UriBuilder uriBuilder, Subject subject, Collection allQueries, boolean allProviders) { return allQueries.stream() // The following only checks the dataset, under which the query was submitted, but a query can target more that @@ -211,7 +218,7 @@ public Stream getQueriesFiltered(Dataset datasetId, UriBuilder .filter(q -> subject.isPermitted(q, Ability.READ)) .map(mq -> { OverviewExecutionStatus status = mq.buildStatusOverview(uriBuilder.clone(), subject); - if (mq.isReadyToDownload(datasetAbilities)) { + if (mq.isReadyToDownload()) { status.setResultUrls(getDownloadUrls(config.getResultProviders(), mq, uriBuilder, allProviders)); } return status; @@ -229,7 +236,7 @@ public Stream getQueriesFiltered(Dataset datasetId, UriBuilder * @param allProviders If true, forces {@link ResultRendererProvider} to return an URL if possible. * @return The modified status */ - public static List getDownloadUrls(List renderer, ManagedExecution exec, UriBuilder uriBuilder, boolean allProviders) { + public static List getDownloadUrls(List renderer, ManagedExecution exec, UriBuilder uriBuilder, boolean allProviders) { return renderer.stream() .map(r -> r.generateResultURLs(exec, uriBuilder.clone(), allProviders)) @@ -242,7 +249,7 @@ public static List getDownloadUrls(List renderer, M /** * Test if the query is structured in a way the Frontend can render it. */ - private static boolean canFrontendRender(ManagedExecution q) { + private static boolean canFrontendRender(ManagedExecution q) { //TODO FK: should this be used to fill into canExpand instead of hiding the Executions? if (!(q instanceof ManagedQuery)) { return false; @@ -273,7 +280,7 @@ public static boolean isFrontendStructure(CQElement root) { /** * Cancel a running query: Sending cancellation to shards, which will cause them to stop executing them, results are not sent back, and incoming results will be discarded. */ - public void cancel(Subject subject, Dataset dataset, ManagedExecution query) { + public void cancel(Subject subject, Dataset dataset, ManagedExecution query) { // Does not make sense to cancel a query that isn't running. if (!query.getState().equals(ExecutionState.RUNNING)) { @@ -289,7 +296,7 @@ public void cancel(Subject subject, Dataset dataset, ManagedExecution query) namespace.sendToAll(new CancelQuery(query.getId())); } - public void patchQuery(Subject subject, ManagedExecution execution, MetaDataPatch patch) { + public void patchQuery(Subject subject, ManagedExecution execution, MetaDataPatch patch) { log.info("Patching {} ({}) with patch: {}", execution.getClass().getSimpleName(), execution, patch); @@ -303,7 +310,7 @@ public void patchQuery(Subject subject, ManagedExecution execution, MetaDataP for (Dataset dataset : remainingDatasets) { ManagedExecutionId id = new ManagedExecutionId(dataset.getId(), execution.getQueryId()); - final ManagedExecution otherExecution = storage.getExecution(id); + final ManagedExecution otherExecution = storage.getExecution(id); if (otherExecution == null) { continue; } @@ -313,16 +320,18 @@ public void patchQuery(Subject subject, ManagedExecution execution, MetaDataP } } - public void reexecute(Subject subject, ManagedExecution query) { + public void reexecute(Subject subject, ManagedExecution query) { log.info("User[{}] reexecuted Query[{}]", subject.getId(), query); if (!query.getState().equals(ExecutionState.RUNNING)) { - datasetRegistry.get(query.getDataset().getId()).getExecutionManager().execute(datasetRegistry, query, config); + final Namespace namespace = query.getNamespace(); + + namespace.getExecutionManager().execute(namespace, query, config); } } - public void deleteQuery(Subject subject, ManagedExecution execution) { + public void deleteQuery(Subject subject, ManagedExecution execution) { log.info("User[{}] deleted Query[{}]", subject.getId(), execution.getId()); datasetRegistry.get(execution.getDataset().getId()) @@ -332,14 +341,14 @@ public void deleteQuery(Subject subject, ManagedExecution execution) { storage.removeExecution(execution.getId()); } - public FullExecutionStatus getQueryFullStatus(ManagedExecution query, Subject subject, UriBuilder url, Boolean allProviders) { + public FullExecutionStatus getQueryFullStatus(ManagedExecution query, Subject subject, UriBuilder url, Boolean allProviders) { + final Namespace namespace = datasetRegistry.get(query.getDataset().getId()); - query.initExecutable(datasetRegistry, config); + query.initExecutable(namespace, config); - Map> datasetAbilities = buildDatasetAbilityMap(subject, datasetRegistry); - final FullExecutionStatus status = query.buildStatusFull(storage, subject, datasetRegistry, config); + final FullExecutionStatus status = query.buildStatusFull(subject); - if (query.isReadyToDownload(datasetAbilities)) { + if (query.isReadyToDownload() && subject.isPermitted(query.getDataset(), Ability.DOWNLOAD)) { status.setResultUrls(getDownloadUrls(config.getResultProviders(), query, url, allProviders)); } return status; @@ -350,12 +359,13 @@ public FullExecutionStatus getQueryFullStatus(ManagedExecution query, Subject */ public ExternalUploadResult uploadEntities(Subject subject, Dataset dataset, ExternalUpload upload) { + final Namespace namespace = datasetRegistry.get(dataset.getId()); final CQExternal.ResolveStatistic statistic = - CQExternal.resolveEntities(upload.getValues(), upload.getFormat(), datasetRegistry.get(dataset.getId()) - .getStorage() - .getIdMapping(), config.getIdColumns(), config.getLocale() - .getDateReader(), upload.isOneRowPerEntity() + CQExternal.resolveEntities(upload.getValues(), upload.getFormat(), namespace + .getStorage() + .getIdMapping(), config.getIdColumns(), config.getLocale() + .getDateReader(), upload.isOneRowPerEntity() ); @@ -371,9 +381,9 @@ public ExternalUploadResult uploadEntities(Subject subject, Dataset dataset, Ext // We only create the Query, really no need to execute it as it's only useful for composition. final ManagedQuery execution = - ((ManagedQuery) datasetRegistry.get(dataset.getId()) - .getExecutionManager() - .createExecution(datasetRegistry, query, subject.getUser(), dataset, false)); + ((ManagedQuery) namespace + .getExecutionManager() + .createExecution(namespace, query, subject.getUser(), dataset, false)); execution.setLastResultCount((long) statistic.getResolved().size()); @@ -381,7 +391,7 @@ public ExternalUploadResult uploadEntities(Subject subject, Dataset dataset, Ext execution.setLabel(upload.getLabel()); } - execution.initExecutable(datasetRegistry, config); + execution.initExecutable(namespace, config); return new ExternalUploadResult(execution.getId(), statistic.getResolved().size(), statistic.getUnresolvedId(), statistic.getUnreadableDate()); } @@ -391,8 +401,9 @@ public ExternalUploadResult uploadEntities(Subject subject, Dataset dataset, Ext */ public FullExecutionStatus getSingleEntityExport(Subject subject, UriBuilder uriBuilder, String idKind, String entity, List sources, Dataset dataset, Range dateRange) { + final Namespace namespace = datasetRegistry.get(dataset.getId()); EntityPreviewForm form = - EntityPreviewForm.create(entity, idKind, dateRange, sources, datasetRegistry.get(dataset.getId()).getPreviewConfig().getSelects()); + EntityPreviewForm.create(entity, idKind, dateRange, sources, namespace.getPreviewConfig().getSelects()); // TODO make sure that subqueries are also system // TODO do not persist system queries @@ -410,10 +421,90 @@ public FullExecutionStatus getSingleEntityExport(Subject subject, UriBuilder uri } - FullExecutionStatus status = execution.buildStatusFull(storage, subject, datasetRegistry, config); + FullExecutionStatus status = execution.buildStatusFull(subject); status.setResultUrls(getDownloadUrls(config.getResultProviders(), execution, uriBuilder, false)); return status; } + /** + * Execute a basic query on a single concept and return only the included entities Id's. + */ + public Stream> resolveEntities(Subject subject, List> filters, Dataset dataset) { + if(filters.stream().map(fv -> fv.getFilter().getConnector()).distinct().count() != 1){ + throw new BadRequestException("Query exactly one connector at once."); + } + + final Namespace namespace = datasetRegistry.get(dataset.getId()); + + final List queries = new ArrayList<>(filters.size()); + + for (FilterValue filter : filters) { + final CQConcept cqConcept = new CQConcept(); + cqConcept.setElements(List.of(filter.getFilter().getConnector().getConcept())); + + final CQTable cqTable = new CQTable(); + + cqTable.setFilters(List.of(filter)); + cqTable.setConnector(filter.getFilter().getConnector()); + cqTable.setConcept(cqConcept); + + cqConcept.setTables(List.of(cqTable)); + + queries.add(cqConcept); + } + + + final QueryDescription query = new ConceptQuery(new CQOr(queries, Optional.of(false), DateAggregationAction.BLOCK)); + + final ManagedExecution execution = postQuery(dataset, query, subject, true); + + if (execution.awaitDone(10, TimeUnit.SECONDS) == ExecutionState.RUNNING) { + log.warn("Still waiting for {} after 10 Seconds.", execution.getId()); + throw new ConqueryError.ExecutionProcessingTimeoutError(); + } + + if (execution.getState() == ExecutionState.FAILED) { + throw ConqueryError.ContextError.fromErrorInfo(execution.getError()); + } + + final SingleTableResult result = (SingleTableResult) execution; + + + final List ids = config.getIdColumns() + .getIds().stream() + // We're only interested in returning printable AND resolvable ids + .filter(ColumnConfig::isPrint) + .filter(ColumnConfig::isResolvable) + .collect(Collectors.toList()); + + + final Map id2index = IntStream.range(0, ids.size()) + .boxed() + .collect(Collectors.toMap( + idx -> ids.get(idx).getName(), + idx -> idx + )); + + final IdPrinter printer = IdColumnUtil.getIdPrinter(subject, execution, namespace, ids); + + // For each included entity emit a Map of { Id-Name -> Id-Value } + return result.streamResults() + .map(printer::createId) + .map(entityPrintId -> { + final Map out = new HashMap<>(); + + for (Map.Entry entry : id2index.entrySet()) { + // Not all ExternalIds are expected to be set. + if (entityPrintId.getExternalId()[entry.getValue()] == null) { + continue; + } + + out.put(entry.getKey(), entityPrintId.getExternalId()[entry.getValue()]); + } + + return out; + }) + .filter(Predicate.not(Map::isEmpty)); + } } diff --git a/backend/src/main/java/com/bakdata/conquery/apiv1/forms/Form.java b/backend/src/main/java/com/bakdata/conquery/apiv1/forms/Form.java index 4c28b5ff5e..783015148c 100644 --- a/backend/src/main/java/com/bakdata/conquery/apiv1/forms/Form.java +++ b/backend/src/main/java/com/bakdata/conquery/apiv1/forms/Form.java @@ -1,22 +1,16 @@ package com.bakdata.conquery.apiv1.forms; -import java.util.List; -import java.util.Map; - import javax.annotation.Nullable; import com.bakdata.conquery.apiv1.query.QueryDescription; import com.bakdata.conquery.io.cps.CPSType; import com.bakdata.conquery.io.storage.MetaStorage; -import com.bakdata.conquery.models.auth.entities.User; import com.bakdata.conquery.models.auth.entities.Subject; import com.bakdata.conquery.models.auth.permissions.Ability; import com.bakdata.conquery.models.datasets.Dataset; import com.bakdata.conquery.models.forms.frontendconfiguration.FormScanner; import com.bakdata.conquery.models.forms.managed.ManagedForm; -import com.bakdata.conquery.models.query.ManagedQuery; import com.bakdata.conquery.models.query.visitor.QueryVisitor; -import com.bakdata.conquery.models.worker.DatasetRegistry; import com.fasterxml.jackson.annotation.JsonIgnore; import com.fasterxml.jackson.databind.JsonNode; import com.google.common.collect.ClassToInstanceMap; @@ -43,7 +37,6 @@ public String getFormType() { return this.getClass().getAnnotation(CPSType.class).id(); } - public abstract Map> createSubQueries(DatasetRegistry datasets, User user, Dataset submittedDataset); @Override diff --git a/backend/src/main/java/com/bakdata/conquery/apiv1/forms/InternalForm.java b/backend/src/main/java/com/bakdata/conquery/apiv1/forms/InternalForm.java new file mode 100644 index 0000000000..a325248550 --- /dev/null +++ b/backend/src/main/java/com/bakdata/conquery/apiv1/forms/InternalForm.java @@ -0,0 +1,17 @@ +package com.bakdata.conquery.apiv1.forms; + +import java.util.Map; + +import com.bakdata.conquery.apiv1.query.Query; + +/** + * Interface for {@link Form}s that are executed as a {@link com.bakdata.conquery.models.forms.managed.ManagedInternalForm}. + * + * {@link com.bakdata.conquery.models.forms.managed.ManagedInternalForm}s can consist of multiple sub queries which are generated from the api object. + */ +public interface InternalForm { + + + Map createSubQueries(); + +} diff --git a/backend/src/main/java/com/bakdata/conquery/apiv1/forms/export_form/AbsoluteMode.java b/backend/src/main/java/com/bakdata/conquery/apiv1/forms/export_form/AbsoluteMode.java index 62fb8aa645..1b7e00d556 100644 --- a/backend/src/main/java/com/bakdata/conquery/apiv1/forms/export_form/AbsoluteMode.java +++ b/backend/src/main/java/com/bakdata/conquery/apiv1/forms/export_form/AbsoluteMode.java @@ -11,15 +11,12 @@ import com.bakdata.conquery.apiv1.query.Query; import com.bakdata.conquery.io.cps.CPSType; import com.bakdata.conquery.io.jackson.View; -import com.bakdata.conquery.models.auth.entities.User; import com.bakdata.conquery.models.common.Range; -import com.bakdata.conquery.models.datasets.Dataset; import com.bakdata.conquery.models.forms.managed.AbsoluteFormQuery; import com.bakdata.conquery.models.forms.util.Alignment; import com.bakdata.conquery.models.query.DateAggregationMode; import com.bakdata.conquery.models.query.QueryResolveContext; import com.bakdata.conquery.models.query.Visitable; -import com.bakdata.conquery.models.worker.DatasetRegistry; import com.fasterxml.jackson.annotation.JsonView; import lombok.Getter; import lombok.Setter; @@ -41,7 +38,7 @@ public class AbsoluteMode extends Mode { private ArrayConceptQuery resolvedFeatures; @Override - public Query createSpecializedQuery(DatasetRegistry datasets, User user, Dataset submittedDataset) { + public Query createSpecializedQuery() { List resolutionsAndAlignments = ExportForm.getResolutionAlignmentMap(getForm().getResolvedResolutions(), getAlignmentHint()); diff --git a/backend/src/main/java/com/bakdata/conquery/apiv1/forms/export_form/EntityDateMode.java b/backend/src/main/java/com/bakdata/conquery/apiv1/forms/export_form/EntityDateMode.java index 489cea9435..619c5668db 100644 --- a/backend/src/main/java/com/bakdata/conquery/apiv1/forms/export_form/EntityDateMode.java +++ b/backend/src/main/java/com/bakdata/conquery/apiv1/forms/export_form/EntityDateMode.java @@ -1,32 +1,24 @@ package com.bakdata.conquery.apiv1.forms.export_form; import java.time.LocalDate; -import java.util.List; import java.util.function.Consumer; import javax.annotation.CheckForNull; import javax.validation.Valid; -import javax.validation.constraints.NotEmpty; import javax.validation.constraints.NotNull; import com.bakdata.conquery.apiv1.query.ArrayConceptQuery; -import com.bakdata.conquery.apiv1.query.CQElement; import com.bakdata.conquery.apiv1.query.Query; import com.bakdata.conquery.io.cps.CPSType; import com.bakdata.conquery.io.jackson.View; -import com.bakdata.conquery.models.auth.entities.User; import com.bakdata.conquery.models.common.Range; import com.bakdata.conquery.models.common.daterange.CDateRange; -import com.bakdata.conquery.models.datasets.Dataset; import com.bakdata.conquery.models.forms.managed.EntityDateQuery; import com.bakdata.conquery.models.forms.util.Alignment; import com.bakdata.conquery.models.query.DateAggregationMode; import com.bakdata.conquery.models.query.QueryResolveContext; import com.bakdata.conquery.models.query.Visitable; -import com.bakdata.conquery.models.worker.DatasetRegistry; -import com.fasterxml.jackson.annotation.JsonBackReference; import com.fasterxml.jackson.annotation.JsonView; -import com.google.common.collect.ImmutableList; import lombok.Getter; import lombok.Setter; @@ -57,18 +49,18 @@ public void visit(Consumer visitor) { public void resolve(QueryResolveContext context) { resolvedFeatures = ArrayConceptQuery.createFromFeatures(getForm().getFeatures()); resolvedFeatures.resolve(context); - } + } @Override - public Query createSpecializedQuery(DatasetRegistry datasets, User user, Dataset submittedDataset) { + public Query createSpecializedQuery() { CDateRange dateRestriction = dateRange == null ? CDateRange.all() : CDateRange.of(dateRange); - return new EntityDateQuery( + return new EntityDateQuery( getForm().getPrerequisite(), resolvedFeatures, ExportForm.getResolutionAlignmentMap(getForm().getResolvedResolutions(), getAlignmentHint()), dateRestriction, dateAggregationMode ); - } + } } diff --git a/backend/src/main/java/com/bakdata/conquery/apiv1/forms/export_form/ExportForm.java b/backend/src/main/java/com/bakdata/conquery/apiv1/forms/export_form/ExportForm.java index a63598138d..7021a7d74e 100644 --- a/backend/src/main/java/com/bakdata/conquery/apiv1/forms/export_form/ExportForm.java +++ b/backend/src/main/java/com/bakdata/conquery/apiv1/forms/export_form/ExportForm.java @@ -1,6 +1,5 @@ package com.bakdata.conquery.apiv1.forms.export_form; -import java.util.Collections; import java.util.List; import java.util.Map; import java.util.Set; @@ -15,16 +14,15 @@ import c10n.C10N; import com.bakdata.conquery.ConqueryConstants; import com.bakdata.conquery.apiv1.forms.Form; -import com.bakdata.conquery.apiv1.query.ArrayConceptQuery; +import com.bakdata.conquery.apiv1.forms.InternalForm; import com.bakdata.conquery.apiv1.query.CQElement; import com.bakdata.conquery.apiv1.query.Query; import com.bakdata.conquery.apiv1.query.QueryDescription; import com.bakdata.conquery.internationalization.ExportFormC10n; import com.bakdata.conquery.io.cps.CPSType; -import com.bakdata.conquery.io.jackson.View; +import com.bakdata.conquery.io.storage.MetaStorage; import com.bakdata.conquery.models.auth.entities.User; import com.bakdata.conquery.models.datasets.Dataset; -import com.bakdata.conquery.models.execution.ManagedExecution; import com.bakdata.conquery.models.forms.managed.ManagedForm; import com.bakdata.conquery.models.forms.managed.ManagedInternalForm; import com.bakdata.conquery.models.forms.util.Alignment; @@ -32,25 +30,23 @@ import com.bakdata.conquery.models.forms.util.ResolutionShortNames; import com.bakdata.conquery.models.i18n.I18n; import com.bakdata.conquery.models.identifiable.ids.specific.ManagedExecutionId; -import com.bakdata.conquery.models.query.DateAggregationMode; import com.bakdata.conquery.models.query.ManagedQuery; import com.bakdata.conquery.models.query.QueryResolveContext; import com.bakdata.conquery.models.query.Visitable; -import com.bakdata.conquery.models.worker.DatasetRegistry; import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonIgnore; import com.fasterxml.jackson.annotation.JsonManagedReference; import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonView; import com.google.common.collect.ImmutableList; import lombok.AccessLevel; import lombok.Getter; import lombok.RequiredArgsConstructor; import lombok.Setter; -@Getter @Setter -@CPSType(id="EXPORT_FORM", base=QueryDescription.class) -public class ExportForm extends Form { +@Getter +@Setter +@CPSType(id = "EXPORT_FORM", base = QueryDescription.class) +public class ExportForm extends Form implements InternalForm { @NotNull @JsonProperty("queryGroup") @@ -77,7 +73,6 @@ public class ExportForm extends Form { private Query prerequisite; @JsonIgnore private List resolvedResolutions; - @Override public void visit(Consumer visitor) { visitor.accept(this); @@ -87,12 +82,11 @@ public void visit(Consumer visitor) { @Override - public Map> createSubQueries(DatasetRegistry datasets, User user, Dataset submittedDataset) { + public Map createSubQueries() { return Map.of( - ConqueryConstants.SINGLE_RESULT_TABLE_NAME, - List.of( - timeMode.createSpecializedQuery(datasets, user, submittedDataset) - .toManagedExecution(user, submittedDataset))); + ConqueryConstants.SINGLE_RESULT_TABLE_NAME, + timeMode.createSpecializedQuery() + ); } @Override @@ -102,7 +96,7 @@ public Set collectRequiredQueries() { @Override public void resolve(QueryResolveContext context) { - queryGroup = (ManagedQuery) context.getDatasetRegistry().getMetaRegistry().resolve(queryGroupId); + queryGroup = (ManagedQuery) context.getStorage().getExecution(queryGroupId); // Apply defaults to user concept @@ -193,7 +187,7 @@ public static void enable(List features) { @Override - public ManagedForm toManagedExecution(User user, Dataset submittedDataset) { - return new ManagedInternalForm(this, user, submittedDataset); + public ManagedForm toManagedExecution(User user, Dataset submittedDataset, MetaStorage storage) { + return new ManagedInternalForm(this, user, submittedDataset, storage); } } diff --git a/backend/src/main/java/com/bakdata/conquery/apiv1/forms/export_form/FullExportForm.java b/backend/src/main/java/com/bakdata/conquery/apiv1/forms/export_form/FullExportForm.java index fca2c7b65b..3bfc39a382 100644 --- a/backend/src/main/java/com/bakdata/conquery/apiv1/forms/export_form/FullExportForm.java +++ b/backend/src/main/java/com/bakdata/conquery/apiv1/forms/export_form/FullExportForm.java @@ -1,7 +1,6 @@ package com.bakdata.conquery.apiv1.forms.export_form; import java.time.LocalDate; -import java.util.Collections; import java.util.List; import java.util.Map; import java.util.Set; @@ -15,23 +14,23 @@ import c10n.C10N; import com.bakdata.conquery.ConqueryConstants; import com.bakdata.conquery.apiv1.forms.Form; +import com.bakdata.conquery.apiv1.forms.InternalForm; +import com.bakdata.conquery.apiv1.query.Query; import com.bakdata.conquery.apiv1.query.QueryDescription; import com.bakdata.conquery.apiv1.query.TableExportQuery; import com.bakdata.conquery.apiv1.query.concept.specific.CQConcept; import com.bakdata.conquery.internationalization.ExportFormC10n; import com.bakdata.conquery.io.cps.CPSType; +import com.bakdata.conquery.io.storage.MetaStorage; import com.bakdata.conquery.models.auth.entities.User; import com.bakdata.conquery.models.common.Range; import com.bakdata.conquery.models.datasets.Dataset; -import com.bakdata.conquery.models.execution.ManagedExecution; -import com.bakdata.conquery.models.forms.managed.ManagedForm; import com.bakdata.conquery.models.forms.managed.ManagedInternalForm; import com.bakdata.conquery.models.i18n.I18n; import com.bakdata.conquery.models.identifiable.ids.specific.ManagedExecutionId; import com.bakdata.conquery.models.query.ManagedQuery; import com.bakdata.conquery.models.query.QueryResolveContext; import com.bakdata.conquery.models.query.Visitable; -import com.bakdata.conquery.models.worker.DatasetRegistry; import com.fasterxml.jackson.annotation.JsonIgnore; import com.fasterxml.jackson.annotation.JsonProperty; import com.google.common.collect.ImmutableList; @@ -41,7 +40,7 @@ @Getter @Setter @CPSType(id = "FULL_EXPORT_FORM", base = QueryDescription.class) -public class FullExportForm extends Form { +public class FullExportForm extends Form implements InternalForm { @NotNull @JsonProperty("queryGroup") @@ -65,7 +64,7 @@ public void visit(Consumer visitor) { @Override - public Map> createSubQueries(DatasetRegistry datasets, User user, Dataset submittedDataset) { + public Map createSubQueries() { // Forms are sent as an array of standard queries containing AND/OR of CQConcepts, we ignore everything and just convert the CQConcepts into CQUnfiltered for export. @@ -74,12 +73,10 @@ public Map> createSubQueries(DatasetRegistry datasets exportQuery.setTables(tables); - final ManagedQuery managedQuery = new ManagedQuery(exportQuery, user, submittedDataset); - return Map.of( ConqueryConstants.SINGLE_RESULT_TABLE_NAME, - List.of(managedQuery) + exportQuery ); } @@ -91,7 +88,7 @@ public Set collectRequiredQueries() { @Override public void resolve(QueryResolveContext context) { - queryGroup = (ManagedQuery) context.getDatasetRegistry().getMetaRegistry().resolve(queryGroupId); + queryGroup = (ManagedQuery) context.getStorage().getExecution(queryGroupId); } @Override @@ -101,7 +98,7 @@ public String getLocalizedTypeLabel() { @Override - public ManagedForm toManagedExecution(User user, Dataset submittedDataset) { - return new ManagedInternalForm(this, user, submittedDataset); + public ManagedInternalForm toManagedExecution(User user, Dataset submittedDataset, MetaStorage storage) { + return new ManagedInternalForm(this, user, submittedDataset, storage); } } diff --git a/backend/src/main/java/com/bakdata/conquery/apiv1/forms/export_form/Mode.java b/backend/src/main/java/com/bakdata/conquery/apiv1/forms/export_form/Mode.java index a0af0fbc34..c44b6d525a 100644 --- a/backend/src/main/java/com/bakdata/conquery/apiv1/forms/export_form/Mode.java +++ b/backend/src/main/java/com/bakdata/conquery/apiv1/forms/export_form/Mode.java @@ -1,17 +1,11 @@ package com.bakdata.conquery.apiv1.forms.export_form; -import com.bakdata.conquery.apiv1.query.ArrayConceptQuery; import com.bakdata.conquery.apiv1.query.Query; import com.bakdata.conquery.io.cps.CPSBase; -import com.bakdata.conquery.io.jackson.View; -import com.bakdata.conquery.models.auth.entities.User; -import com.bakdata.conquery.models.datasets.Dataset; import com.bakdata.conquery.models.query.QueryResolveContext; import com.bakdata.conquery.models.query.Visitable; -import com.bakdata.conquery.models.worker.DatasetRegistry; import com.fasterxml.jackson.annotation.JsonBackReference; import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonView; import lombok.Getter; import lombok.Setter; @@ -25,6 +19,6 @@ public abstract class Mode implements Visitable { private ExportForm form; public abstract void resolve(QueryResolveContext context); - - public abstract Query createSpecializedQuery(DatasetRegistry datasets, User user, Dataset submittedDataset); + + public abstract Query createSpecializedQuery(); } diff --git a/backend/src/main/java/com/bakdata/conquery/apiv1/forms/export_form/RelativeMode.java b/backend/src/main/java/com/bakdata/conquery/apiv1/forms/export_form/RelativeMode.java index e124463320..9c4d0f24e6 100644 --- a/backend/src/main/java/com/bakdata/conquery/apiv1/forms/export_form/RelativeMode.java +++ b/backend/src/main/java/com/bakdata/conquery/apiv1/forms/export_form/RelativeMode.java @@ -1,28 +1,21 @@ package com.bakdata.conquery.apiv1.forms.export_form; -import java.util.List; import java.util.function.Consumer; import javax.validation.constraints.Min; -import javax.validation.constraints.NotEmpty; import javax.validation.constraints.NotNull; import com.bakdata.conquery.apiv1.forms.IndexPlacement; import com.bakdata.conquery.apiv1.query.ArrayConceptQuery; -import com.bakdata.conquery.apiv1.query.CQElement; import com.bakdata.conquery.apiv1.query.concept.specific.temporal.TemporalSamplerFactory; import com.bakdata.conquery.io.cps.CPSType; import com.bakdata.conquery.io.jackson.View; -import com.bakdata.conquery.models.auth.entities.User; -import com.bakdata.conquery.models.datasets.Dataset; import com.bakdata.conquery.models.forms.export.RelExportGenerator; import com.bakdata.conquery.models.forms.managed.RelativeFormQuery; import com.bakdata.conquery.models.forms.util.CalendarUnit; import com.bakdata.conquery.models.query.DateAggregationMode; import com.bakdata.conquery.models.query.QueryResolveContext; import com.bakdata.conquery.models.query.Visitable; -import com.bakdata.conquery.models.worker.DatasetRegistry; -import com.fasterxml.jackson.annotation.JsonBackReference; import com.fasterxml.jackson.annotation.JsonView; import lombok.Getter; import lombok.Setter; @@ -51,7 +44,7 @@ public void visit(Consumer visitor) { } @Override - public RelativeFormQuery createSpecializedQuery(DatasetRegistry datasets, User user, Dataset submittedDataset) { + public RelativeFormQuery createSpecializedQuery() { return RelExportGenerator.generate(this); } diff --git a/backend/src/main/java/com/bakdata/conquery/apiv1/frontend/FrontendPreviewConfig.java b/backend/src/main/java/com/bakdata/conquery/apiv1/frontend/FrontendPreviewConfig.java index 56cb3d1e0f..9769140747 100644 --- a/backend/src/main/java/com/bakdata/conquery/apiv1/frontend/FrontendPreviewConfig.java +++ b/backend/src/main/java/com/bakdata/conquery/apiv1/frontend/FrontendPreviewConfig.java @@ -1,9 +1,12 @@ package com.bakdata.conquery.apiv1.frontend; import java.util.Collection; +import java.util.List; import com.bakdata.conquery.io.jackson.serializer.NsIdRef; +import com.bakdata.conquery.io.jackson.serializer.NsIdRefCollection; import com.bakdata.conquery.models.datasets.concepts.Concept; +import com.bakdata.conquery.models.datasets.concepts.filters.Filter; import com.fasterxml.jackson.annotation.JsonProperty; import lombok.Data; @@ -19,6 +22,12 @@ public static class Labelled { @JsonProperty("default") private final Collection defaultConnectors; + @NsIdRefCollection + private final List> searchFilters; + + /** + * Search concept needs to be parent of searchFilters, so frontend can resolve the filters. + */ @NsIdRef private final Concept searchConcept; } diff --git a/backend/src/main/java/com/bakdata/conquery/apiv1/query/ArrayConceptQuery.java b/backend/src/main/java/com/bakdata/conquery/apiv1/query/ArrayConceptQuery.java index 7b4ca37727..a55793d4cc 100644 --- a/backend/src/main/java/com/bakdata/conquery/apiv1/query/ArrayConceptQuery.java +++ b/backend/src/main/java/com/bakdata/conquery/apiv1/query/ArrayConceptQuery.java @@ -13,7 +13,6 @@ import com.bakdata.conquery.ConqueryConstants; import com.bakdata.conquery.io.cps.CPSType; import com.bakdata.conquery.io.jackson.View; -import com.bakdata.conquery.models.execution.ManagedExecution; import com.bakdata.conquery.models.identifiable.ids.specific.ManagedExecutionId; import com.bakdata.conquery.models.query.DateAggregationMode; import com.bakdata.conquery.models.query.QueryPlanContext; @@ -39,10 +38,11 @@ @Setter @CPSType(id = "ARRAY_CONCEPT_QUERY", base = QueryDescription.class) @Slf4j -@NoArgsConstructor(access = AccessLevel.PRIVATE, onConstructor = @__(@JsonCreator)) +@NoArgsConstructor(access = AccessLevel.PRIVATE, onConstructor_ = {@JsonCreator}) public class ArrayConceptQuery extends Query { - @NotEmpty @Valid + @NotEmpty + @Valid private List childQueries = new ArrayList<>(); @NotNull @@ -60,21 +60,21 @@ public static ArrayConceptQuery createFromFeatures(List features) { } public ArrayConceptQuery(@NonNull List queries, @NonNull DateAggregationMode dateAggregationMode) { - if(queries == null) { + if (queries == null) { throw new IllegalArgumentException("No sub query list provided."); } this.childQueries = queries; this.dateAggregationMode = dateAggregationMode; } - public ArrayConceptQuery( List queries) { + public ArrayConceptQuery(List queries) { this(queries, DateAggregationMode.NONE); } @Override public void resolve(QueryResolveContext context) { resolvedDateAggregationMode = dateAggregationMode; - if(context.getDateAggregationMode() != null) { + if (context.getDateAggregationMode() != null) { log.trace("Overriding date aggregation mode ({}) with mode from context ({})", dateAggregationMode, context.getDateAggregationMode()); resolvedDateAggregationMode = context.getDateAggregationMode(); } diff --git a/backend/src/main/java/com/bakdata/conquery/apiv1/query/ConceptQuery.java b/backend/src/main/java/com/bakdata/conquery/apiv1/query/ConceptQuery.java index 741270abd5..da4d655d29 100644 --- a/backend/src/main/java/com/bakdata/conquery/apiv1/query/ConceptQuery.java +++ b/backend/src/main/java/com/bakdata/conquery/apiv1/query/ConceptQuery.java @@ -20,10 +20,8 @@ import com.bakdata.conquery.models.query.Visitable; import com.bakdata.conquery.models.query.queryplan.ConceptQueryPlan; import com.bakdata.conquery.models.query.resultinfo.ResultInfo; -import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonView; import com.google.common.base.Preconditions; -import lombok.AccessLevel; import lombok.Getter; import lombok.NoArgsConstructor; import lombok.Setter; @@ -32,8 +30,8 @@ @Getter @Setter @CPSType(id = "CONCEPT_QUERY", base = QueryDescription.class) +@NoArgsConstructor @Slf4j -@NoArgsConstructor(access = AccessLevel.PRIVATE, onConstructor = @__(@JsonCreator)) public class ConceptQuery extends Query { @Valid @@ -47,13 +45,14 @@ public class ConceptQuery extends Query { @JsonView(View.InternalCommunication.class) protected DateAggregationMode resolvedDateAggregationMode; - public ConceptQuery(CQElement root, DateAggregationMode dateAggregationMode) { - this(root); - this.dateAggregationMode = dateAggregationMode; - } public ConceptQuery(CQElement root) { + this(root, DateAggregationMode.MERGE); + } + + public ConceptQuery(CQElement root, DateAggregationMode dateAggregationMode) { this.root = root; + this.dateAggregationMode = dateAggregationMode; } @Override diff --git a/backend/src/main/java/com/bakdata/conquery/apiv1/query/Query.java b/backend/src/main/java/com/bakdata/conquery/apiv1/query/Query.java index 321ec2441c..cde81b3cc1 100644 --- a/backend/src/main/java/com/bakdata/conquery/apiv1/query/Query.java +++ b/backend/src/main/java/com/bakdata/conquery/apiv1/query/Query.java @@ -9,6 +9,7 @@ import com.bakdata.conquery.models.auth.entities.User; import com.bakdata.conquery.models.datasets.Dataset; import com.bakdata.conquery.models.execution.ExecutionState; +import com.bakdata.conquery.models.execution.ManagedExecution; import com.bakdata.conquery.models.identifiable.ids.specific.ManagedExecutionId; import com.bakdata.conquery.models.query.ManagedQuery; import com.bakdata.conquery.models.query.QueryPlanContext; @@ -39,8 +40,8 @@ public Set collectRequiredQueries() { public abstract List getResultInfos(); @Override - public ManagedQuery toManagedExecution(User user, Dataset submittedDataset) { - return new ManagedQuery(this, user, submittedDataset); + public ManagedQuery toManagedExecution(User user, Dataset submittedDataset, MetaStorage storage) { + return new ManagedQuery(this, user, submittedDataset, storage); } /** @@ -54,9 +55,9 @@ public CQElement getReusableComponents() { /** * Implement Query-type aware counting of results. Standard method is counting unique entities. * - * @see ManagedQuery#finish(MetaStorage, ExecutionState) for how it's used. - * @return the number of results in the result List. * @param results + * @return the number of results in the result List. + * @see ManagedExecution#finish(ExecutionState) for how it's used. */ public long countResults(Stream results) { return results.map(EntityResult::listResultLines) diff --git a/backend/src/main/java/com/bakdata/conquery/apiv1/query/QueryDescription.java b/backend/src/main/java/com/bakdata/conquery/apiv1/query/QueryDescription.java index 6e0be4b805..6f551e15c8 100644 --- a/backend/src/main/java/com/bakdata/conquery/apiv1/query/QueryDescription.java +++ b/backend/src/main/java/com/bakdata/conquery/apiv1/query/QueryDescription.java @@ -10,12 +10,10 @@ import com.bakdata.conquery.models.auth.entities.Subject; import com.bakdata.conquery.models.auth.entities.User; import com.bakdata.conquery.models.auth.permissions.Ability; -import com.bakdata.conquery.models.config.ConqueryConfig; import com.bakdata.conquery.models.datasets.Dataset; import com.bakdata.conquery.models.datasets.concepts.Concept; import com.bakdata.conquery.models.datasets.concepts.ConceptElement; import com.bakdata.conquery.models.execution.ManagedExecution; -import com.bakdata.conquery.models.forms.managed.ManagedForm; import com.bakdata.conquery.models.identifiable.ids.NamespacedIdentifiable; import com.bakdata.conquery.models.identifiable.ids.specific.ManagedExecutionId; import com.bakdata.conquery.models.query.QueryExecutionContext; @@ -23,7 +21,6 @@ import com.bakdata.conquery.models.query.RequiredEntities; import com.bakdata.conquery.models.query.Visitable; import com.bakdata.conquery.models.query.visitor.QueryVisitor; -import com.bakdata.conquery.models.worker.DatasetRegistry; import com.bakdata.conquery.util.QueryUtils; import com.bakdata.conquery.util.QueryUtils.ExternalIdChecker; import com.bakdata.conquery.util.QueryUtils.NamespacedIdentifiableCollector; @@ -40,14 +37,15 @@ public interface QueryDescription extends Visitable { * Transforms the submitted query to an {@link ManagedExecution}. * In this step some external dependencies are resolve (such as {@link CQExternal}). * However, steps that require add or manipulates queries programmatically based on the submitted query - * should be done in an extra init procedure (see {@link ManagedForm#doInitExecutable(DatasetRegistry, ConqueryConfig)}. + * should be done in an extra init procedure (see {@link ManagedExecution#doInitExecutable()}. * These steps are executed right before the execution of the query and not necessary in this creation phase. * * @param user * @param submittedDataset + * @param storage * @return */ - ManagedExecution toManagedExecution(User user, Dataset submittedDataset); + ManagedExecution toManagedExecution(User user, Dataset submittedDataset, MetaStorage storage); Set collectRequiredQueries(); @@ -100,10 +98,10 @@ public static void authorizeQuery(QueryDescription queryDescription, Subject sub subject.authorize(concepts, Ability.READ); // Check reused query permissions - final Set> collectedExecutions = queryDescription.collectRequiredQueries().stream() - .map(storage::getExecution) - .filter(Objects::nonNull) - .collect(Collectors.toSet()); + final Set collectedExecutions = queryDescription.collectRequiredQueries().stream() + .map(storage::getExecution) + .filter(Objects::nonNull) + .collect(Collectors.toSet()); subject.authorize(collectedExecutions, Ability.READ); diff --git a/backend/src/main/java/com/bakdata/conquery/apiv1/query/SecondaryIdQuery.java b/backend/src/main/java/com/bakdata/conquery/apiv1/query/SecondaryIdQuery.java index e4f2e31cd4..31495a2cf2 100644 --- a/backend/src/main/java/com/bakdata/conquery/apiv1/query/SecondaryIdQuery.java +++ b/backend/src/main/java/com/bakdata/conquery/apiv1/query/SecondaryIdQuery.java @@ -63,6 +63,7 @@ public class SecondaryIdQuery extends Query { @JsonView(View.InternalCommunication.class) private Set withoutSecondaryId; + @Override public SecondaryIdQueryPlan createQueryPlan(QueryPlanContext context) { diff --git a/backend/src/main/java/com/bakdata/conquery/apiv1/query/TableExportQuery.java b/backend/src/main/java/com/bakdata/conquery/apiv1/query/TableExportQuery.java index 932c2c72ba..b49088bb03 100644 --- a/backend/src/main/java/com/bakdata/conquery/apiv1/query/TableExportQuery.java +++ b/backend/src/main/java/com/bakdata/conquery/apiv1/query/TableExportQuery.java @@ -75,13 +75,13 @@ @Getter @Setter @CPSType(id = "TABLE_EXPORT", base = QueryDescription.class) -@RequiredArgsConstructor(onConstructor = @__({@JsonCreator})) +@RequiredArgsConstructor(onConstructor_ = {@JsonCreator}) public class TableExportQuery extends Query { @Valid @NotNull @NonNull - protected Query query; + protected final Query query; @NotNull private Range dateRange = Range.all(); diff --git a/backend/src/main/java/com/bakdata/conquery/apiv1/query/concept/specific/CQReusedQuery.java b/backend/src/main/java/com/bakdata/conquery/apiv1/query/concept/specific/CQReusedQuery.java index df13c7738d..2dc2b2ee50 100644 --- a/backend/src/main/java/com/bakdata/conquery/apiv1/query/concept/specific/CQReusedQuery.java +++ b/backend/src/main/java/com/bakdata/conquery/apiv1/query/concept/specific/CQReusedQuery.java @@ -73,7 +73,7 @@ public QPNode createQueryPlan(QueryPlanContext context, ConceptQueryPlan plan) { @Override public void resolve(QueryResolveContext context) { - query = ((ManagedQuery) context.getDatasetRegistry().getMetaRegistry().resolve(queryId)); + query = ((ManagedQuery) context.getStorage().getExecution(queryId)); resolvedQuery = query.getQuery(); // Yey recursion, because the query might consist of another CQReusedQuery or CQExternal diff --git a/backend/src/main/java/com/bakdata/conquery/commands/ManagerNode.java b/backend/src/main/java/com/bakdata/conquery/commands/ManagerNode.java index ef42bff122..cddf44f8fa 100644 --- a/backend/src/main/java/com/bakdata/conquery/commands/ManagerNode.java +++ b/backend/src/main/java/com/bakdata/conquery/commands/ManagerNode.java @@ -5,8 +5,6 @@ import java.util.ArrayList; import java.util.Collection; import java.util.List; -import java.util.Queue; -import java.util.concurrent.ConcurrentLinkedQueue; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.ScheduledExecutorService; @@ -41,7 +39,6 @@ import com.bakdata.conquery.models.messages.network.MessageToManagerNode; import com.bakdata.conquery.models.messages.network.NetworkMessageContext; import com.bakdata.conquery.models.worker.DatasetRegistry; -import com.bakdata.conquery.models.worker.Namespace; import com.bakdata.conquery.models.worker.Worker; import com.bakdata.conquery.resources.ResourcesProvider; import com.bakdata.conquery.resources.admin.AdminServlet; @@ -121,6 +118,7 @@ public void run(ConqueryConfig config, Environment environment) throws Interrupt // The validator is already injected at this point see Conquery.java datasetRegistry = new DatasetRegistry(config.getCluster().getEntityBucketSize(), config, this::createInternalObjectMapper); storage = new MetaStorage(config.getStorage(), datasetRegistry); + datasetRegistry.setMetaStorage(storage); final ObjectMapper objectMapper = environment.getObjectMapper(); @@ -291,35 +289,28 @@ private void loadMetaStorage() { log.info("Loading MetaStorage"); storage.loadData(); log.info("MetaStorage loaded {}", storage); - - datasetRegistry.setMetaStorage(storage); } @SneakyThrows(InterruptedException.class) public void loadNamespaces() { - Queue namespacesDone = new ConcurrentLinkedQueue<>(); ExecutorService loaders = Executors.newFixedThreadPool(Runtime.getRuntime().availableProcessors()); // Namespaces load their storage themselves, so they can inject Namespace relevant objects into stored objects final Collection namespaceStorages = config.getStorage().discoverNamespaceStorages(); for (NamespaceStorage namespaceStorage : namespaceStorages) { loaders.submit(() -> { - namespacesDone.add(Namespace.createAndRegister( - getDatasetRegistry(), - namespaceStorage, - getConfig(), - this::createInternalObjectMapper - )); + datasetRegistry.createNamespace(namespaceStorage); }); } loaders.shutdown(); while (!loaders.awaitTermination(1, TimeUnit.MINUTES)) { - log.debug("Waiting for Worker namespaces to load. {} are already finished. {} pending.", namespacesDone.size(), namespaceStorages.size() - - namespacesDone.size()); + final int coundLoaded = datasetRegistry.getDatasets().size(); + log.debug("Waiting for Worker namespaces to load. {} are already finished. {} pending.", coundLoaded, namespaceStorages.size() + - coundLoaded); } } diff --git a/backend/src/main/java/com/bakdata/conquery/io/result/ResultRender/ResultRendererProvider.java b/backend/src/main/java/com/bakdata/conquery/io/result/ResultRender/ResultRendererProvider.java index df1714e6f5..08a169afe6 100644 --- a/backend/src/main/java/com/bakdata/conquery/io/result/ResultRender/ResultRendererProvider.java +++ b/backend/src/main/java/com/bakdata/conquery/io/result/ResultRender/ResultRendererProvider.java @@ -24,7 +24,7 @@ public interface ResultRendererProvider { * @param allProviders A flag that should override internal "hide-this-url" flags. * @return An Optional with the url or an empty optional. */ - Collection generateResultURLs(ManagedExecution exec, UriBuilder uriBuilder, boolean allProviders); + Collection generateResultURLs(ManagedExecution exec, UriBuilder uriBuilder, boolean allProviders); void registerResultResource(DropwizardResourceConfig environment, ManagerNode manager); } diff --git a/backend/src/main/java/com/bakdata/conquery/io/result/ResultUtil.java b/backend/src/main/java/com/bakdata/conquery/io/result/ResultUtil.java index 47c80e2ba9..091f7c6345 100644 --- a/backend/src/main/java/com/bakdata/conquery/io/result/ResultUtil.java +++ b/backend/src/main/java/com/bakdata/conquery/io/result/ResultUtil.java @@ -70,14 +70,15 @@ public static Charset determineCharset(String userAgent, String queryCharset) { * * @param exec the execution to test */ - public static void checkSingleTableResult(ManagedExecution exec) { + public static void checkSingleTableResult(ManagedExecution exec) { if (!(exec instanceof SingleTableResult)) { throw new BadRequestException("Execution cannot be rendered as the requested format"); } } - public static void authorizeExecutable(Subject subject, ManagedExecution exec, Dataset dataset) { + public static void authorizeExecutable(Subject subject, ManagedExecution exec) { + final Dataset dataset = exec.getDataset(); subject.authorize(dataset, Ability.READ); subject.authorize(dataset, Ability.DOWNLOAD); diff --git a/backend/src/main/java/com/bakdata/conquery/io/result/arrow/ResultArrowProcessor.java b/backend/src/main/java/com/bakdata/conquery/io/result/arrow/ResultArrowProcessor.java index 61a32303d0..7fb594f00b 100644 --- a/backend/src/main/java/com/bakdata/conquery/io/result/arrow/ResultArrowProcessor.java +++ b/backend/src/main/java/com/bakdata/conquery/io/result/arrow/ResultArrowProcessor.java @@ -22,10 +22,8 @@ import com.bakdata.conquery.models.config.ConqueryConfig; import com.bakdata.conquery.models.datasets.Dataset; import com.bakdata.conquery.models.execution.ManagedExecution; -import com.bakdata.conquery.models.forms.managed.ManagedForm; import com.bakdata.conquery.models.i18n.I18n; import com.bakdata.conquery.models.identifiable.mapping.IdPrinter; -import com.bakdata.conquery.models.query.ManagedQuery; import com.bakdata.conquery.models.query.PrintSettings; import com.bakdata.conquery.models.query.SingleTableResult; import com.bakdata.conquery.models.query.resultinfo.ResultInfo; @@ -40,7 +38,6 @@ import org.apache.arrow.vector.ipc.ArrowFileWriter; import org.apache.arrow.vector.ipc.ArrowStreamWriter; import org.apache.arrow.vector.ipc.ArrowWriter; -import org.apache.http.HttpStatus; @Slf4j @RequiredArgsConstructor(onConstructor_ = {@Inject}) @@ -56,11 +53,11 @@ public class ResultArrowProcessor { private final ArrowConfig arrowConfig; - public Response createResultFile(Subject subject, ManagedExecution exec, boolean pretty) { + public Response createResultFile(Subject subject, ManagedExecution exec, boolean pretty) { return getArrowResult( (output) -> (root) -> new ArrowFileWriter(root, new DictionaryProvider.MapDictionaryProvider(), Channels.newChannel(output)), subject, - (ManagedExecution & SingleTableResult) exec, + (ManagedExecution & SingleTableResult) exec, datasetRegistry, pretty, FILE_EXTENTION_ARROW_FILE, @@ -70,11 +67,11 @@ public Response createResultFile(Subject subject, ManagedExecution exec, bool ); } - public Response createResultStream(Subject subject, ManagedExecution exec, boolean pretty) { + public Response createResultStream(Subject subject, ManagedExecution exec, boolean pretty) { return getArrowResult( (output) -> (root) -> new ArrowStreamWriter(root, new DictionaryProvider.MapDictionaryProvider(), output), subject, - ((ManagedExecution & SingleTableResult) exec), + ((ManagedExecution & SingleTableResult) exec), datasetRegistry, pretty, FILE_EXTENTION_ARROW_STREAM, @@ -84,7 +81,7 @@ public Response createResultStream(Subject subject, ManagedExecution exec, bo ); } - public static & SingleTableResult> Response getArrowResult( + public static Response getArrowResult( Function> writerProducer, Subject subject, E exec, @@ -101,11 +98,7 @@ public static & SingleTableResult> Response getAr log.info("Downloading results for {} on dataset {}", exec, dataset); - ResultUtil.authorizeExecutable(subject, exec, dataset); - - if (!(exec instanceof ManagedQuery || (exec instanceof ManagedForm && ((ManagedForm) exec).getSubQueries().size() == 1))) { - return Response.status(HttpStatus.SC_UNPROCESSABLE_ENTITY, "Execution result is not a single Table").build(); - } + ResultUtil.authorizeExecutable(subject, exec); // Get the locale extracted by the LocaleFilter @@ -116,7 +109,7 @@ public static & SingleTableResult> Response getAr PrintSettings settings = new PrintSettings( pretty, locale, - datasetRegistry, + namespace, config, idPrinter::createId ); diff --git a/backend/src/main/java/com/bakdata/conquery/io/result/csv/ResultCsvProcessor.java b/backend/src/main/java/com/bakdata/conquery/io/result/csv/ResultCsvProcessor.java index 0c6b7c2375..1add7b84fa 100644 --- a/backend/src/main/java/com/bakdata/conquery/io/result/csv/ResultCsvProcessor.java +++ b/backend/src/main/java/com/bakdata/conquery/io/result/csv/ResultCsvProcessor.java @@ -38,7 +38,7 @@ public class ResultCsvProcessor { private final ConqueryConfig config; private final DatasetRegistry datasetRegistry; - public & SingleTableResult> Response createResult(Subject subject, E exec, boolean pretty, Charset charset) { + public Response createResult(Subject subject, E exec, boolean pretty, Charset charset) { final Dataset dataset = exec.getDataset(); @@ -47,7 +47,7 @@ public & SingleTableResult> Response createResult ConqueryMDC.setLocation(subject.getName()); log.info("Downloading results for {} on dataset {}", exec, dataset); - ResultUtil.authorizeExecutable(subject, exec, dataset); + ResultUtil.authorizeExecutable(subject, exec); // Check if subject is permitted to download on all datasets that were referenced by the query authorizeDownloadDatasets(subject, exec); @@ -56,7 +56,7 @@ public & SingleTableResult> Response createResult // Get the locale extracted by the LocaleFilter final Locale locale = I18n.LOCALE.get(); - final PrintSettings settings = new PrintSettings(pretty, locale, datasetRegistry, config, idPrinter::createId); + final PrintSettings settings = new PrintSettings(pretty, locale, namespace, config, idPrinter::createId); final StreamingOutput out = os -> { try (BufferedWriter writer = new BufferedWriter(new OutputStreamWriter(os, charset))) { diff --git a/backend/src/main/java/com/bakdata/conquery/io/result/excel/ExcelRenderer.java b/backend/src/main/java/com/bakdata/conquery/io/result/excel/ExcelRenderer.java index 23c57a1490..2708a04796 100644 --- a/backend/src/main/java/com/bakdata/conquery/io/result/excel/ExcelRenderer.java +++ b/backend/src/main/java/com/bakdata/conquery/io/result/excel/ExcelRenderer.java @@ -68,7 +68,7 @@ private interface TypeWriter { void writeCell(ResultInfo info, PrintSettings settings, Cell cell, Object value, Map styles); } - public & SingleTableResult> void renderToStream( + public void renderToStream( List idHeaders, E exec, OutputStream outputStream) throws IOException { @@ -100,7 +100,7 @@ public & SingleTableResult> void renderToStream( /** * Include meta data in the xlsx such as the title, owner/author, tag and the name of this instance. */ - private & SingleTableResult> void setMetaData(E exec) { + private void setMetaData(E exec) { final POIXMLProperties.CoreProperties coreProperties = workbook.getXSSFWorkbook().getProperties().getCoreProperties(); coreProperties.setTitle(exec.getLabelWithoutAutoLabelSuffix()); @@ -137,7 +137,7 @@ private void postProcessTable(SXSSFSheet sheet, XSSFTable table, int writtenLine * Create a table environment, which improves mainly the visuals of the produced table. */ @NotNull - private XSSFTable createTableEnvironment(ManagedExecution exec, SXSSFSheet sheet) { + private XSSFTable createTableEnvironment(ManagedExecution exec, SXSSFSheet sheet) { XSSFTable table = sheet.getWorkbook().getXSSFWorkbook().getSheet(sheet.getSheetName()).createTable(null); CTTable cttable = table.getCTTable(); diff --git a/backend/src/main/java/com/bakdata/conquery/io/result/excel/ResultExcelProcessor.java b/backend/src/main/java/com/bakdata/conquery/io/result/excel/ResultExcelProcessor.java index f96e906683..af6c438ba2 100644 --- a/backend/src/main/java/com/bakdata/conquery/io/result/excel/ResultExcelProcessor.java +++ b/backend/src/main/java/com/bakdata/conquery/io/result/excel/ResultExcelProcessor.java @@ -11,7 +11,6 @@ import com.bakdata.conquery.io.result.ResultUtil; import com.bakdata.conquery.models.auth.entities.Subject; -import com.bakdata.conquery.models.auth.permissions.Ability; import com.bakdata.conquery.models.config.ConqueryConfig; import com.bakdata.conquery.models.config.ExcelConfig; import com.bakdata.conquery.models.datasets.Dataset; @@ -38,23 +37,21 @@ public class ResultExcelProcessor { private final ExcelConfig excelConfig; + public Response createResult(Subject subject, E exec, boolean pretty) { - public & SingleTableResult> Response createResult(Subject subject, E exec, boolean pretty) { ConqueryMDC.setLocation(subject.getName()); final Dataset dataset = exec.getDataset(); log.info("Downloading results for {} on dataset {}", exec, dataset); - ResultUtil.authorizeExecutable(subject, exec, dataset); - ResultUtil.checkSingleTableResult(exec); - subject.authorize(dataset, Ability.DOWNLOAD); + ResultUtil.authorizeExecutable(subject, exec); final Namespace namespace = datasetRegistry.get(dataset.getId()); final IdPrinter idPrinter = IdColumnUtil.getIdPrinter(subject, exec, namespace, conqueryConfig.getIdColumns().getIds()); final Locale locale = I18n.LOCALE.get(); - final PrintSettings settings = new PrintSettings(pretty, locale, datasetRegistry, conqueryConfig, idPrinter::createId); + final PrintSettings settings = new PrintSettings(pretty, locale, namespace, conqueryConfig, idPrinter::createId); final ExcelRenderer excelRenderer = new ExcelRenderer(excelConfig, settings); diff --git a/backend/src/main/java/com/bakdata/conquery/io/result/parquet/ResultParquetProcessor.java b/backend/src/main/java/com/bakdata/conquery/io/result/parquet/ResultParquetProcessor.java index bee9a72e10..d76a851d5b 100644 --- a/backend/src/main/java/com/bakdata/conquery/io/result/parquet/ResultParquetProcessor.java +++ b/backend/src/main/java/com/bakdata/conquery/io/result/parquet/ResultParquetProcessor.java @@ -35,7 +35,7 @@ public class ResultParquetProcessor { private final DatasetRegistry datasetRegistry; private final ConqueryConfig config; - public Response createResultFile(Subject subject, ManagedExecution exec, boolean pretty) { + public Response createResultFile(Subject subject, ManagedExecution exec, boolean pretty) { ConqueryMDC.setLocation(subject.getName()); @@ -43,7 +43,7 @@ public Response createResultFile(Subject subject, ManagedExecution exec, bool log.info("Downloading results for {} on dataset {}", exec, dataset); - ResultUtil.authorizeExecutable(subject, exec, dataset); + ResultUtil.authorizeExecutable(subject, exec); ResultUtil.checkSingleTableResult(exec); @@ -55,7 +55,7 @@ public Response createResultFile(Subject subject, ManagedExecution exec, bool PrintSettings settings = new PrintSettings( pretty, locale, - datasetRegistry, + namespace, config, idPrinter::createId ); diff --git a/backend/src/main/java/com/bakdata/conquery/io/storage/MetaStorage.java b/backend/src/main/java/com/bakdata/conquery/io/storage/MetaStorage.java index fa30ba2d3e..b07a579087 100644 --- a/backend/src/main/java/com/bakdata/conquery/io/storage/MetaStorage.java +++ b/backend/src/main/java/com/bakdata/conquery/io/storage/MetaStorage.java @@ -32,7 +32,7 @@ public class MetaStorage extends ConqueryStorage implements Injectable { private final StoreFactory storageFactory; - private IdentifiableStore> executions; + private IdentifiableStore executions; private IdentifiableStore formConfigs; private IdentifiableStore authUser; @@ -78,19 +78,19 @@ public void clear() { centralRegistry.clear(); } - public void addExecution(ManagedExecution query) { + public void addExecution(ManagedExecution query) { executions.add(query); } - public ManagedExecution getExecution(ManagedExecutionId id) { + public ManagedExecution getExecution(ManagedExecutionId id) { return executions.get(id); } - public Collection> getAllExecutions() { + public Collection getAllExecutions() { return executions.getAll(); } - public void updateExecution(ManagedExecution query) { + public void updateExecution(ManagedExecution query) { executions.update(query); } diff --git a/backend/src/main/java/com/bakdata/conquery/models/auth/entities/Group.java b/backend/src/main/java/com/bakdata/conquery/models/auth/entities/Group.java index 5fde6c2ba5..2d34ddadc8 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/auth/entities/Group.java +++ b/backend/src/main/java/com/bakdata/conquery/models/auth/entities/Group.java @@ -46,7 +46,7 @@ public Set getEffectivePermissions() { } @Override - protected void updateStorage() { + public void updateStorage() { storage.updateGroup(this); } diff --git a/backend/src/main/java/com/bakdata/conquery/models/auth/entities/User.java b/backend/src/main/java/com/bakdata/conquery/models/auth/entities/User.java index 1226750768..f8c8094900 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/auth/entities/User.java +++ b/backend/src/main/java/com/bakdata/conquery/models/auth/entities/User.java @@ -97,7 +97,7 @@ public Set getRoles() { } @Override - protected void updateStorage() { + public void updateStorage() { storage.updateUser(this); } diff --git a/backend/src/main/java/com/bakdata/conquery/models/auth/oidc/IntrospectionDelegatingRealm.java b/backend/src/main/java/com/bakdata/conquery/models/auth/oidc/IntrospectionDelegatingRealm.java index a535e69693..c18ffc1d7f 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/auth/oidc/IntrospectionDelegatingRealm.java +++ b/backend/src/main/java/com/bakdata/conquery/models/auth/oidc/IntrospectionDelegatingRealm.java @@ -3,46 +3,58 @@ import java.io.IOException; import java.net.URI; import java.util.List; -import java.util.Objects; +import java.util.Map; +import java.util.Optional; import java.util.Set; import java.util.concurrent.TimeUnit; import java.util.stream.Collectors; -import java.util.stream.Stream; import com.bakdata.conquery.io.storage.MetaStorage; import com.bakdata.conquery.models.auth.ConqueryAuthenticationInfo; import com.bakdata.conquery.models.auth.ConqueryAuthenticationRealm; import com.bakdata.conquery.models.auth.entities.Group; import com.bakdata.conquery.models.auth.entities.User; +import com.bakdata.conquery.models.auth.oidc.keycloak.GroupUtil; +import com.bakdata.conquery.models.auth.oidc.keycloak.KeycloakApi; +import com.bakdata.conquery.models.auth.oidc.keycloak.KeycloakGroup; import com.bakdata.conquery.models.auth.util.SkippingCredentialsMatcher; +import com.bakdata.conquery.models.config.auth.IntrospectionDelegatingRealmFactory; import com.bakdata.conquery.models.identifiable.ids.specific.GroupId; import com.bakdata.conquery.models.identifiable.ids.specific.UserId; import com.google.common.cache.CacheBuilder; import com.google.common.cache.CacheLoader; import com.google.common.cache.LoadingCache; -import com.google.common.collect.Sets; +import com.google.common.util.concurrent.UncheckedExecutionException; +import com.nimbusds.jwt.JWTClaimsSet; +import com.nimbusds.jwt.SignedJWT; import com.nimbusds.oauth2.sdk.ParseException; import com.nimbusds.oauth2.sdk.TokenIntrospectionRequest; import com.nimbusds.oauth2.sdk.TokenIntrospectionResponse; import com.nimbusds.oauth2.sdk.TokenIntrospectionSuccessResponse; import com.nimbusds.oauth2.sdk.auth.ClientAuthentication; import com.nimbusds.oauth2.sdk.http.HTTPResponse; +import com.nimbusds.oauth2.sdk.id.Audience; import com.nimbusds.oauth2.sdk.token.TypelessAccessToken; import lombok.Getter; -import lombok.RequiredArgsConstructor; import lombok.Setter; import lombok.SneakyThrows; import lombok.extern.slf4j.Slf4j; import org.apache.commons.lang3.StringUtils; -import org.apache.commons.lang3.tuple.Pair; +import org.apache.logging.log4j.util.Strings; import org.apache.shiro.authc.AuthenticationException; import org.apache.shiro.authc.AuthenticationToken; import org.apache.shiro.authc.BearerToken; import org.apache.shiro.authc.ExpiredCredentialsException; +import org.apache.shiro.authc.IncorrectCredentialsException; +import org.apache.shiro.authc.pam.UnsupportedTokenException; import org.apache.shiro.realm.AuthenticatingRealm; +import org.jetbrains.annotations.Nullable; /** - * Realm that validates OpenID access tokens by delegating them to an IDP TokenIntrospection endpoint + * Realm that validates OpenID access tokens by delegating them to an oauth tokenIntrospection endpoint. + *

+ * If {@link IntrospectionDelegatingRealmFactory#getGroupIdAttribute()} is defined, it also maps groups in keycloak, which have this attribute set + * to the corresponding group attribute in conquery and synchronizes the user group membership. */ @Slf4j @Getter @@ -50,24 +62,25 @@ public class IntrospectionDelegatingRealm extends AuthenticatingRealm implements ConqueryAuthenticationRealm { private static final Class TOKEN_CLASS = BearerToken.class; - private static final String GROUPS_CLAIM = "groups"; private final IntrospectionDelegatingRealmFactory authProviderConf; - public final MetaStorage storage; + private final MetaStorage storage; + private final KeycloakApi keycloakApi; private ClientAuthentication clientAuthentication; /** - * We only hold validated Tokens for some minutes to recheck them regulary with Keycloak. + * We only hold validated Tokens for some minutes to re-setup users and reduce fan-out. */ - private LoadingCache tokenCache = CacheBuilder.newBuilder() - .expireAfterWrite(10, TimeUnit.MINUTES) - .build(new TokenValidator()); + private LoadingCache tokenCache = CacheBuilder.newBuilder() + .expireAfterWrite(5, TimeUnit.MINUTES) + .build(new UserClaimsSetupService()); - public IntrospectionDelegatingRealm(MetaStorage storage, IntrospectionDelegatingRealmFactory authProviderConf) { + public IntrospectionDelegatingRealm(MetaStorage storage, IntrospectionDelegatingRealmFactory authProviderConf, KeycloakApi keycloakApi) { this.storage = storage; this.authProviderConf = authProviderConf; + this.keycloakApi = keycloakApi; } @Override @@ -85,98 +98,223 @@ public ConqueryAuthenticationInfo doGetAuthenticationInfo(AuthenticationToken to return null; } log.trace("Token has expected format!"); + final BearerToken bearertoken = (BearerToken) token; - TokenIntrospectionSuccessResponse successResponse = tokenCache.get((BearerToken) token); + // Validate token on every request + validateToken(bearertoken); - log.trace("Got an successful token introspection response."); + JWTClaimsSet claimsSet; - UserId userId = extractId(successResponse); - - User user = getUserOrThrowUnknownAccount(storage, userId); - - return new ConqueryAuthenticationInfo(user, token, this, true); - } - - private static UserId extractId(TokenIntrospectionSuccessResponse successResponse) { - String identifier = successResponse.getUsername(); - if (StringUtils.isBlank(identifier)) { - identifier = successResponse.getStringParameter("preferred_username"); - } - if (StringUtils.isBlank(identifier)) { - identifier = successResponse.getStringParameter("email"); + // Try to prepare corresponding user if necessary (on new token). + // This might fail if the tokes does not have the required audience set. + try { + claimsSet = tokenCache.get(bearertoken.getToken()); } - if (StringUtils.isBlank(identifier)) { - throw new IllegalStateException("Unable to retrieve a user identifier from validated token. Dismissing the token."); + catch (UncheckedExecutionException e) { + final Throwable cause = e.getCause(); + if (cause instanceof AuthenticationException) { + throw cause; + } + throw e; } - UserId userId = new UserId(identifier); - log.trace("Extracted UserId {}", userId); - return userId; - } - private static String extractDisplayName(TokenIntrospectionSuccessResponse successResponse) { - String username = successResponse.getUsername(); - if (StringUtils.isBlank(username)) { - username = successResponse.getStringParameter("name"); - } - if (StringUtils.isBlank(username)) { - throw new IllegalStateException("Unable to retrieve a user identifier from validated token. Dismissing the token."); - } + UserId userId = getUserId(claimsSet); + + final User user = storage.getUser(userId); - return username; + return new ConqueryAuthenticationInfo(user, token, this, true); } + /** - * Is called by the CacheLoader, so the Token is not validated on every request. + * Is called on every request to ensure that the token is still valid. */ - private TokenIntrospectionSuccessResponse validateToken(AuthenticationToken token) throws ParseException, IOException { - TokenIntrospectionRequest request = new TokenIntrospectionRequest(URI.create(authProviderConf.getIntrospectionEndpoint()), authProviderConf.getClientAuthentication(), new TypelessAccessToken((String) token.getCredentials())); + private void validateToken(AuthenticationToken token) throws ParseException, IOException { + // Build introspection request + TokenIntrospectionRequest + request = + new TokenIntrospectionRequest(URI.create(authProviderConf.getIntrospectionEndpoint()), authProviderConf.getClientAuthentication(), new TypelessAccessToken((String) token.getCredentials())); + // Send introspection request TokenIntrospectionResponse response = TokenIntrospectionResponse.parse(request.toHTTPRequest().send()); + log.trace("Retrieved token introspection response."); if (!response.indicatesSuccess()) { HTTPResponse httpResponse = response.toHTTPResponse(); log.error("Received the following error from the auth server while validating a token: {} {} {}", httpResponse.getStatusCode(), httpResponse.getStatusMessage(), httpResponse.getContent()); throw new AuthenticationException("Unable to retrieve access token from auth server."); - } else if (!(response instanceof TokenIntrospectionSuccessResponse)) { + } + else if (!(response instanceof TokenIntrospectionSuccessResponse)) { log.error("Unknown token response {}.", response.getClass().getName()); throw new AuthenticationException("Unknown token response. See log."); } TokenIntrospectionSuccessResponse successResponse = response.toSuccessResponse(); - if(log.isTraceEnabled()) { + if (log.isTraceEnabled()) { log.trace("Token introspection: {}", successResponse.toJSONObject()); } if (!successResponse.isActive()) { log.trace("Token was not active"); throw new ExpiredCredentialsException(); } - return successResponse; + + log.trace("Got an successful token introspection response: {}", log.isTraceEnabled() ? successResponse.toJSONObject().toString() : ""); + + } + + + private void validateAudiences(JWTClaimsSet claims) { + // Check if the token is intended for our client/resource + final Audience expectedAudience = new Audience(authProviderConf.getResource()); + final List providedAudiences = Audience.create(claims.getAudience()); + + if (providedAudiences == null) { + throw new IncorrectCredentialsException("Token does not contain audiences."); + } + + if (!providedAudiences.contains(expectedAudience)) { + throw new IncorrectCredentialsException("Audience does not match. Expected: '" + + expectedAudience.getValue() + + "' (was: '" + + claims.getAudience() + + "')"); + } } + private static UserId getUserId(JWTClaimsSet claims) { + final String subject = claims.getSubject(); + UserId userId = new UserId(subject); + log.trace("Extracted UserId {}", userId); + return userId; + } - private class TokenValidator extends CacheLoader { + private static String extractDisplayName(JWTClaimsSet claims) { + try { + final String name = claims.getStringClaim("name"); + + if (StringUtils.isBlank(name)) { + throw new UnsupportedTokenException("Claim 'name' was empty"); + } + return name; + } + catch (java.text.ParseException e) { + throw new IncorrectCredentialsException("Unable to extract username from token", e); + } + } + + + /** + * Validates token and synchronizes user and its group memberships with keycloak. + */ + private class UserClaimsSetupService extends CacheLoader { @Override - public TokenIntrospectionSuccessResponse load(BearerToken key) throws Exception { - log.trace("Attempting to validate token"); - TokenIntrospectionSuccessResponse response = validateToken(key); + public JWTClaimsSet load(String token) throws Exception { + + final SignedJWT jwt = SignedJWT.parse(token); + + final JWTClaimsSet claimsSet = jwt.getJWTClaimsSet(); + + + // Check if token was intended for us + validateAudiences(claimsSet); - User user = getOrCreateUser(response, extractDisplayName(response), extractId(response)); - Set mappedGroupsToDo = getMappedGroups(response); + final User user = getOrCreateUser(claimsSet); - synchGroupMappings(user, mappedGroupsToDo); + // Map groups if group-id attribute is set + final String groupIdAttribute = authProviderConf.getGroupIdAttribute(); + if (Strings.isNotBlank(groupIdAttribute)) { + final Set memberships = getUserGroups(claimsSet, groupIdAttribute); + syncGroupMappings(user, memberships); + } + + return claimsSet; + } + + @Nullable + private Set getUserGroups(JWTClaimsSet claims, String groupIdAttribute) { + final Set userGroups = keycloakApi.getUserGroups(claims.getSubject()); + final Set groupHierarchy = keycloakApi.getGroupHierarchy(); + + // Collect direct and indirect group memberships + Set allMemberships = GroupUtil.getAllUserGroups(userGroups, groupHierarchy); + + // Extract eva-group-id from attributes + return allMemberships.stream() + .map(this::tryGetGroup) + .flatMap(Optional::stream) + .collect(Collectors.toSet()); + } + + + private Optional tryGetGroup(KeycloakGroup keycloakGroup) { + + final Map attributes = keycloakGroup.attributes(); + if (attributes == null) { + log.trace("Not mapping keycloak group because it has no attributes set: {}", keycloakGroup); + return Optional.empty(); + } + + // Extract group id + final String groupIdString = attributes.get(authProviderConf.getGroupIdAttribute()); + if (groupIdString == null) { + log.trace("Not mapping keycloak group because it has no attribute '{}': {}", authProviderConf.getGroupIdAttribute(), keycloakGroup); + return Optional.empty(); + } + if (Strings.isBlank(groupIdString)) { + log.error("Cannot map keycloak group, because group id attribute was blank: {}", keycloakGroup); + return Optional.empty(); + } + + // Parse group id + GroupId groupId; + try { + groupId = GroupId.Parser.INSTANCE.parse(groupIdString); + } + catch (Exception e) { + log.error("Cannot parse '{}' as a GroupId. Skipping", groupIdString); + return Optional.empty(); + } + + final Group group = storage.getGroup(groupId); + + if (group != null) { + // Found existing group + return Optional.of(group); + } - return response; + // Create a new group + return Optional.of(createGroup(groupId.getGroup(), keycloakGroup.name())); } + private synchronized Group createGroup(String name, String label) { + // TODO mark group as managed by keycloak + final Group group = new Group(name, label, storage); - private void synchGroupMappings(User user, Set mappedGroupsToDo) { - for(Group group : storage.getAllGroups()) { - if(group.containsMember(user)) { - if(mappedGroupsToDo.contains(group)) { - // Mapping is still valid, remove from ToDo-List + // Recheck group existence in synchronized part + final Group existing = storage.getGroup(group.getId()); + + if (existing != null) { + // Found existing group + log.debug("Skip group creation, because group '{}' existed", group.getId()); + return existing; + } + + log.info("Creating new Group: {}", group); + group.updateStorage(); + return group; + } + + + private void syncGroupMappings(User user, Set mappedGroupsToDo) { + // TODO mark mappings as managed by keycloak + for (Group group : storage.getAllGroups()) { + if (group.containsMember(user)) { + if (mappedGroupsToDo.contains(group)) { + // Mapping is still valid, remove from todo-list mappedGroupsToDo.remove(group); - } else { + } + else { // Mapping is not valid any more remove user from group group.removeMember(user); } @@ -189,35 +327,30 @@ private void synchGroupMappings(User user, Set mappedGroupsToDo) { } - private synchronized User getOrCreateUser(TokenIntrospectionSuccessResponse successResponse, String username, UserId userId) { + private synchronized User getOrCreateUser(JWTClaimsSet claims) { + UserId userId = getUserId(claims); + final String displayName = extractDisplayName(claims); + User user = storage.getUser(userId); + if (user != null) { + log.trace("Found existing user: {}", user); + // Update display name if necessary + if (!user.getLabel().equals(displayName)) { + log.info("Updating display name of user [{}]: '{}' -> '{}'", user.getName(), user.getLabel(), displayName); + user.setLabel(displayName); + user.updateStorage(); + } + return user; } - // try to construct a new User if none could be found in the storage - String userLabel = successResponse.getStringParameter("name"); - user = new User(username, userLabel != null ? userLabel : username, storage); + + // Construct a new User if none could be found in the storage + user = new User(userId.getName(), displayName, storage); storage.addUser(user); log.info("Created new user: {}", user); - return user; - } - - private Set getMappedGroups(TokenIntrospectionSuccessResponse successResponse) { - List groupNames = Objects.requireNonNullElse(successResponse.getStringListParameter(GROUPS_CLAIM), List.of()); - Stream> derivedGroupIds = groupNames.stream().map(name -> Pair.of(name, new GroupId(name))); - Set groups = derivedGroupIds.map(this::getOrCreateGroup).collect(Collectors.toCollection(Sets::newHashSet)); - return groups; - } - private synchronized Group getOrCreateGroup(Pair groupNameId) { - Group group = storage.getGroup(groupNameId.getValue()); - if (group != null) { - return group; - } - group = new Group(groupNameId.getValue().getGroup(), groupNameId.getKey(), storage); - storage.addGroup(group); - log.info("Created new group: {}", group); - return group; + return user; } } diff --git a/backend/src/main/java/com/bakdata/conquery/models/auth/oidc/keycloak/AccessTokenResponse.java b/backend/src/main/java/com/bakdata/conquery/models/auth/oidc/keycloak/AccessTokenResponse.java new file mode 100644 index 0000000000..6700347915 --- /dev/null +++ b/backend/src/main/java/com/bakdata/conquery/models/auth/oidc/keycloak/AccessTokenResponse.java @@ -0,0 +1,15 @@ +package com.bakdata.conquery.models.auth.oidc.keycloak; + +import com.fasterxml.jackson.annotation.JsonIgnoreProperties; +import com.fasterxml.jackson.annotation.JsonProperty; + +/** + * Response object from oauth token endpoint. + */ +@JsonIgnoreProperties(ignoreUnknown = true) +public record AccessTokenResponse( + @JsonProperty("access_token") String access_token, + @JsonProperty("expires_in") long expires_in, + @JsonProperty("token_type") String token_type +) { +} diff --git a/backend/src/main/java/com/bakdata/conquery/models/auth/oidc/keycloak/ClientCredentials.java b/backend/src/main/java/com/bakdata/conquery/models/auth/oidc/keycloak/ClientCredentials.java new file mode 100644 index 0000000000..2f04dd7be9 --- /dev/null +++ b/backend/src/main/java/com/bakdata/conquery/models/auth/oidc/keycloak/ClientCredentials.java @@ -0,0 +1,21 @@ +package com.bakdata.conquery.models.auth.oidc.keycloak; + +import javax.ws.rs.core.Form; + +import org.glassfish.jersey.internal.util.collection.MultivaluedStringMap; + +/** + * Utility class for creating the form body needed to perform the client credential flow + */ +public class ClientCredentials { + + public static Form create(String clientId, String secret) { + final MultivaluedStringMap map = new MultivaluedStringMap(); + + map.add("grant_type", "client_credentials"); + map.add("client_id", clientId); + map.add("client_secret", secret); + + return new Form(map); + } +} diff --git a/backend/src/main/java/com/bakdata/conquery/models/auth/oidc/keycloak/ClientCredentialsGrantRequestFilter.java b/backend/src/main/java/com/bakdata/conquery/models/auth/oidc/keycloak/ClientCredentialsGrantRequestFilter.java new file mode 100644 index 0000000000..5092dfe572 --- /dev/null +++ b/backend/src/main/java/com/bakdata/conquery/models/auth/oidc/keycloak/ClientCredentialsGrantRequestFilter.java @@ -0,0 +1,68 @@ +package com.bakdata.conquery.models.auth.oidc.keycloak; + +import java.io.IOException; +import java.net.URI; +import java.time.Instant; +import java.time.temporal.ChronoUnit; + +import javax.ws.rs.client.Client; +import javax.ws.rs.client.ClientBuilder; +import javax.ws.rs.client.ClientRequestContext; +import javax.ws.rs.client.ClientRequestFilter; +import javax.ws.rs.client.Entity; +import javax.ws.rs.client.Invocation; +import javax.ws.rs.core.HttpHeaders; +import javax.ws.rs.core.MediaType; + +import lombok.extern.slf4j.Slf4j; + +/** + * Filter that handles the authentication transparently for the api. + * It requests an access token upon first request through the client it is registered in. + * It tracks the validity of the token and renews it as soon as a request is made after the + * first half of its lifetime. + * + * @implNote Authentication errors can still occur e.g. a not expired token was revoked. + */ +@Slf4j +public class ClientCredentialsGrantRequestFilter implements ClientRequestFilter { + + private final Invocation tokenInvocation; + + private AccessTokenResponse accessToken; + private Instant renewAfter; + + public ClientCredentialsGrantRequestFilter(String clientId, String clientSecret, URI tokenEndpoint) { + Client client = ClientBuilder.newClient(); + + tokenInvocation = client.target(tokenEndpoint) + .request(MediaType.APPLICATION_JSON_TYPE).buildPost(Entity.form(ClientCredentials.create(clientId + , clientSecret))); + } + + @Override + public void filter(ClientRequestContext requestContext) throws IOException { + AccessTokenResponse response = getActiveAccessToken(); + + log.trace("Adding access token to request"); + requestContext.getHeaders().add(HttpHeaders.AUTHORIZATION, String.join(" ", response.token_type(), response.access_token())); + + } + + private AccessTokenResponse getActiveAccessToken() { + Instant now = Instant.now(); + if (accessToken == null || now.isAfter(renewAfter)) { + final AccessTokenResponse response = acquireFreshAccessToken(); + accessToken = response; + // We want the token to be refreshed after half its time to live + renewAfter = now.plus(response.expires_in() / 2, ChronoUnit.SECONDS); + } + + return accessToken; + } + + private AccessTokenResponse acquireFreshAccessToken() { + log.info("Acquire new token"); + return tokenInvocation.invoke(AccessTokenResponse.class); + } +} diff --git a/backend/src/main/java/com/bakdata/conquery/models/auth/oidc/keycloak/GroupUtil.java b/backend/src/main/java/com/bakdata/conquery/models/auth/oidc/keycloak/GroupUtil.java new file mode 100644 index 0000000000..3848e397dd --- /dev/null +++ b/backend/src/main/java/com/bakdata/conquery/models/auth/oidc/keycloak/GroupUtil.java @@ -0,0 +1,100 @@ +package com.bakdata.conquery.models.auth.oidc.keycloak; + +import java.util.Collections; +import java.util.NoSuchElementException; +import java.util.Set; + +import com.google.common.collect.ImmutableCollection; +import com.google.common.collect.ImmutableSet; +import com.google.common.collect.Sets; +import org.jetbrains.annotations.NotNull; + +public class GroupUtil { + + public static final String HIERARCHY_SEPARATOR = "/"; + + /** + * Return direct and indirect group memberships (parent groups of provided userGroups). + * + * @param userGroups The group which a user direct member of + * @param groupHierarchy The complete group hierarchy + * @return all group memberships + */ + @NotNull + public static Set getAllUserGroups(Set userGroups, Set groupHierarchy) { + Set allMemberships = Collections.emptySet(); + for (KeycloakGroup userGroup : userGroups) { + allMemberships = Sets.union(allMemberships, GroupUtil.getParentGroups(userGroup, groupHierarchy)); + } + return allMemberships; + } + + /** + * Return the provided group and all parent groups in the provided hierarchy. + *

+ * Using this hierarchy + *

+	 * a
+	 * ├─ aa
+	 * ├─ ab
+	 * ├─ ac
+	 * b
+	 * ├─ ba
+	 * ├─ bb
+	 * │  ├─ bba
+	 * │  ├─ bb
+	 * │  ├─ bbc
+	 * ├─ bc
+	 * c
+	 * 
+ * and providing the group + *
bb
+ * would return + *
[bb, b]
+ *

+ * For more exampled see GroupUtilTest + * + * @param group The group whose parents are collected + * @param hierarchy The group hierarchy in which to look for parents + * @return the provided group and all of its parents. + * @throws NoSuchElementException if the hierarchy is empty or the provided group is not + * contianed in the hierarchy. + */ + public static Set getParentGroups(KeycloakGroup group, Set hierarchy) { + final ImmutableSet.Builder builder = ImmutableSet.builder(); + + getParentGroups(group, hierarchy, builder); + + return builder.build(); + } + + private static void getParentGroups(KeycloakGroup group, Set hierarchy, ImmutableCollection.Builder builder) { + if (hierarchy == null || hierarchy.isEmpty()) { + throw new NoSuchElementException("Group '" + group.path() + "' cannot be found in the hierarchy"); + } + + boolean foundMatchingPath = false; + + for (KeycloakGroup groupParent : hierarchy) { + if (group.equals(groupParent)) { + builder.add(group); + return; + } + + if (group.path().startsWith(groupParent.path() + HIERARCHY_SEPARATOR)) { + + if (foundMatchingPath) { + throw new IllegalStateException("Group '" + group.path() + "' fits into multiple paths of the group hierarchy"); + } + foundMatchingPath = true; + + getParentGroups(group, groupParent.subGroups(), builder); + builder.add(groupParent); + } + } + + if (!foundMatchingPath) { + throw new NoSuchElementException("Group '" + group.path() + "' cannot be found in the hierarchy"); + } + } +} diff --git a/backend/src/main/java/com/bakdata/conquery/models/auth/oidc/keycloak/KeycloakApi.java b/backend/src/main/java/com/bakdata/conquery/models/auth/oidc/keycloak/KeycloakApi.java new file mode 100644 index 0000000000..6ea9d5189c --- /dev/null +++ b/backend/src/main/java/com/bakdata/conquery/models/auth/oidc/keycloak/KeycloakApi.java @@ -0,0 +1,62 @@ +package com.bakdata.conquery.models.auth.oidc.keycloak; + +import java.net.URI; +import java.util.Set; + +import javax.ws.rs.client.Client; +import javax.ws.rs.client.WebTarget; +import javax.ws.rs.core.GenericType; + +import com.bakdata.conquery.models.config.auth.IntrospectionDelegatingRealmFactory; +import com.bakdata.conquery.util.ResourceUtil; +import com.google.common.base.Preconditions; +import lombok.extern.slf4j.Slf4j; + +@Slf4j +public class KeycloakApi { + + private static final String USER_ID_TEMPLATE = "user_id"; + private static final String GROUP_ID_TEMPLATE = "group_id"; + + private final WebTarget user; + private final WebTarget userGroups; + private final WebTarget groups; + private final WebTarget group; + + public KeycloakApi(IntrospectionDelegatingRealmFactory config, Client client) { + + client.register(new ClientCredentialsGrantRequestFilter(config.getClientId(), config.getClientSecret(), URI.create(config.getTokenEndpoint()))); + + final WebTarget base = client.target(config.getAuthServerUrl()); + + final WebTarget adminBase = base.path("admin").path("realms").path(config.getRealm()); + + user = adminBase.path("users").path(ResourceUtil.wrapAsUriTemplate(USER_ID_TEMPLATE)); + userGroups = user.path("groups"); + + groups = adminBase.path("groups"); + group = groups.path(ResourceUtil.wrapAsUriTemplate(GROUP_ID_TEMPLATE)); + } + + public Set getUserGroups(String userId) { + Preconditions.checkNotNull(userId); + return userGroups.resolveTemplate(USER_ID_TEMPLATE, userId).request().get(new GenericType>() { + }); + } + + + public Set getGroupHierarchy() { + final WebTarget webTarget = groups; + log.info("Requesting group from: {}", webTarget.getUri()); + return webTarget.request().get(new GenericType>() { + }); + } + + + public KeycloakGroup getGroup(String groupId) { + Preconditions.checkNotNull(groupId); + final WebTarget webTarget = group.resolveTemplate(GROUP_ID_TEMPLATE, groupId); + log.info("Requesting group from: {}", webTarget.getUri()); + return webTarget.request().get(KeycloakGroup.class); + } +} diff --git a/backend/src/main/java/com/bakdata/conquery/models/auth/oidc/keycloak/KeycloakGroup.java b/backend/src/main/java/com/bakdata/conquery/models/auth/oidc/keycloak/KeycloakGroup.java new file mode 100644 index 0000000000..467c732808 --- /dev/null +++ b/backend/src/main/java/com/bakdata/conquery/models/auth/oidc/keycloak/KeycloakGroup.java @@ -0,0 +1,21 @@ +package com.bakdata.conquery.models.auth.oidc.keycloak; + +import java.util.Map; +import java.util.Set; + +import javax.validation.constraints.NotEmpty; + +import com.fasterxml.jackson.annotation.JsonIgnoreProperties; + +@JsonIgnoreProperties(ignoreUnknown = true) +public record KeycloakGroup( + @NotEmpty + String id, + @NotEmpty + String name, + @NotEmpty + String path, + Map attributes, + Set subGroups +) { +} diff --git a/backend/src/main/java/com/bakdata/conquery/models/auth/oidc/passwordflow/IdpDelegatingAccessTokenCreator.java b/backend/src/main/java/com/bakdata/conquery/models/auth/oidc/passwordflow/IdpDelegatingAccessTokenCreator.java index 5c2bd6eac0..ede4952fde 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/auth/oidc/passwordflow/IdpDelegatingAccessTokenCreator.java +++ b/backend/src/main/java/com/bakdata/conquery/models/auth/oidc/passwordflow/IdpDelegatingAccessTokenCreator.java @@ -1,8 +1,17 @@ package com.bakdata.conquery.models.auth.oidc.passwordflow; +import java.net.URI; + +import javax.ws.rs.core.UriBuilder; + import com.bakdata.conquery.models.auth.basic.AccessTokenCreator; -import com.bakdata.conquery.models.auth.oidc.IntrospectionDelegatingRealmFactory; -import com.nimbusds.oauth2.sdk.*; +import com.bakdata.conquery.models.config.auth.IntrospectionDelegatingRealmFactory; +import com.nimbusds.oauth2.sdk.AccessTokenResponse; +import com.nimbusds.oauth2.sdk.AuthorizationGrant; +import com.nimbusds.oauth2.sdk.ResourceOwnerPasswordCredentialsGrant; +import com.nimbusds.oauth2.sdk.Scope; +import com.nimbusds.oauth2.sdk.TokenRequest; +import com.nimbusds.oauth2.sdk.TokenResponse; import com.nimbusds.oauth2.sdk.auth.Secret; import com.nimbusds.oauth2.sdk.http.HTTPResponse; import com.nimbusds.oauth2.sdk.token.AccessToken; @@ -12,9 +21,6 @@ import lombok.SneakyThrows; import lombok.extern.slf4j.Slf4j; -import javax.ws.rs.core.UriBuilder; -import java.net.URI; - @Slf4j @Getter diff --git a/backend/src/main/java/com/bakdata/conquery/models/config/ArrowResultProvider.java b/backend/src/main/java/com/bakdata/conquery/models/config/ArrowResultProvider.java index 2e828c6edb..05401e5a2c 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/config/ArrowResultProvider.java +++ b/backend/src/main/java/com/bakdata/conquery/models/config/ArrowResultProvider.java @@ -33,7 +33,7 @@ public class ArrowResultProvider implements ResultRendererProvider { @Override @SneakyThrows(MalformedURLException.class) - public Collection generateResultURLs(ManagedExecution exec, UriBuilder uriBuilder, boolean allProviders) { + public Collection generateResultURLs(ManagedExecution exec, UriBuilder uriBuilder, boolean allProviders) { if (!(exec instanceof SingleTableResult)) { return Collections.emptyList(); } @@ -43,8 +43,8 @@ public Collection generateResultURLs(ManagedExecution exec, UriBuilder u } return List.of( - ResultArrowResource.getFileDownloadURL(uriBuilder.clone(), (ManagedExecution & SingleTableResult) exec), - ResultArrowResource.getStreamDownloadURL(uriBuilder.clone(), (ManagedExecution & SingleTableResult) exec) + ResultArrowResource.getFileDownloadURL(uriBuilder.clone(), (ManagedExecution & SingleTableResult) exec), + ResultArrowResource.getStreamDownloadURL(uriBuilder.clone(), (ManagedExecution & SingleTableResult) exec) ); } diff --git a/backend/src/main/java/com/bakdata/conquery/models/config/CsvResultProvider.java b/backend/src/main/java/com/bakdata/conquery/models/config/CsvResultProvider.java index ee2edfa3ea..4b361f35bc 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/config/CsvResultProvider.java +++ b/backend/src/main/java/com/bakdata/conquery/models/config/CsvResultProvider.java @@ -30,7 +30,7 @@ public class CsvResultProvider implements ResultRendererProvider { private boolean hidden = false; @SneakyThrows(MalformedURLException.class) - public Collection generateResultURLs(ManagedExecution exec, UriBuilder uriBuilder, boolean allProviders) { + public Collection generateResultURLs(ManagedExecution exec, UriBuilder uriBuilder, boolean allProviders) { if (!(exec instanceof SingleTableResult)) { return Collections.emptyList(); } @@ -39,7 +39,7 @@ public Collection generateResultURLs(ManagedExecution exec, UriBuilder u return Collections.emptyList(); } - return List.of(ResultCsvResource.getDownloadURL(uriBuilder, (ManagedExecution & SingleTableResult) exec)); + return List.of(ResultCsvResource.getDownloadURL(uriBuilder, (ManagedExecution & SingleTableResult) exec)); } @Override diff --git a/backend/src/main/java/com/bakdata/conquery/models/config/ExcelResultProvider.java b/backend/src/main/java/com/bakdata/conquery/models/config/ExcelResultProvider.java index 139d08196a..7dc4e6ecb9 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/config/ExcelResultProvider.java +++ b/backend/src/main/java/com/bakdata/conquery/models/config/ExcelResultProvider.java @@ -45,7 +45,7 @@ public class ExcelResultProvider implements ResultRendererProvider { @Override @SneakyThrows(MalformedURLException.class) - public Collection generateResultURLs(ManagedExecution exec, UriBuilder uriBuilder, boolean allProviders) { + public Collection generateResultURLs(ManagedExecution exec, UriBuilder uriBuilder, boolean allProviders) { // We only support/produce xlsx files with one sheet for now if (!(exec instanceof SingleTableResult singleExecution)) { log.trace("Execution result is not a single table"); @@ -79,7 +79,7 @@ public Collection generateResultURLs(ManagedExecution exec, UriBuilder u return Collections.emptyList(); } - final URL resultUrl = ResultExcelResource.getDownloadURL(uriBuilder, (ManagedExecution & SingleTableResult) exec); + final URL resultUrl = ResultExcelResource.getDownloadURL(uriBuilder, (ManagedExecution & SingleTableResult) exec); log.trace("Generated URL: {}", resultUrl); return List.of(resultUrl); diff --git a/backend/src/main/java/com/bakdata/conquery/models/config/ParquetResultProvider.java b/backend/src/main/java/com/bakdata/conquery/models/config/ParquetResultProvider.java index 68bfb1c210..3b43eee8a6 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/config/ParquetResultProvider.java +++ b/backend/src/main/java/com/bakdata/conquery/models/config/ParquetResultProvider.java @@ -11,11 +11,9 @@ import com.bakdata.conquery.commands.ManagerNode; import com.bakdata.conquery.io.cps.CPSType; import com.bakdata.conquery.io.result.ResultRender.ResultRendererProvider; -import com.bakdata.conquery.io.result.arrow.ResultArrowProcessor; import com.bakdata.conquery.io.result.parquet.ResultParquetProcessor; import com.bakdata.conquery.models.execution.ManagedExecution; import com.bakdata.conquery.models.query.SingleTableResult; -import com.bakdata.conquery.resources.api.ResultArrowResource; import com.bakdata.conquery.resources.api.ResultParquetResource; import io.dropwizard.jersey.DropwizardResourceConfig; import lombok.Data; @@ -30,7 +28,7 @@ public class ParquetResultProvider implements ResultRendererProvider { @Override @SneakyThrows(MalformedURLException.class) - public Collection generateResultURLs(ManagedExecution exec, UriBuilder uriBuilder, boolean allProviders) { + public Collection generateResultURLs(ManagedExecution exec, UriBuilder uriBuilder, boolean allProviders) { if (!(exec instanceof SingleTableResult)) { return Collections.emptyList(); } @@ -40,7 +38,7 @@ public Collection generateResultURLs(ManagedExecution exec, UriBuilder u } return List.of( - ResultParquetResource.getDownloadURL(uriBuilder.clone(), (ManagedExecution & SingleTableResult) exec) + ResultParquetResource.getDownloadURL(uriBuilder.clone(), (ManagedExecution & SingleTableResult) exec) ); } diff --git a/backend/src/main/java/com/bakdata/conquery/models/config/StoreFactory.java b/backend/src/main/java/com/bakdata/conquery/models/config/StoreFactory.java index f1d6dc5b23..b987030c87 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/config/StoreFactory.java +++ b/backend/src/main/java/com/bakdata/conquery/models/config/StoreFactory.java @@ -69,7 +69,7 @@ public interface StoreFactory { SingletonStore createStructureStore(String pathName, CentralRegistry centralRegistry, ObjectMapper objectMapper); // MetaStorage - IdentifiableStore> createExecutionsStore(CentralRegistry centralRegistry, DatasetRegistry datasetRegistry, String pathName, ObjectMapper objectMapper); + IdentifiableStore createExecutionsStore(CentralRegistry centralRegistry, DatasetRegistry datasetRegistry, String pathName, ObjectMapper objectMapper); IdentifiableStore createFormConfigStore(CentralRegistry centralRegistry, DatasetRegistry datasetRegistry, String pathName, ObjectMapper objectMapper); diff --git a/backend/src/main/java/com/bakdata/conquery/models/config/XodusStoreFactory.java b/backend/src/main/java/com/bakdata/conquery/models/config/XodusStoreFactory.java index 4dd16dd2a9..437ba9c2b9 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/config/XodusStoreFactory.java +++ b/backend/src/main/java/com/bakdata/conquery/models/config/XodusStoreFactory.java @@ -335,7 +335,7 @@ public SingletonStore createStructureStore(String pathName, Cen } @Override - public IdentifiableStore> createExecutionsStore(CentralRegistry centralRegistry, DatasetRegistry datasetRegistry, String pathName, ObjectMapper objectMapper) { + public IdentifiableStore createExecutionsStore(CentralRegistry centralRegistry, DatasetRegistry datasetRegistry, String pathName, ObjectMapper objectMapper) { return StoreMappings.identifiable(createStore(findEnvironment(resolveSubDir(pathName, "executions")), validator, EXECUTIONS, datasetRegistry.injectInto(centralRegistry.injectIntoNew(objectMapper))), centralRegistry); } diff --git a/backend/src/main/java/com/bakdata/conquery/models/auth/oidc/IntrospectionDelegatingRealmFactory.java b/backend/src/main/java/com/bakdata/conquery/models/config/auth/IntrospectionDelegatingRealmFactory.java similarity index 80% rename from backend/src/main/java/com/bakdata/conquery/models/auth/oidc/IntrospectionDelegatingRealmFactory.java rename to backend/src/main/java/com/bakdata/conquery/models/config/auth/IntrospectionDelegatingRealmFactory.java index be38a25ab3..8d812f7327 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/auth/oidc/IntrospectionDelegatingRealmFactory.java +++ b/backend/src/main/java/com/bakdata/conquery/models/config/auth/IntrospectionDelegatingRealmFactory.java @@ -1,23 +1,30 @@ -package com.bakdata.conquery.models.auth.oidc; +package com.bakdata.conquery.models.config.auth; + +import java.io.PrintWriter; +import java.util.List; +import java.util.Map; + +import javax.ws.rs.client.Client; import com.bakdata.conquery.commands.ManagerNode; import com.bakdata.conquery.models.auth.ConqueryAuthenticationRealm; import com.bakdata.conquery.models.auth.basic.JWTokenHandler; +import com.bakdata.conquery.models.auth.oidc.IntrospectionDelegatingRealm; +import com.bakdata.conquery.models.auth.oidc.keycloak.KeycloakApi; import com.fasterxml.jackson.annotation.JsonIgnore; import com.nimbusds.oauth2.sdk.auth.ClientAuthentication; import com.nimbusds.oauth2.sdk.auth.ClientSecretBasic; import com.nimbusds.oauth2.sdk.auth.Secret; import com.nimbusds.oauth2.sdk.id.ClientID; +import io.dropwizard.client.JerseyClientBuilder; import io.dropwizard.servlets.tasks.Task; import io.dropwizard.validation.ValidationMethod; +import lombok.Getter; +import lombok.Setter; import lombok.extern.slf4j.Slf4j; import org.keycloak.authorization.client.AuthzClient; import org.keycloak.authorization.client.Configuration; -import java.io.PrintWriter; -import java.util.List; -import java.util.Map; - /** * Bridge class for realms that authenticate users by submitting their token to the IDP for introspection. @@ -27,6 +34,14 @@ public class IntrospectionDelegatingRealmFactory extends Configuration { public static final String CONFIDENTIAL_CREDENTIAL = "secret"; + /** + * Attribute of keycloak group object that holds the conquery group id to map to. + * If it is blank, the group mapping will be skipped. + */ + @Getter + @Setter + public String groupIdAttribute = ""; + private transient AuthzClient authClient; @@ -39,7 +54,7 @@ public ConqueryAuthenticationRealm createRealm(ManagerNode managerNode) { authClient = getAuthClient(false); // Register task to retrieve the idp client api, so the realm can be used, when the idp service is available. - if(managerNode != null && managerNode.getEnvironment().admin() != null) { + if (managerNode.getEnvironment().admin() != null) { managerNode.getEnvironment().admin().addTask(new Task("keycloak-update-authz-client") { @Override @@ -49,7 +64,12 @@ public void execute(Map> parameters, PrintWriter output) th } }); } - return new IntrospectionDelegatingRealm(managerNode.getStorage(), this); + + // Setup keycloak api + final Client client = new JerseyClientBuilder(managerNode.getEnvironment()).build("keycloak-api"); + final KeycloakApi keycloakApi = new KeycloakApi(this, client); + + return new IntrospectionDelegatingRealm(managerNode.getStorage(), this, keycloakApi); } @@ -68,12 +88,12 @@ public String getIntrospectionEndpoint() { } @JsonIgnore - private String getClientId() { + public String getClientId() { return getResource(); } @JsonIgnore - private String getClientSecret() { + public String getClientSecret() { return (String) credentials.get(CONFIDENTIAL_CREDENTIAL); } @@ -90,8 +110,7 @@ public AuthzClient getAuthClient(boolean exceptionOnFailedRetrieval) { } try { // This tries to contact the identity providers discovery endpoint and can possibly timeout - AuthzClient authzClient = AuthzClient.create(this); - return authzClient; + return AuthzClient.create(this); } catch (RuntimeException e) { log.warn("Unable to establish connection to auth server.", log.isTraceEnabled()? e : null ); if(exceptionOnFailedRetrieval) { diff --git a/backend/src/main/java/com/bakdata/conquery/models/config/auth/OIDCAuthorizationCodeFlowRealmFactory.java b/backend/src/main/java/com/bakdata/conquery/models/config/auth/OIDCAuthorizationCodeFlowRealmFactory.java index 54c4c6fdc5..63957e6ade 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/config/auth/OIDCAuthorizationCodeFlowRealmFactory.java +++ b/backend/src/main/java/com/bakdata/conquery/models/config/auth/OIDCAuthorizationCodeFlowRealmFactory.java @@ -3,7 +3,6 @@ import com.bakdata.conquery.commands.ManagerNode; import com.bakdata.conquery.io.cps.CPSType; import com.bakdata.conquery.models.auth.ConqueryAuthenticationRealm; -import com.bakdata.conquery.models.auth.oidc.IntrospectionDelegatingRealmFactory; import lombok.Getter; import lombok.Setter; import lombok.extern.slf4j.Slf4j; diff --git a/backend/src/main/java/com/bakdata/conquery/models/config/auth/OIDCResourceOwnerPasswordCredentialRealmFactory.java b/backend/src/main/java/com/bakdata/conquery/models/config/auth/OIDCResourceOwnerPasswordCredentialRealmFactory.java index 0f82d407b4..6104e180b6 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/config/auth/OIDCResourceOwnerPasswordCredentialRealmFactory.java +++ b/backend/src/main/java/com/bakdata/conquery/models/config/auth/OIDCResourceOwnerPasswordCredentialRealmFactory.java @@ -3,7 +3,6 @@ import com.bakdata.conquery.commands.ManagerNode; import com.bakdata.conquery.io.cps.CPSType; import com.bakdata.conquery.models.auth.ConqueryAuthenticationRealm; -import com.bakdata.conquery.models.auth.oidc.IntrospectionDelegatingRealmFactory; import com.bakdata.conquery.models.auth.oidc.passwordflow.IdpDelegatingAccessTokenCreator; import com.bakdata.conquery.resources.unprotected.LoginResource; import com.bakdata.conquery.resources.unprotected.TokenResource; diff --git a/backend/src/main/java/com/bakdata/conquery/models/datasets/PreviewConfig.java b/backend/src/main/java/com/bakdata/conquery/models/datasets/PreviewConfig.java index 7b2dd54959..5a682b7892 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/datasets/PreviewConfig.java +++ b/backend/src/main/java/com/bakdata/conquery/models/datasets/PreviewConfig.java @@ -14,10 +14,11 @@ import com.bakdata.conquery.models.auth.entities.Subject; import com.bakdata.conquery.models.common.Range; import com.bakdata.conquery.models.datasets.concepts.Concept; +import com.bakdata.conquery.models.datasets.concepts.filters.Filter; import com.bakdata.conquery.models.datasets.concepts.select.Select; import com.bakdata.conquery.models.identifiable.ids.specific.ColumnId; -import com.bakdata.conquery.models.identifiable.ids.specific.ConceptId; import com.bakdata.conquery.models.identifiable.ids.specific.ConnectorId; +import com.bakdata.conquery.models.identifiable.ids.specific.FilterId; import com.bakdata.conquery.models.identifiable.ids.specific.SecondaryIdDescriptionId; import com.bakdata.conquery.models.identifiable.ids.specific.SelectId; import com.bakdata.conquery.models.query.PrintSettings; @@ -27,6 +28,7 @@ import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonIgnore; import com.fasterxml.jackson.annotation.OptBoolean; +import com.google.common.collect.MoreCollectors; import com.google.common.collect.Sets; import io.dropwizard.validation.ValidationMethod; import lombok.AllArgsConstructor; @@ -74,13 +76,17 @@ public class PreviewConfig { private Set defaultConnectors = Collections.emptySet(); /** - * Link to a concept providing search capabilities for entities. - * + * Link to list of Filters to provide search capabilities for entities. + *

* This looks weird at first, but allows reuse of available components instead of introducing duplicate behaviour. - * + *

* The Frontend will use the concepts filters to render a search for entity preview. */ - private ConceptId searchConcept; + private Set searchFilters = Collections.emptySet(); + + @JacksonInject(useInput = OptBoolean.FALSE) + @NotNull + private DatasetRegistry datasetRegistry; public boolean isGroupingColumn(SecondaryIdDescription desc) { return getGrouping().contains(desc.getId()); @@ -90,37 +96,12 @@ public boolean isHidden(Column column) { return getHidden().contains(column.getId()); } - @JacksonInject(useInput = OptBoolean.FALSE) - @NotNull - private DatasetRegistry datasetRegistry; - - @Data - public static class InfoCardSelect { - @JsonCreator - public InfoCardSelect(String label, SelectId select) { - this.label = label; - this.select = select; - } - - /** - * User facing label of the select. - */ - private final String label; - /** - * Id (without dataset) of the select. - */ - private final SelectId select; - - } - - @JsonIgnore @ValidationMethod(message = "Default Connectors must also be available Connectors.") public boolean isDefaultSubsetOfAvailable() { return Sets.difference(getDefaultConnectors(), getAllConnectors()).isEmpty(); } - @JsonIgnore @ValidationMethod(message = "Selects may be used only once.") public boolean isSelectsDistinct() { @@ -133,6 +114,13 @@ public boolean isSelectsLabelsDistinct() { return infoCardSelects.stream().map(InfoCardSelect::getLabel).distinct().count() == getInfoCardSelects().size(); } + + @JsonIgnore + @ValidationMethod(message = "SearchFilters must be of same concept.") + public boolean isSearchFiltersOfSameConcept() { + return searchFilters.stream().map(id -> id.getConnector().getConcept()).distinct().count() <= 1; + } + /** * Used to map {@link SelectResultInfo} to {@link InfoCardSelect#getLabel()} via {@link PrintSettings#getColumnNamer()}. */ @@ -158,15 +146,48 @@ public List - - - - <@layout.kv k="Dictionaries" v=layout.si(c.dictionariesSize)+"B"/> - <@layout.kv k="Size" v=layout.si(c.size)+"B"/> - <@layout.kc k="IdMapping">Here - <@layout.kc k="Mappings"> -

    - <#list c.internToExternMappers as mapper> -
  • - ${mapper.name} <#if mapper.initialized() ><#else> - -
  • - -
- - <@layout.kc k="SearchIndices"> -
    - <#list c.searchIndices as searchIndex> -
  • - ${searchIndex.name} -
  • - -
- - <@layout.kc k="Tables"> - - - <@layout.kc k="Concepts"> -
    - <#list c.concepts?sort_by("name") as concept> -
  • - ${concept.name} - -
  • - -
- +<#import "templates/table.html.ftl" as table> +<#import "templates/accordion.html.ftl" as accordion> +<#import "templates/infoCard.html.ftl" as infoCard> +<#import "templates/editableText.html.ftl" as editableText> +<#import "templates/breadcrumbs.html.ftl" as breadcrumbs> - <@layout.kc k="SecondaryIds"> -
    - <#list c.secondaryIds?sort_by("name") as secondaryId> -
  • ${secondaryId}
  • - -
- +<#assign columnsMappers=["id", "initialized", "actions"]> +<#assign columnsSearchIndices=["id", "actions"]> +<#assign columnsTables=["id", "label", "imports", "entries", "actions"]> +<#assign columnsConcepts=["id", "label", "actions"]> +<#assign columnsSecondaryIds=["id", "label"]> -
-
-
- +<#macro deleteMappersButton id> + + - -
-
-
+<#macro deleteSearchIndiciesButton id> + + - <#assign uploadContainerStyle = "border border-secondary rounded p-2 m-2"> -
-
-
-
-
Upload mapping JSON - -
- - -
+<#macro deleteTablesButton id> + + -
-
-
- - -
- - -
+<#macro deleteConceptsButton id> + + -
-
-
- - -
- - -
+<#macro label> + <@editableText.editableText text="${c.ds.label}" onChange="(label) => rest('/admin/datasets/${c.ds.id}/label',{ method: 'post', body: label}).then(function(res){if(res.ok)location.reload();})" /> + +<#macro labelold> +
+ + + + +<#macro idMapping>Here -
-
-
- - -
- - -
+<@layout.layout> + + + + <@breadcrumbs.breadcrumbs + labels=["Datasets", c.ds.label] + links=["/admin-ui/datasets"] + /> +
+
+ <@infoCard.infoCard + class="d-inline-flex" + title="Dataset ${c.ds.label}" + labels=["ID", "Label", "Dictionaries", "Size", "IdMapping"] + values=[c.ds.id, label, layout.si(c.dictionariesSize)+"B", layout.si(c.size)+"B", idMapping] + /> + +
+
+
File Upload
+
+ + + +
+
+
+ +
+ +
+
+ + <@accordion.accordionGroup> + <@accordion.accordion summary="Mappings" infoText="${c.internToExternMappers?size} entries"> + <@table.table columns=columnsMappers items=c.internToExternMappers deleteButton=deleteMappersButton /> + + <@accordion.accordion summary="SearchIndices" infoText="${c.searchIndices?size} entries"> + <@table.table columns=columnsSearchIndices items=c.searchIndices deleteButton=deleteSearchIndiciesButton /> + + <@accordion.accordion summary="Tables" infoText="${c.tables?size} entries"> + <@table.table columns=columnsTables items=c.tables?sort_by("name") deleteButton=deleteTablesButton link="./${c.ds.id}/tables/" /> + + <@accordion.accordion summary="Concepts" infoText="${c.concepts?size} entries"> + <@table.table columns=columnsConcepts items=c.concepts?sort_by("name") deleteButton=deleteConceptsButton link="./${c.ds.id}/concepts/" /> + + <@accordion.accordion summary="SecondaryIds" infoText="${c.secondaryIds?size} entries"> + <@table.table columns=columnsSecondaryIds items=c.secondaryIds?sort_by("name") /> + + diff --git a/backend/src/main/resources/com/bakdata/conquery/resources/admin/ui/datasets.html.ftl b/backend/src/main/resources/com/bakdata/conquery/resources/admin/ui/datasets.html.ftl index 1396fcda35..e12fd55ad6 100644 --- a/backend/src/main/resources/com/bakdata/conquery/resources/admin/ui/datasets.html.ftl +++ b/backend/src/main/resources/com/bakdata/conquery/resources/admin/ui/datasets.html.ftl @@ -1,26 +1,34 @@ <#import "templates/template.html.ftl" as layout> +<#import "templates/table.html.ftl" as table> +<#assign columns=["id", "label", "actions"]> + +<#macro deleteDatasetButton id> + + + <@layout.layout>
- -

-
-
+

Datasets

+
+

Create Dataset

- - " class="form-control text-monospace" style="font-family:monospace;"> - - - -
- +
+ + " class="form-control text-monospace" style="font-family:monospace;"> +
+
+ + +
+ + +
+ +
+

All Datasets

+ <@table.table columns=columns items=c?sort_by("name") link="/admin-ui/datasets/" deleteButton=deleteDatasetButton /> +
@@ -40,7 +48,12 @@ name: document.getElementById('entity_id').value, label: document.getElementById('entity_name').value }) - }).then(function(){location.reload();}); + }).then(function(res){ + if(res.ok) + location.reload(); + else + showMessageForResponse(res); + }); } function deleteDataset(datasetId) { @@ -50,7 +63,12 @@ { method: 'delete', credentials: "same-origin" - }).then(function(){location.reload();}); + }).then(function(res){ + if(res.ok) + location.reload(); + else + showMessageForResponse(res); + }); } \ No newline at end of file diff --git a/backend/src/main/resources/com/bakdata/conquery/resources/admin/ui/scripts/dataset.js b/backend/src/main/resources/com/bakdata/conquery/resources/admin/ui/scripts/dataset.js new file mode 100644 index 0000000000..c6157ca624 --- /dev/null +++ b/backend/src/main/resources/com/bakdata/conquery/resources/admin/ui/scripts/dataset.js @@ -0,0 +1,32 @@ +function updateDatasetUploadForm(select) { + const uploadFormMapping = { + mapping: { + name: "mapping", + uri: "internToExtern", + accept: "*.mapping.json", + }, + table: { name: "table_schema", uri: "tables", accept: "*.table.json" }, + concept: { + name: "concept_schema", + uri: "concepts", + accept: "*.concept.json", + }, + structure: { + name: "structure_schema", + uri: "structure", + accept: "structure.json", + }, + }; + + const data = uploadFormMapping[select.value]; + const fileInput = $(select).next(); + fileInput.value = ""; + fileInput.attr("accept", data.accept); + fileInput.attr("name", data.name); + $(select) + .parent() + .attr( + "onsubmit", + "postFile(event, '/admin/datasets/${c.ds.id}/" + data.uri + "')" + ); +} diff --git a/backend/src/main/resources/com/bakdata/conquery/resources/admin/ui/templates/accordion.html.ftl b/backend/src/main/resources/com/bakdata/conquery/resources/admin/ui/templates/accordion.html.ftl new file mode 100644 index 0000000000..c60fd9fcb1 --- /dev/null +++ b/backend/src/main/resources/com/bakdata/conquery/resources/admin/ui/templates/accordion.html.ftl @@ -0,0 +1,31 @@ +<#macro accordionGroup> +
+ <#nested /> +
+ +<#macro accordion summary infoText=""> +
+
+
+
${summary}
+
+
+
${infoText}
+
+
+
+
+ <#nested /> +
+
+ +
+ \ No newline at end of file diff --git a/backend/src/main/resources/com/bakdata/conquery/resources/admin/ui/templates/breadcrumbs.html.ftl b/backend/src/main/resources/com/bakdata/conquery/resources/admin/ui/templates/breadcrumbs.html.ftl new file mode 100644 index 0000000000..9e8afc8118 --- /dev/null +++ b/backend/src/main/resources/com/bakdata/conquery/resources/admin/ui/templates/breadcrumbs.html.ftl @@ -0,0 +1,21 @@ +<#macro breadcrumbs labels links class=""> + + \ No newline at end of file diff --git a/backend/src/main/resources/com/bakdata/conquery/resources/admin/ui/templates/editableText.html.ftl b/backend/src/main/resources/com/bakdata/conquery/resources/admin/ui/templates/editableText.html.ftl new file mode 100644 index 0000000000..6618a20870 --- /dev/null +++ b/backend/src/main/resources/com/bakdata/conquery/resources/admin/ui/templates/editableText.html.ftl @@ -0,0 +1,25 @@ +<#macro editableText text onChange class="" style=""> +
+ + ${text} + + +
+ + +
+ \ No newline at end of file diff --git a/backend/src/main/resources/com/bakdata/conquery/resources/admin/ui/templates/infoCard.html.ftl b/backend/src/main/resources/com/bakdata/conquery/resources/admin/ui/templates/infoCard.html.ftl new file mode 100644 index 0000000000..a4fcca868a --- /dev/null +++ b/backend/src/main/resources/com/bakdata/conquery/resources/admin/ui/templates/infoCard.html.ftl @@ -0,0 +1,26 @@ +<#macro infoCard labels values title="" subtitle="" class=""> +
+
+ <#if title?has_content> +
${title}
+ + <#if subtitle?has_content> +
${title}
+ +
+ <#list values as value> +
+ <#if (value?index < labels?size)> +
${labels[value?index]}
+ + <#if value?is_macro> + <@value /> + <#else> +
${value}
+ +
+ +
+
+
+ \ No newline at end of file diff --git a/backend/src/main/resources/com/bakdata/conquery/resources/admin/ui/templates/table.html.ftl b/backend/src/main/resources/com/bakdata/conquery/resources/admin/ui/templates/table.html.ftl new file mode 100644 index 0000000000..764d5018e9 --- /dev/null +++ b/backend/src/main/resources/com/bakdata/conquery/resources/admin/ui/templates/table.html.ftl @@ -0,0 +1,47 @@ +<#macro table columns items deleteButton="" link=""> +
+
+
+ + + <#list columns as column> + <#if column == "actions"> + + <#else> + + + + + + + <#if items?size == 0> + + + + + <#list items as item> + + <#list columns as column> + <#if column == "id" && link?has_content> + + <#elseif column == "initialized"> + + <#elseif column == "actions"> + + <#else> + + + + + + +
${column}${column}
No items found
${item.id} <#if item.initialized() ><#else> + <#if deleteButton?is_macro> + <@deleteButton id="${item.id}"/> + <#else> + <#stop "Expected macro for deleteButton"> + + ${item[column]}
+ + + \ No newline at end of file diff --git a/backend/src/main/resources/com/bakdata/conquery/resources/admin/ui/templates/template.html.ftl b/backend/src/main/resources/com/bakdata/conquery/resources/admin/ui/templates/template.html.ftl index a30141e7c3..cd0b3f2371 100644 --- a/backend/src/main/resources/com/bakdata/conquery/resources/admin/ui/templates/template.html.ftl +++ b/backend/src/main/resources/com/bakdata/conquery/resources/admin/ui/templates/template.html.ftl @@ -7,7 +7,7 @@ - +