Skip to content

Commit

Permalink
fixes splitting of type
Browse files Browse the repository at this point in the history
  • Loading branch information
awildturtok committed Mar 14, 2023
1 parent 9ee82d2 commit 2828f6b
Showing 1 changed file with 11 additions and 11 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@
import com.bakdata.conquery.models.identifiable.ids.specific.DatasetId;
import com.bakdata.conquery.models.index.IndexService;
import com.bakdata.conquery.resources.admin.rest.AdminDatasetResource;
import com.bakdata.conquery.resources.api.ConceptsProcessor.ResolvedConceptsResult;
import com.bakdata.conquery.resources.api.ConceptsProcessor;
import com.bakdata.conquery.resources.api.FilterResource;
import com.bakdata.conquery.resources.hierarchies.HierarchyHelper;
import com.bakdata.conquery.util.support.StandaloneSupport;
Expand All @@ -37,7 +37,7 @@
@Slf4j
public class FilterResolutionTest extends IntegrationTest.Simple implements ProgrammaticIntegrationTest {

private String[] lines = new String[]{
private final String[] lines = new String[]{
"HEADER",
"a",
"aab",
Expand All @@ -48,28 +48,28 @@ public class FilterResolutionTest extends IntegrationTest.Simple implements Prog
@Override
public void execute(StandaloneSupport conquery) throws Exception {
//read test sepcification
String
final String
testJson =
In.resource("/tests/query/MULTI_SELECT_DATE_RESTRICTION_OR_CONCEPT_QUERY/MULTI_SELECT_DATE_RESTRICTION_OR_CONCEPT_QUERY.test.json")
.withUTF8()
.readAll();

DatasetId dataset = conquery.getDataset().getId();
final DatasetId dataset = conquery.getDataset().getId();

ConqueryTestSpec test = JsonIntegrationTest.readJson(dataset, testJson);
final ConqueryTestSpec test = JsonIntegrationTest.readJson(dataset, testJson);

ValidatorHelper.failOnError(log, conquery.getValidator().validate(test));

CSVConfig csvConf = conquery.getConfig().getCsv();
final CSVConfig csvConf = conquery.getConfig().getCsv();

test.importRequiredData(conquery);

conquery.waitUntilWorkDone();


Concept<?> concept = conquery.getNamespace().getStorage().getAllConcepts().iterator().next();
Connector connector = concept.getConnectors().iterator().next();
SelectFilter<?> filter = (SelectFilter<?>) connector.getFilters().iterator().next();
final Concept<?> concept = conquery.getNamespace().getStorage().getAllConcepts().iterator().next();
final Connector connector = concept.getConnectors().iterator().next();
final SelectFilter<?> filter = (SelectFilter<?>) connector.getFilters().iterator().next();

// Copy search csv from resources to tmp folder.
final Path tmpCSv = Files.createTempFile("conquery_search", "csv");
Expand Down Expand Up @@ -112,7 +112,7 @@ public void execute(StandaloneSupport conquery) throws Exception {
.request(MediaType.APPLICATION_JSON_TYPE)
.post(Entity.entity(new FilterResource.FilterValues(List.of("a", "aaa", "unknown")), MediaType.APPLICATION_JSON_TYPE));

ResolvedConceptsResult resolved = fromCsvResponse.readEntity(ResolvedConceptsResult.class);
final ConceptsProcessor.ResolvedFilterValues resolved = fromCsvResponse.readEntity(ConceptsProcessor.ResolvedFilterValues.class);

//check the resolved values
// "aaa" is hit by "a" and "aaa" therefore should be first
Expand All @@ -126,7 +126,7 @@ public void execute(StandaloneSupport conquery) throws Exception {
.request(MediaType.APPLICATION_JSON_TYPE)
.post(Entity.entity(new FilterResource.FilterValues(List.of("f", "unknown")), MediaType.APPLICATION_JSON_TYPE));

ResolvedConceptsResult resolved = fromCsvResponse.readEntity(ResolvedConceptsResult.class);
final ConceptsProcessor.ResolvedFilterValues resolved = fromCsvResponse.readEntity(ConceptsProcessor.ResolvedFilterValues.class);

//check the resolved values
assertThat(resolved.resolvedFilter().value().stream().map(FrontendValue::getValue)).contains("f");
Expand Down

0 comments on commit 2828f6b

Please sign in to comment.