Skip to content

Run all tests with SQL Mode #3746

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Draft
wants to merge 30 commits into
base: develop
Choose a base branch
from
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
30 commits
Select commit Hold shift + click to select a range
ab5d4b0
adds warning to CachedStore, if there are duplicate keys (This can be…
awildturtok Jun 2, 2025
ce371b6
Fixes importing for FilterTests
awildturtok Jun 2, 2025
a3a7d78
minor corrections
awildturtok Jun 3, 2025
5384960
minor corrections
awildturtok Jun 3, 2025
666d64e
minor corrections
awildturtok Jun 3, 2025
dfe693e
trivial fixes where coalesce to NULL was missing
awildturtok Jun 3, 2025
eb587d1
fix order of AllValuesAggregator test
awildturtok Jun 3, 2025
a123a2b
use coalesce for createEmptyRangeForNullValues
awildturtok Jun 3, 2025
831f575
Fix boolean Parsing for CsvTableImporter
awildturtok Jun 3, 2025
bc40de4
Cleanup
awildturtok Jun 3, 2025
c2b0015
fixes importing of previous results
awildturtok Jun 3, 2025
34b41d0
Fixes importing of AllIdsTable
awildturtok Jun 3, 2025
3b972a2
Merge branch 'develop' into feature/sql-run-all-tests
awildturtok Jun 3, 2025
d6e9a50
Fixes loading of SqlTestDataImporter.java by inherint from WorkerTest…
awildturtok Jun 30, 2025
8b96457
Disable some tests
awildturtok Jun 30, 2025
0f56b1b
fixes scanning for tests in sql mode => .test.json
awildturtok Jul 1, 2025
cee41a5
Disable some tests
awildturtok Jul 1, 2025
d5661e5
fix: treat no validity date as +/- inf
jnsrnhld Jul 2, 2025
34dfc26
fix: parse empty array expression as null
jnsrnhld Jul 2, 2025
7282b96
REVERT_ME: move sql tests to excluded dir
jnsrnhld Jul 2, 2025
d9785f9
fix: intersection of validity dates with date restriction
jnsrnhld Jul 2, 2025
83804e8
fix: parsing of booleans
jnsrnhld Jul 2, 2025
800d3a2
fix: validity dates if start and end are null
jnsrnhld Jul 2, 2025
343c716
fix: full export form conversion
jnsrnhld Jul 3, 2025
a586c37
fix: validity date exclusion
jnsrnhld Jul 3, 2025
af34b8e
Fixes COALESCE behaviour of SumDiff Aggregation
awildturtok Jul 7, 2025
5fd7b7e
feat: implement DURATION_SUM filter conversion
jnsrnhld Jul 7, 2025
067fe60
fix: fix ExistsSelect conversion without manipulating the parser
jnsrnhld Jul 8, 2025
a782257
fix: apply ExistsConversion fix to forms too
jnsrnhld Jul 8, 2025
65abae9
fix: use coalesce for ExistsSelect final representation
jnsrnhld Jul 8, 2025
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
The table of contents is too big for display.
Diff view
Diff view
  •  
  •  
  •  
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,7 @@
import lombok.NoArgsConstructor;
import lombok.NonNull;
import lombok.Setter;
import lombok.ToString;
import lombok.extern.slf4j.Slf4j;

/**
Expand All @@ -38,6 +39,7 @@
@CPSType(id = "ARRAY_CONCEPT_QUERY", base = QueryDescription.class)
@Slf4j
@NoArgsConstructor(access = AccessLevel.PRIVATE, onConstructor_ = {@JsonCreator})
@ToString
public class ArrayConceptQuery extends Query {

@NotEmpty
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -36,8 +36,9 @@ public void toCSV(List<ResultInfo> idHeaders, List<ResultInfo> infos, Stream<Ent
createCSVBody(cfg, infos, resultStream, printSettings, StringResultPrinters.forCharset(charset));
}

private void createCSVBody(PrintSettings cfg, List<ResultInfo> infos, Stream<EntityResult> results, PrintSettings printSettings,
PrinterFactory printerFactory) {
private void createCSVBody(
PrintSettings cfg, List<ResultInfo> infos, Stream<EntityResult> results, PrintSettings printSettings,
PrinterFactory printerFactory) {
final Printer[] printers = infos.stream().map(info -> info.createPrinter(printerFactory, printSettings)).toArray(Printer[]::new);

results.map(result -> Pair.of(cfg.getIdMapper().map(result), result))
Expand All @@ -52,20 +53,19 @@ private void createCSVBody(PrintSettings cfg, List<ResultInfo> infos, Stream<Ent
public void printLine(EntityPrintId entity, Printer[] printers, Object[] values) {
// Cast here to Object[] so it is clear to intellij that the varargs call is intended
writer.addValues((Object[]) entity.getExternalId());
try {
for (int i = 0; i < printers.length; i++) {
final Object value = values[i];

if (value == null) {
writer.addValue("");
continue;
}
for (int i = 0; i < printers.length; i++) {
final Object value = values[i];

if (value == null) {
writer.addValue("");
continue;
}
try {
writer.addValue(printers[i].apply(value));
}
}
catch (Exception e) {
throw new IllegalStateException("Unable to print line " + Arrays.deepToString(values), e);
catch (Exception e) {
throw new IllegalStateException("Failed to print column %s of line %s".formatted(i, Arrays.deepToString(values)), e);
}
}

writer.writeValuesToRow();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -29,8 +29,6 @@
@ToString(onlyExplicitlyIncluded = true)
public class CachedStore<KEY, VALUE> implements Store<KEY, VALUE> {

private static final ProgressBar PROGRESS_BAR = new ProgressBar(0);

private final LoadingCache<KEY, VALUE> cache;

/**
Expand Down Expand Up @@ -120,41 +118,36 @@ public void loadKeys() {

log.info("BEGIN loading keys {}", this);

store.getAllKeys().forEach(keys::add);
store.getAllKeys().forEach(key -> {
boolean hasPrior = !keys.add(key);
if (hasPrior) {
log.warn("Multiple keys deserialize to `{}`", key);
}
});

log.debug("DONE loading keys from {} in {}", this, stopwatch);
}

@Override
public Stream<KEY> getAllKeys() {
return (Stream<KEY>) Arrays.stream(keys.toArray());
}

@Override
public void loadData() {
final LongAdder totalSize = new LongAdder();
final int count = count();
final ProgressBar bar;

if (count > 100) {
synchronized (PROGRESS_BAR) {
bar = PROGRESS_BAR;
bar.addMaxValue(count);
}
}
else {
bar = null;
}
final ProgressBar bar = count > 100 ? new ProgressBar(100) : null;

log.info("BEGIN loading store {}", this);


final Stopwatch timer = Stopwatch.createStarted();

final Set<KEY> dupes = new HashSet<>();

store.forEach((key, value, size) -> {
try {
totalSize.add(size);
added(key, value);

if (!dupes.add(key)) {
log.warn("Multiple Keys deserialize to `{}`", key);
}
}
catch (RuntimeException e) {
if (e.getCause() != null && e.getCause() instanceof IdReferenceResolvingException) {
Expand Down Expand Up @@ -183,6 +176,11 @@ public Stream<VALUE> getAll() {
return getAllKeys().map(cache::get);
}

@Override
public Stream<KEY> getAllKeys() {
return (Stream<KEY>) Arrays.stream(keys.toArray());
}

@Override
public void clear() {
store.clear();
Expand Down
Original file line number Diff line number Diff line change
@@ -1,19 +1,21 @@
package com.bakdata.conquery.models.datasets.concepts.filters.specific;

import java.util.EnumSet;

import com.bakdata.conquery.apiv1.frontend.FrontendFilterConfiguration;
import com.bakdata.conquery.apiv1.frontend.FrontendFilterType;
import com.bakdata.conquery.io.cps.CPSType;
import com.bakdata.conquery.models.common.Range;
import com.bakdata.conquery.models.config.ConqueryConfig;
import com.bakdata.conquery.models.datasets.concepts.DaterangeSelectOrFilter;
import com.bakdata.conquery.models.datasets.concepts.filters.Filter;
import com.bakdata.conquery.models.datasets.concepts.filters.SingleColumnFilter;
import com.bakdata.conquery.models.events.MajorTypeId;
import com.bakdata.conquery.models.exceptions.ConceptConfigurationException;
import com.bakdata.conquery.models.identifiable.ids.specific.ColumnId;
import com.bakdata.conquery.models.query.filter.RangeFilterNode;
import com.bakdata.conquery.models.query.queryplan.aggregators.specific.DurationSumAggregator;
import com.bakdata.conquery.models.query.queryplan.filter.FilterNode;
import com.bakdata.conquery.sql.conversion.model.aggregator.DurationSumSqlAggregator;
import com.bakdata.conquery.sql.conversion.model.filter.FilterConverter;
import java.util.List;
import javax.annotation.Nullable;
import lombok.Getter;
import lombok.Setter;
import lombok.extern.slf4j.Slf4j;
Expand All @@ -22,21 +24,25 @@
@Setter
@Slf4j
@CPSType(id = "DURATION_SUM", base = Filter.class)
public class DurationSumFilter extends SingleColumnFilter<Range.LongRange> {
public class DurationSumFilter extends Filter<Range.LongRange> implements DaterangeSelectOrFilter {

@Nullable
private ColumnId column;
@Nullable
private ColumnId startColumn;
@Nullable
private ColumnId endColumn;

@Override
public EnumSet<MajorTypeId> getAcceptedColumnTypes() {
return EnumSet.of(MajorTypeId.DATE_RANGE);
public List<ColumnId> getRequiredColumns() {
if (isSingleColumnDaterange()) {
return List.of(column);
}
return List.of(startColumn, endColumn);
}

@Override
public void configureFrontend(FrontendFilterConfiguration.Top f, ConqueryConfig conqueryConfig) throws ConceptConfigurationException {
MajorTypeId type = getColumn().resolve().getType();
if (type != MajorTypeId.DATE_RANGE) {
throw new ConceptConfigurationException(getConnector(), "DURATION_SUM filter is incompatible with columns of type "
+ type);
}

f.setType(FrontendFilterType.Fields.INTEGER_RANGE);
f.setMin(0);
}
Expand All @@ -45,4 +51,9 @@ public void configureFrontend(FrontendFilterConfiguration.Top f, ConqueryConfig
public FilterNode createFilterNode(Range.LongRange value) {
return new RangeFilterNode(value, new DurationSumAggregator(getColumn().resolve()));
}

@Override
public FilterConverter<DurationSumFilter, Range.LongRange> createConverter() {
return new DurationSumSqlAggregator();
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@
import com.bakdata.conquery.models.query.queryplan.aggregators.Aggregator;
import com.bakdata.conquery.models.query.queryplan.aggregators.specific.DurationSumAggregator;
import com.bakdata.conquery.models.types.ResultType;
import com.bakdata.conquery.sql.conversion.model.select.DurationSumSelectConverter;
import com.bakdata.conquery.sql.conversion.model.aggregator.DurationSumSqlAggregator;
import com.bakdata.conquery.sql.conversion.model.select.SelectConverter;
import com.fasterxml.jackson.annotation.JsonCreator;
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
Expand Down Expand Up @@ -52,6 +52,6 @@ public ResultType getResultType() {

@Override
public SelectConverter<DurationSumSelect> createConverter() {
return new DurationSumSelectConverter();
return new DurationSumSqlAggregator();
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@
import com.bakdata.conquery.models.preproc.parser.ColumnValues;
import com.bakdata.conquery.models.preproc.parser.Parser;
import lombok.ToString;
import org.jetbrains.annotations.NotNull;

@ToString(callSuper = true)
public class BooleanParser extends Parser<Boolean, BooleanStore> {
Expand All @@ -19,6 +20,11 @@ public BooleanParser(ConqueryConfig config) {

@Override
protected Boolean parseValue(@Nonnull String value) throws ParsingException {
return parseBoolean(value);
}

@NotNull
public static Boolean parseBoolean(@NotNull String value) {
return switch (value) {
case "J", "true", "1" -> true;
case "N", "false", "0" -> false;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -24,5 +24,5 @@ public enum DateAggregationMode {
* Merge or intersect the dates depending on certain nodes in the query plan (OR -> MERGE, AND -> INTERSECT,
* NOT -> INVERT)
*/
LOGICAL;
LOGICAL
}
Original file line number Diff line number Diff line change
@@ -1,14 +1,15 @@
package com.bakdata.conquery.models.query.queryplan.aggregators.specific.value;

import java.util.HashSet;
import java.util.List;
import java.util.Set;
import java.util.stream.Collectors;

import com.bakdata.conquery.models.datasets.Column;
import com.bakdata.conquery.models.events.Bucket;
import com.bakdata.conquery.models.query.QueryExecutionContext;
import com.bakdata.conquery.models.query.entity.Entity;
import com.bakdata.conquery.models.query.queryplan.aggregators.SingleColumnAggregator;
import com.google.common.collect.ImmutableSet;
import lombok.ToString;

/**
Expand All @@ -17,7 +18,7 @@
* @param <VALUE> Value type of the column.
*/
@ToString(callSuper = true, onlyExplicitlyIncluded = true)
public class AllValuesAggregator<VALUE> extends SingleColumnAggregator<Set<VALUE>> {
public class AllValuesAggregator<VALUE> extends SingleColumnAggregator<List<VALUE>> {

private final Set<VALUE> entries = new HashSet<>();

Expand All @@ -38,8 +39,8 @@ public void consumeEvent(Bucket bucket, int event) {
}

@Override
public Set<VALUE> createAggregationResult() {
return entries.isEmpty() ? null : ImmutableSet.copyOf(entries);
public List<VALUE> createAggregationResult() {
return entries.isEmpty() ? null : entries.stream().sorted().collect(Collectors.toList());
}

}
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,10 @@ public DateRangeStringPrinter(PrintSettings printSettings, String negativeInf, S

@Override
public String apply(@NotNull List<Integer> f) {
if (f.isEmpty()) {
return null;
}

Preconditions.checkArgument(f.size() == 2, "Expected a list with 2 elements, one min, one max. The list was: %s ", f);

final Integer min = f.get(0);
Expand Down
Original file line number Diff line number Diff line change
@@ -1,8 +1,8 @@
package com.bakdata.conquery.sql.conversion;

import java.util.List;
import java.util.Optional;

import com.google.common.collect.MoreCollectors;
import lombok.Getter;

/**
Expand All @@ -23,9 +23,24 @@ protected Conversions(List<? extends Converter<? extends C, R, X>> converters) {
}

public R convert(C node, X context) {
return converters.stream()
.flatMap(converter -> converter.tryConvert(node, context).stream())
.collect(MoreCollectors.onlyElement());
R converted = null;
for (Converter<? extends C, R, X> converter : converters) {
Optional<R> maybeConverted = converter.tryConvert(node, context);
if (maybeConverted.isPresent()) {
if (converted == null) {
converted = maybeConverted.get();
}
else {
throw new IllegalStateException("Multiple Converters for %s".formatted(node));
}
}
}

if (converted == null) {
throw new IllegalStateException("No converter found for %s".formatted(node));
}

return converted;
}

}
Original file line number Diff line number Diff line change
Expand Up @@ -104,7 +104,7 @@ private ColumnDateRange getAggregatedValidityDate(DateAggregationDates dateAggre
String aggregatingOperator = switch (dateAggregationAction) {
case MERGE -> " + ";
case INTERSECT -> " * ";
default -> throw new IllegalStateException("Unexpected aggregation mode: " + dateAggregationAction);
case BLOCK, NEGATE -> throw new IllegalStateException("Unexpected aggregation mode: " + dateAggregationAction);
};

String aggregatedExpression = dateAggregationDates.qualify(joinedStepCteName)
Expand All @@ -118,8 +118,7 @@ private ColumnDateRange getAggregatedValidityDate(DateAggregationDates dateAggre
}

private static String createEmptyRangeForNullValues(Field<?> field) {
return DSL.when(field.isNull(), DSL.field("'{}'::{0}", DSL.keyword("datemultirange")))
.otherwise(field)
return DSL.coalesce(field, DSL.field("'{}'::{0}", DSL.keyword("datemultirange")))
.toString();
}

Expand Down
Loading