From e4b24f73dfc6f2882eca87428513b2ed5afeb804 Mon Sep 17 00:00:00 2001 From: Fabian Blank Date: Mon, 15 May 2023 11:22:39 +0200 Subject: [PATCH 01/96] initial Version of dropping concepts between each other --- .../DropzoneBetweenElements.tsx | 91 ++++++ .../form-concept-group/FormConceptGroup.tsx | 265 ++++++++++-------- .../formConceptGroupState.ts | 6 + 3 files changed, 247 insertions(+), 115 deletions(-) create mode 100644 frontend/src/js/external-forms/form-concept-group/DropzoneBetweenElements.tsx diff --git a/frontend/src/js/external-forms/form-concept-group/DropzoneBetweenElements.tsx b/frontend/src/js/external-forms/form-concept-group/DropzoneBetweenElements.tsx new file mode 100644 index 0000000000..181abedf24 --- /dev/null +++ b/frontend/src/js/external-forms/form-concept-group/DropzoneBetweenElements.tsx @@ -0,0 +1,91 @@ +import { DropTargetMonitor, useDrop } from "react-dnd"; +import Dropzone, { ChildArgs, PossibleDroppableObject } from "../../ui-components/Dropzone"; +import styled from "@emotion/styled"; +import { DNDType } from "../../common/constants/dndTypes"; +import { ReactNode } from "react"; + +interface PropsT { + onDrop: ( + item: DroppableObject, + monitor: DropTargetMonitor, + ) => void; + acceptedDropTypes: string[]; + children?: (args: ChildArgs) => ReactNode; + } + +const DropzoneBetweenElements = ({onDrop, children, acceptedDropTypes}: PropsT) => { + const Root = styled("div")<{ + isHovered: boolean; + }>` + width: 100%; + left: 0; + top: -17px; + right: 0; + position: absolute; + bottom: 90%; + border-radius: ${({ theme }) => theme.borderRadius}; + `; + + + const DropzoneRoot = styled("div")` + width: 100%; + left: 0; + top: -17px; + right: 0; + position: absolute; + bottom: 90%; + z-index: 2; + background-color: ${({ theme }) => theme.col.bg}; + `; + + const [{ isOver, isDroppable }, drop] = useDrop({ + accept: [ + DNDType.FORM_CONFIG, + DNDType.CONCEPT_TREE_NODE, + DNDType.PREVIOUS_QUERY, + DNDType.PREVIOUS_SECONDARY_ID_QUERY, + ], + hover: (_, __) => { + if (!isDroppable) return; + + }, + collect: (monitor) => ({ + isOver: monitor.isOver(), + isDroppable: monitor.canDrop(), + }), + }); + + + const [{ isOver: isOver2, isDroppable : isDroppable2 }, drop2] = useDrop({ + accept: [ + DNDType.FORM_CONFIG, + DNDType.CONCEPT_TREE_NODE, + DNDType.PREVIOUS_QUERY, + DNDType.PREVIOUS_SECONDARY_ID_QUERY, + ], + hover: (_, __) => { + if (!isDroppable2) return; + }, + collect: (monitor) => ({ + isOver: monitor.isOver(), + isDroppable: monitor.canDrop(), + }), + }); + + return( + <> + {!isOver && !isOver2 && } + {/* Show when hovered with text and dropzone */} + {(isOver || isOver2) && + ( + + {children} + + ) + + } + + ) +} + +export default DropzoneBetweenElements; \ No newline at end of file diff --git a/frontend/src/js/external-forms/form-concept-group/FormConceptGroup.tsx b/frontend/src/js/external-forms/form-concept-group/FormConceptGroup.tsx index 9e343b181c..027b4d75cc 100644 --- a/frontend/src/js/external-forms/form-concept-group/FormConceptGroup.tsx +++ b/frontend/src/js/external-forms/form-concept-group/FormConceptGroup.tsx @@ -32,6 +32,7 @@ import { useVisibleConceptListFields, } from "../stateSelectors"; +import DropzoneBetweenElements from "./DropzoneBetweenElements"; import FormConceptCopyModal from "./FormConceptCopyModal"; import FormConceptNode from "./FormConceptNode"; import { @@ -40,6 +41,7 @@ import { copyConcept, FormConceptGroupT, initializeConcept, + insertValue, onToggleIncludeSubnodes, removeConcept, removeValue, @@ -237,130 +239,163 @@ const FormConceptGroup = (props: Props) => { ); }} items={props.value.map((row, i) => ( - - {props.renderRowPrefix - ? props.renderRowPrefix({ - value: props.value, - onChange: props.onChange, - row, - i, - }) - : null} - {row.concepts.length > 1 && ( - - - {t("externalForms.common.connectedWith")}: - - { - props.onChange( - setValueProperties(props.value, i, { - connector: val, - }), - ); - }} - options={[ - { value: "OR", label: t("common.or") }, - { value: "AND", label: t("common.and") }, - ]} - /> - - )} - - props.onChange(addConcept(props.value, i, null)) - } - onRemoveClick={(j) => - props.onChange( - props.value && props.value[i].concepts.length === 1 - ? removeValue(props.value, i) - : removeConcept(props.value, i, j), - ) - } - items={row.concepts.map((concept, j) => - concept ? ( - - setEditedFormQueryNodePosition({ - valueIdx: i, - conceptIdx: j, - }) - } - expand={{ - onClick: () => - props.onChange( - onToggleIncludeSubnodes( - props.value, - i, - j, - !concept.includeSubnodes, - newValue, - ), - ), - expandable: - !props.disallowMultipleColumns && - hasConceptChildren(concept), - active: !!concept.includeSubnodes, + <> + { + console.log("onDrop", item); + console.log(row); + if (isMovedObject(item)) { + return props.onChange( + addConcept( + insertValue(props.value, i, newValue), + i, + copyConcept(item), + ), + ); + } + + if (props.isValidConcept && !props.isValidConcept(item)) + return null; + + return props.onChange( + addConcept( + insertValue(props.value, i, newValue), + i, + initializeConcept(item, defaults, tableConfig), + ), + ); + }} + > + {() => props.conceptDropzoneText} + + + {props.renderRowPrefix + ? props.renderRowPrefix({ + value: props.value, + onChange: props.onChange, + row, + i, + }) + : null} + {row.concepts.length > 1 && ( + + + {t("externalForms.common.connectedWith")}: + + { + props.onChange( + setValueProperties(props.value, i, { + connector: val, + }), + ); }} + options={[ + { value: "OR", label: t("common.or") }, + { value: "AND", label: t("common.and") }, + ]} /> - ) : ( - */ - acceptedDropTypes={DROP_TYPES} - onImportLines={(lines) => - onImportLines(lines, { valueIdx: i, conceptIdx: j }) - } - onDrop={(item: DragItemConceptTreeNode | DragItemFile) => { - if (item.type === "__NATIVE_FILE__") { - onDropFile(item.files[0], { + + )} + + props.onChange(addConcept(props.value, i, null)) + } + onRemoveClick={(j) => + props.onChange( + props.value && props.value[i].concepts.length === 1 + ? removeValue(props.value, i) + : removeConcept(props.value, i, j), + ) + } + items={row.concepts.map((concept, j) => + concept ? ( + + setEditedFormQueryNodePosition({ valueIdx: i, conceptIdx: j, - }); - - return; + }) + } + expand={{ + onClick: () => + props.onChange( + onToggleIncludeSubnodes( + props.value, + i, + j, + !concept.includeSubnodes, + newValue, + ), + ), + expandable: + !props.disallowMultipleColumns && + hasConceptChildren(concept), + active: !!concept.includeSubnodes, + }} + /> + ) : ( + */ + acceptedDropTypes={DROP_TYPES} + onImportLines={(lines) => + onImportLines(lines, { valueIdx: i, conceptIdx: j }) } + onDrop={( + item: DragItemConceptTreeNode | DragItemFile, + ) => { + if (item.type === "__NATIVE_FILE__") { + onDropFile(item.files[0], { + valueIdx: i, + conceptIdx: j, + }); + + return; + } + + if (isMovedObject(item)) { + return props.onChange( + setConcept(props.value, i, j, copyConcept(item)), + ); + } + + if (props.isValidConcept && !props.isValidConcept(item)) + return null; - if (isMovedObject(item)) { return props.onChange( - setConcept(props.value, i, j, copyConcept(item)), + setConcept( + props.value, + i, + j, + initializeConcept(item, defaults, tableConfig), + ), ); + }} + > + {({ isOver, item }) => + isOver && isMovedObject(item) + ? t("externalForms.common.concept.copying") + : props.conceptDropzoneText } - - if (props.isValidConcept && !props.isValidConcept(item)) - return null; - - return props.onChange( - setConcept( - props.value, - i, - j, - initializeConcept(item, defaults, tableConfig), - ), - ); - }} - > - {({ isOver, item }) => - isOver && isMovedObject(item) - ? t("externalForms.common.concept.copying") - : props.conceptDropzoneText - } - - ), - )} - /> - + + ), + )} + /> + + ))} /> {isCopyModalOpen && ( diff --git a/frontend/src/js/external-forms/form-concept-group/formConceptGroupState.ts b/frontend/src/js/external-forms/form-concept-group/formConceptGroupState.ts index da2cbc3706..d24fe3c441 100644 --- a/frontend/src/js/external-forms/form-concept-group/formConceptGroupState.ts +++ b/frontend/src/js/external-forms/form-concept-group/formConceptGroupState.ts @@ -47,6 +47,12 @@ export const addValue = ( newValue: FormConceptGroupT, ) => [...value, newValue]; +export const insertValue = ( + value: FormConceptGroupT[], + valueIdx: number, + newValue: FormConceptGroupT, +) => [...value.slice(0, valueIdx), newValue, ...value.slice(valueIdx)]; + export const removeValue = (value: FormConceptGroupT[], valueIdx: number) => { return [...value.slice(0, valueIdx), ...value.slice(valueIdx + 1)]; }; From 4fa714da4f130d96f3581706305abf87a35c34a9 Mon Sep 17 00:00:00 2001 From: Fabian Blank Date: Mon, 15 May 2023 11:37:13 +0200 Subject: [PATCH 02/96] cleanup --- .../DropzoneBetweenElements.tsx | 165 +++++++++--------- .../form-concept-group/FormConceptGroup.tsx | 2 - 2 files changed, 84 insertions(+), 83 deletions(-) diff --git a/frontend/src/js/external-forms/form-concept-group/DropzoneBetweenElements.tsx b/frontend/src/js/external-forms/form-concept-group/DropzoneBetweenElements.tsx index 181abedf24..0613fd1d35 100644 --- a/frontend/src/js/external-forms/form-concept-group/DropzoneBetweenElements.tsx +++ b/frontend/src/js/external-forms/form-concept-group/DropzoneBetweenElements.tsx @@ -1,91 +1,94 @@ -import { DropTargetMonitor, useDrop } from "react-dnd"; -import Dropzone, { ChildArgs, PossibleDroppableObject } from "../../ui-components/Dropzone"; import styled from "@emotion/styled"; -import { DNDType } from "../../common/constants/dndTypes"; import { ReactNode } from "react"; +import { DropTargetMonitor, useDrop } from "react-dnd"; -interface PropsT { - onDrop: ( - item: DroppableObject, - monitor: DropTargetMonitor, - ) => void; - acceptedDropTypes: string[]; - children?: (args: ChildArgs) => ReactNode; - } - -const DropzoneBetweenElements = ({onDrop, children, acceptedDropTypes}: PropsT) => { - const Root = styled("div")<{ - isHovered: boolean; - }>` - width: 100%; - left: 0; - top: -17px; - right: 0; - position: absolute; - bottom: 90%; - border-radius: ${({ theme }) => theme.borderRadius}; - `; +import { DNDType } from "../../common/constants/dndTypes"; +import Dropzone, { + ChildArgs, + PossibleDroppableObject, +} from "../../ui-components/Dropzone"; +interface PropsT { + onDrop: (item: DroppableObject, monitor: DropTargetMonitor) => void; + acceptedDropTypes: string[]; + children?: (args: ChildArgs) => ReactNode; +} - const DropzoneRoot = styled("div")` - width: 100%; - left: 0; - top: -17px; - right: 0; - position: absolute; - bottom: 90%; - z-index: 2; - background-color: ${({ theme }) => theme.col.bg}; - `; +const DropzoneBetweenElements = < + DroppableObject extends PossibleDroppableObject, +>({ + onDrop, + children, + acceptedDropTypes, +}: PropsT) => { + const Root = styled("div")<{ + isHovered: boolean; + }>` + width: 100%; + left: 0; + top: -17px; + right: 0; + position: absolute; + bottom: 90%; + border-radius: ${({ theme }) => theme.borderRadius}; + `; - const [{ isOver, isDroppable }, drop] = useDrop({ - accept: [ - DNDType.FORM_CONFIG, - DNDType.CONCEPT_TREE_NODE, - DNDType.PREVIOUS_QUERY, - DNDType.PREVIOUS_SECONDARY_ID_QUERY, - ], - hover: (_, __) => { - if (!isDroppable) return; - - }, - collect: (monitor) => ({ - isOver: monitor.isOver(), - isDroppable: monitor.canDrop(), - }), - }); + const DropzoneRoot = styled("div")` + width: 100%; + left: 0; + top: -17px; + right: 0; + position: absolute; + bottom: 90%; + z-index: 2; + background-color: ${({ theme }) => theme.col.bg}; + `; + const [{ isOver, isDroppable }, drop] = useDrop({ + accept: [ + DNDType.FORM_CONFIG, + DNDType.CONCEPT_TREE_NODE, + DNDType.PREVIOUS_QUERY, + DNDType.PREVIOUS_SECONDARY_ID_QUERY, + ], + hover: (_, __) => { + if (!isDroppable) return; + }, + collect: (monitor) => ({ + isOver: monitor.isOver(), + isDroppable: monitor.canDrop(), + }), + }); - const [{ isOver: isOver2, isDroppable : isDroppable2 }, drop2] = useDrop({ - accept: [ - DNDType.FORM_CONFIG, - DNDType.CONCEPT_TREE_NODE, - DNDType.PREVIOUS_QUERY, - DNDType.PREVIOUS_SECONDARY_ID_QUERY, - ], - hover: (_, __) => { - if (!isDroppable2) return; - }, - collect: (monitor) => ({ - isOver: monitor.isOver(), - isDroppable: monitor.canDrop(), - }), - }); + const [{ isOver: isOver2, isDroppable: isDroppable2 }, drop2] = useDrop({ + accept: [ + DNDType.FORM_CONFIG, + DNDType.CONCEPT_TREE_NODE, + DNDType.PREVIOUS_QUERY, + DNDType.PREVIOUS_SECONDARY_ID_QUERY, + ], + hover: (_, __) => { + if (!isDroppable2) return; + }, + collect: (monitor) => ({ + isOver: monitor.isOver(), + isDroppable: monitor.canDrop(), + }), + }); - return( - <> - {!isOver && !isOver2 && } - {/* Show when hovered with text and dropzone */} - {(isOver || isOver2) && - ( - - {children} - - ) - - } - - ) -} + return ( + <> + {!isOver && !isOver2 && } + {/* Show when hovered with text and dropzone */} + {(isOver || isOver2) && ( + + + {children} + + + )} + + ); +}; -export default DropzoneBetweenElements; \ No newline at end of file +export default DropzoneBetweenElements; diff --git a/frontend/src/js/external-forms/form-concept-group/FormConceptGroup.tsx b/frontend/src/js/external-forms/form-concept-group/FormConceptGroup.tsx index 027b4d75cc..5b8e9788b5 100644 --- a/frontend/src/js/external-forms/form-concept-group/FormConceptGroup.tsx +++ b/frontend/src/js/external-forms/form-concept-group/FormConceptGroup.tsx @@ -243,8 +243,6 @@ const FormConceptGroup = (props: Props) => { { - console.log("onDrop", item); - console.log(row); if (isMovedObject(item)) { return props.onChange( addConcept( From 2ba5888205de22ac80c153ae4c134ae73495b79c Mon Sep 17 00:00:00 2001 From: Fabian Blank Date: Mon, 15 May 2023 12:13:24 +0200 Subject: [PATCH 03/96] format --- .../form-concept-group/FormConceptGroup.tsx | 44 ++++++++++--------- 1 file changed, 23 insertions(+), 21 deletions(-) diff --git a/frontend/src/js/external-forms/form-concept-group/FormConceptGroup.tsx b/frontend/src/js/external-forms/form-concept-group/FormConceptGroup.tsx index 5b8e9788b5..6d55b3f636 100644 --- a/frontend/src/js/external-forms/form-concept-group/FormConceptGroup.tsx +++ b/frontend/src/js/external-forms/form-concept-group/FormConceptGroup.tsx @@ -240,33 +240,35 @@ const FormConceptGroup = (props: Props) => { }} items={props.value.map((row, i) => ( <> - { - if (isMovedObject(item)) { + {!props.disallowMultipleColumns && ( + { + if (isMovedObject(item)) { + return props.onChange( + addConcept( + insertValue(props.value, i, newValue), + i, + copyConcept(item), + ), + ); + } + + if (props.isValidConcept && !props.isValidConcept(item)) + return null; + return props.onChange( addConcept( insertValue(props.value, i, newValue), i, - copyConcept(item), + initializeConcept(item, defaults, tableConfig), ), ); - } - - if (props.isValidConcept && !props.isValidConcept(item)) - return null; - - return props.onChange( - addConcept( - insertValue(props.value, i, newValue), - i, - initializeConcept(item, defaults, tableConfig), - ), - ); - }} - > - {() => props.conceptDropzoneText} - + }} + > + {() => props.conceptDropzoneText} + + )} {props.renderRowPrefix ? props.renderRowPrefix({ From cb563b506b173a41ba92a53975b97d20bfc2e6c2 Mon Sep 17 00:00:00 2001 From: Fabian Blank Date: Fri, 19 May 2023 14:48:55 +0200 Subject: [PATCH 04/96] Implement version 2 of UI dropping between elements --- .../form-components/DropzoneList.tsx | 23 ++++- .../form-concept-group/BetweenElement.tsx | 89 ++++++++++++++++++ .../DropzoneBetweenElements.tsx | 94 ------------------- .../form-concept-group/FormConceptGroup.tsx | 55 +++++------ 4 files changed, 133 insertions(+), 128 deletions(-) create mode 100644 frontend/src/js/external-forms/form-concept-group/BetweenElement.tsx delete mode 100644 frontend/src/js/external-forms/form-concept-group/DropzoneBetweenElements.tsx diff --git a/frontend/src/js/external-forms/form-components/DropzoneList.tsx b/frontend/src/js/external-forms/form-components/DropzoneList.tsx index f4f22a8925..07964fb368 100644 --- a/frontend/src/js/external-forms/form-components/DropzoneList.tsx +++ b/frontend/src/js/external-forms/form-components/DropzoneList.tsx @@ -14,6 +14,7 @@ import DropzoneWithFileInput, { } from "../../ui-components/DropzoneWithFileInput"; import Label from "../../ui-components/Label"; import Optional from "../../ui-components/Optional"; +import BetweenElements from "../form-concept-group/BetweenElement"; const ListItem = styled("div")` position: relative; @@ -51,6 +52,10 @@ interface PropsT { ) => void; onDropFile: (file: File) => void; onImportLines: (lines: string[]) => void; + conceptDropzoneText: string; + dropBetween: ( + i: number, + ) => (item: DroppableObject, monitor: DropTargetMonitor) => void; } const DropzoneList = ( @@ -66,6 +71,8 @@ const DropzoneList = ( disallowMultipleColumns, onDrop, onImportLines, + conceptDropzoneText, + dropBetween, }: PropsT, ref: Ref, ) => { @@ -87,10 +94,18 @@ const DropzoneList = ( {items && items.length > 0 && (
{items.map((item, i) => ( - - onDelete(i)} /> - {item} - + <> + + {() => conceptDropzoneText} + + + onDelete(i)} /> + {item} + + ))}
)} diff --git a/frontend/src/js/external-forms/form-concept-group/BetweenElement.tsx b/frontend/src/js/external-forms/form-concept-group/BetweenElement.tsx new file mode 100644 index 0000000000..d83d06d878 --- /dev/null +++ b/frontend/src/js/external-forms/form-concept-group/BetweenElement.tsx @@ -0,0 +1,89 @@ +import styled from "@emotion/styled"; +import { faPlus } from "@fortawesome/free-solid-svg-icons"; +import { ReactNode, useState } from "react"; +import { DropTargetMonitor, useDrop } from "react-dnd"; + +import IconButton from "../../button/IconButton"; +import Dropzone, { + ChildArgs, + PossibleDroppableObject, +} from "../../ui-components/Dropzone"; + +interface Props { + onDrop: (item: DroppableObject, monitor: DropTargetMonitor) => void; + acceptedDropTypes: string[]; + children?: (args: ChildArgs) => ReactNode; +} + +const Root = styled("div")<{ + isOver: boolean; + isDroppable: boolean; +}>` + background-color: ${({ theme, isDroppable, isOver }) => { + if (isOver && isDroppable) return theme.col.grayLight; + if (isDroppable) return theme.col.grayVeryLight; + return "inherit"; + }}; + display: flex; + align-items: center; + width: 100%; + margin-bottom: 1px; + z-index: 2; + position: relative; +`; + +const PlusContainer = styled("div")` + margin-left: 45%; + width: 10%; +`; + +const BetweenElements = ({ + acceptedDropTypes, + children, + onDrop, +}: Props) => { + const [showDropzone, setShowDropzone] = useState(false); + const [{ isOver, isDroppable }, drop] = useDrop({ + accept: acceptedDropTypes, + collect: (monitor) => ({ + isOver: monitor.isOver(), + isDroppable: monitor.canDrop(), + }), + }); + const [{ isOver: isOver2 }, drop2] = useDrop({ + accept: acceptedDropTypes, + collect: (monitor) => ({ + isOver: monitor.isOver(), + }), + }); + + const onDropped = (item: DroppableObject, monitor: DropTargetMonitor) => { + setShowDropzone(false); + onDrop(item, monitor); + }; + + return ( + <> + {!(showDropzone || isOver || isOver2) && ( + + setShowDropzone(true)}> + + + + )} + + {(showDropzone || isOver || isOver2) && ( + // TODO x - to close the dropzone + + {children} + + )} + + ); +}; + +export default BetweenElements; diff --git a/frontend/src/js/external-forms/form-concept-group/DropzoneBetweenElements.tsx b/frontend/src/js/external-forms/form-concept-group/DropzoneBetweenElements.tsx deleted file mode 100644 index 0613fd1d35..0000000000 --- a/frontend/src/js/external-forms/form-concept-group/DropzoneBetweenElements.tsx +++ /dev/null @@ -1,94 +0,0 @@ -import styled from "@emotion/styled"; -import { ReactNode } from "react"; -import { DropTargetMonitor, useDrop } from "react-dnd"; - -import { DNDType } from "../../common/constants/dndTypes"; -import Dropzone, { - ChildArgs, - PossibleDroppableObject, -} from "../../ui-components/Dropzone"; - -interface PropsT { - onDrop: (item: DroppableObject, monitor: DropTargetMonitor) => void; - acceptedDropTypes: string[]; - children?: (args: ChildArgs) => ReactNode; -} - -const DropzoneBetweenElements = < - DroppableObject extends PossibleDroppableObject, ->({ - onDrop, - children, - acceptedDropTypes, -}: PropsT) => { - const Root = styled("div")<{ - isHovered: boolean; - }>` - width: 100%; - left: 0; - top: -17px; - right: 0; - position: absolute; - bottom: 90%; - border-radius: ${({ theme }) => theme.borderRadius}; - `; - - const DropzoneRoot = styled("div")` - width: 100%; - left: 0; - top: -17px; - right: 0; - position: absolute; - bottom: 90%; - z-index: 2; - background-color: ${({ theme }) => theme.col.bg}; - `; - - const [{ isOver, isDroppable }, drop] = useDrop({ - accept: [ - DNDType.FORM_CONFIG, - DNDType.CONCEPT_TREE_NODE, - DNDType.PREVIOUS_QUERY, - DNDType.PREVIOUS_SECONDARY_ID_QUERY, - ], - hover: (_, __) => { - if (!isDroppable) return; - }, - collect: (monitor) => ({ - isOver: monitor.isOver(), - isDroppable: monitor.canDrop(), - }), - }); - - const [{ isOver: isOver2, isDroppable: isDroppable2 }, drop2] = useDrop({ - accept: [ - DNDType.FORM_CONFIG, - DNDType.CONCEPT_TREE_NODE, - DNDType.PREVIOUS_QUERY, - DNDType.PREVIOUS_SECONDARY_ID_QUERY, - ], - hover: (_, __) => { - if (!isDroppable2) return; - }, - collect: (monitor) => ({ - isOver: monitor.isOver(), - isDroppable: monitor.canDrop(), - }), - }); - - return ( - <> - {!isOver && !isOver2 && } - {/* Show when hovered with text and dropzone */} - {(isOver || isOver2) && ( - - - {children} - - - )} - - ); -}; - -export default DropzoneBetweenElements; diff --git a/frontend/src/js/external-forms/form-concept-group/FormConceptGroup.tsx b/frontend/src/js/external-forms/form-concept-group/FormConceptGroup.tsx index 6d55b3f636..94fcb4af78 100644 --- a/frontend/src/js/external-forms/form-concept-group/FormConceptGroup.tsx +++ b/frontend/src/js/external-forms/form-concept-group/FormConceptGroup.tsx @@ -32,7 +32,6 @@ import { useVisibleConceptListFields, } from "../stateSelectors"; -import DropzoneBetweenElements from "./DropzoneBetweenElements"; import FormConceptCopyModal from "./FormConceptCopyModal"; import FormConceptNode from "./FormConceptNode"; import { @@ -201,6 +200,30 @@ const FormConceptGroup = (props: Props) => { ? t("externalForms.common.concept.copying") : props.attributeDropzoneText } + dropBetween={(i: number) => { + return (item: DragItemConceptTreeNode) => { + if (isMovedObject(item)) { + return props.onChange( + addConcept( + insertValue(props.value, i, newValue), + i, + copyConcept(item), + ), + ); + } + + if (props.isValidConcept && !props.isValidConcept(item)) + return null; + + return props.onChange( + addConcept( + insertValue(props.value, i, newValue), + i, + initializeConcept(item, defaults, tableConfig), + ), + ); + }; + }} acceptedDropTypes={[DNDType.CONCEPT_TREE_NODE]} disallowMultipleColumns={props.disallowMultipleColumns} onDelete={(i) => props.onChange(removeValue(props.value, i))} @@ -238,37 +261,9 @@ const FormConceptGroup = (props: Props) => { ), ); }} + conceptDropzoneText={props.conceptDropzoneText} items={props.value.map((row, i) => ( <> - {!props.disallowMultipleColumns && ( - { - if (isMovedObject(item)) { - return props.onChange( - addConcept( - insertValue(props.value, i, newValue), - i, - copyConcept(item), - ), - ); - } - - if (props.isValidConcept && !props.isValidConcept(item)) - return null; - - return props.onChange( - addConcept( - insertValue(props.value, i, newValue), - i, - initializeConcept(item, defaults, tableConfig), - ), - ); - }} - > - {() => props.conceptDropzoneText} - - )} {props.renderRowPrefix ? props.renderRowPrefix({ From 29b50315cf28b1923667b3991592f2a1268c53b5 Mon Sep 17 00:00:00 2001 From: Fabian Blank Date: Mon, 22 May 2023 11:47:47 +0200 Subject: [PATCH 05/96] Iteration 2 of UI --- .../form-components/DropzoneList.tsx | 2 +- .../form-concept-group/BetweenElement.tsx | 30 +++++++++++-------- 2 files changed, 18 insertions(+), 14 deletions(-) diff --git a/frontend/src/js/external-forms/form-components/DropzoneList.tsx b/frontend/src/js/external-forms/form-components/DropzoneList.tsx index 07964fb368..887e48d498 100644 --- a/frontend/src/js/external-forms/form-components/DropzoneList.tsx +++ b/frontend/src/js/external-forms/form-components/DropzoneList.tsx @@ -18,7 +18,7 @@ import BetweenElements from "../form-concept-group/BetweenElement"; const ListItem = styled("div")` position: relative; - padding: 5px; + padding: 0px 5px 0px 5px; box-shadow: 0 0 3px 0 rgba(0, 0, 0, 0.1); background-color: white; border-radius: ${({ theme }) => theme.borderRadius}; diff --git a/frontend/src/js/external-forms/form-concept-group/BetweenElement.tsx b/frontend/src/js/external-forms/form-concept-group/BetweenElement.tsx index d83d06d878..a7716cc89e 100644 --- a/frontend/src/js/external-forms/form-concept-group/BetweenElement.tsx +++ b/frontend/src/js/external-forms/form-concept-group/BetweenElement.tsx @@ -3,7 +3,7 @@ import { faPlus } from "@fortawesome/free-solid-svg-icons"; import { ReactNode, useState } from "react"; import { DropTargetMonitor, useDrop } from "react-dnd"; -import IconButton from "../../button/IconButton"; +import FaIcon from "../../icon/FaIcon"; import Dropzone, { ChildArgs, PossibleDroppableObject, @@ -21,20 +21,20 @@ const Root = styled("div")<{ }>` background-color: ${({ theme, isDroppable, isOver }) => { if (isOver && isDroppable) return theme.col.grayLight; - if (isDroppable) return theme.col.grayVeryLight; - return "inherit"; + return isDroppable ? theme.col.grayVeryLight : "inherit"; }}; - display: flex; - align-items: center; width: 100%; - margin-bottom: 1px; - z-index: 2; - position: relative; + text-align: center; `; const PlusContainer = styled("div")` - margin-left: 45%; - width: 10%; + margin: auto; +`; + +const SxFaIcon = styled(FaIcon)` + height: 15px; + color: ${({ theme }) => theme.col.black}; + opacity: 0.75; `; const BetweenElements = ({ @@ -42,6 +42,10 @@ const BetweenElements = ({ children, onDrop, }: Props) => { + const SxDropzone = styled(Dropzone)` + margin: 5px 0 5px 0; + `; + const [showDropzone, setShowDropzone] = useState(false); const [{ isOver, isDroppable }, drop] = useDrop({ accept: acceptedDropTypes, @@ -67,20 +71,20 @@ const BetweenElements = ({ {!(showDropzone || isOver || isOver2) && ( setShowDropzone(true)}> - + )} {(showDropzone || isOver || isOver2) && ( // TODO x - to close the dropzone - {children} - + )} ); From 263c0db09710b186821efe6b8c2e1ed3b18bcea1 Mon Sep 17 00:00:00 2001 From: Fabian Blank Date: Fri, 26 May 2023 11:20:13 +0200 Subject: [PATCH 06/96] add X to close button --- .../DropzoneBetweenElements.tsx} | 47 ++++++++++++++----- .../form-components/DropzoneList.tsx | 19 ++++---- 2 files changed, 47 insertions(+), 19 deletions(-) rename frontend/src/js/external-forms/{form-concept-group/BetweenElement.tsx => form-components/DropzoneBetweenElements.tsx} (68%) diff --git a/frontend/src/js/external-forms/form-concept-group/BetweenElement.tsx b/frontend/src/js/external-forms/form-components/DropzoneBetweenElements.tsx similarity index 68% rename from frontend/src/js/external-forms/form-concept-group/BetweenElement.tsx rename to frontend/src/js/external-forms/form-components/DropzoneBetweenElements.tsx index a7716cc89e..d6e5a9b8d2 100644 --- a/frontend/src/js/external-forms/form-concept-group/BetweenElement.tsx +++ b/frontend/src/js/external-forms/form-components/DropzoneBetweenElements.tsx @@ -1,8 +1,9 @@ import styled from "@emotion/styled"; -import { faPlus } from "@fortawesome/free-solid-svg-icons"; +import { faPlus, faTimes } from "@fortawesome/free-solid-svg-icons"; import { ReactNode, useState } from "react"; import { DropTargetMonitor, useDrop } from "react-dnd"; +import IconButton from "../../button/IconButton"; import FaIcon from "../../icon/FaIcon"; import Dropzone, { ChildArgs, @@ -13,16 +14,19 @@ interface Props { onDrop: (item: DroppableObject, monitor: DropTargetMonitor) => void; acceptedDropTypes: string[]; children?: (args: ChildArgs) => ReactNode; + isFirstElement: boolean; } const Root = styled("div")<{ isOver: boolean; isDroppable: boolean; + isFirstElement: boolean; }>` background-color: ${({ theme, isDroppable, isOver }) => { if (isOver && isDroppable) return theme.col.grayLight; return isDroppable ? theme.col.grayVeryLight : "inherit"; }}; + margin-top: ${({ isFirstElement }) => (isFirstElement ? "5px" : "0px")}; width: 100%; text-align: center; `; @@ -31,16 +35,31 @@ const PlusContainer = styled("div")` margin: auto; `; +const DropzoneContainer = styled("div")` + overflow: hidden; + height: 54px; +`; + const SxFaIcon = styled(FaIcon)` height: 15px; color: ${({ theme }) => theme.col.black}; opacity: 0.75; `; +const RemoveBtn = styled(IconButton)` + position: relative; + color: ${({ theme }) => theme.col.black}; + top: -64px; + left: 97%; + z-index: 2; + background-color: white; +`; + const BetweenElements = ({ acceptedDropTypes, children, onDrop, + isFirstElement, }: Props) => { const SxDropzone = styled(Dropzone)` margin: 5px 0 5px 0; @@ -65,11 +84,15 @@ const BetweenElements = ({ setShowDropzone(false); onDrop(item, monitor); }; - return ( <> {!(showDropzone || isOver || isOver2) && ( - + setShowDropzone(true)}> @@ -77,14 +100,16 @@ const BetweenElements = ({ )} {(showDropzone || isOver || isOver2) && ( - // TODO x - to close the dropzone - - {children} - + + + {children} + + + )} ); diff --git a/frontend/src/js/external-forms/form-components/DropzoneList.tsx b/frontend/src/js/external-forms/form-components/DropzoneList.tsx index 887e48d498..1e97d3c67f 100644 --- a/frontend/src/js/external-forms/form-components/DropzoneList.tsx +++ b/frontend/src/js/external-forms/form-components/DropzoneList.tsx @@ -14,7 +14,8 @@ import DropzoneWithFileInput, { } from "../../ui-components/DropzoneWithFileInput"; import Label from "../../ui-components/Label"; import Optional from "../../ui-components/Optional"; -import BetweenElements from "../form-concept-group/BetweenElement"; + +import BetweenElements from "./DropzoneBetweenElements"; const ListItem = styled("div")` position: relative; @@ -22,7 +23,6 @@ const ListItem = styled("div")` box-shadow: 0 0 3px 0 rgba(0, 0, 0, 0.1); background-color: white; border-radius: ${({ theme }) => theme.borderRadius}; - margin-bottom: 5px; `; const StyledIconButton = styled(IconButton)` @@ -95,12 +95,15 @@ const DropzoneList = (
{items.map((item, i) => ( <> - - {() => conceptDropzoneText} - + {!disallowMultipleColumns && ( + + {() => conceptDropzoneText} + + )} onDelete(i)} /> {item} From 8c0f697b9e5c8dc36d0237f7f96013830da41777 Mon Sep 17 00:00:00 2001 From: Fabian Blank Date: Tue, 30 May 2023 12:08:20 +0200 Subject: [PATCH 07/96] cleanup, improve text, fix small css issues --- .../DropzoneBetweenElements.tsx | 34 +++++++------------ .../form-components/DropzoneList.tsx | 16 ++++----- .../form-concept-group/FormConceptGroup.tsx | 1 - frontend/src/localization/de.json | 3 +- frontend/src/localization/en.json | 3 +- 5 files changed, 23 insertions(+), 34 deletions(-) diff --git a/frontend/src/js/external-forms/form-components/DropzoneBetweenElements.tsx b/frontend/src/js/external-forms/form-components/DropzoneBetweenElements.tsx index d6e5a9b8d2..2c10931a94 100644 --- a/frontend/src/js/external-forms/form-components/DropzoneBetweenElements.tsx +++ b/frontend/src/js/external-forms/form-components/DropzoneBetweenElements.tsx @@ -1,19 +1,17 @@ import styled from "@emotion/styled"; -import { faPlus, faTimes } from "@fortawesome/free-solid-svg-icons"; -import { ReactNode, useState } from "react"; +import { faPlus } from "@fortawesome/free-solid-svg-icons"; +import { useState } from "react"; import { DropTargetMonitor, useDrop } from "react-dnd"; -import IconButton from "../../button/IconButton"; import FaIcon from "../../icon/FaIcon"; import Dropzone, { - ChildArgs, PossibleDroppableObject, } from "../../ui-components/Dropzone"; +import { useTranslation } from "react-i18next"; interface Props { onDrop: (item: DroppableObject, monitor: DropTargetMonitor) => void; acceptedDropTypes: string[]; - children?: (args: ChildArgs) => ReactNode; isFirstElement: boolean; } @@ -32,7 +30,11 @@ const Root = styled("div")<{ `; const PlusContainer = styled("div")` - margin: auto; + margin: 0; + display: flex; + justify-content: center; + align-items: center; + height: 21px; `; const DropzoneContainer = styled("div")` @@ -41,26 +43,18 @@ const DropzoneContainer = styled("div")` `; const SxFaIcon = styled(FaIcon)` - height: 15px; + height: 12px; color: ${({ theme }) => theme.col.black}; opacity: 0.75; `; -const RemoveBtn = styled(IconButton)` - position: relative; - color: ${({ theme }) => theme.col.black}; - top: -64px; - left: 97%; - z-index: 2; - background-color: white; -`; - const BetweenElements = ({ acceptedDropTypes, - children, onDrop, isFirstElement, }: Props) => { + const { t } = useTranslation(); + const SxDropzone = styled(Dropzone)` margin: 5px 0 5px 0; `; @@ -100,15 +94,13 @@ const BetweenElements = ({ )} {(showDropzone || isOver || isOver2) && ( - + setShowDropzone(false)}> - {children} + {() => t("externalForms.default.dropBetweenLabel")} - )} diff --git a/frontend/src/js/external-forms/form-components/DropzoneList.tsx b/frontend/src/js/external-forms/form-components/DropzoneList.tsx index 1e97d3c67f..4cf10621ac 100644 --- a/frontend/src/js/external-forms/form-components/DropzoneList.tsx +++ b/frontend/src/js/external-forms/form-components/DropzoneList.tsx @@ -15,7 +15,7 @@ import DropzoneWithFileInput, { import Label from "../../ui-components/Label"; import Optional from "../../ui-components/Optional"; -import BetweenElements from "./DropzoneBetweenElements"; +import DropzoneBetweenElements from "./DropzoneBetweenElements"; const ListItem = styled("div")` position: relative; @@ -52,7 +52,6 @@ interface PropsT { ) => void; onDropFile: (file: File) => void; onImportLines: (lines: string[]) => void; - conceptDropzoneText: string; dropBetween: ( i: number, ) => (item: DroppableObject, monitor: DropTargetMonitor) => void; @@ -71,7 +70,6 @@ const DropzoneList = ( disallowMultipleColumns, onDrop, onImportLines, - conceptDropzoneText, dropBetween, }: PropsT, ref: Ref, @@ -94,21 +92,19 @@ const DropzoneList = ( {items && items.length > 0 && (
{items.map((item, i) => ( - <> +
{!disallowMultipleColumns && ( - - {() => conceptDropzoneText} - + /> )} - + onDelete(i)} /> {item} - +
))}
)} diff --git a/frontend/src/js/external-forms/form-concept-group/FormConceptGroup.tsx b/frontend/src/js/external-forms/form-concept-group/FormConceptGroup.tsx index 94fcb4af78..4bf9a0e73a 100644 --- a/frontend/src/js/external-forms/form-concept-group/FormConceptGroup.tsx +++ b/frontend/src/js/external-forms/form-concept-group/FormConceptGroup.tsx @@ -261,7 +261,6 @@ const FormConceptGroup = (props: Props) => { ), ); }} - conceptDropzoneText={props.conceptDropzoneText} items={props.value.map((row, i) => ( <> diff --git a/frontend/src/localization/de.json b/frontend/src/localization/de.json index 1b5843d915..a4f1f256bb 100644 --- a/frontend/src/localization/de.json +++ b/frontend/src/localization/de.json @@ -273,7 +273,8 @@ }, "default": { "conceptDropzoneLabel": "Füge ein Konzept oder eine Konzeptliste hinzu", - "conceptColumnDropzoneLabel": "Füge ein Konzept oder eine Konzeptliste hinzu" + "conceptColumnDropzoneLabel": "Füge ein Konzept oder eine Konzeptliste hinzu", + "dropBetweenLabel": "Füge ein Konzept oder eine Konzeptliste hinzu. Clicke hier um abzubrechen" }, "copyModal": { "headline": "Kopieren von Konzepten aus anderem Feld", diff --git a/frontend/src/localization/en.json b/frontend/src/localization/en.json index 52d4c1f0ef..efa70d3c78 100644 --- a/frontend/src/localization/en.json +++ b/frontend/src/localization/en.json @@ -274,7 +274,8 @@ }, "default": { "conceptDropzoneLabel": "Add a concept or a concept list", - "conceptColumnDropzoneLabel": "Add a concept or a concept list" + "conceptColumnDropzoneLabel": "Add a concept or a concept list", + "dropBetweenLabel": "Add a concept or a concept list. Click here to cancel" }, "copyModal": { "headline": "Copy concepts form another field", From 70589c6b863d7e007108d51542a19c516e6a7895 Mon Sep 17 00:00:00 2001 From: Fabian Blank Date: Tue, 30 May 2023 12:08:43 +0200 Subject: [PATCH 08/96] format --- .../form-components/DropzoneBetweenElements.tsx | 7 ++----- 1 file changed, 2 insertions(+), 5 deletions(-) diff --git a/frontend/src/js/external-forms/form-components/DropzoneBetweenElements.tsx b/frontend/src/js/external-forms/form-components/DropzoneBetweenElements.tsx index 2c10931a94..f3ae29b124 100644 --- a/frontend/src/js/external-forms/form-components/DropzoneBetweenElements.tsx +++ b/frontend/src/js/external-forms/form-components/DropzoneBetweenElements.tsx @@ -2,12 +2,12 @@ import styled from "@emotion/styled"; import { faPlus } from "@fortawesome/free-solid-svg-icons"; import { useState } from "react"; import { DropTargetMonitor, useDrop } from "react-dnd"; +import { useTranslation } from "react-i18next"; import FaIcon from "../../icon/FaIcon"; import Dropzone, { PossibleDroppableObject, } from "../../ui-components/Dropzone"; -import { useTranslation } from "react-i18next"; interface Props { onDrop: (item: DroppableObject, monitor: DropTargetMonitor) => void; @@ -95,10 +95,7 @@ const BetweenElements = ({ {(showDropzone || isOver || isOver2) && ( setShowDropzone(false)}> - + {() => t("externalForms.default.dropBetweenLabel")} From 00cc78f2ebdea210d2bc6a911355cd6e1c082ff3 Mon Sep 17 00:00:00 2001 From: Fabian Blank Date: Tue, 30 May 2023 12:26:01 +0200 Subject: [PATCH 09/96] add bottom Dropzone margin, renaming to make vars clearer --- .../form-components/DropzoneBetweenElements.tsx | 10 ++++++---- .../external-forms/form-components/DropzoneList.tsx | 13 ++++++++----- .../form-concept-group/FormConceptGroup.tsx | 2 +- 3 files changed, 15 insertions(+), 10 deletions(-) diff --git a/frontend/src/js/external-forms/form-components/DropzoneBetweenElements.tsx b/frontend/src/js/external-forms/form-components/DropzoneBetweenElements.tsx index f3ae29b124..511857ed7a 100644 --- a/frontend/src/js/external-forms/form-components/DropzoneBetweenElements.tsx +++ b/frontend/src/js/external-forms/form-components/DropzoneBetweenElements.tsx @@ -50,7 +50,7 @@ const SxFaIcon = styled(FaIcon)` const BetweenElements = ({ acceptedDropTypes, - onDrop, + onDrop: onDropCallback, isFirstElement, }: Props) => { const { t } = useTranslation(); @@ -60,6 +60,7 @@ const BetweenElements = ({ `; const [showDropzone, setShowDropzone] = useState(false); + const [{ isOver, isDroppable }, drop] = useDrop({ accept: acceptedDropTypes, collect: (monitor) => ({ @@ -74,10 +75,11 @@ const BetweenElements = ({ }), }); - const onDropped = (item: DroppableObject, monitor: DropTargetMonitor) => { + const onDrop = (item: DroppableObject, monitor: DropTargetMonitor) => { setShowDropzone(false); - onDrop(item, monitor); + onDropCallback(item, monitor); }; + return ( <> {!(showDropzone || isOver || isOver2) && ( @@ -95,7 +97,7 @@ const BetweenElements = ({ {(showDropzone || isOver || isOver2) && ( setShowDropzone(false)}> - + {() => t("externalForms.default.dropBetweenLabel")} diff --git a/frontend/src/js/external-forms/form-components/DropzoneList.tsx b/frontend/src/js/external-forms/form-components/DropzoneList.tsx index 4cf10621ac..c9d556b3ad 100644 --- a/frontend/src/js/external-forms/form-components/DropzoneList.tsx +++ b/frontend/src/js/external-forms/form-components/DropzoneList.tsx @@ -52,7 +52,7 @@ interface PropsT { ) => void; onDropFile: (file: File) => void; onImportLines: (lines: string[]) => void; - dropBetween: ( + dropInbetween: ( i: number, ) => (item: DroppableObject, monitor: DropTargetMonitor) => void; } @@ -70,10 +70,13 @@ const DropzoneList = ( disallowMultipleColumns, onDrop, onImportLines, - dropBetween, + dropInbetween, }: PropsT, ref: Ref, ) => { + const SxDropzoneWithFileInput = styled(DropzoneWithFileInput)` + margin-top: 5px; + `; // allow at least one column const showDropzone = (items && items.length === 0) || !disallowMultipleColumns; @@ -96,7 +99,7 @@ const DropzoneList = ( {!disallowMultipleColumns && ( )} @@ -110,13 +113,13 @@ const DropzoneList = ( )}
{showDropzone && onImportLines && ( - {dropzoneChildren} - + )}
diff --git a/frontend/src/js/external-forms/form-concept-group/FormConceptGroup.tsx b/frontend/src/js/external-forms/form-concept-group/FormConceptGroup.tsx index 4bf9a0e73a..7304ef2d31 100644 --- a/frontend/src/js/external-forms/form-concept-group/FormConceptGroup.tsx +++ b/frontend/src/js/external-forms/form-concept-group/FormConceptGroup.tsx @@ -200,7 +200,7 @@ const FormConceptGroup = (props: Props) => { ? t("externalForms.common.concept.copying") : props.attributeDropzoneText } - dropBetween={(i: number) => { + dropInbetween={(i: number) => { return (item: DragItemConceptTreeNode) => { if (isMovedObject(item)) { return props.onChange( From 2b8f7ed4c67d970c58c6b90134d913aabb920b27 Mon Sep 17 00:00:00 2001 From: Fabian Blank Date: Fri, 2 Jun 2023 11:56:05 +0200 Subject: [PATCH 10/96] light refactoring and renaming, formatting changes --- .../form-components/DropzoneBetweenElements.tsx | 11 +++++++---- .../external-forms/form-components/DropzoneList.tsx | 10 ++++++---- .../form-concept-group/FormConceptGroup.tsx | 2 +- 3 files changed, 14 insertions(+), 9 deletions(-) diff --git a/frontend/src/js/external-forms/form-components/DropzoneBetweenElements.tsx b/frontend/src/js/external-forms/form-components/DropzoneBetweenElements.tsx index 511857ed7a..d26e4c2215 100644 --- a/frontend/src/js/external-forms/form-components/DropzoneBetweenElements.tsx +++ b/frontend/src/js/external-forms/form-components/DropzoneBetweenElements.tsx @@ -61,14 +61,14 @@ const BetweenElements = ({ const [showDropzone, setShowDropzone] = useState(false); - const [{ isOver, isDroppable }, drop] = useDrop({ + const [{ isOver, isDroppable }, addZoneRef] = useDrop({ accept: acceptedDropTypes, collect: (monitor) => ({ isOver: monitor.isOver(), isDroppable: monitor.canDrop(), }), }); - const [{ isOver: isOver2 }, drop2] = useDrop({ + const [{ isOver: isOver2 }, dropzoneWrapperRef] = useDrop({ accept: acceptedDropTypes, collect: (monitor) => ({ isOver: monitor.isOver(), @@ -84,7 +84,7 @@ const BetweenElements = ({ <> {!(showDropzone || isOver || isOver2) && ( ({ )} {(showDropzone || isOver || isOver2) && ( - setShowDropzone(false)}> + setShowDropzone(false)} + > {() => t("externalForms.default.dropBetweenLabel")} diff --git a/frontend/src/js/external-forms/form-components/DropzoneList.tsx b/frontend/src/js/external-forms/form-components/DropzoneList.tsx index c9d556b3ad..eb5577dc06 100644 --- a/frontend/src/js/external-forms/form-components/DropzoneList.tsx +++ b/frontend/src/js/external-forms/form-components/DropzoneList.tsx @@ -52,7 +52,7 @@ interface PropsT { ) => void; onDropFile: (file: File) => void; onImportLines: (lines: string[]) => void; - dropInbetween: ( + dropBetween: ( i: number, ) => (item: DroppableObject, monitor: DropTargetMonitor) => void; } @@ -70,11 +70,13 @@ const DropzoneList = ( disallowMultipleColumns, onDrop, onImportLines, - dropInbetween, + dropBetween, }: PropsT, ref: Ref, ) => { - const SxDropzoneWithFileInput = styled(DropzoneWithFileInput)` + const SxDropzoneWithFileInput = styled( + DropzoneWithFileInput, + )` margin-top: 5px; `; // allow at least one column @@ -99,7 +101,7 @@ const DropzoneList = ( {!disallowMultipleColumns && ( )} diff --git a/frontend/src/js/external-forms/form-concept-group/FormConceptGroup.tsx b/frontend/src/js/external-forms/form-concept-group/FormConceptGroup.tsx index 7304ef2d31..4bf9a0e73a 100644 --- a/frontend/src/js/external-forms/form-concept-group/FormConceptGroup.tsx +++ b/frontend/src/js/external-forms/form-concept-group/FormConceptGroup.tsx @@ -200,7 +200,7 @@ const FormConceptGroup = (props: Props) => { ? t("externalForms.common.concept.copying") : props.attributeDropzoneText } - dropInbetween={(i: number) => { + dropBetween={(i: number) => { return (item: DragItemConceptTreeNode) => { if (isMovedObject(item)) { return props.onChange( From 04bcd01f0cc0a3a1fb45aab1e9a10466d6ba4746 Mon Sep 17 00:00:00 2001 From: Fabian Blank Date: Tue, 13 Jun 2023 14:16:06 +0200 Subject: [PATCH 11/96] fix spelling --- frontend/src/localization/de.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/frontend/src/localization/de.json b/frontend/src/localization/de.json index a4f1f256bb..549e4029e5 100644 --- a/frontend/src/localization/de.json +++ b/frontend/src/localization/de.json @@ -274,7 +274,7 @@ "default": { "conceptDropzoneLabel": "Füge ein Konzept oder eine Konzeptliste hinzu", "conceptColumnDropzoneLabel": "Füge ein Konzept oder eine Konzeptliste hinzu", - "dropBetweenLabel": "Füge ein Konzept oder eine Konzeptliste hinzu. Clicke hier um abzubrechen" + "dropBetweenLabel": "Füge ein Konzept oder eine Konzeptliste hinzu. Klicke hier um abzubrechen" }, "copyModal": { "headline": "Kopieren von Konzepten aus anderem Feld", From 8ed55c246e83c93dd6fa21fe431fae0b0a6a59e8 Mon Sep 17 00:00:00 2001 From: Fabian Blank Date: Fri, 16 Jun 2023 11:42:49 +0200 Subject: [PATCH 12/96] implement v3 --- .../DropzoneBetweenElements.tsx | 75 +++++-------------- .../form-components/DropzoneList.tsx | 1 - .../form-concept-group/FormConceptGroup.tsx | 5 +- 3 files changed, 22 insertions(+), 59 deletions(-) diff --git a/frontend/src/js/external-forms/form-components/DropzoneBetweenElements.tsx b/frontend/src/js/external-forms/form-components/DropzoneBetweenElements.tsx index d26e4c2215..b21c644f9d 100644 --- a/frontend/src/js/external-forms/form-components/DropzoneBetweenElements.tsx +++ b/frontend/src/js/external-forms/form-components/DropzoneBetweenElements.tsx @@ -1,11 +1,6 @@ import styled from "@emotion/styled"; -import { faPlus } from "@fortawesome/free-solid-svg-icons"; -import { useState } from "react"; import { DropTargetMonitor, useDrop } from "react-dnd"; -import { useTranslation } from "react-i18next"; - -import FaIcon from "../../icon/FaIcon"; -import Dropzone, { +import { PossibleDroppableObject, } from "../../ui-components/Dropzone"; @@ -20,49 +15,28 @@ const Root = styled("div")<{ isDroppable: boolean; isFirstElement: boolean; }>` - background-color: ${({ theme, isDroppable, isOver }) => { - if (isOver && isDroppable) return theme.col.grayLight; - return isDroppable ? theme.col.grayVeryLight : "inherit"; - }}; - margin-top: ${({ isFirstElement }) => (isFirstElement ? "5px" : "0px")}; width: 100%; - text-align: center; -`; - -const PlusContainer = styled("div")` - margin: 0; - display: flex; - justify-content: center; - align-items: center; - height: 21px; + left: 0; + top: -17px; + height: 40px; + right: 0; + position: relative; + border-radius: ${({ theme }) => theme.borderRadius}; `; const DropzoneContainer = styled("div")` overflow: hidden; - height: 54px; -`; - -const SxFaIcon = styled(FaIcon)` - height: 12px; - color: ${({ theme }) => theme.col.black}; - opacity: 0.75; + height: 20px; `; const BetweenElements = ({ acceptedDropTypes, - onDrop: onDropCallback, + onDrop, isFirstElement, }: Props) => { - const { t } = useTranslation(); - - const SxDropzone = styled(Dropzone)` - margin: 5px 0 5px 0; - `; - - const [showDropzone, setShowDropzone] = useState(false); - const [{ isOver, isDroppable }, addZoneRef] = useDrop({ accept: acceptedDropTypes, + drop: onDrop, collect: (monitor) => ({ isOver: monitor.isOver(), isDroppable: monitor.canDrop(), @@ -74,37 +48,24 @@ const BetweenElements = ({ isOver: monitor.isOver(), }), }); - - const onDrop = (item: DroppableObject, monitor: DropTargetMonitor) => { - setShowDropzone(false); - onDropCallback(item, monitor); - }; + console.log(isOver, isDroppable); return ( <> - {!(showDropzone || isOver || isOver2) && ( - setShowDropzone(true)}> - - + > - )} - {(showDropzone || isOver || isOver2) && ( - setShowDropzone(false)} - > - - {() => t("externalForms.default.dropBetweenLabel")} - - - )} + {( isOver || isOver2) && ( + + + )} ); }; diff --git a/frontend/src/js/external-forms/form-components/DropzoneList.tsx b/frontend/src/js/external-forms/form-components/DropzoneList.tsx index eb5577dc06..76b6f4fc2a 100644 --- a/frontend/src/js/external-forms/form-components/DropzoneList.tsx +++ b/frontend/src/js/external-forms/form-components/DropzoneList.tsx @@ -14,7 +14,6 @@ import DropzoneWithFileInput, { } from "../../ui-components/DropzoneWithFileInput"; import Label from "../../ui-components/Label"; import Optional from "../../ui-components/Optional"; - import DropzoneBetweenElements from "./DropzoneBetweenElements"; const ListItem = styled("div")` diff --git a/frontend/src/js/external-forms/form-concept-group/FormConceptGroup.tsx b/frontend/src/js/external-forms/form-concept-group/FormConceptGroup.tsx index 4bf9a0e73a..3f16486173 100644 --- a/frontend/src/js/external-forms/form-concept-group/FormConceptGroup.tsx +++ b/frontend/src/js/external-forms/form-concept-group/FormConceptGroup.tsx @@ -24,6 +24,7 @@ import ToggleButton from "../../ui-components/ToggleButton"; import UploadConceptListModal from "../../upload-concept-list-modal/UploadConceptListModal"; import type { ConceptListDefaults as ConceptListDefaultsType } from "../config-types"; import { Description } from "../form-components/Description"; +import DropzoneBetweenElements from "../form-components/DropzoneBetweenElements"; import DropzoneList from "../form-components/DropzoneList"; import DynamicInputGroup from "../form-components/DynamicInputGroup"; import FormQueryNodeEditor from "../form-query-node-editor/FormQueryNodeEditor"; @@ -87,7 +88,9 @@ interface Props { }) => ReactNode; } -const DropzoneListItem = styled("div")``; +const DropzoneListItem = styled("div")` + margin-top: -30px; +`; const Row = styled("div")` display: flex; align-items: center; From 46bf6ec78a959606692f20891fe9dc8809419013 Mon Sep 17 00:00:00 2001 From: Fabian Blank Date: Fri, 16 Jun 2023 11:43:09 +0200 Subject: [PATCH 13/96] format --- .../DropzoneBetweenElements.tsx | 27 ++++++++----------- .../form-components/DropzoneList.tsx | 1 + 2 files changed, 12 insertions(+), 16 deletions(-) diff --git a/frontend/src/js/external-forms/form-components/DropzoneBetweenElements.tsx b/frontend/src/js/external-forms/form-components/DropzoneBetweenElements.tsx index b21c644f9d..9a24ff75bc 100644 --- a/frontend/src/js/external-forms/form-components/DropzoneBetweenElements.tsx +++ b/frontend/src/js/external-forms/form-components/DropzoneBetweenElements.tsx @@ -1,8 +1,7 @@ import styled from "@emotion/styled"; import { DropTargetMonitor, useDrop } from "react-dnd"; -import { - PossibleDroppableObject, -} from "../../ui-components/Dropzone"; + +import { PossibleDroppableObject } from "../../ui-components/Dropzone"; interface Props { onDrop: (item: DroppableObject, monitor: DropTargetMonitor) => void; @@ -52,20 +51,16 @@ const BetweenElements = ({ return ( <> - - + - {( isOver || isOver2) && ( - - - )} + {(isOver || isOver2) && ( + + )} ); }; diff --git a/frontend/src/js/external-forms/form-components/DropzoneList.tsx b/frontend/src/js/external-forms/form-components/DropzoneList.tsx index 76b6f4fc2a..eb5577dc06 100644 --- a/frontend/src/js/external-forms/form-components/DropzoneList.tsx +++ b/frontend/src/js/external-forms/form-components/DropzoneList.tsx @@ -14,6 +14,7 @@ import DropzoneWithFileInput, { } from "../../ui-components/DropzoneWithFileInput"; import Label from "../../ui-components/Label"; import Optional from "../../ui-components/Optional"; + import DropzoneBetweenElements from "./DropzoneBetweenElements"; const ListItem = styled("div")` From 05e56f07d1eeeb890966058047deb4bea987f676 Mon Sep 17 00:00:00 2001 From: Fabian Blank Date: Mon, 19 Jun 2023 10:01:32 +0200 Subject: [PATCH 14/96] added reordering, changed method of where the dropzone is --- .../DropzoneBetweenElements.tsx | 12 +-------- .../form-concept-group/FormConceptGroup.tsx | 26 +++++++++++++------ frontend/src/localization/de.json | 3 +-- frontend/src/localization/en.json | 3 +-- 4 files changed, 21 insertions(+), 23 deletions(-) diff --git a/frontend/src/js/external-forms/form-components/DropzoneBetweenElements.tsx b/frontend/src/js/external-forms/form-components/DropzoneBetweenElements.tsx index 9a24ff75bc..eb8a7a1c0e 100644 --- a/frontend/src/js/external-forms/form-components/DropzoneBetweenElements.tsx +++ b/frontend/src/js/external-forms/form-components/DropzoneBetweenElements.tsx @@ -41,14 +41,6 @@ const BetweenElements = ({ isDroppable: monitor.canDrop(), }), }); - const [{ isOver: isOver2 }, dropzoneWrapperRef] = useDrop({ - accept: acceptedDropTypes, - collect: (monitor) => ({ - isOver: monitor.isOver(), - }), - }); - console.log(isOver, isDroppable); - return ( <> ({ isFirstElement={isFirstElement} > - {(isOver || isOver2) && ( - - )} + {isOver && } ); }; diff --git a/frontend/src/js/external-forms/form-concept-group/FormConceptGroup.tsx b/frontend/src/js/external-forms/form-concept-group/FormConceptGroup.tsx index 3f16486173..8c65aec38e 100644 --- a/frontend/src/js/external-forms/form-concept-group/FormConceptGroup.tsx +++ b/frontend/src/js/external-forms/form-concept-group/FormConceptGroup.tsx @@ -24,7 +24,6 @@ import ToggleButton from "../../ui-components/ToggleButton"; import UploadConceptListModal from "../../upload-concept-list-modal/UploadConceptListModal"; import type { ConceptListDefaults as ConceptListDefaultsType } from "../config-types"; import { Description } from "../form-components/Description"; -import DropzoneBetweenElements from "../form-components/DropzoneBetweenElements"; import DropzoneList from "../form-components/DropzoneList"; import DynamicInputGroup from "../form-components/DynamicInputGroup"; import FormQueryNodeEditor from "../form-query-node-editor/FormQueryNodeEditor"; @@ -205,19 +204,30 @@ const FormConceptGroup = (props: Props) => { } dropBetween={(i: number) => { return (item: DragItemConceptTreeNode) => { + if (props.isValidConcept && !props.isValidConcept(item)) + return null; + if (isMovedObject(item)) { + let removed = + props.value[item.dragContext.movedFromAndIdx].concepts + .length === 1 + ? removeValue(props.value, item.dragContext.movedFromAndIdx) + : removeConcept( + props.value, + item.dragContext.movedFromAndIdx, + item.dragContext.movedFromOrIdx, + ); + let insertIndex = + i > item.dragContext.movedFromAndIdx ? i - 1 : i; return props.onChange( addConcept( - insertValue(props.value, i, newValue), - i, + insertValue(removed, insertIndex, newValue), + insertIndex, copyConcept(item), ), ); } - if (props.isValidConcept && !props.isValidConcept(item)) - return null; - return props.onChange( addConcept( insertValue(props.value, i, newValue), @@ -244,6 +254,8 @@ const FormConceptGroup = (props: Props) => { return; } + if (props.isValidConcept && !props.isValidConcept(item)) return; + if (isMovedObject(item)) { return props.onChange( addConcept( @@ -254,8 +266,6 @@ const FormConceptGroup = (props: Props) => { ); } - if (props.isValidConcept && !props.isValidConcept(item)) return; - return props.onChange( addConcept( addValue(props.value, newValue), diff --git a/frontend/src/localization/de.json b/frontend/src/localization/de.json index 549e4029e5..1b5843d915 100644 --- a/frontend/src/localization/de.json +++ b/frontend/src/localization/de.json @@ -273,8 +273,7 @@ }, "default": { "conceptDropzoneLabel": "Füge ein Konzept oder eine Konzeptliste hinzu", - "conceptColumnDropzoneLabel": "Füge ein Konzept oder eine Konzeptliste hinzu", - "dropBetweenLabel": "Füge ein Konzept oder eine Konzeptliste hinzu. Klicke hier um abzubrechen" + "conceptColumnDropzoneLabel": "Füge ein Konzept oder eine Konzeptliste hinzu" }, "copyModal": { "headline": "Kopieren von Konzepten aus anderem Feld", diff --git a/frontend/src/localization/en.json b/frontend/src/localization/en.json index efa70d3c78..52d4c1f0ef 100644 --- a/frontend/src/localization/en.json +++ b/frontend/src/localization/en.json @@ -274,8 +274,7 @@ }, "default": { "conceptDropzoneLabel": "Add a concept or a concept list", - "conceptColumnDropzoneLabel": "Add a concept or a concept list", - "dropBetweenLabel": "Add a concept or a concept list. Click here to cancel" + "conceptColumnDropzoneLabel": "Add a concept or a concept list" }, "copyModal": { "headline": "Copy concepts form another field", From 0241f8efe2205e87946e8c27fa1572c25ca22d33 Mon Sep 17 00:00:00 2001 From: Fabian Blank Date: Tue, 20 Jun 2023 13:33:04 +0200 Subject: [PATCH 15/96] improve dragging - expand drop zone --- .../DropzoneBetweenElements.tsx | 40 ++++++++++++------- .../form-components/DropzoneList.tsx | 1 - 2 files changed, 25 insertions(+), 16 deletions(-) diff --git a/frontend/src/js/external-forms/form-components/DropzoneBetweenElements.tsx b/frontend/src/js/external-forms/form-components/DropzoneBetweenElements.tsx index eb8a7a1c0e..af7d5798b1 100644 --- a/frontend/src/js/external-forms/form-components/DropzoneBetweenElements.tsx +++ b/frontend/src/js/external-forms/form-components/DropzoneBetweenElements.tsx @@ -1,4 +1,5 @@ import styled from "@emotion/styled"; +import { useState } from "react"; import { DropTargetMonitor, useDrop } from "react-dnd"; import { PossibleDroppableObject } from "../../ui-components/Dropzone"; @@ -6,51 +7,60 @@ import { PossibleDroppableObject } from "../../ui-components/Dropzone"; interface Props { onDrop: (item: DroppableObject, monitor: DropTargetMonitor) => void; acceptedDropTypes: string[]; - isFirstElement: boolean; } const Root = styled("div")<{ - isOver: boolean; - isDroppable: boolean; - isFirstElement: boolean; + height: number; }>` width: 100%; left: 0; - top: -17px; - height: 40px; + top: -15px; + height: ${({ height }) => height + 40}px; right: 0; position: relative; border-radius: ${({ theme }) => theme.borderRadius}; `; -const DropzoneContainer = styled("div")` +const DropzoneContainer = styled("div")<{ + height: number; +}>` overflow: hidden; - height: 20px; + margin-top: ${({ height }) => -height}px; + display: block; + height: ${({ height }) => height}px; `; const BetweenElements = ({ acceptedDropTypes, onDrop, - isFirstElement, }: Props) => { - const [{ isOver, isDroppable }, addZoneRef] = useDrop({ + const [height, setHeight] = useState(40); + + const [{ isOver }, addZoneRef] = useDrop({ accept: acceptedDropTypes, drop: onDrop, + hover(item) { + if (item.type === "CONCEPT_TREE_NODE") { + return setHeight(item.dragContext.height); + } + return setHeight(0); + }, + collect: (monitor) => ({ isOver: monitor.isOver(), isDroppable: monitor.canDrop(), }), }); + return ( <> - - {isOver && } + {isOver && ( + + )} ); }; diff --git a/frontend/src/js/external-forms/form-components/DropzoneList.tsx b/frontend/src/js/external-forms/form-components/DropzoneList.tsx index eb5577dc06..cd1c91e2af 100644 --- a/frontend/src/js/external-forms/form-components/DropzoneList.tsx +++ b/frontend/src/js/external-forms/form-components/DropzoneList.tsx @@ -102,7 +102,6 @@ const DropzoneList = ( )} From 2c9e60c0185b4c77237682b4bf768a60deb1bdbe Mon Sep 17 00:00:00 2001 From: Fabian Blank Date: Tue, 20 Jun 2023 13:35:20 +0200 Subject: [PATCH 16/96] format --- .../form-components/DropzoneBetweenElements.tsx | 9 ++------- 1 file changed, 2 insertions(+), 7 deletions(-) diff --git a/frontend/src/js/external-forms/form-components/DropzoneBetweenElements.tsx b/frontend/src/js/external-forms/form-components/DropzoneBetweenElements.tsx index af7d5798b1..8a6ff2945c 100644 --- a/frontend/src/js/external-forms/form-components/DropzoneBetweenElements.tsx +++ b/frontend/src/js/external-forms/form-components/DropzoneBetweenElements.tsx @@ -54,13 +54,8 @@ const BetweenElements = ({ return ( <> - - {isOver && ( - - )} + + {isOver && } ); }; From a5113d16c3580e4223eb54835261d1a389fc8378 Mon Sep 17 00:00:00 2001 From: Fabian Blank Date: Tue, 27 Jun 2023 10:42:27 +0200 Subject: [PATCH 17/96] Dropping works, removing when dragging to a new position works --- .../form-concept-group/FormConceptGroup.tsx | 47 ++++++++++++------- .../form-concept-group/FormConceptNode.tsx | 3 ++ .../src/js/standard-query-editor/types.ts | 1 + 3 files changed, 34 insertions(+), 17 deletions(-) diff --git a/frontend/src/js/external-forms/form-concept-group/FormConceptGroup.tsx b/frontend/src/js/external-forms/form-concept-group/FormConceptGroup.tsx index 8c65aec38e..90aec6f41a 100644 --- a/frontend/src/js/external-forms/form-concept-group/FormConceptGroup.tsx +++ b/frontend/src/js/external-forms/form-concept-group/FormConceptGroup.tsx @@ -59,6 +59,7 @@ import { } from "./formConceptGroupState"; import { useCopyModal } from "./useCopyModal"; import { useUploadConceptListModal } from "./useUploadConceptListModal"; +import { useFormContext } from "react-hook-form"; interface Props { formType: string; @@ -178,6 +179,8 @@ const FormConceptGroup = (props: Props) => { : null; }, [editedFormQueryNodePosition, props.value]); + const { getValues } = useFormContext(); + return (
*/ @@ -208,20 +211,15 @@ const FormConceptGroup = (props: Props) => { return null; if (isMovedObject(item)) { - let removed = - props.value[item.dragContext.movedFromAndIdx].concepts - .length === 1 - ? removeValue(props.value, item.dragContext.movedFromAndIdx) - : removeConcept( - props.value, - item.dragContext.movedFromAndIdx, - item.dragContext.movedFromOrIdx, - ); + if (exists(item.dragContext.deleteInOrigin)){ + item.dragContext.deleteInOrigin(); + } + let insertIndex = i > item.dragContext.movedFromAndIdx ? i - 1 : i; return props.onChange( addConcept( - insertValue(removed, insertIndex, newValue), + insertValue(getValues(props.fieldName), insertIndex, newValue), insertIndex, copyConcept(item), ), @@ -257,10 +255,14 @@ const FormConceptGroup = (props: Props) => { if (props.isValidConcept && !props.isValidConcept(item)) return; if (isMovedObject(item)) { + if (exists(item.dragContext.deleteInOrigin)){ + item.dragContext.deleteInOrigin(); + } + const updatedValue = getValues(props.fieldName); return props.onChange( addConcept( - addValue(props.value, newValue), - props.value.length, + addValue(updatedValue, newValue), + updatedValue.length, copyConcept(item), ), ); @@ -338,6 +340,13 @@ const FormConceptGroup = (props: Props) => { conceptIdx: j, }) } + deleteInForm={() => { + return props.onChange( + props.value[i].concepts.length === 1 + ? removeValue(props.value, i) + : removeConcept(props.value, i, j) + ); + }} expand={{ onClick: () => props.onChange( @@ -372,16 +381,16 @@ const FormConceptGroup = (props: Props) => { return; } + + if (props.isValidConcept && !props.isValidConcept(item)) + return null; if (isMovedObject(item)) { return props.onChange( - setConcept(props.value, i, j, copyConcept(item)), + setConcept(getValues(props.fieldName), i, j, copyConcept(item)), ); } - if (props.isValidConcept && !props.isValidConcept(item)) - return null; - return props.onChange( setConcept( props.value, @@ -438,9 +447,13 @@ const FormConceptGroup = (props: Props) => { ); }} onDropConcept={(concept) => { + if (isMovedObject(concept) && exists(concept.dragContext.deleteInOrigin)) { + concept.dragContext.deleteInOrigin(); + } + const { valueIdx, conceptIdx } = editedFormQueryNodePosition; props.onChange( - setConceptProperties(props.value, valueIdx, conceptIdx, { + setConceptProperties(getValues(props.fieldName), valueIdx, conceptIdx, { ids: [...concept.ids, ...editedNode.ids], }), ); diff --git a/frontend/src/js/external-forms/form-concept-group/FormConceptNode.tsx b/frontend/src/js/external-forms/form-concept-group/FormConceptNode.tsx index b4e118c9a2..dedfa2fd7a 100644 --- a/frontend/src/js/external-forms/form-concept-group/FormConceptNode.tsx +++ b/frontend/src/js/external-forms/form-concept-group/FormConceptNode.tsx @@ -81,6 +81,7 @@ interface PropsT { expandable: boolean; active: boolean; }; + deleteInForm: () => void; } // generalized node to handle concepts queried in forms @@ -92,6 +93,7 @@ const FormConceptNode: FC = ({ hasNonDefaultSettings, hasFilterValues, expand, + deleteInForm, }) => { const { t } = useTranslation(); const rootNodeLabel = getRootNodeLabel(conceptNode); @@ -113,6 +115,7 @@ const FormConceptNode: FC = ({ dragContext: { ...item.dragContext, ...getWidthAndHeight(ref), + deleteInOrigin: deleteInForm, }, }), }); diff --git a/frontend/src/js/standard-query-editor/types.ts b/frontend/src/js/standard-query-editor/types.ts index 17af92884f..0d22e6910c 100644 --- a/frontend/src/js/standard-query-editor/types.ts +++ b/frontend/src/js/standard-query-editor/types.ts @@ -56,6 +56,7 @@ export interface DragContext { height: number; movedFromAndIdx?: number; movedFromOrIdx?: number; + deleteInOrigin?: () => void; } export interface DragItemQuery extends PreviousQueryQueryNodeType { From 03a2dcc67388e86eb4f4104c47b37015d31f8e74 Mon Sep 17 00:00:00 2001 From: Fabian Blank Date: Tue, 27 Jun 2023 10:43:13 +0200 Subject: [PATCH 18/96] fix formatting --- .../form-concept-group/FormConceptGroup.tsx | 45 +++++++++++++------ 1 file changed, 31 insertions(+), 14 deletions(-) diff --git a/frontend/src/js/external-forms/form-concept-group/FormConceptGroup.tsx b/frontend/src/js/external-forms/form-concept-group/FormConceptGroup.tsx index 90aec6f41a..885801c081 100644 --- a/frontend/src/js/external-forms/form-concept-group/FormConceptGroup.tsx +++ b/frontend/src/js/external-forms/form-concept-group/FormConceptGroup.tsx @@ -1,5 +1,6 @@ import styled from "@emotion/styled"; import { ReactNode, useEffect, useState, useRef, useMemo } from "react"; +import { useFormContext } from "react-hook-form"; import { useTranslation } from "react-i18next"; import { usePostPrefixForSuggestions } from "../../api/api"; @@ -59,7 +60,6 @@ import { } from "./formConceptGroupState"; import { useCopyModal } from "./useCopyModal"; import { useUploadConceptListModal } from "./useUploadConceptListModal"; -import { useFormContext } from "react-hook-form"; interface Props { formType: string; @@ -211,15 +211,19 @@ const FormConceptGroup = (props: Props) => { return null; if (isMovedObject(item)) { - if (exists(item.dragContext.deleteInOrigin)){ + if (exists(item.dragContext.deleteInOrigin)) { item.dragContext.deleteInOrigin(); } - + let insertIndex = i > item.dragContext.movedFromAndIdx ? i - 1 : i; return props.onChange( addConcept( - insertValue(getValues(props.fieldName), insertIndex, newValue), + insertValue( + getValues(props.fieldName), + insertIndex, + newValue, + ), insertIndex, copyConcept(item), ), @@ -255,7 +259,7 @@ const FormConceptGroup = (props: Props) => { if (props.isValidConcept && !props.isValidConcept(item)) return; if (isMovedObject(item)) { - if (exists(item.dragContext.deleteInOrigin)){ + if (exists(item.dragContext.deleteInOrigin)) { item.dragContext.deleteInOrigin(); } const updatedValue = getValues(props.fieldName); @@ -342,9 +346,9 @@ const FormConceptGroup = (props: Props) => { } deleteInForm={() => { return props.onChange( - props.value[i].concepts.length === 1 - ? removeValue(props.value, i) - : removeConcept(props.value, i, j) + props.value[i].concepts.length === 1 + ? removeValue(props.value, i) + : removeConcept(props.value, i, j), ); }} expand={{ @@ -381,13 +385,18 @@ const FormConceptGroup = (props: Props) => { return; } - + if (props.isValidConcept && !props.isValidConcept(item)) return null; if (isMovedObject(item)) { return props.onChange( - setConcept(getValues(props.fieldName), i, j, copyConcept(item)), + setConcept( + getValues(props.fieldName), + i, + j, + copyConcept(item), + ), ); } @@ -447,15 +456,23 @@ const FormConceptGroup = (props: Props) => { ); }} onDropConcept={(concept) => { - if (isMovedObject(concept) && exists(concept.dragContext.deleteInOrigin)) { + if ( + isMovedObject(concept) && + exists(concept.dragContext.deleteInOrigin) + ) { concept.dragContext.deleteInOrigin(); } const { valueIdx, conceptIdx } = editedFormQueryNodePosition; props.onChange( - setConceptProperties(getValues(props.fieldName), valueIdx, conceptIdx, { - ids: [...concept.ids, ...editedNode.ids], - }), + setConceptProperties( + getValues(props.fieldName), + valueIdx, + conceptIdx, + { + ids: [...concept.ids, ...editedNode.ids], + }, + ), ); }} onRemoveConcept={(conceptId) => { From a5a404523933f4c1a398c955d465a17a9f07317d Mon Sep 17 00:00:00 2001 From: awildturtok <1553491+awildturtok@users.noreply.github.com> Date: Tue, 4 Jul 2023 10:18:29 +0200 Subject: [PATCH 19/96] use workStealingPool to read SerializingStore --- .../xodus/stores/SerializingStore.java | 92 +++++++++++-------- 1 file changed, 54 insertions(+), 38 deletions(-) diff --git a/backend/src/main/java/com/bakdata/conquery/io/storage/xodus/stores/SerializingStore.java b/backend/src/main/java/com/bakdata/conquery/io/storage/xodus/stores/SerializingStore.java index 6cf3fcddbd..d0b18f34ae 100644 --- a/backend/src/main/java/com/bakdata/conquery/io/storage/xodus/stores/SerializingStore.java +++ b/backend/src/main/java/com/bakdata/conquery/io/storage/xodus/stores/SerializingStore.java @@ -7,6 +7,9 @@ import java.time.format.DateTimeFormatter; import java.util.ArrayList; import java.util.Collection; +import java.util.concurrent.ExecutorService; +import java.util.concurrent.Executors; +import java.util.concurrent.TimeUnit; import java.util.function.Function; import java.util.function.Supplier; @@ -28,6 +31,7 @@ import jetbrains.exodus.ByteIterable; import lombok.Data; import lombok.NonNull; +import lombok.SneakyThrows; import lombok.ToString; import lombok.extern.slf4j.Slf4j; import org.jetbrains.annotations.NotNull; @@ -180,52 +184,64 @@ public VALUE get(KEY key) { * Depending on the {@link XodusStoreFactory} corrupt entries may be dump to a file and/or removed from the store. * These entries are not submitted to the consumer. */ + @SneakyThrows @Override public IterationStatistic forEach(StoreEntryConsumer consumer) { final IterationStatistic result = new IterationStatistic(); final ArrayList unreadables = new ArrayList<>(); + final ExecutorService executorService = Executors.newWorkStealingPool(10); + store.forEach((k, v) -> { - result.incrTotalProcessed(); - - // Try to read the key first - final KEY key = getDeserializedAndDumpFailed( - k, - this::readKey, - () -> new String(k.getBytesUnsafe()), - v, - "Could not parse key [{}]" - ); - if (key == null) { - unreadables.add(k); - result.incrFailedKeys(); - return; - } + executorService.submit(() -> { + + result.incrTotalProcessed(); + + // Try to read the key first + final KEY key = getDeserializedAndDumpFailed( + k, + this::readKey, + () -> new String(k.getBytesUnsafe()), + v, + "Could not parse key [{}]" + ); + if (key == null) { + unreadables.add(k); + result.incrFailedKeys(); + return; + } + + // Try to read the value + final VALUE value = getDeserializedAndDumpFailed( + v, + this::readValue, + key::toString, + v, + "Could not parse value for key [{}]" + ); + + if (value == null) { + unreadables.add(k); + result.incrFailedValues(); + return; + } + + // Apply the consumer to key and value + try { + consumer.accept(key, value, v.getLength()); + } + catch (Exception e) { + log.warn("Unable to apply for-each consumer on key[{}]", key, e); + } + }); + }); - // Try to read the value - final VALUE value = getDeserializedAndDumpFailed( - v, - this::readValue, - key::toString, - v, - "Could not parse value for key [{}]" - ); - - if (value == null) { - unreadables.add(k); - result.incrFailedValues(); - return; - } + executorService.shutdown(); - // Apply the consumer to key and value - try { - consumer.accept(key, value, v.getLength()); - } - catch (Exception e) { - log.warn("Unable to apply for-each consumer on key[{}]", key, e); - } + while (executorService.awaitTermination(1, TimeUnit.MINUTES)){ + log.debug("Still waiting for {} to load.", this); + } - }); // Print some statistics final int total = result.getTotalProcessed(); log.debug( @@ -254,7 +270,7 @@ public IterationStatistic forEach(StoreEntryConsumer consumer) { * @param deserializer The concrete deserializer to use. * @param onFailKeyStringSupplier When deserilization failed and dump is enabled this is used in the dump file name. * @param onFailOrigValue Will be the dumpfile content rendered as a json. - * @param onFailWarnMsgFmt The warn message that will be logged on failure. + * @param onFailWarnMsgFmt The warning message that will be logged on failure. * @return The deserialized value */ private TYPE getDeserializedAndDumpFailed(ByteIterable serial, Function deserializer, Supplier onFailKeyStringSupplier, ByteIterable onFailOrigValue, String onFailWarnMsgFmt) { From 1ca91fffdea29922402050b3c7e84c4d53489d8b Mon Sep 17 00:00:00 2001 From: awildturtok <1553491+awildturtok@users.noreply.github.com> Date: Tue, 4 Jul 2023 10:36:07 +0200 Subject: [PATCH 20/96] fix waiting for execturoService --- .../conquery/io/storage/xodus/stores/SerializingStore.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/backend/src/main/java/com/bakdata/conquery/io/storage/xodus/stores/SerializingStore.java b/backend/src/main/java/com/bakdata/conquery/io/storage/xodus/stores/SerializingStore.java index d0b18f34ae..4740377dde 100644 --- a/backend/src/main/java/com/bakdata/conquery/io/storage/xodus/stores/SerializingStore.java +++ b/backend/src/main/java/com/bakdata/conquery/io/storage/xodus/stores/SerializingStore.java @@ -238,7 +238,7 @@ public IterationStatistic forEach(StoreEntryConsumer consumer) { executorService.shutdown(); - while (executorService.awaitTermination(1, TimeUnit.MINUTES)){ + while (!executorService.awaitTermination(1, TimeUnit.MINUTES)){ log.debug("Still waiting for {} to load.", this); } From a0f8f9f96a167d14389cdded1698f6ce6a52f7f7 Mon Sep 17 00:00:00 2001 From: awildturtok <1553491+awildturtok@users.noreply.github.com> Date: Tue, 4 Jul 2023 12:35:48 +0200 Subject: [PATCH 21/96] use ThreadLocal keyReader --- .../xodus/stores/SerializingStore.java | 334 +++++++++--------- 1 file changed, 168 insertions(+), 166 deletions(-) diff --git a/backend/src/main/java/com/bakdata/conquery/io/storage/xodus/stores/SerializingStore.java b/backend/src/main/java/com/bakdata/conquery/io/storage/xodus/stores/SerializingStore.java index 4740377dde..4a6b3425f2 100644 --- a/backend/src/main/java/com/bakdata/conquery/io/storage/xodus/stores/SerializingStore.java +++ b/backend/src/main/java/com/bakdata/conquery/io/storage/xodus/stores/SerializingStore.java @@ -59,7 +59,7 @@ public class SerializingStore implements Store { /** * Deserializer for keys */ - private final ObjectReader keyReader; + private final ThreadLocal keyReader; /** * Serializer for values @@ -123,14 +123,14 @@ public , CLASS_V extends Class> SerializingSto keyWriter = objectMapper.writerFor(keyType); - keyReader = objectMapper.readerFor(keyType); + keyReader = ThreadLocal.withInitial(() -> objectMapper.readerFor(keyType)); removeUnreadablesFromUnderlyingStore = removeUnreadableFromStore; unreadableValuesDumpDir = unreadableDataDumpDirectory; if (shouldDumpUnreadables()) { - if(!unreadableValuesDumpDir.exists() && !unreadableValuesDumpDir.mkdirs()) { + if (!unreadableValuesDumpDir.exists() && !unreadableValuesDumpDir.mkdirs()) { throw new IllegalStateException("Could not create dump directory: " + unreadableValuesDumpDir); } else if (!unreadableValuesDumpDir.isDirectory()) { @@ -155,19 +155,51 @@ public void add(KEY key, VALUE value) { store.add(writeKey(key), writeValue(value)); } + /** + * Serialize key with {@code keyWriter}. + */ + private ByteIterable writeKey(KEY key) { + return write(key, keyWriter); + } + + /** + * Serialize value with {@code valueWriter}. + */ + private ByteIterable writeValue(VALUE value) { + return write(value, valueWriter); + } + + /** + * Try writing object with writer. + */ + private ByteIterable write(Object obj, ObjectWriter writer) { + try { + final byte[] bytes = writer.writeValueAsBytes(obj); + if (log.isTraceEnabled()) { + final String json = JacksonUtil.toJsonDebug(bytes); + log.trace("Written ({}): {}", valueType.getName(), json); + } + return new ArrayByteIterable(bytes); + } + catch (JsonProcessingException e) { + throw new RuntimeException("Failed to write " + obj, e); + } + } + @Override public VALUE get(KEY key) { final ByteIterable binValue = store.get(writeKey(key)); try { - return readValue(binValue); - } catch (Exception e) { + return readValue(binValue); + } + catch (Exception e) { - if(unreadableValuesDumpDir != null) { + if (unreadableValuesDumpDir != null) { dumpToFile(binValue, key.toString(), e, unreadableValuesDumpDir, store.getName(), objectMapper); } - if(removeUnreadablesFromUnderlyingStore) { + if (removeUnreadablesFromUnderlyingStore) { remove(key); // Null seems to be an acceptable return value in this case return null; @@ -179,6 +211,115 @@ public VALUE get(KEY key) { } } + /** + * Deserialize value with {@code valueReader}. + */ + private VALUE readValue(ByteIterable value) { + return read(valueReader, value); + } + + /** + * Dumps the content of an unreadable value to a file as a json (it tries to parse it as an object and than tries to dump it as a json). + * + * @param obj The object to dump. + * @param keyOfDump The key under which the unreadable value is accessible. It is used for the file name. + * @param reason The exception causing us to dump the file + * @param unreadableDumpDir The director to dump to. The method assumes that the directory exists and is okay to write to. + * @param storeName The name of the store which is also used in the dump file name. + */ + private static void dumpToFile(@NonNull ByteIterable obj, @NonNull String keyOfDump, Exception reason, @NonNull File unreadableDumpDir, String storeName, ObjectMapper objectMapper) { + // Create dump filehandle + final File dumpfile = makeDumpFileName(keyOfDump, unreadableDumpDir, storeName); + final File exceptionFileName = makeExceptionFileName(keyOfDump, unreadableDumpDir, storeName); + + if (dumpfile.exists() || exceptionFileName.exists()) { + log.trace("Abort dumping of file {} because it already exists.", dumpfile); + return; + } + + if (!dumpfile.getParentFile().exists() && !dumpfile.getParentFile().mkdirs()) { + throw new IllegalStateException("Could not create `%s`.".formatted(dumpfile.getParentFile())); + } + + //TODO FK: dump in a separate thread so we are not blocking the reader thread. + + // Write json + try { + log.info("Dumping value of key {} to {} (because it cannot be deserialized anymore).", keyOfDump, dumpfile.getCanonicalPath()); + + final JsonNode dump = objectMapper.readerFor(JsonNode.class).readValue(obj.getBytesUnsafe(), 0, obj.getLength()); + Jackson.MAPPER.writer().writeValue(dumpfile, dump); + } + catch (IOException e) { + log.error("Failed to dump unreadable value of key `{}` to file `{}`", keyOfDump, dumpfile, e); + } + + try (PrintStream out = new PrintStream(exceptionFileName)) { + reason.printStackTrace(out); + } + catch (IOException e) { + log.error("Failed to dump exception for `{}` to file `{}`.", keyOfDump, exceptionFileName, e); + } + + } + + @Override + public void remove(KEY key) { + log.trace("Removing value to key {} from Store[{}]", key, store.getName()); + store.remove(writeKey(key)); + } + + /** + * Try read value with reader. + */ + private T read(ObjectReader reader, ByteIterable obj) { + if (obj == null) { + return null; + } + try { + return reader.readValue(obj.getBytesUnsafe(), 0, obj.getLength()); + } + catch (IOException e) { + throw new RuntimeException("Failed to read " + JacksonUtil.toJsonDebug(obj.getBytesUnsafe()), e); + } + } + + /** + * Generates a valid file name from the key of the dump object, the store and the current time. + * However, it does not ensure that there is no file with such a name. + *

+ * Current implementation is `$unreadableDumpDir/$today/$store/$key.json` + */ + @NotNull + public static File makeDumpFileName(@NotNull String keyOfDump, @NotNull File unreadableDumpDir, @NotNull String storeName) { + return unreadableDumpDir.toPath() + .resolve(DateTimeFormatter.BASIC_ISO_DATE.format(LocalDateTime.now())) + .resolve(storeName) + .resolve(sanitiseFileName(keyOfDump) + "." + DUMP_FILE_EXTENSION) + .toFile(); + + } + + /** + * Generates a valid file name from the key of the dump object, the store and the current time. + * However, it does not ensure that there is no file with such a name. + *

+ * Current implementation is `$unreadableDumpDir/$today/$store/$key.exception` + */ + @NotNull + public static File makeExceptionFileName(@NotNull String keyOfDump, @NotNull File unreadableDumpDir, @NotNull String storeName) { + return unreadableDumpDir.toPath() + .resolve(DateTimeFormatter.BASIC_ISO_DATE.format(LocalDateTime.now())) + .resolve(storeName) + .resolve(sanitiseFileName(keyOfDump) + "." + EXCEPTION_FILE_EXTENSION) + .toFile(); + + } + + private static String sanitiseFileName(@NotNull String name) { + return FileUtil.SAVE_FILENAME_REPLACEMENT_MATCHER.matcher(name).replaceAll("_"); + } + /** * Iterates a given consumer over the entries of this store. * Depending on the {@link XodusStoreFactory} corrupt entries may be dump to a file and/or removed from the store. @@ -238,21 +379,22 @@ public IterationStatistic forEach(StoreEntryConsumer consumer) { executorService.shutdown(); - while (!executorService.awaitTermination(1, TimeUnit.MINUTES)){ + while (!executorService.awaitTermination(1, TimeUnit.MINUTES)) { log.debug("Still waiting for {} to load.", this); } // Print some statistics final int total = result.getTotalProcessed(); log.debug( - String.format( - "While processing store %s:\n\tEntries processed:\t%d\n\tKey read failure:\t%d (%.2f%%)\n\tValue read failure:\t%d (%.2f%%)", - store.getName(), - total, - result.getFailedKeys(), - total > 0 ? (float) result.getFailedKeys() / total * 100 : 0, - result.getFailedValues(), - total > 0 ? (float) result.getFailedValues() / total * 100 : 0)); + String.format( + "While processing store %s:\n\tEntries processed:\t%d\n\tKey read failure:\t%d (%.2f%%)\n\tValue read failure:\t%d (%.2f%%)", + store.getName(), + total, + result.getFailedKeys(), + total > 0 ? (float) result.getFailedKeys() / total * 100 : 0, + result.getFailedValues(), + total > 0 ? (float) result.getFailedValues() / total * 100 : 0 + )); // Remove corrupted entries from the store if configured so if (removeUnreadablesFromUnderlyingStore) { @@ -288,6 +430,13 @@ private TYPE getDeserializedAndDumpFailed(ByteIterable serial, Function T read(ObjectReader reader, ByteIterable obj) { - if (obj == null) { - return null; - } - try { - return reader.readValue(obj.getBytesUnsafe(), 0, obj.getLength()); - } - catch (IOException e) { - throw new RuntimeException("Failed to read " + JacksonUtil.toJsonDebug(obj.getBytesUnsafe()), e); - } - } - - /** - * Dumps the content of an unreadable value to a file as a json (it tries to parse it as an object and than tries to dump it as a json). - * - * @param obj The object to dump. - * @param keyOfDump The key under which the unreadable value is accessible. It is used for the file name. - * @param reason The exception causing us to dump the file - * @param unreadableDumpDir The director to dump to. The method assumes that the directory exists and is okay to write to. - * @param storeName The name of the store which is also used in the dump file name. - */ - private static void dumpToFile(@NonNull ByteIterable obj, @NonNull String keyOfDump, Exception reason, @NonNull File unreadableDumpDir, String storeName, ObjectMapper objectMapper) { - // Create dump filehandle - final File dumpfile = makeDumpFileName(keyOfDump, unreadableDumpDir, storeName); - final File exceptionFileName = makeExceptionFileName(keyOfDump, unreadableDumpDir, storeName); - - if (dumpfile.exists() || exceptionFileName.exists()) { - log.trace("Abort dumping of file {} because it already exists.", dumpfile); - return; - } - - if(!dumpfile.getParentFile().exists() && !dumpfile.getParentFile().mkdirs()){ - throw new IllegalStateException("Could not create `%s`.".formatted(dumpfile.getParentFile())); - } - - //TODO FK: dump in a separate thread so we are not blocking the reader thread. - - // Write json - try { - log.info("Dumping value of key {} to {} (because it cannot be deserialized anymore).", keyOfDump, dumpfile.getCanonicalPath()); - - final JsonNode dump = objectMapper.readerFor(JsonNode.class).readValue(obj.getBytesUnsafe(), 0, obj.getLength()); - Jackson.MAPPER.writer().writeValue(dumpfile, dump); - } - catch (IOException e) { - log.error("Failed to dump unreadable value of key `{}` to file `{}`", keyOfDump, dumpfile, e); - } - - try(PrintStream out = new PrintStream(exceptionFileName)) { - reason.printStackTrace(out); - } - catch (IOException e) { - log.error("Failed to dump exception for `{}` to file `{}`.", keyOfDump, exceptionFileName, e); - } - - } - - /** - * Generates a valid file name from the key of the dump object, the store and the current time. - * However, it does not ensure that there is no file with such a name. - * - * Current implementation is `$unreadableDumpDir/$today/$store/$key.json` - */ - @NotNull - public static File makeDumpFileName(@NotNull String keyOfDump, @NotNull File unreadableDumpDir, @NotNull String storeName) { - return unreadableDumpDir.toPath() - .resolve(DateTimeFormatter.BASIC_ISO_DATE.format(LocalDateTime.now())) - .resolve(storeName) - .resolve(sanitiseFileName(keyOfDump) + "." + DUMP_FILE_EXTENSION) - .toFile(); - - } - - private static String sanitiseFileName(@NotNull String name) { - return FileUtil.SAVE_FILENAME_REPLACEMENT_MATCHER.matcher(name).replaceAll("_"); - } - - /** - * Generates a valid file name from the key of the dump object, the store and the current time. - * However, it does not ensure that there is no file with such a name. - * - * Current implementation is `$unreadableDumpDir/$today/$store/$key.exception` - */ - @NotNull - public static File makeExceptionFileName(@NotNull String keyOfDump, @NotNull File unreadableDumpDir, @NotNull String storeName) { - return unreadableDumpDir.toPath() - .resolve(DateTimeFormatter.BASIC_ISO_DATE.format(LocalDateTime.now())) - .resolve(storeName) - .resolve(sanitiseFileName(keyOfDump) + "." + EXCEPTION_FILE_EXTENSION) - .toFile(); - - } - @Override public void fillCache() { } @@ -487,15 +489,15 @@ public static class IterationStatistic { private int totalProcessed; private int failedKeys; private int failedValues; - + public void incrTotalProcessed() { totalProcessed++; } - + public void incrFailedKeys() { failedKeys++; } - + public void incrFailedValues() { failedValues++; } From 52fb71103fb814d23d2b96be36d3b8547f6f303b Mon Sep 17 00:00:00 2001 From: awildturtok <1553491+awildturtok@users.noreply.github.com> Date: Tue, 4 Jul 2023 12:42:54 +0200 Subject: [PATCH 22/96] use ThreadLocal valueReader --- .../conquery/io/storage/xodus/stores/SerializingStore.java | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/backend/src/main/java/com/bakdata/conquery/io/storage/xodus/stores/SerializingStore.java b/backend/src/main/java/com/bakdata/conquery/io/storage/xodus/stores/SerializingStore.java index 4a6b3425f2..0eaf8a7063 100644 --- a/backend/src/main/java/com/bakdata/conquery/io/storage/xodus/stores/SerializingStore.java +++ b/backend/src/main/java/com/bakdata/conquery/io/storage/xodus/stores/SerializingStore.java @@ -69,7 +69,7 @@ public class SerializingStore implements Store { /** * Deserializer for values */ - private final ObjectReader valueReader; + private final ThreadLocal valueReader; /** * Optional validator used for serialization. @@ -119,7 +119,7 @@ public , CLASS_V extends Class> SerializingSto valueWriter = objectMapper.writerFor(this.valueType); - valueReader = objectMapper.readerFor(this.valueType); + valueReader = ThreadLocal.withInitial(() -> objectMapper.readerFor(this.valueType)); keyWriter = objectMapper.writerFor(keyType); @@ -215,7 +215,7 @@ public VALUE get(KEY key) { * Deserialize value with {@code valueReader}. */ private VALUE readValue(ByteIterable value) { - return read(valueReader, value); + return read(valueReader.get(), value); } /** From 6860f8dac519291eedb00bb1202cd7303de40749 Mon Sep 17 00:00:00 2001 From: awildturtok <1553491+awildturtok@users.noreply.github.com> Date: Tue, 4 Jul 2023 14:39:21 +0200 Subject: [PATCH 23/96] use ArrayBlockingQueue to limit buffered values --- .../io/storage/xodus/stores/SerializingStore.java | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/backend/src/main/java/com/bakdata/conquery/io/storage/xodus/stores/SerializingStore.java b/backend/src/main/java/com/bakdata/conquery/io/storage/xodus/stores/SerializingStore.java index 0eaf8a7063..ac169c1b5f 100644 --- a/backend/src/main/java/com/bakdata/conquery/io/storage/xodus/stores/SerializingStore.java +++ b/backend/src/main/java/com/bakdata/conquery/io/storage/xodus/stores/SerializingStore.java @@ -7,8 +7,9 @@ import java.time.format.DateTimeFormatter; import java.util.ArrayList; import java.util.Collection; +import java.util.concurrent.ArrayBlockingQueue; import java.util.concurrent.ExecutorService; -import java.util.concurrent.Executors; +import java.util.concurrent.ThreadPoolExecutor; import java.util.concurrent.TimeUnit; import java.util.function.Function; import java.util.function.Supplier; @@ -331,7 +332,10 @@ public IterationStatistic forEach(StoreEntryConsumer consumer) { final IterationStatistic result = new IterationStatistic(); final ArrayList unreadables = new ArrayList<>(); - final ExecutorService executorService = Executors.newWorkStealingPool(10); + final ExecutorService executorService = new ThreadPoolExecutor(0, Integer.MAX_VALUE, + 60L, TimeUnit.SECONDS, + new ArrayBlockingQueue<>(100) + ); store.forEach((k, v) -> { executorService.submit(() -> { From 4ef65234d613168eb99a692fcf35e047e9978ab1 Mon Sep 17 00:00:00 2001 From: awildturtok <1553491+awildturtok@users.noreply.github.com> Date: Tue, 4 Jul 2023 14:51:37 +0200 Subject: [PATCH 24/96] set an upper bound to threadcount --- .../conquery/io/storage/xodus/stores/SerializingStore.java | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/backend/src/main/java/com/bakdata/conquery/io/storage/xodus/stores/SerializingStore.java b/backend/src/main/java/com/bakdata/conquery/io/storage/xodus/stores/SerializingStore.java index ac169c1b5f..4032c78c06 100644 --- a/backend/src/main/java/com/bakdata/conquery/io/storage/xodus/stores/SerializingStore.java +++ b/backend/src/main/java/com/bakdata/conquery/io/storage/xodus/stores/SerializingStore.java @@ -332,8 +332,8 @@ public IterationStatistic forEach(StoreEntryConsumer consumer) { final IterationStatistic result = new IterationStatistic(); final ArrayList unreadables = new ArrayList<>(); - final ExecutorService executorService = new ThreadPoolExecutor(0, Integer.MAX_VALUE, - 60L, TimeUnit.SECONDS, + final ExecutorService executorService = new ThreadPoolExecutor(5, 20, + 10L, TimeUnit.SECONDS, new ArrayBlockingQueue<>(100) ); From 300c585897fafb8b5f9bb5af90231f8681d3b974 Mon Sep 17 00:00:00 2001 From: awildturtok <1553491+awildturtok@users.noreply.github.com> Date: Tue, 4 Jul 2023 15:21:37 +0200 Subject: [PATCH 25/96] use SynchronousQueue --- .../io/storage/xodus/stores/SerializingStore.java | 9 ++++----- 1 file changed, 4 insertions(+), 5 deletions(-) diff --git a/backend/src/main/java/com/bakdata/conquery/io/storage/xodus/stores/SerializingStore.java b/backend/src/main/java/com/bakdata/conquery/io/storage/xodus/stores/SerializingStore.java index 4032c78c06..472a3fab88 100644 --- a/backend/src/main/java/com/bakdata/conquery/io/storage/xodus/stores/SerializingStore.java +++ b/backend/src/main/java/com/bakdata/conquery/io/storage/xodus/stores/SerializingStore.java @@ -7,8 +7,7 @@ import java.time.format.DateTimeFormatter; import java.util.ArrayList; import java.util.Collection; -import java.util.concurrent.ArrayBlockingQueue; -import java.util.concurrent.ExecutorService; +import java.util.concurrent.SynchronousQueue; import java.util.concurrent.ThreadPoolExecutor; import java.util.concurrent.TimeUnit; import java.util.function.Function; @@ -332,9 +331,9 @@ public IterationStatistic forEach(StoreEntryConsumer consumer) { final IterationStatistic result = new IterationStatistic(); final ArrayList unreadables = new ArrayList<>(); - final ExecutorService executorService = new ThreadPoolExecutor(5, 20, - 10L, TimeUnit.SECONDS, - new ArrayBlockingQueue<>(100) + final ThreadPoolExecutor executorService = new ThreadPoolExecutor(5, 20, + 10L, TimeUnit.SECONDS, + new SynchronousQueue<>() ); store.forEach((k, v) -> { From 2fa9761d9d3249329f683963cf1646ea9b8872a8 Mon Sep 17 00:00:00 2001 From: awildturtok <1553491+awildturtok@users.noreply.github.com> Date: Tue, 4 Jul 2023 15:45:28 +0200 Subject: [PATCH 26/96] block provider when at capacity --- .../conquery/io/storage/xodus/stores/SerializingStore.java | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/backend/src/main/java/com/bakdata/conquery/io/storage/xodus/stores/SerializingStore.java b/backend/src/main/java/com/bakdata/conquery/io/storage/xodus/stores/SerializingStore.java index 472a3fab88..5140084be4 100644 --- a/backend/src/main/java/com/bakdata/conquery/io/storage/xodus/stores/SerializingStore.java +++ b/backend/src/main/java/com/bakdata/conquery/io/storage/xodus/stores/SerializingStore.java @@ -7,7 +7,8 @@ import java.time.format.DateTimeFormatter; import java.util.ArrayList; import java.util.Collection; -import java.util.concurrent.SynchronousQueue; +import java.util.concurrent.ArrayBlockingQueue; +import java.util.concurrent.Executors; import java.util.concurrent.ThreadPoolExecutor; import java.util.concurrent.TimeUnit; import java.util.function.Function; @@ -333,7 +334,9 @@ public IterationStatistic forEach(StoreEntryConsumer consumer) { final ThreadPoolExecutor executorService = new ThreadPoolExecutor(5, 20, 10L, TimeUnit.SECONDS, - new SynchronousQueue<>() + new ArrayBlockingQueue<>(100), + Executors.defaultThreadFactory(), + new ThreadPoolExecutor.CallerRunsPolicy() ); store.forEach((k, v) -> { From 4cd202a2e71fd857690d96c0b62a07ce461db930 Mon Sep 17 00:00:00 2001 From: Fabian Blank Date: Tue, 4 Jul 2023 16:03:37 +0200 Subject: [PATCH 27/96] Change dragging text to moving from copying. This is the new behaviour --- .../js/external-forms/form-concept-group/FormConceptGroup.tsx | 2 +- frontend/src/localization/de.json | 3 ++- frontend/src/localization/en.json | 2 +- 3 files changed, 4 insertions(+), 3 deletions(-) diff --git a/frontend/src/js/external-forms/form-concept-group/FormConceptGroup.tsx b/frontend/src/js/external-forms/form-concept-group/FormConceptGroup.tsx index 885801c081..832c8a26f9 100644 --- a/frontend/src/js/external-forms/form-concept-group/FormConceptGroup.tsx +++ b/frontend/src/js/external-forms/form-concept-group/FormConceptGroup.tsx @@ -202,7 +202,7 @@ const FormConceptGroup = (props: Props) => { } dropzoneChildren={({ isOver, item }) => isOver && isMovedObject(item) - ? t("externalForms.common.concept.copying") + ? t("externalForms.common.concept.moving") : props.attributeDropzoneText } dropBetween={(i: number) => { diff --git a/frontend/src/localization/de.json b/frontend/src/localization/de.json index 1b5843d915..3a62bb3054 100644 --- a/frontend/src/localization/de.json +++ b/frontend/src/localization/de.json @@ -311,7 +311,8 @@ "concept": { "expand": "Untergeordnete Attribute einbeziehen / wieder ausschließen.", "copyFrom": "Kopieren von ...", - "copying": "Kopieren" + "copying": "Kopieren", + "moving": "Verschieben" }, "clear": "Formular leeren", "clearConfirm": "Formular jetzt leeren" diff --git a/frontend/src/localization/en.json b/frontend/src/localization/en.json index 52d4c1f0ef..f158fb3b02 100644 --- a/frontend/src/localization/en.json +++ b/frontend/src/localization/en.json @@ -312,7 +312,7 @@ "concept": { "expand": "Include / exclude sub-attributes", "copyFrom": "Copy from ...", - "copying": "Copy" + "moving": "Move" }, "clear": "Clear form", "clearConfirm": "Clear form now" From 64e0aa6ce4af07e5318f87c6f9888dbea891041b Mon Sep 17 00:00:00 2001 From: awildturtok <1553491+awildturtok@users.noreply.github.com> Date: Tue, 4 Jul 2023 16:09:26 +0200 Subject: [PATCH 28/96] tune down size of queue --- .../conquery/io/storage/xodus/stores/SerializingStore.java | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/backend/src/main/java/com/bakdata/conquery/io/storage/xodus/stores/SerializingStore.java b/backend/src/main/java/com/bakdata/conquery/io/storage/xodus/stores/SerializingStore.java index 5140084be4..9f647215f0 100644 --- a/backend/src/main/java/com/bakdata/conquery/io/storage/xodus/stores/SerializingStore.java +++ b/backend/src/main/java/com/bakdata/conquery/io/storage/xodus/stores/SerializingStore.java @@ -332,9 +332,9 @@ public IterationStatistic forEach(StoreEntryConsumer consumer) { final IterationStatistic result = new IterationStatistic(); final ArrayList unreadables = new ArrayList<>(); - final ThreadPoolExecutor executorService = new ThreadPoolExecutor(5, 20, + final ThreadPoolExecutor executorService = new ThreadPoolExecutor(5, 10, 10L, TimeUnit.SECONDS, - new ArrayBlockingQueue<>(100), + new ArrayBlockingQueue<>(10), Executors.defaultThreadFactory(), new ThreadPoolExecutor.CallerRunsPolicy() ); From 87aff4a1f83af14eebceb671aeccaa87da1f1b92 Mon Sep 17 00:00:00 2001 From: Fabian Blank Date: Tue, 4 Jul 2023 16:25:18 +0200 Subject: [PATCH 29/96] Minor changes in readability --- .../form-components/DropzoneBetweenElements.tsx | 4 ++-- .../js/external-forms/form-concept-group/FormConceptGroup.tsx | 1 + frontend/src/localization/de.json | 1 - 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/frontend/src/js/external-forms/form-components/DropzoneBetweenElements.tsx b/frontend/src/js/external-forms/form-components/DropzoneBetweenElements.tsx index 8a6ff2945c..eb80639a8a 100644 --- a/frontend/src/js/external-forms/form-components/DropzoneBetweenElements.tsx +++ b/frontend/src/js/external-forms/form-components/DropzoneBetweenElements.tsx @@ -21,7 +21,7 @@ const Root = styled("div")<{ border-radius: ${({ theme }) => theme.borderRadius}; `; -const DropzoneContainer = styled("div")<{ +const Expander = styled("div")<{ height: number; }>` overflow: hidden; @@ -55,7 +55,7 @@ const BetweenElements = ({ return ( <> - {isOver && } + {isOver && } ); }; diff --git a/frontend/src/js/external-forms/form-concept-group/FormConceptGroup.tsx b/frontend/src/js/external-forms/form-concept-group/FormConceptGroup.tsx index 832c8a26f9..ad8ba7277e 100644 --- a/frontend/src/js/external-forms/form-concept-group/FormConceptGroup.tsx +++ b/frontend/src/js/external-forms/form-concept-group/FormConceptGroup.tsx @@ -91,6 +91,7 @@ interface Props { const DropzoneListItem = styled("div")` margin-top: -30px; `; + const Row = styled("div")` display: flex; align-items: center; diff --git a/frontend/src/localization/de.json b/frontend/src/localization/de.json index 3a62bb3054..82e8c5d994 100644 --- a/frontend/src/localization/de.json +++ b/frontend/src/localization/de.json @@ -311,7 +311,6 @@ "concept": { "expand": "Untergeordnete Attribute einbeziehen / wieder ausschließen.", "copyFrom": "Kopieren von ...", - "copying": "Kopieren", "moving": "Verschieben" }, "clear": "Formular leeren", From 5bb55fca935e1d656d9a8578b6e63d8eaf0baf57 Mon Sep 17 00:00:00 2001 From: awildturtok <1553491+awildturtok@users.noreply.github.com> Date: Tue, 4 Jul 2023 16:27:37 +0200 Subject: [PATCH 30/96] use very minimal queue and threadPool specs --- .../conquery/io/storage/xodus/stores/SerializingStore.java | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/backend/src/main/java/com/bakdata/conquery/io/storage/xodus/stores/SerializingStore.java b/backend/src/main/java/com/bakdata/conquery/io/storage/xodus/stores/SerializingStore.java index 9f647215f0..46c85356c8 100644 --- a/backend/src/main/java/com/bakdata/conquery/io/storage/xodus/stores/SerializingStore.java +++ b/backend/src/main/java/com/bakdata/conquery/io/storage/xodus/stores/SerializingStore.java @@ -332,9 +332,9 @@ public IterationStatistic forEach(StoreEntryConsumer consumer) { final IterationStatistic result = new IterationStatistic(); final ArrayList unreadables = new ArrayList<>(); - final ThreadPoolExecutor executorService = new ThreadPoolExecutor(5, 10, - 10L, TimeUnit.SECONDS, - new ArrayBlockingQueue<>(10), + final ThreadPoolExecutor executorService = new ThreadPoolExecutor(5, 5, + 60L, TimeUnit.SECONDS, + new ArrayBlockingQueue<>(5), Executors.defaultThreadFactory(), new ThreadPoolExecutor.CallerRunsPolicy() ); From 71dc9ea1e40e069dd5b59c4498626f69292690b3 Mon Sep 17 00:00:00 2001 From: Fabian Blank Date: Fri, 7 Jul 2023 13:27:20 +0200 Subject: [PATCH 31/96] minor changes in codestyle and using less recomputation for css classes --- .../DropzoneBetweenElements.tsx | 20 +++++++------------ .../form-components/DropzoneList.tsx | 2 +- .../form-concept-group/FormConceptGroup.tsx | 2 -- 3 files changed, 8 insertions(+), 16 deletions(-) diff --git a/frontend/src/js/external-forms/form-components/DropzoneBetweenElements.tsx b/frontend/src/js/external-forms/form-components/DropzoneBetweenElements.tsx index eb80639a8a..aed94e7734 100644 --- a/frontend/src/js/external-forms/form-components/DropzoneBetweenElements.tsx +++ b/frontend/src/js/external-forms/form-components/DropzoneBetweenElements.tsx @@ -9,32 +9,27 @@ interface Props { acceptedDropTypes: string[]; } -const Root = styled("div")<{ - height: number; -}>` +const RootHeightBase = 40; + +const Root = styled("div")` width: 100%; left: 0; top: -15px; - height: ${({ height }) => height + 40}px; right: 0; position: relative; border-radius: ${({ theme }) => theme.borderRadius}; `; -const Expander = styled("div")<{ - height: number; -}>` +const Expander = styled("div")` overflow: hidden; - margin-top: ${({ height }) => -height}px; display: block; - height: ${({ height }) => height}px; `; const BetweenElements = ({ acceptedDropTypes, onDrop, }: Props) => { - const [height, setHeight] = useState(40); + const [height, setHeight] = useState(0); const [{ isOver }, addZoneRef] = useDrop({ accept: acceptedDropTypes, @@ -43,7 +38,6 @@ const BetweenElements = ({ if (item.type === "CONCEPT_TREE_NODE") { return setHeight(item.dragContext.height); } - return setHeight(0); }, collect: (monitor) => ({ @@ -54,8 +48,8 @@ const BetweenElements = ({ return ( <> - - {isOver && } + + {isOver && } ); }; diff --git a/frontend/src/js/external-forms/form-components/DropzoneList.tsx b/frontend/src/js/external-forms/form-components/DropzoneList.tsx index cd1c91e2af..af55b0566d 100644 --- a/frontend/src/js/external-forms/form-components/DropzoneList.tsx +++ b/frontend/src/js/external-forms/form-components/DropzoneList.tsx @@ -19,7 +19,7 @@ import DropzoneBetweenElements from "./DropzoneBetweenElements"; const ListItem = styled("div")` position: relative; - padding: 0px 5px 0px 5px; + padding: 0 5px; box-shadow: 0 0 3px 0 rgba(0, 0, 0, 0.1); background-color: white; border-radius: ${({ theme }) => theme.borderRadius}; diff --git a/frontend/src/js/external-forms/form-concept-group/FormConceptGroup.tsx b/frontend/src/js/external-forms/form-concept-group/FormConceptGroup.tsx index ad8ba7277e..60e49afd0b 100644 --- a/frontend/src/js/external-forms/form-concept-group/FormConceptGroup.tsx +++ b/frontend/src/js/external-forms/form-concept-group/FormConceptGroup.tsx @@ -282,7 +282,6 @@ const FormConceptGroup = (props: Props) => { ); }} items={props.value.map((row, i) => ( - <> {props.renderRowPrefix ? props.renderRowPrefix({ @@ -421,7 +420,6 @@ const FormConceptGroup = (props: Props) => { )} /> - ))} /> {isCopyModalOpen && ( From aa0b743c72e568cdde0a8c8ddd2b3b466ed02558 Mon Sep 17 00:00:00 2001 From: Fabian Blank Date: Fri, 7 Jul 2023 13:40:18 +0200 Subject: [PATCH 32/96] add height Factor, format --- .../DropzoneBetweenElements.tsx | 10 +- .../form-concept-group/FormConceptGroup.tsx | 246 +++++++++--------- 2 files changed, 129 insertions(+), 127 deletions(-) diff --git a/frontend/src/js/external-forms/form-components/DropzoneBetweenElements.tsx b/frontend/src/js/external-forms/form-components/DropzoneBetweenElements.tsx index aed94e7734..466d164e54 100644 --- a/frontend/src/js/external-forms/form-components/DropzoneBetweenElements.tsx +++ b/frontend/src/js/external-forms/form-components/DropzoneBetweenElements.tsx @@ -10,6 +10,7 @@ interface Props { } const RootHeightBase = 40; +const HeightFactor = 0.5; const Root = styled("div")` width: 100%; @@ -36,7 +37,7 @@ const BetweenElements = ({ drop: onDrop, hover(item) { if (item.type === "CONCEPT_TREE_NODE") { - return setHeight(item.dragContext.height); + return setHeight(item.dragContext.height * HeightFactor); } }, @@ -48,8 +49,11 @@ const BetweenElements = ({ return ( <> - - {isOver && } + + {isOver && } ); }; diff --git a/frontend/src/js/external-forms/form-concept-group/FormConceptGroup.tsx b/frontend/src/js/external-forms/form-concept-group/FormConceptGroup.tsx index 60e49afd0b..a14c8ea8da 100644 --- a/frontend/src/js/external-forms/form-concept-group/FormConceptGroup.tsx +++ b/frontend/src/js/external-forms/form-concept-group/FormConceptGroup.tsx @@ -282,144 +282,142 @@ const FormConceptGroup = (props: Props) => { ); }} items={props.value.map((row, i) => ( - - {props.renderRowPrefix - ? props.renderRowPrefix({ - value: props.value, - onChange: props.onChange, - row, - i, - }) - : null} - {row.concepts.length > 1 && ( - - - {t("externalForms.common.connectedWith")}: - - { - props.onChange( - setValueProperties(props.value, i, { - connector: val, - }), + + {props.renderRowPrefix + ? props.renderRowPrefix({ + value: props.value, + onChange: props.onChange, + row, + i, + }) + : null} + {row.concepts.length > 1 && ( + + + {t("externalForms.common.connectedWith")}: + + { + props.onChange( + setValueProperties(props.value, i, { + connector: val, + }), + ); + }} + options={[ + { value: "OR", label: t("common.or") }, + { value: "AND", label: t("common.and") }, + ]} + /> + + )} + + props.onChange(addConcept(props.value, i, null)) + } + onRemoveClick={(j) => + props.onChange( + props.value && props.value[i].concepts.length === 1 + ? removeValue(props.value, i) + : removeConcept(props.value, i, j), + ) + } + items={row.concepts.map((concept, j) => + concept ? ( + + setEditedFormQueryNodePosition({ + valueIdx: i, + conceptIdx: j, + }) + } + deleteInForm={() => { + return props.onChange( + props.value[i].concepts.length === 1 + ? removeValue(props.value, i) + : removeConcept(props.value, i, j), ); }} - options={[ - { value: "OR", label: t("common.or") }, - { value: "AND", label: t("common.and") }, - ]} + expand={{ + onClick: () => + props.onChange( + onToggleIncludeSubnodes( + props.value, + i, + j, + !concept.includeSubnodes, + newValue, + ), + ), + expandable: + !props.disallowMultipleColumns && + hasConceptChildren(concept), + active: !!concept.includeSubnodes, + }} /> - - )} - - props.onChange(addConcept(props.value, i, null)) - } - onRemoveClick={(j) => - props.onChange( - props.value && props.value[i].concepts.length === 1 - ? removeValue(props.value, i) - : removeConcept(props.value, i, j), - ) - } - items={row.concepts.map((concept, j) => - concept ? ( - - setEditedFormQueryNodePosition({ + ) : ( + */ + acceptedDropTypes={DROP_TYPES} + onImportLines={(lines) => + onImportLines(lines, { valueIdx: i, conceptIdx: j }) + } + onDrop={(item: DragItemConceptTreeNode | DragItemFile) => { + if (item.type === "__NATIVE_FILE__") { + onDropFile(item.files[0], { valueIdx: i, conceptIdx: j, - }) - } - deleteInForm={() => { - return props.onChange( - props.value[i].concepts.length === 1 - ? removeValue(props.value, i) - : removeConcept(props.value, i, j), - ); - }} - expand={{ - onClick: () => - props.onChange( - onToggleIncludeSubnodes( - props.value, - i, - j, - !concept.includeSubnodes, - newValue, - ), - ), - expandable: - !props.disallowMultipleColumns && - hasConceptChildren(concept), - active: !!concept.includeSubnodes, - }} - /> - ) : ( - */ - acceptedDropTypes={DROP_TYPES} - onImportLines={(lines) => - onImportLines(lines, { valueIdx: i, conceptIdx: j }) + }); + + return; } - onDrop={( - item: DragItemConceptTreeNode | DragItemFile, - ) => { - if (item.type === "__NATIVE_FILE__") { - onDropFile(item.files[0], { - valueIdx: i, - conceptIdx: j, - }); - - return; - } - - if (props.isValidConcept && !props.isValidConcept(item)) - return null; - - if (isMovedObject(item)) { - return props.onChange( - setConcept( - getValues(props.fieldName), - i, - j, - copyConcept(item), - ), - ); - } + if (props.isValidConcept && !props.isValidConcept(item)) + return null; + + if (isMovedObject(item)) { return props.onChange( setConcept( - props.value, + getValues(props.fieldName), i, j, - initializeConcept(item, defaults, tableConfig), + copyConcept(item), ), ); - }} - > - {({ isOver, item }) => - isOver && isMovedObject(item) - ? t("externalForms.common.concept.copying") - : props.conceptDropzoneText } - - ), - )} - /> - + + return props.onChange( + setConcept( + props.value, + i, + j, + initializeConcept(item, defaults, tableConfig), + ), + ); + }} + > + {({ isOver, item }) => + isOver && isMovedObject(item) + ? t("externalForms.common.concept.copying") + : props.conceptDropzoneText + } + + ), + )} + /> + ))} /> {isCopyModalOpen && ( From 448700c3ae22f9d85a8509f18265bc245a926872 Mon Sep 17 00:00:00 2001 From: Marco Korinth Date: Wed, 12 Jul 2023 15:56:17 +0200 Subject: [PATCH 33/96] feat: admin-ui/jobs redesign and dynamic data loading --- .../conquery/resources/admin/ui/jobs.html.ftl | 223 ++++++++++++------ .../admin/ui/templates/accordion.html.ftl | 14 +- .../admin/ui/templates/breadcrumbs.html.ftl | 2 +- 3 files changed, 162 insertions(+), 77 deletions(-) diff --git a/backend/src/main/resources/com/bakdata/conquery/resources/admin/ui/jobs.html.ftl b/backend/src/main/resources/com/bakdata/conquery/resources/admin/ui/jobs.html.ftl index 26e0847442..a31b245c3d 100644 --- a/backend/src/main/resources/com/bakdata/conquery/resources/admin/ui/jobs.html.ftl +++ b/backend/src/main/resources/com/bakdata/conquery/resources/admin/ui/jobs.html.ftl @@ -1,71 +1,156 @@ <#import "templates/template.html.ftl" as layout> +<#import "templates/breadcrumbs.html.ftl" as breadcrumbs> +<#import "templates/accordion.html.ftl" as accordion> + <@layout.layout> - <#list c as node, status> -

-
-
-
-
- ${node} - - updated ${status.ageString} ago - ${status.jobs?size} - -
-
- - <#list status.jobs as job> - - - - - - -
- ${job.label} - -
-
-
-
- <#if !job.cancelled> - - <#else> -
Cancelled
- -
-
-
-
-
-
- - -
-
- -
- -
-
- \ No newline at end of file + + + <@breadcrumbs.breadcrumbs + labels=["Jobs"] + /> + +
+
+ + +
+
+ + <@accordion.accordionGroup id="nodesAccordionGroup" class="mt-3"> + + + +
+ <@accordion.accordion summary="" id="categoryTemplate"> + +
+
+ +
+ updated ago + +
+
+
+
+ +
No jobs in this node
+ +
+
+
+
+
+
+
+
+
diff --git a/backend/src/main/resources/com/bakdata/conquery/resources/admin/ui/templates/accordion.html.ftl b/backend/src/main/resources/com/bakdata/conquery/resources/admin/ui/templates/accordion.html.ftl index 21b0a695be..c6ba5b66ad 100644 --- a/backend/src/main/resources/com/bakdata/conquery/resources/admin/ui/templates/accordion.html.ftl +++ b/backend/src/main/resources/com/bakdata/conquery/resources/admin/ui/templates/accordion.html.ftl @@ -1,10 +1,10 @@ -<#macro accordionGroup class="" style=""> -
+<#macro accordionGroup id="" class="" style=""> +
<#nested />
-<#macro accordion summary infoText="" class="" style=""> -
+<#macro accordion summary infoText="" id="" class="" style=""> +
${summary}
-
${infoText}
+
${infoText}
-
+
<#nested />
@@ -28,4 +28,4 @@ }
- \ No newline at end of file + diff --git a/backend/src/main/resources/com/bakdata/conquery/resources/admin/ui/templates/breadcrumbs.html.ftl b/backend/src/main/resources/com/bakdata/conquery/resources/admin/ui/templates/breadcrumbs.html.ftl index 9e8afc8118..5086348b3d 100644 --- a/backend/src/main/resources/com/bakdata/conquery/resources/admin/ui/templates/breadcrumbs.html.ftl +++ b/backend/src/main/resources/com/bakdata/conquery/resources/admin/ui/templates/breadcrumbs.html.ftl @@ -1,4 +1,4 @@ -<#macro breadcrumbs labels links class=""> +<#macro breadcrumbs labels links=[] class="">
)}
From 15fa5cd7dc9e8d1fc54dc7381bb8b49d5d91244d Mon Sep 17 00:00:00 2001 From: Fabian Blank Date: Mon, 14 Aug 2023 14:38:21 +0200 Subject: [PATCH 45/96] simplify code and prevent bleeding --- .../DropzoneBetweenElements.tsx | 18 ++-- .../form-concept-group/FormConceptNode.tsx | 83 ++++++++++--------- 2 files changed, 52 insertions(+), 49 deletions(-) diff --git a/frontend/src/js/external-forms/form-components/DropzoneBetweenElements.tsx b/frontend/src/js/external-forms/form-components/DropzoneBetweenElements.tsx index 398efd8fe7..f6319279d2 100644 --- a/frontend/src/js/external-forms/form-components/DropzoneBetweenElements.tsx +++ b/frontend/src/js/external-forms/form-components/DropzoneBetweenElements.tsx @@ -11,7 +11,7 @@ interface Props { const RootHeightBase = 30; const LineHeight = 3; - +const MarginTopOffsetOver = 5; const Root = styled("div")` width: 100%; left: 0; @@ -23,13 +23,13 @@ const Root = styled("div")` const Line = styled("div")` overflow: hidden; display: block; - background-color: ${({ theme }) => theme.col.blueGrayLight}; + background-color: ${({ theme }) => theme.col.blueGrayDark}; margin: 1px 0; height: ${LineHeight}px; border-radius: 2px; `; -const BetweenElements = ({ +const DropzoneBetweenElements = ({ acceptedDropTypes, onDrop, lastElement, @@ -44,10 +44,9 @@ const BetweenElements = ({ }); const rootHeightMultiplier = lastElement ? 0.5 : 1; - const rootDefaultMarginTop = (lastElement ? -15 : -5) - LineHeight; - const rootOverMarginTop = lastElement ? -23 : -10; + const rootMarginTop = (lastElement ? -15 : -5) - LineHeight; const rootDefaultTop = lastElement ? -5 : -10; - const rootOverTop = (lastElement ? -2 : -15) - LineHeight; + const rootOverTop = (lastElement ? -5 : -15) - LineHeight; return ( <> @@ -57,9 +56,8 @@ const BetweenElements = ({ style={{ height: RootHeightBase * rootHeightMultiplier + - (isOver && !lastElement ? 0 : LineHeight) + - (lastElement ? LineHeight + 4 : 0), - marginTop: isOver ? rootOverMarginTop : rootDefaultMarginTop, + (isOver ? 0 : LineHeight), + marginTop: (isOver ? MarginTopOffsetOver : 0) + rootMarginTop, top: isOver ? rootOverTop : rootDefaultTop, }} > @@ -67,4 +65,4 @@ const BetweenElements = ({ ); }; -export default BetweenElements; +export default DropzoneBetweenElements; diff --git a/frontend/src/js/external-forms/form-concept-group/FormConceptNode.tsx b/frontend/src/js/external-forms/form-concept-group/FormConceptNode.tsx index 55298d813a..087f49e7c6 100644 --- a/frontend/src/js/external-forms/form-concept-group/FormConceptNode.tsx +++ b/frontend/src/js/external-forms/form-concept-group/FormConceptNode.tsx @@ -33,7 +33,6 @@ const Root = styled("div")<{ display: grid; grid-template-columns: 1fr auto; font-size: ${({ theme }) => theme.font.sm}; - margin-top: 5px; `; const Label = styled("p")` @@ -69,6 +68,10 @@ const RootNode = styled("p")` word-break: break-word; `; +const TopMargin = styled("div")` + margin-top: 5px; +`; + interface PropsT { valueIdx: number; conceptIdx: number; @@ -131,46 +134,48 @@ const FormConceptNode: FC = ({ : undefined; return ( - canNodeBeDropped(conceptNode, item)} - highlightDroppable - > - { - ref.current = instance; - drag(instance); - }} - active={hasNonDefaultSettings || hasFilterValues} - onClick={onClick} + + canNodeBeDropped(conceptNode, item)} + highlightDroppable > -
- - <> - {rootNodeLabel && {rootNodeLabel}} - - {conceptNode && !!conceptNode.description && ( - {conceptNode.description} - )} - - -
- - {expand && expand.expandable && ( - - { - e.stopPropagation(); - expand.onClick(); - }} - /> + { + ref.current = instance; + drag(instance); + }} + active={hasNonDefaultSettings || hasFilterValues} + onClick={onClick} + > +
+ + <> + {rootNodeLabel && {rootNodeLabel}} + + {conceptNode && !!conceptNode.description && ( + {conceptNode.description} + )} + - )} - - - +
+ + {expand && expand.expandable && ( + + { + e.stopPropagation(); + expand.onClick(); + }} + /> + + )} + +
+
+
); }; From 0ac13f2ffe1c25b9f93ae5eadbc35e6c591ed63e Mon Sep 17 00:00:00 2001 From: awildturtok <1553491+awildturtok@users.noreply.github.com> Date: Mon, 14 Aug 2023 15:27:37 +0200 Subject: [PATCH 46/96] Send WorkerMessage in ForwardToWorker gzipped --- .../network/specific/ForwardToWorker.java | 52 ++++++++++++------- 1 file changed, 33 insertions(+), 19 deletions(-) diff --git a/backend/src/main/java/com/bakdata/conquery/models/messages/network/specific/ForwardToWorker.java b/backend/src/main/java/com/bakdata/conquery/models/messages/network/specific/ForwardToWorker.java index 3703763c2c..daba9b5fbc 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/messages/network/specific/ForwardToWorker.java +++ b/backend/src/main/java/com/bakdata/conquery/models/messages/network/specific/ForwardToWorker.java @@ -1,6 +1,12 @@ package com.bakdata.conquery.models.messages.network.specific; +import java.io.ByteArrayInputStream; +import java.io.ByteArrayOutputStream; +import java.io.IOException; +import java.io.OutputStream; import java.util.Objects; +import java.util.zip.GZIPInputStream; +import java.util.zip.GZIPOutputStream; import com.bakdata.conquery.io.cps.CPSType; import com.bakdata.conquery.models.identifiable.ids.specific.WorkerId; @@ -14,7 +20,6 @@ import com.bakdata.conquery.util.io.ConqueryMDC; import com.bakdata.conquery.util.progressreporter.ProgressReporter; import com.fasterxml.jackson.annotation.JsonIgnore; -import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.ObjectWriter; import lombok.AccessLevel; @@ -25,7 +30,8 @@ import lombok.ToString; /** - * Messages are sent serialized and only deserialized when they are being processed. This ensures that messages that were sent just shortly before to setup state later messages depend upon is correct. + * @implNote Messages are sent serialized and only deserialized when they are being processed. This ensures that messages that were sent just shortly before to setup state later messages depend upon is correct. + * @implNote Messages are additionally sent gzipped, to avoid hogging memory with long queues. */ @CPSType(id = "FORWARD_TO_WORKER", base = NetworkMessage.class) @RequiredArgsConstructor(access = AccessLevel.PROTECTED) @@ -33,45 +39,53 @@ @ToString(of = {"workerId", "text"}) public class ForwardToWorker extends MessageToShardNode implements SlowMessage { - @SneakyThrows(JsonProcessingException.class) + private final WorkerId workerId; + private final byte[] messageRaw; + // We cache these on the sender side. + @Getter(onMethod_ = @JsonIgnore(false)) + private final boolean slowMessage; + private final String text; + @JsonIgnore + @Setter + private ProgressReporter progressReporter; + public static ForwardToWorker create(WorkerId worker, WorkerMessage message, ObjectWriter writer) { return new ForwardToWorker( worker, - writer.writeValueAsBytes(message), + serializeMessage(message, writer), true, message.toString() ); } - private final WorkerId workerId; - private final byte[] messageRaw; + @SneakyThrows(IOException.class) + private static byte[] serializeMessage(WorkerMessage message, ObjectWriter writer) { + final ByteArrayOutputStream baos = new ByteArrayOutputStream(); + try (OutputStream outputStream = new GZIPOutputStream(baos)) { + writer.writeValue(outputStream, message); + } - // We cache these on the sender side. - @Getter(onMethod_ = @JsonIgnore(false)) - private final boolean slowMessage; - private final String text; + return baos.toByteArray(); + } - @JsonIgnore - @Setter - private ProgressReporter progressReporter; + private static WorkerMessage deserializeMessage(byte[] messageRaw, ObjectMapper mapper) throws java.io.IOException { + return mapper.readerFor(WorkerMessage.class).readValue(new GZIPInputStream(new ByteArrayInputStream(messageRaw))); + } @Override public void react(ShardNodeNetworkContext context) throws Exception { - Worker worker = Objects.requireNonNull(context.getWorkers().getWorker(workerId)); + final Worker worker = Objects.requireNonNull(context.getWorkers().getWorker(workerId)); ConqueryMDC.setLocation(worker.toString()); // Jobception: this is to ensure that no subsequent message is deserialized before one message is processed - worker.getJobManager().addSlowJob(new SimpleJob("Deserialize and process WorkerMessage", () -> { + worker.getJobManager().addSlowJob(new SimpleJob("Process %s".formatted(getText()), () -> { - WorkerMessage message = deserializeMessage(messageRaw, worker.getCommunicationMapper()); + final WorkerMessage message = deserializeMessage(messageRaw, worker.getCommunicationMapper()); message.setProgressReporter(progressReporter); message.react(worker); })); } - private static WorkerMessage deserializeMessage(byte[] messageRaw, ObjectMapper binaryMapper) throws java.io.IOException { - return binaryMapper.readerFor(WorkerMessage.class).readValue(messageRaw); - } } From 9ef3448394f4248a04cc42fb118a65f198f03b96 Mon Sep 17 00:00:00 2001 From: awildturtok <1553491+awildturtok@users.noreply.github.com> Date: Mon, 14 Aug 2023 16:14:31 +0200 Subject: [PATCH 47/96] gzip compresses SerializingStore --- .../xodus/stores/SerializingStore.java | 343 +++++++++--------- 1 file changed, 181 insertions(+), 162 deletions(-) diff --git a/backend/src/main/java/com/bakdata/conquery/io/storage/xodus/stores/SerializingStore.java b/backend/src/main/java/com/bakdata/conquery/io/storage/xodus/stores/SerializingStore.java index 6cf3fcddbd..589120ba35 100644 --- a/backend/src/main/java/com/bakdata/conquery/io/storage/xodus/stores/SerializingStore.java +++ b/backend/src/main/java/com/bakdata/conquery/io/storage/xodus/stores/SerializingStore.java @@ -1,7 +1,11 @@ package com.bakdata.conquery.io.storage.xodus.stores; +import java.io.ByteArrayInputStream; +import java.io.ByteArrayOutputStream; import java.io.File; import java.io.IOException; +import java.io.InputStream; +import java.io.OutputStream; import java.io.PrintStream; import java.time.LocalDateTime; import java.time.format.DateTimeFormatter; @@ -9,6 +13,8 @@ import java.util.Collection; import java.util.function.Function; import java.util.function.Supplier; +import java.util.zip.GZIPInputStream; +import java.util.zip.GZIPOutputStream; import javax.validation.Validator; @@ -18,7 +24,6 @@ import com.bakdata.conquery.models.config.XodusStoreFactory; import com.bakdata.conquery.models.exceptions.ValidatorHelper; import com.bakdata.conquery.util.io.FileUtil; -import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.ObjectReader; @@ -126,7 +131,7 @@ public , CLASS_V extends Class> SerializingSto unreadableValuesDumpDir = unreadableDataDumpDirectory; if (shouldDumpUnreadables()) { - if(!unreadableValuesDumpDir.exists() && !unreadableValuesDumpDir.mkdirs()) { + if (!unreadableValuesDumpDir.exists() && !unreadableValuesDumpDir.mkdirs()) { throw new IllegalStateException("Could not create dump directory: " + unreadableValuesDumpDir); } else if (!unreadableValuesDumpDir.isDirectory()) { @@ -151,19 +156,56 @@ public void add(KEY key, VALUE value) { store.add(writeKey(key), writeValue(value)); } + /** + * Serialize key with {@code keyWriter}. + */ + private ByteIterable writeKey(KEY key) { + return write(key, keyWriter); + } + + /** + * Serialize value with {@code valueWriter}. + */ + private ByteIterable writeValue(VALUE value) { + return write(value, valueWriter); + } + + /** + * Try writing object with writer. + */ + private ByteIterable write(Object obj, ObjectWriter writer) { + try { + final ByteArrayOutputStream baos = new ByteArrayOutputStream(); + + try (final OutputStream outputStream = new GZIPOutputStream(baos)) { + writer.writeValue(outputStream, obj); + } + + baos.close(); + + final byte[] bytes = baos.toByteArray(); + + return new ArrayByteIterable(bytes); + } + catch (IOException e) { + throw new RuntimeException("Failed to write " + obj, e); + } + } + @Override public VALUE get(KEY key) { final ByteIterable binValue = store.get(writeKey(key)); try { - return readValue(binValue); - } catch (Exception e) { + return readValue(binValue); + } + catch (Exception e) { - if(unreadableValuesDumpDir != null) { - dumpToFile(binValue, key.toString(), e, unreadableValuesDumpDir, store.getName(), objectMapper); + if (unreadableValuesDumpDir != null) { + dumpToFile(binValue.getBytesUnsafe(), key.toString(), e, unreadableValuesDumpDir, store.getName(), objectMapper); } - if(removeUnreadablesFromUnderlyingStore) { + if (removeUnreadablesFromUnderlyingStore) { remove(key); // Null seems to be an acceptable return value in this case return null; @@ -175,6 +217,124 @@ public VALUE get(KEY key) { } } + /** + * Deserialize value with {@code valueReader}. + */ + private VALUE readValue(ByteIterable value) { + return read(valueReader, value); + } + + /** + * Dumps the content of an unreadable value to a file as a json (it tries to parse it as an object and than tries to dump it as a json). + * + * @param gzippedObj The object to dump. + * @param keyOfDump The key under which the unreadable value is accessible. It is used for the file name. + * @param reason The exception causing us to dump the file + * @param unreadableDumpDir The director to dump to. The method assumes that the directory exists and is okay to write to. + * @param storeName The name of the store which is also used in the dump file name. + */ + private static void dumpToFile(@NonNull byte[] gzippedObj, @NonNull String keyOfDump, Exception reason, @NonNull File unreadableDumpDir, String storeName, ObjectMapper objectMapper) { + // Create dump filehandle + final File dumpfile = makeDumpFileName(keyOfDump, unreadableDumpDir, storeName); + final File exceptionFileName = makeExceptionFileName(keyOfDump, unreadableDumpDir, storeName); + + if (dumpfile.exists() || exceptionFileName.exists()) { + log.trace("Abort dumping of file {} because it already exists.", dumpfile); + return; + } + + if (!dumpfile.getParentFile().exists() && !dumpfile.getParentFile().mkdirs()) { + throw new IllegalStateException("Could not create `%s`.".formatted(dumpfile.getParentFile())); + } + + //TODO FK: dump in a separate thread so we are not blocking the reader thread. + + // Write json + try { + log.info("Dumping value of key {} to {} (because it cannot be deserialized anymore).", keyOfDump, dumpfile.getCanonicalPath()); + + final JsonNode dump = objectMapper.readerFor(JsonNode.class).readValue(debugUnGzip(gzippedObj)); + Jackson.MAPPER.writer().writeValue(dumpfile, dump); + } + catch (IOException e) { + log.error("Failed to dump unreadable value of key `{}` to file `{}`", keyOfDump, dumpfile, e); + } + + try (PrintStream out = new PrintStream(exceptionFileName)) { + reason.printStackTrace(out); + } + catch (IOException e) { + log.error("Failed to dump exception for `{}` to file `{}`.", keyOfDump, exceptionFileName, e); + } + + } + + private static byte[] debugUnGzip(byte[] bytes) throws IOException { + return new GZIPInputStream(new ByteArrayInputStream(bytes)).readAllBytes(); + } + + @Override + public void remove(KEY key) { + log.trace("Removing value to key {} from Store[{}]", key, store.getName()); + store.remove(writeKey(key)); + } + + /** + * Try read value with reader. + */ + private T read(ObjectReader reader, ByteIterable obj) { + if (obj == null) { + return null; + } + try (final InputStream inputStream = new GZIPInputStream(new ByteArrayInputStream(obj.getBytesUnsafe(), 0, obj.getLength()))) { + return reader.readValue(inputStream); + } + catch (IOException e) { + try { + throw new RuntimeException("Failed to read " + JacksonUtil.toJsonDebug(debugUnGzip(obj.getBytesUnsafe())), e); + } + catch (IOException ex) { + throw new RuntimeException(ex); + } + } + } + + /** + * Generates a valid file name from the key of the dump object, the store and the current time. + * However, it does not ensure that there is no file with such a name. + *

+ * Current implementation is `$unreadableDumpDir/$today/$store/$key.json` + */ + @NotNull + public static File makeDumpFileName(@NotNull String keyOfDump, @NotNull File unreadableDumpDir, @NotNull String storeName) { + return unreadableDumpDir.toPath() + .resolve(DateTimeFormatter.BASIC_ISO_DATE.format(LocalDateTime.now())) + .resolve(storeName) + .resolve(sanitiseFileName(keyOfDump) + "." + DUMP_FILE_EXTENSION) + .toFile(); + + } + + /** + * Generates a valid file name from the key of the dump object, the store and the current time. + * However, it does not ensure that there is no file with such a name. + *

+ * Current implementation is `$unreadableDumpDir/$today/$store/$key.exception` + */ + @NotNull + public static File makeExceptionFileName(@NotNull String keyOfDump, @NotNull File unreadableDumpDir, @NotNull String storeName) { + return unreadableDumpDir.toPath() + .resolve(DateTimeFormatter.BASIC_ISO_DATE.format(LocalDateTime.now())) + .resolve(storeName) + .resolve(sanitiseFileName(keyOfDump) + "." + EXCEPTION_FILE_EXTENSION) + .toFile(); + + } + + private static String sanitiseFileName(@NotNull String name) { + return FileUtil.SAVE_FILENAME_REPLACEMENT_MATCHER.matcher(name).replaceAll("_"); + } + /** * Iterates a given consumer over the entries of this store. * Depending on the {@link XodusStoreFactory} corrupt entries may be dump to a file and/or removed from the store. @@ -228,15 +388,14 @@ public IterationStatistic forEach(StoreEntryConsumer consumer) { }); // Print some statistics final int total = result.getTotalProcessed(); - log.debug( - String.format( - "While processing store %s:\n\tEntries processed:\t%d\n\tKey read failure:\t%d (%.2f%%)\n\tValue read failure:\t%d (%.2f%%)", + log.debug("While processing store %s:\n\tEntries processed:\t%d\n\tKey read failure:\t%d (%.2f%%)\n\tValue read failure:\t%d (%.2f%%)".formatted( store.getName(), total, result.getFailedKeys(), total > 0 ? (float) result.getFailedKeys() / total * 100 : 0, result.getFailedValues(), - total > 0 ? (float) result.getFailedValues() / total * 100 : 0)); + total > 0 ? (float) result.getFailedValues() / total * 100 : 0 + )); // Remove corrupted entries from the store if configured so if (removeUnreadablesFromUnderlyingStore) { @@ -266,52 +425,12 @@ private TYPE getDeserializedAndDumpFailed(ByteIterable serial, Function T read(ObjectReader reader, ByteIterable obj) { - if (obj == null) { - return null; - } - try { - return reader.readValue(obj.getBytesUnsafe(), 0, obj.getLength()); - } - catch (IOException e) { - throw new RuntimeException("Failed to read " + JacksonUtil.toJsonDebug(obj.getBytesUnsafe()), e); - } - } - - /** - * Dumps the content of an unreadable value to a file as a json (it tries to parse it as an object and than tries to dump it as a json). - * - * @param obj The object to dump. - * @param keyOfDump The key under which the unreadable value is accessible. It is used for the file name. - * @param reason The exception causing us to dump the file - * @param unreadableDumpDir The director to dump to. The method assumes that the directory exists and is okay to write to. - * @param storeName The name of the store which is also used in the dump file name. - */ - private static void dumpToFile(@NonNull ByteIterable obj, @NonNull String keyOfDump, Exception reason, @NonNull File unreadableDumpDir, String storeName, ObjectMapper objectMapper) { - // Create dump filehandle - final File dumpfile = makeDumpFileName(keyOfDump, unreadableDumpDir, storeName); - final File exceptionFileName = makeExceptionFileName(keyOfDump, unreadableDumpDir, storeName); - - if (dumpfile.exists() || exceptionFileName.exists()) { - log.trace("Abort dumping of file {} because it already exists.", dumpfile); - return; - } - - if(!dumpfile.getParentFile().exists() && !dumpfile.getParentFile().mkdirs()){ - throw new IllegalStateException("Could not create `%s`.".formatted(dumpfile.getParentFile())); - } - - //TODO FK: dump in a separate thread so we are not blocking the reader thread. - - // Write json - try { - log.info("Dumping value of key {} to {} (because it cannot be deserialized anymore).", keyOfDump, dumpfile.getCanonicalPath()); - - final JsonNode dump = objectMapper.readerFor(JsonNode.class).readValue(obj.getBytesUnsafe(), 0, obj.getLength()); - Jackson.MAPPER.writer().writeValue(dumpfile, dump); - } - catch (IOException e) { - log.error("Failed to dump unreadable value of key `{}` to file `{}`", keyOfDump, dumpfile, e); + @Override + public void update(KEY key, VALUE value) { + if (!valueType.isInstance(value)) { + throw new IllegalStateException("The element %s is not of the required type %s".formatted(value, valueType)); } - try(PrintStream out = new PrintStream(exceptionFileName)) { - reason.printStackTrace(out); - } - catch (IOException e) { - log.error("Failed to dump exception for `{}` to file `{}`.", keyOfDump, exceptionFileName, e); + if (validateOnWrite) { + ValidatorHelper.failOnError(log, validator.validate(value)); } - } - - /** - * Generates a valid file name from the key of the dump object, the store and the current time. - * However, it does not ensure that there is no file with such a name. - * - * Current implementation is `$unreadableDumpDir/$today/$store/$key.json` - */ - @NotNull - public static File makeDumpFileName(@NotNull String keyOfDump, @NotNull File unreadableDumpDir, @NotNull String storeName) { - return unreadableDumpDir.toPath() - .resolve(DateTimeFormatter.BASIC_ISO_DATE.format(LocalDateTime.now())) - .resolve(storeName) - .resolve(sanitiseFileName(keyOfDump) + "." + DUMP_FILE_EXTENSION) - .toFile(); - - } - - private static String sanitiseFileName(@NotNull String name) { - return FileUtil.SAVE_FILENAME_REPLACEMENT_MATCHER.matcher(name).replaceAll("_"); - } - - /** - * Generates a valid file name from the key of the dump object, the store and the current time. - * However, it does not ensure that there is no file with such a name. - * - * Current implementation is `$unreadableDumpDir/$today/$store/$key.exception` - */ - @NotNull - public static File makeExceptionFileName(@NotNull String keyOfDump, @NotNull File unreadableDumpDir, @NotNull String storeName) { - return unreadableDumpDir.toPath() - .resolve(DateTimeFormatter.BASIC_ISO_DATE.format(LocalDateTime.now())) - .resolve(storeName) - .resolve(sanitiseFileName(keyOfDump) + "." + EXCEPTION_FILE_EXTENSION) - .toFile(); - + store.update(writeKey(key), writeValue(value)); } @Override @@ -471,15 +490,15 @@ public static class IterationStatistic { private int totalProcessed; private int failedKeys; private int failedValues; - + public void incrTotalProcessed() { totalProcessed++; } - + public void incrFailedKeys() { failedKeys++; } - + public void incrFailedValues() { failedValues++; } From 8d812845a8ee30cbf270b0af5bc5579159080bbd Mon Sep 17 00:00:00 2001 From: Fabian Blank Date: Mon, 14 Aug 2023 16:33:18 +0200 Subject: [PATCH 48/96] Simplify code - fix jumping --- .../DropzoneBetweenElements.tsx | 15 ++-- .../form-concept-group/FormConceptGroup.tsx | 71 ++++++---------- .../form-concept-group/FormConceptNode.tsx | 82 +++++++++---------- 3 files changed, 74 insertions(+), 94 deletions(-) diff --git a/frontend/src/js/external-forms/form-components/DropzoneBetweenElements.tsx b/frontend/src/js/external-forms/form-components/DropzoneBetweenElements.tsx index f6319279d2..d6d9a0511a 100644 --- a/frontend/src/js/external-forms/form-components/DropzoneBetweenElements.tsx +++ b/frontend/src/js/external-forms/form-components/DropzoneBetweenElements.tsx @@ -11,7 +11,6 @@ interface Props { const RootHeightBase = 30; const LineHeight = 3; -const MarginTopOffsetOver = 5; const Root = styled("div")` width: 100%; left: 0; @@ -29,7 +28,9 @@ const Line = styled("div")` border-radius: 2px; `; -const DropzoneBetweenElements = ({ +const DropzoneBetweenElements = < + DroppableObject extends PossibleDroppableObject, +>({ acceptedDropTypes, onDrop, lastElement, @@ -44,9 +45,10 @@ const DropzoneBetweenElements = @@ -56,8 +58,9 @@ const DropzoneBetweenElements = diff --git a/frontend/src/js/external-forms/form-concept-group/FormConceptGroup.tsx b/frontend/src/js/external-forms/form-concept-group/FormConceptGroup.tsx index e540ec3a1a..2cce604693 100644 --- a/frontend/src/js/external-forms/form-concept-group/FormConceptGroup.tsx +++ b/frontend/src/js/external-forms/form-concept-group/FormConceptGroup.tsx @@ -107,6 +107,10 @@ const SxDescription = styled(Description)` font-size: ${({ theme }) => theme.font.xs}; `; +const SxFormConceptNode = styled(FormConceptNode)` + margin-top: 5px; +`; + export interface EditedFormQueryNodePosition { valueIdx: number; conceptIdx: number; @@ -208,50 +212,36 @@ const FormConceptGroup = (props: Props) => { if (props.isValidConcept && !props.isValidConcept(item)) return null; + const concept = isMovedObject(item) + ? copyConcept(item) + : initializeConcept(item, defaults, tableConfig); + let newPropsValue = props.value; + let insertIndex = i; if (isMovedObject(item)) { - let insertIndex = - i > item.dragContext.movedFromAndIdx && - item.dragContext.movedFromOrIdx === 0 - ? i - 1 - : i; - if (item.dragContext.movedFromFieldName === props.fieldName) { - const updatedValue = - props.value[item.dragContext.movedFromAndIdx].concepts - .length === 1 - ? removeValue(props.value, item.dragContext.movedFromAndIdx) + const { movedFromFieldName, movedFromAndIdx, movedFromOrIdx } = + item.dragContext; + + if (movedFromFieldName === props.fieldName) { + if (i > movedFromAndIdx && movedFromOrIdx === 0) { + insertIndex = i - 1; + } + newPropsValue = + props.value[movedFromAndIdx].concepts.length === 1 + ? removeValue(props.value, movedFromAndIdx) : removeConcept( props.value, - item.dragContext.movedFromAndIdx, - item.dragContext.movedFromOrIdx, + movedFromAndIdx, + movedFromOrIdx, ); - return props.onChange( - addConcept( - insertValue(updatedValue, insertIndex, newValue), - insertIndex, - copyConcept(item), - ), - ); } else { if (exists(item.dragContext.deleteFromOtherField)) { item.dragContext.deleteFromOtherField(); } - - return props.onChange( - addConcept( - insertValue(props.value, insertIndex, newValue), - insertIndex, - copyConcept(item), - ), - ); } } return props.onChange( - addConcept( - insertValue(props.value, i, newValue), - i, - initializeConcept(item, defaults, tableConfig), - ), + addConcept(insertValue(newPropsValue, insertIndex, newValue), insertIndex, concept), ); }; }} @@ -274,21 +264,14 @@ const FormConceptGroup = (props: Props) => { if (props.isValidConcept && !props.isValidConcept(item)) return; - if (isMovedObject(item)) { - return props.onChange( - addConcept( - addValue(props.value, newValue), - props.value.length, - copyConcept(item), - ), - ); - } - + const concept = isMovedObject(item) + ? copyConcept(item) + : initializeConcept(item, defaults, tableConfig); return props.onChange( addConcept( addValue(props.value, newValue), props.value.length, // Assuming the last index has increased after addValue - initializeConcept(item, defaults, tableConfig), + concept, ), ); }} @@ -338,7 +321,7 @@ const FormConceptGroup = (props: Props) => { } items={row.concepts.map((concept, j) => concept ? ( - = ({ : undefined; return ( - - canNodeBeDropped(conceptNode, item)} - highlightDroppable + canNodeBeDropped(conceptNode, item)} + highlightDroppable + > + { + ref.current = instance; + drag(instance); + }} + active={hasNonDefaultSettings || hasFilterValues} + onClick={onClick} > - { - ref.current = instance; - drag(instance); - }} - active={hasNonDefaultSettings || hasFilterValues} - onClick={onClick} - > -

- - <> - {rootNodeLabel && {rootNodeLabel}} - - {conceptNode && !!conceptNode.description && ( - {conceptNode.description} - )} - +
+ + <> + {rootNodeLabel && {rootNodeLabel}} + + {conceptNode && !!conceptNode.description && ( + {conceptNode.description} + )} + + +
+ + {expand && expand.expandable && ( + + { + e.stopPropagation(); + expand.onClick(); + }} + /> -
- - {expand && expand.expandable && ( - - { - e.stopPropagation(); - expand.onClick(); - }} - /> - - )} - -
-
- + )} + + + ); }; From a55e791a01567781b7b0387425baeb736d314eac Mon Sep 17 00:00:00 2001 From: Fabian Blank Date: Mon, 14 Aug 2023 16:35:43 +0200 Subject: [PATCH 49/96] format --- .../external-forms/form-concept-group/FormConceptGroup.tsx | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/frontend/src/js/external-forms/form-concept-group/FormConceptGroup.tsx b/frontend/src/js/external-forms/form-concept-group/FormConceptGroup.tsx index 2cce604693..bf381b6db3 100644 --- a/frontend/src/js/external-forms/form-concept-group/FormConceptGroup.tsx +++ b/frontend/src/js/external-forms/form-concept-group/FormConceptGroup.tsx @@ -241,7 +241,11 @@ const FormConceptGroup = (props: Props) => { } return props.onChange( - addConcept(insertValue(newPropsValue, insertIndex, newValue), insertIndex, concept), + addConcept( + insertValue(newPropsValue, insertIndex, newValue), + insertIndex, + concept, + ), ); }; }} From 8ebadddeba41a13b9115c70070301df93adab1ec Mon Sep 17 00:00:00 2001 From: awildturtok <1553491+awildturtok@users.noreply.github.com> Date: Mon, 14 Aug 2023 17:52:12 +0200 Subject: [PATCH 50/96] introduce a set of clunky parameters to allow MigrateCommand from non-gzipped stores to gzipped stores. --- .../conquery/commands/MigrateCommand.java | 105 ++++++++++++------ 1 file changed, 74 insertions(+), 31 deletions(-) diff --git a/backend/src/main/java/com/bakdata/conquery/commands/MigrateCommand.java b/backend/src/main/java/com/bakdata/conquery/commands/MigrateCommand.java index 416ea09024..f63c78daf9 100644 --- a/backend/src/main/java/com/bakdata/conquery/commands/MigrateCommand.java +++ b/backend/src/main/java/com/bakdata/conquery/commands/MigrateCommand.java @@ -1,14 +1,21 @@ package com.bakdata.conquery.commands; +import java.io.ByteArrayInputStream; +import java.io.ByteArrayOutputStream; import java.io.File; import java.io.IOException; +import java.io.InputStream; +import java.io.OutputStream; import java.util.Arrays; import java.util.List; +import java.util.zip.GZIPInputStream; +import java.util.zip.GZIPOutputStream; import com.bakdata.conquery.io.jackson.Jackson; import com.bakdata.conquery.models.config.ConqueryConfig; import com.bakdata.conquery.models.config.XodusStoreFactory; import com.bakdata.conquery.util.io.ConqueryMDC; +import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.JsonMappingException; import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.ObjectMapper; @@ -27,11 +34,13 @@ import jetbrains.exodus.env.StoreConfig; import jetbrains.exodus.env.Transaction; import kotlin.jvm.functions.Function4; +import lombok.SneakyThrows; import lombok.extern.slf4j.Slf4j; import net.sourceforge.argparse4j.impl.Arguments; import net.sourceforge.argparse4j.inf.Namespace; import net.sourceforge.argparse4j.inf.Subparser; import org.codehaus.groovy.control.CompilerConfiguration; +import org.jetbrains.annotations.NotNull; /** * Command allowing script based migration of databases. Especially useful for data that cannot be easily recreated after reimports, such as {@link com.bakdata.conquery.models.auth.entities.User}s and {@link com.bakdata.conquery.models.execution.ManagedExecution}s. @@ -56,7 +65,6 @@ @Slf4j public class MigrateCommand extends ConqueryCommand { - public MigrateCommand() { super("migrate", "Run a migration script on a store."); } @@ -75,6 +83,18 @@ public void configure(Subparser subparser) { .required(true) .type(Arguments.fileType()); + subparser + .addArgument("--in-gzip") + .help("If true, values are ungzipped before deserialization.") + .setDefault(true) + .type(Arguments.booleanType()); + + subparser + .addArgument("--out-gzip") + .help("If true, values are gzipped before writing.") + .setDefault(true) + .type(Arguments.booleanType()); + subparser .addArgument("--script") .help("Migration Script returning a closure implementing MigrationScriptFactory. See supplementary example.groovy for details.\nSignature: String env, String store, String key, ObjectNode value -> return new Tuple(key,value)") @@ -88,6 +108,10 @@ protected void run(io.dropwizard.setup.Environment environment, Namespace namesp final File inStoreDirectory = namespace.get("in"); final File outStoreDirectory = namespace.get("out"); + final boolean inGzip = namespace.getBoolean("in-gzip"); + final boolean outGzip = namespace.getBoolean("out-gzip"); + + final long logsize = ((XodusStoreFactory) configuration.getStorage()).getXodus().getLogFileSize().toKilobytes(); @@ -99,11 +123,11 @@ protected void run(io.dropwizard.setup.Environment environment, Namespace namesp } // Create Groovy Shell and parse script - CompilerConfiguration config = new CompilerConfiguration(); + final CompilerConfiguration config = new CompilerConfiguration(); config.setScriptBaseClass(MigrationScriptFactory.class.getName()); - GroovyShell groovy = new GroovyShell(config); + final GroovyShell groovy = new GroovyShell(config); - MigrationScriptFactory factory = (MigrationScriptFactory) groovy.parse(In.file((File) namespace.get("script")).readAll()); + final MigrationScriptFactory factory = (MigrationScriptFactory) groovy.parse(In.file((File) namespace.get("script")).readAll()); final Function4 migrator = factory.run(); @@ -116,25 +140,12 @@ protected void run(io.dropwizard.setup.Environment environment, Namespace namesp final File environmentDirectory = new File(outStoreDirectory, xenv.getName()); environmentDirectory.mkdirs(); - processEnvironment(xenv, logsize, environmentDirectory, migrator, mapper); + processEnvironment(xenv, logsize, environmentDirectory, migrator, mapper, inGzip, outGzip); }); } - - /** - * Class defining the interface for the Groovy-Script. - */ - public abstract static class MigrationScriptFactory extends Script { - - /** - * Environment -> Store -> Key -> Value -> (Key, Value) - */ - @Override - public abstract Function4 run(); - } - - private void processEnvironment(File inStoreDirectory, long logSize, File outStoreDirectory, Function4 migrator, ObjectMapper mapper) { + private void processEnvironment(File inStoreDirectory, long logSize, File outStoreDirectory, Function4 migrator, ObjectMapper mapper, boolean inGzip, boolean outGzip) { final jetbrains.exodus.env.Environment inEnvironment = Environments.newInstance( inStoreDirectory, new EnvironmentConfig().setLogFileSize(logSize) @@ -175,7 +186,7 @@ private void processEnvironment(File inStoreDirectory, long logSize, File outSto continue; } - doMigrate(inStore, outStore, migrator, mapper); + migrateStore(inStore, outStore, migrator, mapper, inGzip, outGzip); log.info("Done writing {}.", store); } @@ -191,7 +202,7 @@ private void processEnvironment(File inStoreDirectory, long logSize, File outSto inEnvironment.close(); } - private void doMigrate(Store inStore, Store outStore, Function4 migrator, ObjectMapper mapper) { + private void migrateStore(Store inStore, Store outStore, Function4 migrator, ObjectMapper mapper, boolean inGzip, boolean outGzip) { final Environment inEnvironment = inStore.getEnvironment(); final Environment outEnvironment = outStore.getEnvironment(); @@ -211,13 +222,12 @@ private void doMigrate(Store inStore, Store outStore, Function4 migrated = - migrator.invoke(inEnvironment.getLocation(), inStore.getName(), key, node); + final Tuple migrated = migrator.invoke(inEnvironment.getLocation(), inStore.getName(), key, value); // => Effectively delete the object if (migrated == null) { @@ -226,18 +236,18 @@ private void doMigrate(Store inStore, Store outStore, Function4 Store -> Key -> Value -> (Key, Value) + */ + @Override + public abstract Function4 run(); + } } From 5b355607f50c758c16e90ffa2bf3bd3a40c03e55 Mon Sep 17 00:00:00 2001 From: Jonas Arnhold Date: Tue, 15 Aug 2023 12:52:16 +0200 Subject: [PATCH 51/96] Add support for multi column validity dates (#3129) Add support for multi column validity dates in SQL and Legacy queryengine * Replace findValidityDateColumn() with findValidityDate() Co-authored-by: Torben Meyer Co-authored-by: awildturtok <1553491+awildturtok@users.noreply.github.com> --- .../apiv1/query/TableExportQuery.java | 25 ++-- .../apiv1/query/concept/filter/CQTable.java | 13 +- .../query/concept/specific/CQConcept.java | 9 +- .../models/datasets/concepts/Concept.java | 5 +- .../datasets/concepts/ValidityDate.java | 103 ++++++++++++-- .../conquery/models/events/Bucket.java | 15 +- .../conquery/models/events/EmptyBucket.java | 3 +- .../models/query/QueryExecutionContext.java | 4 +- .../query/queryplan/TableExportQueryPlan.java | 33 +++-- .../specific/EventDateUnionAggregator.java | 17 +-- .../specific/EventDurationSumAggregator.java | 10 +- .../specific/QuarterAggregator.java | 14 +- .../specific/SpecialDateUnion.java | 18 ++- .../specific/value/FirstValueAggregator.java | 19 +-- .../specific/value/LastValueAggregator.java | 13 +- .../queryplan/specific/ValidityDateNode.java | 24 ++-- .../models/query/resultinfo/UniqueNamer.java | 7 +- .../context/selects/ConceptSelects.java | 38 +++-- .../context/selects/MergedSelects.java | 87 +++++++----- .../conversion/context/selects/Selects.java | 5 +- .../cqelement/CQConceptConverter.java | 45 +++--- .../ConceptPreprocessingService.java | 19 +-- .../dialect/PostgreSqlFunctionProvider.java | 132 ++++++++++++++---- .../dialect/SqlFunctionProvider.java | 43 +++--- .../conversion/filter/FilterConverter.java | 6 +- .../query/ConceptQueryConverter.java | 32 ++++- .../select/DateDistanceConverter.java | 26 ++-- .../select/FirstValueConverter.java | 5 +- .../conquery/sql/models/ColumnDateRange.java | 84 +++++++++++ .../integration/IntegrationTests.java | 6 +- .../sql/SqlIntegrationTestSpec.java | 12 +- .../frontend/FilterSearchItemTest.java | 27 +++- .../types/SerialisationObjectsUtil.java | 3 +- .../util/SerialisationObjectsUtil.java | 3 +- .../sql/date_restriction/only_max/content.csv | 9 ++ .../date_restriction/only_max/expected.csv | 3 + .../only_max/only_max.spec.json | 84 +++++++++++ .../sql/date_restriction/only_min/content.csv | 9 ++ .../date_restriction/only_min/expected.csv | 3 + .../only_min/only_min.spec.json | 84 +++++++++++ tutorial/mimic_iii_demo/age_gender.ipynb | 2 +- tutorial/mimic_iii_demo/icd9.ipynb | 2 +- 42 files changed, 813 insertions(+), 288 deletions(-) create mode 100644 backend/src/main/java/com/bakdata/conquery/sql/models/ColumnDateRange.java create mode 100644 backend/src/test/resources/tests/sql/date_restriction/only_max/content.csv create mode 100644 backend/src/test/resources/tests/sql/date_restriction/only_max/expected.csv create mode 100644 backend/src/test/resources/tests/sql/date_restriction/only_max/only_max.spec.json create mode 100644 backend/src/test/resources/tests/sql/date_restriction/only_min/content.csv create mode 100644 backend/src/test/resources/tests/sql/date_restriction/only_min/expected.csv create mode 100644 backend/src/test/resources/tests/sql/date_restriction/only_min/only_min.spec.json diff --git a/backend/src/main/java/com/bakdata/conquery/apiv1/query/TableExportQuery.java b/backend/src/main/java/com/bakdata/conquery/apiv1/query/TableExportQuery.java index 993cee86fc..bd5116638f 100644 --- a/backend/src/main/java/com/bakdata/conquery/apiv1/query/TableExportQuery.java +++ b/backend/src/main/java/com/bakdata/conquery/apiv1/query/TableExportQuery.java @@ -34,6 +34,7 @@ import com.bakdata.conquery.models.datasets.concepts.Concept; import com.bakdata.conquery.models.datasets.concepts.ConceptElement; import com.bakdata.conquery.models.datasets.concepts.Connector; +import com.bakdata.conquery.models.datasets.concepts.ValidityDate; import com.bakdata.conquery.models.datasets.concepts.tree.ConceptTreeNode; import com.bakdata.conquery.models.datasets.concepts.tree.TreeConcept; import com.bakdata.conquery.models.identifiable.ids.specific.ManagedExecutionId; @@ -148,6 +149,13 @@ public void resolve(QueryResolveContext context) { final Map secondaryIdPositions = calculateSecondaryIdPositions(currentPosition); + final Set validityDates = tables.stream() + .map(CQConcept::getTables) + .flatMap(Collection::stream) + .map(CQTable::findValidityDate) + .filter(Objects::nonNull) + .collect(Collectors.toSet()); + // We need to know if a column is a concept column so we can prioritize it if it is also a SecondaryId final Set conceptColumns = tables.stream() .map(CQConcept::getTables) @@ -157,7 +165,7 @@ public void resolve(QueryResolveContext context) { .filter(Objects::nonNull) .collect(Collectors.toSet()); - positions = calculateColumnPositions(currentPosition, tables, secondaryIdPositions, conceptColumns); + positions = calculateColumnPositions(currentPosition, tables, secondaryIdPositions, conceptColumns, validityDates); resultInfos = createResultInfos(secondaryIdPositions, conceptColumns); } @@ -179,22 +187,21 @@ private Map calculateSecondaryIdPositions(Atomi return secondaryIdPositions; } - private static Map calculateColumnPositions(AtomicInteger currentPosition, List tables, Map secondaryIdPositions, Set conceptColumns) { + private static Map calculateColumnPositions(AtomicInteger currentPosition, List tables, Map secondaryIdPositions, Set conceptColumns, Set validityDates) { final Map positions = new HashMap<>(); for (CQConcept concept : tables) { for (CQTable table : concept.getTables()) { - final Column validityDateColumn = table.findValidityDateColumn(); - - if (validityDateColumn != null) { - positions.putIfAbsent(validityDateColumn, 0); - } - // Set column positions, set SecondaryId positions to precomputed ones. for (Column column : table.getConnector().getTable().getColumns()) { + // ValidityDates are handled separately in column=0 + if (validityDates.stream().anyMatch(vd -> vd.containsColumn(column))) { + continue; + } + if (positions.containsKey(column)) { continue; } @@ -341,4 +348,4 @@ public void visit(Consumer visitor) { public RequiredEntities collectRequiredEntities(QueryExecutionContext context) { return query.collectRequiredEntities(context); } -} \ No newline at end of file +} diff --git a/backend/src/main/java/com/bakdata/conquery/apiv1/query/concept/filter/CQTable.java b/backend/src/main/java/com/bakdata/conquery/apiv1/query/concept/filter/CQTable.java index bea2268426..b858c3741d 100644 --- a/backend/src/main/java/com/bakdata/conquery/apiv1/query/concept/filter/CQTable.java +++ b/backend/src/main/java/com/bakdata/conquery/apiv1/query/concept/filter/CQTable.java @@ -10,8 +10,8 @@ import com.bakdata.conquery.apiv1.query.concept.specific.CQConcept; import com.bakdata.conquery.io.jackson.serializer.NsIdRef; import com.bakdata.conquery.io.jackson.serializer.NsIdRefCollection; -import com.bakdata.conquery.models.datasets.Column; import com.bakdata.conquery.models.datasets.concepts.Connector; +import com.bakdata.conquery.models.datasets.concepts.ValidityDate; import com.bakdata.conquery.models.datasets.concepts.select.Select; import com.bakdata.conquery.models.query.QueryResolveContext; import com.fasterxml.jackson.annotation.JsonBackReference; @@ -75,18 +75,17 @@ public void resolve(QueryResolveContext context) { } @CheckForNull - public Column findValidityDateColumn() { + public ValidityDate findValidityDate() { - // if no dateColumn is provided, we use the default instead which is always the first one. - // Set to null if none-available in the connector. if (dateColumn != null) { - return dateColumn.getValue().getColumn(); + return dateColumn.getValue(); } if (!connector.getValidityDates().isEmpty()) { - return connector.getValidityDates().get(0).getColumn(); + return connector.getValidityDates().get(0); } return null; } -} \ No newline at end of file + +} diff --git a/backend/src/main/java/com/bakdata/conquery/apiv1/query/concept/specific/CQConcept.java b/backend/src/main/java/com/bakdata/conquery/apiv1/query/concept/specific/CQConcept.java index f9ea072431..e425c8f106 100644 --- a/backend/src/main/java/com/bakdata/conquery/apiv1/query/concept/specific/CQConcept.java +++ b/backend/src/main/java/com/bakdata/conquery/apiv1/query/concept/specific/CQConcept.java @@ -25,6 +25,7 @@ import com.bakdata.conquery.models.datasets.concepts.Concept; import com.bakdata.conquery.models.datasets.concepts.ConceptElement; import com.bakdata.conquery.models.datasets.concepts.Connector; +import com.bakdata.conquery.models.datasets.concepts.ValidityDate; import com.bakdata.conquery.models.datasets.concepts.select.Select; import com.bakdata.conquery.models.identifiable.ids.NamespacedIdentifiable; import com.bakdata.conquery.models.query.DateAggregationMode; @@ -226,7 +227,7 @@ public QPNode createQueryPlan(QueryPlanContext context, ConceptQueryPlan plan) { final QPNode conceptSpecificNode = - getConcept().createConceptQuery(context, filters, aggregators, eventDateUnionAggregators, selectValidityDateColumn(table)); + getConcept().createConceptQuery(context, filters, aggregators, eventDateUnionAggregators, selectValidityDate(table)); // Link up the ExistsAggregators to the node existsAggregators.forEach(agg -> agg.setReference(conceptSpecificNode)); @@ -272,14 +273,14 @@ private static List> createAggregators(ConceptQueryPlan plan, List .collect(Collectors.toList()); } - private Column selectValidityDateColumn(CQTable table) { + private ValidityDate selectValidityDate(CQTable table) { if (table.getDateColumn() != null) { - return table.getDateColumn().getValue().getColumn(); + return table.getDateColumn().getValue(); } //else use this first defined validity date column if (!table.getConnector().getValidityDates().isEmpty()) { - return table.getConnector().getValidityDates().get(0).getColumn(); + return table.getConnector().getValidityDates().get(0); } return null; diff --git a/backend/src/main/java/com/bakdata/conquery/models/datasets/concepts/Concept.java b/backend/src/main/java/com/bakdata/conquery/models/datasets/concepts/Concept.java index d4c51d7c3e..368334b54c 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/datasets/concepts/Concept.java +++ b/backend/src/main/java/com/bakdata/conquery/models/datasets/concepts/Concept.java @@ -14,7 +14,6 @@ import com.bakdata.conquery.models.auth.permissions.ConceptPermission; import com.bakdata.conquery.models.auth.permissions.ConqueryPermission; import com.bakdata.conquery.models.common.CDateSet; -import com.bakdata.conquery.models.datasets.Column; import com.bakdata.conquery.models.datasets.Dataset; import com.bakdata.conquery.models.datasets.concepts.select.Select; import com.bakdata.conquery.models.exceptions.ConfigurationException; @@ -89,12 +88,12 @@ public int countElements() { /** * Allows concepts to create their own altered FiltersNode if necessary. */ - public QPNode createConceptQuery(QueryPlanContext context, List> filters, List> aggregators, List> eventDateAggregators, Column validityDateColumn) { + public QPNode createConceptQuery(QueryPlanContext context, List> filters, List> aggregators, List> eventDateAggregators, ValidityDate validityDate) { final QPNode child = filters.isEmpty() && aggregators.isEmpty() ? new Leaf() : FiltersNode.create(filters, aggregators, eventDateAggregators); // Only if a validityDateColumn exists, capsule children in ValidityDateNode - return validityDateColumn != null ? new ValidityDateNode(validityDateColumn, child) : child; + return validityDate != null ? new ValidityDateNode(validityDate, child) : child; } @Override diff --git a/backend/src/main/java/com/bakdata/conquery/models/datasets/concepts/ValidityDate.java b/backend/src/main/java/com/bakdata/conquery/models/datasets/concepts/ValidityDate.java index 5bdbe1c8bf..762d507209 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/datasets/concepts/ValidityDate.java +++ b/backend/src/main/java/com/bakdata/conquery/models/datasets/concepts/ValidityDate.java @@ -1,10 +1,14 @@ package com.bakdata.conquery.models.datasets.concepts; -import javax.validation.constraints.NotNull; +import javax.annotation.CheckForNull; +import javax.annotation.Nullable; import com.bakdata.conquery.io.jackson.serializer.NsIdRef; +import com.bakdata.conquery.models.common.daterange.CDateRange; import com.bakdata.conquery.models.datasets.Column; import com.bakdata.conquery.models.datasets.Dataset; +import com.bakdata.conquery.models.events.Bucket; +import com.bakdata.conquery.models.events.MajorTypeId; import com.bakdata.conquery.models.identifiable.Labeled; import com.bakdata.conquery.models.identifiable.ids.NamespacedIdentifiable; import com.bakdata.conquery.models.identifiable.ids.specific.ValidityDateId; @@ -15,6 +19,7 @@ import lombok.Getter; import lombok.NoArgsConstructor; import lombok.Setter; +import lombok.ToString; import lombok.extern.slf4j.Slf4j; @Getter @@ -24,39 +29,108 @@ public class ValidityDate extends Labeled implements NamespacedIdentifiable { @NsIdRef - @NotNull + @Nullable private Column column; + @NsIdRef + @Nullable + private Column startColumn; + @NsIdRef + @Nullable + private Column endColumn; @JsonBackReference + @ToString.Exclude @EqualsAndHashCode.Exclude private Connector connector; + public static ValidityDate create(Column column) { + final ValidityDate validityDate = new ValidityDate(); + validityDate.setColumn(column); + return validityDate; + } + + public static ValidityDate create(Column startColumn, Column endColumn) { + final ValidityDate validityDate = new ValidityDate(); + validityDate.setColumn(startColumn); + validityDate.setColumn(endColumn); + return validityDate; + } + @Override public ValidityDateId createId() { return new ValidityDateId(connector.getId(), getName()); } - @JsonIgnore - @ValidationMethod(message = "Column is not of Date or DateRange.") - public boolean isValidValidityDates() { - if (getColumn().getType().isDateCompatible()) { - return true; + @CheckForNull + public CDateRange getValidityDate(int event, Bucket bucket) { + // I spent a lot of time trying to create two classes implementing single/multi-column valditiy dates separately. + // JsonCreator was not happy, and I could not figure out why. This is probably the most performant implementation that's not two classes. + + if (getColumn() != null) { + if (bucket.has(event, getColumn())) { + return bucket.getAsDateRange(event, getColumn()); + } + + return null; + } + + final Column startColumn = getStartColumn(); + final Column endColumn = getEndColumn(); + + final boolean hasStart = bucket.has(event, startColumn); + final boolean hasEnd = bucket.has(event, endColumn); + + if (!hasStart && !hasEnd) { + return null; } - log.error("ValidityDate-Column[{}] is not of type DATE or DATERANGE", getColumn().getId()); - return false; + final int start = hasStart ? bucket.getDate(event, startColumn) : Integer.MIN_VALUE; + final int end = hasEnd ? bucket.getDate(event, endColumn) : Integer.MAX_VALUE; + + return CDateRange.of(start, end); + } + + public boolean containsColumn(Column column) { + return column.equals(getColumn()) || column.equals(getStartColumn()) || column.equals(getEndColumn()); } @JsonIgnore @ValidationMethod(message = "ValidityDate is not for Connectors' Table.") public boolean isForConnectorsTable() { - if (getColumn().getTable().equals(connector.getTable())) { - return true; + final boolean anyColumnNotForConnector = + (startColumn != null && !startColumn.getTable().equals(connector.getTable())) + || (endColumn != null && !endColumn.getTable().equals(connector.getTable())); + + final boolean columnNotForConnector = column != null && !column.getTable().equals(connector.getTable()); + + return !anyColumnNotForConnector && !columnNotForConnector; + } + + @JsonIgnore + @ValidationMethod(message = "Single column date range (set via column) and two column date range (set via startColumn and endColumn) are exclusive.") + public boolean isExclusiveValidityDates() { + if (column == null) { + return startColumn != null && endColumn != null; } + return startColumn == null && endColumn == null; + } - log.error("ValidityDate[{}](Column = `{}`) does not belong to Connector[{}]#Table[{}]", getId(), getColumn().getId(), getId(), connector.getTable().getId()); + @JsonIgnore + @ValidationMethod(message = "Both columns of a two-column validity date have to be of type DATE.") + public boolean isValidTwoColumnValidityDates() { + if (startColumn == null || endColumn == null) { + return true; + } + return startColumn.getType() == MajorTypeId.DATE && endColumn.getType() == MajorTypeId.DATE; + } - return false; + @JsonIgnore + @ValidationMethod(message = "Column is not of type DATE or DATE_RANGE.") + public boolean isValidValidityDatesSingleColumn() { + if (column == null) { + return true; + } + return column.getType().isDateCompatible(); } @JsonIgnore @@ -64,4 +138,5 @@ public boolean isForConnectorsTable() { public Dataset getDataset() { return connector.getDataset(); } -} \ No newline at end of file + +} diff --git a/backend/src/main/java/com/bakdata/conquery/models/events/Bucket.java b/backend/src/main/java/com/bakdata/conquery/models/events/Bucket.java index f7df84d48c..618920194f 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/events/Bucket.java +++ b/backend/src/main/java/com/bakdata/conquery/models/events/Bucket.java @@ -17,6 +17,7 @@ import com.bakdata.conquery.models.datasets.Dataset; import com.bakdata.conquery.models.datasets.Import; import com.bakdata.conquery.models.datasets.Table; +import com.bakdata.conquery.models.datasets.concepts.ValidityDate; import com.bakdata.conquery.models.events.stores.root.BooleanStore; import com.bakdata.conquery.models.events.stores.root.ColumnStore; import com.bakdata.conquery.models.events.stores.root.DateRangeStore; @@ -164,8 +165,14 @@ public CDateRange getDateRange(int event, Column column) { return ((DateRangeStore) getStore(column)).getDateRange(event); } - public boolean eventIsContainedIn(int event, Column column, CDateSet dateRanges) { - return dateRanges.intersects(getAsDateRange(event, column)); + public boolean eventIsContainedIn(int event, ValidityDate validityDate, CDateSet dateRanges) { + final CDateRange dateRange = validityDate.getValidityDate(event, this); + + if (dateRange == null){ + return false; + } + + return dateRanges.intersects(dateRange); } public CDateRange getAsDateRange(int event, Column column) { @@ -181,10 +188,10 @@ public Object createScriptValue(int event, @NotNull Column column) { } public Map calculateMap(int event) { - Map out = new HashMap<>(stores.length); + final Map out = new HashMap<>(stores.length); for (int i = 0; i < stores.length; i++) { - ColumnStore store = stores[i]; + final ColumnStore store = stores[i]; if (!store.has(event)) { continue; } diff --git a/backend/src/main/java/com/bakdata/conquery/models/events/EmptyBucket.java b/backend/src/main/java/com/bakdata/conquery/models/events/EmptyBucket.java index e31cee658a..d820c82fb2 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/events/EmptyBucket.java +++ b/backend/src/main/java/com/bakdata/conquery/models/events/EmptyBucket.java @@ -7,6 +7,7 @@ import com.bakdata.conquery.models.common.CDateSet; import com.bakdata.conquery.models.common.daterange.CDateRange; import com.bakdata.conquery.models.datasets.Column; +import com.bakdata.conquery.models.datasets.concepts.ValidityDate; import com.bakdata.conquery.models.events.stores.root.ColumnStore; import lombok.Getter; @@ -25,7 +26,7 @@ public EmptyBucket() { @Override - public boolean eventIsContainedIn(int event, Column column, CDateSet dateRanges) { + public boolean eventIsContainedIn(int event, ValidityDate column, CDateSet dateRanges) { return false; } diff --git a/backend/src/main/java/com/bakdata/conquery/models/query/QueryExecutionContext.java b/backend/src/main/java/com/bakdata/conquery/models/query/QueryExecutionContext.java index 666962d7e0..f0cb08f30d 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/query/QueryExecutionContext.java +++ b/backend/src/main/java/com/bakdata/conquery/models/query/QueryExecutionContext.java @@ -7,10 +7,10 @@ import com.bakdata.conquery.io.storage.ModificationShieldedWorkerStorage; import com.bakdata.conquery.models.common.CDate; import com.bakdata.conquery.models.common.CDateSet; -import com.bakdata.conquery.models.datasets.Column; import com.bakdata.conquery.models.datasets.SecondaryIdDescription; import com.bakdata.conquery.models.datasets.Table; import com.bakdata.conquery.models.datasets.concepts.Connector; +import com.bakdata.conquery.models.datasets.concepts.ValidityDate; import com.bakdata.conquery.models.events.Bucket; import com.bakdata.conquery.models.events.BucketManager; import com.bakdata.conquery.models.identifiable.ids.specific.ManagedExecutionId; @@ -36,7 +36,7 @@ public class QueryExecutionContext { private final BucketManager bucketManager; - private Column validityDateColumn; + private ValidityDate validityDateColumn; @NonNull private CDateSet dateRestriction = CDateSet.createFull(); private Connector connector; diff --git a/backend/src/main/java/com/bakdata/conquery/models/query/queryplan/TableExportQueryPlan.java b/backend/src/main/java/com/bakdata/conquery/models/query/queryplan/TableExportQueryPlan.java index 73551b7f07..9288871085 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/query/queryplan/TableExportQueryPlan.java +++ b/backend/src/main/java/com/bakdata/conquery/models/query/queryplan/TableExportQueryPlan.java @@ -7,7 +7,9 @@ import com.bakdata.conquery.apiv1.query.concept.filter.CQTable; import com.bakdata.conquery.models.common.CDateSet; +import com.bakdata.conquery.models.common.daterange.CDateRange; import com.bakdata.conquery.models.datasets.Column; +import com.bakdata.conquery.models.datasets.concepts.ValidityDate; import com.bakdata.conquery.models.events.Bucket; import com.bakdata.conquery.models.events.CBlock; import com.bakdata.conquery.models.query.QueryExecutionContext; @@ -80,7 +82,7 @@ public Optional execute(QueryExecutionContext ctx, Entity for (Map.Entry entry : tables.entrySet()) { final CQTable cqTable = entry.getKey(); - final Column validityDateColumn = cqTable.findValidityDateColumn(); + final ValidityDate validityDate = cqTable.findValidityDate(); final QPNode query = entry.getValue(); final Map cblocks = ctx.getBucketManager().getEntityCBlocksForConnector(entity, cqTable.getConnector()); @@ -95,8 +97,8 @@ public Optional execute(QueryExecutionContext ctx, Entity for (int event = start; event < end; event++) { - if (validityDateColumn != null - && !bucket.eventIsContainedIn(event, validityDateColumn, dateRange)) { + if (validityDate != null + && !bucket.eventIsContainedIn(event, validityDate, dateRange)) { continue; } @@ -104,7 +106,7 @@ public Optional execute(QueryExecutionContext ctx, Entity continue; } - final Object[] resultRow = collectRow(totalColumns, cqTable, bucket, event, validityDateColumn, cblocks.get(bucket)); + final Object[] resultRow = collectRow(totalColumns, cqTable, bucket, event, validityDate, cblocks.get(bucket)); results.add(resultRow); } @@ -146,29 +148,40 @@ private boolean isRowIncluded(QPNode query, Bucket bucket, Entity entity, int ev return query.isContained(); } - private Object[] collectRow(int totalColumns, CQTable exportDescription, Bucket bucket, int event, Column validityDateColumn, CBlock cblock) { + private Object[] collectRow(int totalColumns, CQTable exportDescription, Bucket bucket, int event, ValidityDate validityDate, CBlock cblock) { final Object[] entry = new Object[totalColumns]; + + final CDateRange date; + + if(validityDate != null && (date = validityDate.getValidityDate(event, bucket)) != null) { + entry[0] = List.of(date); + } + entry[1] = exportDescription.getConnector().getTable().getLabel(); for (Column column : exportDescription.getConnector().getTable().getColumns()) { - if (!bucket.has(event, column)) { + // ValidityDates are handled separately. + if (validityDate != null && validityDate.containsColumn(column)){ continue; } - final int position = positions.get(column); + if (!positions.containsKey(column)) { + continue; + } - if (column.equals(validityDateColumn)) { - entry[position] = List.of(bucket.getAsDateRange(event, column)); + if (!bucket.has(event, column)) { continue; } + final int position = positions.get(column); + if (!rawConceptValues && column.equals(exportDescription.getConnector().getColumn())) { entry[position] = cblock.getMostSpecificChildLocalId(event); continue; } - + entry[position] = bucket.createScriptValue(event, column); } return entry; diff --git a/backend/src/main/java/com/bakdata/conquery/models/query/queryplan/aggregators/specific/EventDateUnionAggregator.java b/backend/src/main/java/com/bakdata/conquery/models/query/queryplan/aggregators/specific/EventDateUnionAggregator.java index 7f10736554..0f3344f648 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/query/queryplan/aggregators/specific/EventDateUnionAggregator.java +++ b/backend/src/main/java/com/bakdata/conquery/models/query/queryplan/aggregators/specific/EventDateUnionAggregator.java @@ -3,8 +3,9 @@ import java.util.Set; import com.bakdata.conquery.models.common.CDateSet; -import com.bakdata.conquery.models.datasets.Column; +import com.bakdata.conquery.models.common.daterange.CDateRange; import com.bakdata.conquery.models.datasets.Table; +import com.bakdata.conquery.models.datasets.concepts.ValidityDate; import com.bakdata.conquery.models.events.Bucket; import com.bakdata.conquery.models.query.QueryExecutionContext; import com.bakdata.conquery.models.query.entity.Entity; @@ -24,7 +25,7 @@ public class EventDateUnionAggregator extends Aggregator { private final Set requiredTables; - private Column validityDateColumn; + private ValidityDate validityDateColumn; private CDateSet set = CDateSet.createEmpty(); private CDateSet dateRestriction; @@ -41,10 +42,7 @@ public void init(Entity entity, QueryExecutionContext context) { @Override public void nextTable(QueryExecutionContext ctx, Table currentTable) { validityDateColumn = ctx.getValidityDateColumn(); - if (validityDateColumn != null && !validityDateColumn.getType().isDateCompatible()) { - throw new IllegalStateException("The validityDateColumn " + validityDateColumn + " is not a DATE TYPE"); - } - + dateRestriction = ctx.getDateRestriction(); super.nextTable(ctx, currentTable); } @@ -61,10 +59,13 @@ public void acceptEvent(Bucket bucket, int event) { return; } - if (!bucket.has(event, validityDateColumn)) { + final CDateRange dateRange = validityDateColumn.getValidityDate(event, bucket); + + if (dateRange == null){ return; } - set.maskedAdd(bucket.getAsDateRange(event, validityDateColumn), dateRestriction); + + set.maskedAdd(dateRange, dateRestriction); } @Override diff --git a/backend/src/main/java/com/bakdata/conquery/models/query/queryplan/aggregators/specific/EventDurationSumAggregator.java b/backend/src/main/java/com/bakdata/conquery/models/query/queryplan/aggregators/specific/EventDurationSumAggregator.java index 4a0119f910..5f563588cc 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/query/queryplan/aggregators/specific/EventDurationSumAggregator.java +++ b/backend/src/main/java/com/bakdata/conquery/models/query/queryplan/aggregators/specific/EventDurationSumAggregator.java @@ -6,8 +6,8 @@ import com.bakdata.conquery.models.common.CDateSet; import com.bakdata.conquery.models.common.daterange.CDateRange; -import com.bakdata.conquery.models.datasets.Column; import com.bakdata.conquery.models.datasets.Table; +import com.bakdata.conquery.models.datasets.concepts.ValidityDate; import com.bakdata.conquery.models.events.Bucket; import com.bakdata.conquery.models.query.QueryExecutionContext; import com.bakdata.conquery.models.query.entity.Entity; @@ -26,7 +26,7 @@ public class EventDurationSumAggregator extends Aggregator { @CheckForNull private CDateSet dateRestriction; @CheckForNull - private Column validityDateColumn; + private ValidityDate validityDateColumn; private int realUpperBound; @Override @@ -48,12 +48,12 @@ public void acceptEvent(Bucket bucket, int event) { return; } - if (!bucket.has(event, validityDateColumn)) { + final CDateRange value = validityDateColumn.getValidityDate(event, bucket); + + if (value == null){ return; } - final CDateRange value = bucket.getAsDateRange(event, validityDateColumn); - set.maskedAdd(value, dateRestriction, realUpperBound); } diff --git a/backend/src/main/java/com/bakdata/conquery/models/query/queryplan/aggregators/specific/QuarterAggregator.java b/backend/src/main/java/com/bakdata/conquery/models/query/queryplan/aggregators/specific/QuarterAggregator.java index f75a71f8d7..4e09852192 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/query/queryplan/aggregators/specific/QuarterAggregator.java +++ b/backend/src/main/java/com/bakdata/conquery/models/query/queryplan/aggregators/specific/QuarterAggregator.java @@ -8,8 +8,8 @@ import com.bakdata.conquery.models.common.CDateSet; import com.bakdata.conquery.models.common.QuarterUtils; import com.bakdata.conquery.models.common.daterange.CDateRange; -import com.bakdata.conquery.models.datasets.Column; import com.bakdata.conquery.models.datasets.Table; +import com.bakdata.conquery.models.datasets.concepts.ValidityDate; import com.bakdata.conquery.models.events.Bucket; import com.bakdata.conquery.models.query.QueryExecutionContext; import com.bakdata.conquery.models.query.entity.Entity; @@ -31,7 +31,7 @@ public class QuarterAggregator extends Aggregator { private CDateSet set = CDateSet.createEmpty(); private CDateSet dateRestriction; - private Column column; + private ValidityDate validityDate; private int realUpperBound; @@ -48,19 +48,19 @@ public void init(Entity entity, QueryExecutionContext context) { @Override public void nextTable(QueryExecutionContext ctx, Table currentTable) { - column = ctx.getValidityDateColumn(); + validityDate = ctx.getValidityDateColumn(); dateRestriction = ctx.getDateRestriction(); } @Override public void acceptEvent(Bucket bucket, int event) { - if (getColumn() == null || !bucket.has(event, getColumn())) { + final CDateRange dateRange = validityDate.getValidityDate(event, bucket); + + if (dateRange == null){ return; } - final CDateRange value = bucket.getAsDateRange(event, getColumn()); - - set.maskedAdd(value, dateRestriction, realUpperBound); + set.maskedAdd(dateRange, dateRestriction, realUpperBound); } @Override diff --git a/backend/src/main/java/com/bakdata/conquery/models/query/queryplan/aggregators/specific/SpecialDateUnion.java b/backend/src/main/java/com/bakdata/conquery/models/query/queryplan/aggregators/specific/SpecialDateUnion.java index 7723b7cd33..e2ad370f73 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/query/queryplan/aggregators/specific/SpecialDateUnion.java +++ b/backend/src/main/java/com/bakdata/conquery/models/query/queryplan/aggregators/specific/SpecialDateUnion.java @@ -1,8 +1,9 @@ package com.bakdata.conquery.models.query.queryplan.aggregators.specific; import com.bakdata.conquery.models.common.CDateSet; -import com.bakdata.conquery.models.datasets.Column; +import com.bakdata.conquery.models.common.daterange.CDateRange; import com.bakdata.conquery.models.datasets.Table; +import com.bakdata.conquery.models.datasets.concepts.ValidityDate; import com.bakdata.conquery.models.events.Bucket; import com.bakdata.conquery.models.query.QueryExecutionContext; import com.bakdata.conquery.models.query.entity.Entity; @@ -20,7 +21,7 @@ public class SpecialDateUnion extends Aggregator { private CDateSet set = CDateSet.createEmpty(); - private Column currentColumn; + private ValidityDate validityDate; private CDateSet dateRestriction; @@ -31,18 +32,25 @@ public void init(Entity entity, QueryExecutionContext context) { @Override public void nextTable(QueryExecutionContext ctx, Table table) { - currentColumn = ctx.getValidityDateColumn(); + validityDate = ctx.getValidityDateColumn(); dateRestriction = ctx.getDateRestriction(); } @Override public void acceptEvent(Bucket bucket, int event) { - if (currentColumn == null || !bucket.has(event, currentColumn)) { + if (validityDate == null) { set.addAll(dateRestriction); return; } - set.maskedAdd(bucket.getAsDateRange(event, currentColumn), dateRestriction); + final CDateRange dateRange = validityDate.getValidityDate(event, bucket); + + if (dateRange == null){ + set.addAll(dateRestriction); + return; + } + + set.maskedAdd(dateRange, dateRestriction); } /** diff --git a/backend/src/main/java/com/bakdata/conquery/models/query/queryplan/aggregators/specific/value/FirstValueAggregator.java b/backend/src/main/java/com/bakdata/conquery/models/query/queryplan/aggregators/specific/value/FirstValueAggregator.java index 1887a4fbd4..7e70495cc3 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/query/queryplan/aggregators/specific/value/FirstValueAggregator.java +++ b/backend/src/main/java/com/bakdata/conquery/models/query/queryplan/aggregators/specific/value/FirstValueAggregator.java @@ -5,6 +5,7 @@ import com.bakdata.conquery.models.common.daterange.CDateRange; import com.bakdata.conquery.models.datasets.Column; import com.bakdata.conquery.models.datasets.Table; +import com.bakdata.conquery.models.datasets.concepts.ValidityDate; import com.bakdata.conquery.models.events.Bucket; import com.bakdata.conquery.models.query.QueryExecutionContext; import com.bakdata.conquery.models.query.entity.Entity; @@ -26,7 +27,7 @@ public class FirstValueAggregator extends SingleColumnAggregator { private int date = CDateRange.POSITIVE_INFINITY; - private Column validityDateColumn; + private ValidityDate validityDate; public FirstValueAggregator(Column column) { super(column); @@ -41,7 +42,7 @@ public void init(Entity entity, QueryExecutionContext context) { @Override public void nextTable(QueryExecutionContext ctx, Table currentTable) { - validityDateColumn = ctx.getValidityDateColumn(); + validityDate = ctx.getValidityDateColumn(); } @Override @@ -50,23 +51,25 @@ public void acceptEvent(Bucket bucket, int event) { return; } - if (validityDateColumn == null) { + if (validityDate == null) { // If there is no validity date, take the first possible value if(selectedBucket == null) { selectedBucket = bucket; selectedEvent = OptionalInt.of(event); } else { - log.trace("There is more than one value for the {}. Choosing the very first one encountered", this.getClass().getSimpleName()); + log.trace("There is more than one value for the {}. Choosing the very first one encountered", getClass().getSimpleName()); } return; } - if(! bucket.has(event, validityDateColumn)) { - // TODO this might be an IllegalState + + final CDateRange dateRange = validityDate.getValidityDate(event, bucket); + + if (dateRange == null){ return; } - int next = bucket.getAsDateRange(event, validityDateColumn).getMinValue(); + final int next = dateRange.getMinValue(); if (next < date) { date = next; @@ -74,7 +77,7 @@ public void acceptEvent(Bucket bucket, int event) { selectedBucket = bucket; } else if (next == date) { - log.trace("There is more than one value for the {}. Choosing the very first one encountered", this.getClass().getSimpleName()); + log.trace("There is more than one value for the {}. Choosing the very first one encountered", getClass().getSimpleName()); } } diff --git a/backend/src/main/java/com/bakdata/conquery/models/query/queryplan/aggregators/specific/value/LastValueAggregator.java b/backend/src/main/java/com/bakdata/conquery/models/query/queryplan/aggregators/specific/value/LastValueAggregator.java index bee141cbab..0e359c3f98 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/query/queryplan/aggregators/specific/value/LastValueAggregator.java +++ b/backend/src/main/java/com/bakdata/conquery/models/query/queryplan/aggregators/specific/value/LastValueAggregator.java @@ -5,6 +5,7 @@ import com.bakdata.conquery.models.common.daterange.CDateRange; import com.bakdata.conquery.models.datasets.Column; import com.bakdata.conquery.models.datasets.Table; +import com.bakdata.conquery.models.datasets.concepts.ValidityDate; import com.bakdata.conquery.models.events.Bucket; import com.bakdata.conquery.models.query.QueryExecutionContext; import com.bakdata.conquery.models.query.entity.Entity; @@ -26,7 +27,7 @@ public class LastValueAggregator extends SingleColumnAggregator { private Bucket selectedBucket; private int date; - private Column validityDateColumn; + private ValidityDate validityDateColumn; public LastValueAggregator(Column column) { super(column); @@ -60,14 +61,14 @@ public void acceptEvent(Bucket bucket, int event) { } return; } - - if(! bucket.has(event, validityDateColumn)) { - // TODO this might be an IllegalState + + final CDateRange dateRange = validityDateColumn.getValidityDate(event, bucket); + + if (dateRange == null){ return; } - - int next = bucket.getAsDateRange(event, validityDateColumn).getMaxValue(); + int next = dateRange.getMaxValue(); if (next > date) { date = next; diff --git a/backend/src/main/java/com/bakdata/conquery/models/query/queryplan/specific/ValidityDateNode.java b/backend/src/main/java/com/bakdata/conquery/models/query/queryplan/specific/ValidityDateNode.java index 940912753b..9d29a7f8dd 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/query/queryplan/specific/ValidityDateNode.java +++ b/backend/src/main/java/com/bakdata/conquery/models/query/queryplan/specific/ValidityDateNode.java @@ -5,8 +5,8 @@ import com.bakdata.conquery.models.common.CDateSet; import com.bakdata.conquery.models.common.daterange.CDateRange; -import com.bakdata.conquery.models.datasets.Column; import com.bakdata.conquery.models.datasets.Table; +import com.bakdata.conquery.models.datasets.concepts.ValidityDate; import com.bakdata.conquery.models.events.Bucket; import com.bakdata.conquery.models.events.CBlock; import com.bakdata.conquery.models.query.QueryExecutionContext; @@ -15,32 +15,30 @@ import com.google.common.base.Preconditions; import lombok.ToString; -@ToString(of = "validityDateColumn", callSuper = true) +@ToString(of = "validityDate", callSuper = true) public class ValidityDateNode extends QPChainNode { - private final Column validityDateColumn; + private final ValidityDate validityDate; private transient CDateSet restriction; protected Map preCurrentRow; - public ValidityDateNode(Column validityDateColumn, QPNode child) { + public ValidityDateNode(ValidityDate validityDate, QPNode child) { super(child); - Preconditions.checkNotNull(validityDateColumn, this.getClass().getSimpleName() + " needs a validityDateColumn"); - this.validityDateColumn = validityDateColumn; + Preconditions.checkNotNull(validityDate, this.getClass().getSimpleName() + " needs a validityDate"); + this.validityDate = validityDate; } @Override public void acceptEvent(Bucket bucket, int event) { + //no dateRestriction or event is in date restriction + final boolean contained = bucket.eventIsContainedIn(event, validityDate, context.getDateRestriction()); - //if event has null validityDate cancel - if (!bucket.has(event, validityDateColumn)) { + if (!contained){ return; } - //no dateRestriction or event is in date restriction - if (restriction.isAll() || bucket.eventIsContainedIn(event, validityDateColumn, context.getDateRestriction())) { - getChild().acceptEvent(bucket, event); - } + getChild().acceptEvent(bucket, event); } @Override @@ -59,7 +57,7 @@ public boolean isContained() { @Override public void nextTable(QueryExecutionContext ctx, Table currentTable) { - super.nextTable(ctx.withValidityDateColumn(validityDateColumn), currentTable); + super.nextTable(ctx.withValidityDateColumn(validityDate), currentTable); restriction = ctx.getDateRestriction(); preCurrentRow = ctx.getBucketManager().getEntityCBlocksForConnector(getEntity(), context.getConnector()); diff --git a/backend/src/main/java/com/bakdata/conquery/models/query/resultinfo/UniqueNamer.java b/backend/src/main/java/com/bakdata/conquery/models/query/resultinfo/UniqueNamer.java index 5c91e52b56..ce9502d9ea 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/query/resultinfo/UniqueNamer.java +++ b/backend/src/main/java/com/bakdata/conquery/models/query/resultinfo/UniqueNamer.java @@ -1,15 +1,11 @@ package com.bakdata.conquery.models.query.resultinfo; -import java.util.Collections; -import java.util.HashMap; -import java.util.Map; import java.util.Objects; import com.bakdata.conquery.models.query.PrintSettings; import com.fasterxml.jackson.annotation.JsonIgnore; import com.google.common.collect.ConcurrentHashMultiset; import com.google.common.collect.Multiset; -import com.google.common.collect.Multisets; import lombok.Getter; import lombok.NonNull; import lombok.RequiredArgsConstructor; @@ -38,7 +34,8 @@ public class UniqueNamer { @NonNull @JsonIgnore public final String getUniqueName(ResultInfo info) { - @NonNull String label = Objects.requireNonNullElse(info.userColumnName(settings), info.defaultColumnName(settings)); + @NonNull + String label = Objects.requireNonNullElse(info.userColumnName(settings), info.defaultColumnName(settings)); // lookup if prefix is needed and computed it if necessary String uniqueName = label; synchronized (ocurrenceCounter) { diff --git a/backend/src/main/java/com/bakdata/conquery/sql/conversion/context/selects/ConceptSelects.java b/backend/src/main/java/com/bakdata/conquery/sql/conversion/context/selects/ConceptSelects.java index 8e42dde561..2d2d808632 100644 --- a/backend/src/main/java/com/bakdata/conquery/sql/conversion/context/selects/ConceptSelects.java +++ b/backend/src/main/java/com/bakdata/conquery/sql/conversion/context/selects/ConceptSelects.java @@ -1,11 +1,13 @@ package com.bakdata.conquery.sql.conversion.context.selects; +import java.util.ArrayList; +import java.util.Collection; import java.util.List; import java.util.Optional; -import java.util.function.Function; import java.util.stream.Stream; import com.bakdata.conquery.apiv1.query.concept.specific.CQConcept; +import com.bakdata.conquery.sql.models.ColumnDateRange; import lombok.Builder; import lombok.Value; import lombok.With; @@ -20,19 +22,26 @@ public class ConceptSelects implements Selects { Field primaryColumn; - Optional> dateRestriction; - Optional> validityDate; + Optional dateRestrictionRange; + Optional validityDate; List> eventSelect; List> eventFilter; List> groupSelect; List> groupFilter; + @Override + public Selects withValidityDate(ColumnDateRange validityDate) { + return this.toBuilder() + .validityDate(Optional.of(validityDate)) + .build(); + } + @Override public ConceptSelects byName(String qualifier) { return builder() .primaryColumn(this.mapFieldToQualifier(qualifier, this.primaryColumn)) - .dateRestriction(this.mapFieldStreamToQualifier(qualifier, this.dateRestriction.stream()).findFirst()) - .validityDate(this.mapFieldStreamToQualifier(qualifier, this.validityDate.stream()).findFirst()) + .dateRestrictionRange(this.dateRestrictionRange.map(dateRestriction -> dateRestriction.qualify(qualifier))) + .validityDate(this.validityDate.map(validityDate -> validityDate.qualify(qualifier))) .eventSelect(this.mapFieldStreamToQualifier(qualifier, this.eventSelect.stream()).toList()) .eventFilter(this.mapFieldStreamToQualifier(qualifier, this.eventFilter.stream()).toList()) .groupSelect(this.mapFieldStreamToQualifier(qualifier, this.groupSelect.stream()).toList()) @@ -51,19 +60,22 @@ public List> all() { private Stream> primaryColumnAndValidityDate() { return Stream.concat( Stream.of(this.primaryColumn), - this.validityDate.stream() + this.validityDate.map(ColumnDateRange::toFields).stream().flatMap(Collection::stream) ); } @Override public List> explicitSelects() { - return Stream.of( - this.dateRestriction.stream(), - this.eventSelect.stream(), - this.eventFilter.stream(), - this.groupSelect.stream(), - this.groupFilter.stream() - ).flatMap(Function.identity()).toList(); + + List> explicitSelects = new ArrayList<>(); + + dateRestrictionRange.ifPresent(columnDateRange -> explicitSelects.addAll(columnDateRange.toFields())); + explicitSelects.addAll(eventSelect); + explicitSelects.addAll(eventFilter); + explicitSelects.addAll(groupSelect); + explicitSelects.addAll(groupFilter); + + return explicitSelects; } } diff --git a/backend/src/main/java/com/bakdata/conquery/sql/conversion/context/selects/MergedSelects.java b/backend/src/main/java/com/bakdata/conquery/sql/conversion/context/selects/MergedSelects.java index 1ce36e8ae8..7d3547d5b2 100644 --- a/backend/src/main/java/com/bakdata/conquery/sql/conversion/context/selects/MergedSelects.java +++ b/backend/src/main/java/com/bakdata/conquery/sql/conversion/context/selects/MergedSelects.java @@ -1,10 +1,12 @@ package com.bakdata.conquery.sql.conversion.context.selects; +import java.util.Collection; import java.util.List; import java.util.Optional; import java.util.stream.Stream; import com.bakdata.conquery.sql.conversion.context.step.QueryStep; +import com.bakdata.conquery.sql.models.ColumnDateRange; import lombok.AccessLevel; import lombok.AllArgsConstructor; import lombok.Value; @@ -20,11 +22,13 @@ @AllArgsConstructor(access = AccessLevel.PRIVATE) public class MergedSelects implements Selects { - String PRIMARY_COLUMN_ALIAS = "primary_column"; - + public static final String PRIMARY_COLUMN_NAME = "primary_column"; Field primaryColumn; - Optional> validityDate; + /** + * An aggregated validity date of all validity dates of each {@link QueryStep} passed to the {@link MergedSelects} constructor. + */ + Optional validityDate; /** * A merged list of all select fields, except the primary column and validity date, @@ -35,43 +39,25 @@ public class MergedSelects implements Selects { public MergedSelects(List querySteps) { this.primaryColumn = this.coalescePrimaryColumns(querySteps); - this.validityDate = this.extractValidityDate(querySteps); + this.validityDate = this.extractValidityDates(querySteps); this.mergedSelects = this.mergeSelects(querySteps); } - private Field coalescePrimaryColumns(List querySteps) { - List> primaryColumns = querySteps.stream() - .map(queryStep -> this.mapFieldToQualifier(queryStep.getCteName(), queryStep.getSelects().getPrimaryColumn())) - .toList(); - return DSL.coalesce((Object) primaryColumns.get(0), primaryColumns.subList(1, primaryColumns.size()).toArray()) - .as(PRIMARY_COLUMN_ALIAS); - } - - private Optional> extractValidityDate(List querySteps) { - // TODO: date aggregation... - if (querySteps.isEmpty()) { - return Optional.empty(); - } - QueryStep firstQueryStep = querySteps.get(0); - return this.mapFieldStreamToQualifier(firstQueryStep.getCteName(), firstQueryStep.getSelects().getValidityDate().stream()) - .findFirst(); - } - - private List> mergeSelects(List queriesToJoin) { - return queriesToJoin.stream() - .flatMap(queryStep -> queryStep.getSelects().explicitSelects().stream() - .map(field -> this.mapFieldToQualifier(queryStep.getCteName(), field))) - .toList(); + @Override + public Selects withValidityDate(ColumnDateRange validityDate) { + return new MergedSelects( + this.primaryColumn, + Optional.of(validityDate), + this.mergedSelects + ); } @Override public MergedSelects byName(String qualifier) { return new MergedSelects( this.mapFieldToQualifier(qualifier, this.primaryColumn), - this.mapFieldStreamToQualifier(qualifier, this.validityDate.stream()).findFirst(), - this.mergedSelects.stream() - .map(field -> this.mapFieldToQualifier(qualifier, field)) - .toList() + this.validityDate.map(columnDateRange -> columnDateRange.qualify(qualifier)), + this.mapFieldStreamToQualifier(qualifier, this.mergedSelects.stream()).toList() ); } @@ -83,16 +69,43 @@ public List> all() { ).toList(); } + @Override + public List> explicitSelects() { + return this.mergedSelects; + } + + private Field coalescePrimaryColumns(List querySteps) { + List> primaryColumns = querySteps.stream() + .map(queryStep -> this.mapFieldToQualifier(queryStep.getCteName(), queryStep.getSelects() + .getPrimaryColumn())) + .toList(); + return DSL.coalesce((Object) primaryColumns.get(0), primaryColumns.subList(1, primaryColumns.size()).toArray()) + .as(PRIMARY_COLUMN_NAME); + } + + private Optional extractValidityDates(List querySteps) { + // TODO: date aggregation... + return querySteps.stream() + .filter(queryStep -> queryStep.getSelects().getValidityDate().isPresent()) + .map(queryStep -> { + ColumnDateRange validityDate = queryStep.getSelects().getValidityDate().get(); + return validityDate.qualify(queryStep.getCteName()); + }) + .findFirst(); + } + + private List> mergeSelects(List queriesToJoin) { + return queriesToJoin.stream() + .flatMap(queryStep -> queryStep.getSelects().explicitSelects().stream() + .map(field -> this.mapFieldToQualifier(queryStep.getCteName(), field))) + .toList(); + } + private Stream> primaryColumnAndValidityDate() { return Stream.concat( Stream.of(this.primaryColumn), - this.validityDate.stream() + this.validityDate.map(ColumnDateRange::toFields).stream().flatMap(Collection::stream) ); } - @Override - public List> explicitSelects() { - return this.mergedSelects; - } - } diff --git a/backend/src/main/java/com/bakdata/conquery/sql/conversion/context/selects/Selects.java b/backend/src/main/java/com/bakdata/conquery/sql/conversion/context/selects/Selects.java index 6229c247a9..09d7259e25 100644 --- a/backend/src/main/java/com/bakdata/conquery/sql/conversion/context/selects/Selects.java +++ b/backend/src/main/java/com/bakdata/conquery/sql/conversion/context/selects/Selects.java @@ -4,6 +4,7 @@ import java.util.Optional; import java.util.stream.Stream; +import com.bakdata.conquery.sql.models.ColumnDateRange; import org.jooq.Field; import org.jooq.impl.DSL; @@ -11,7 +12,9 @@ public interface Selects { Field getPrimaryColumn(); - Optional> getValidityDate(); + Optional getValidityDate(); + + Selects withValidityDate(ColumnDateRange validityDate); /** * Returns the selected columns as fully qualified reference. diff --git a/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/CQConceptConverter.java b/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/CQConceptConverter.java index db4e9b43b2..400dac447f 100644 --- a/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/CQConceptConverter.java +++ b/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/CQConceptConverter.java @@ -73,22 +73,38 @@ private QueryStep buildDateRestrictionQueryStep( String conceptLabel, QueryStep previous ) { - if (((ConceptSelects) previous.getSelects()).getDateRestriction().isEmpty()) { + if (((ConceptSelects) previous.getSelects()).getDateRestrictionRange().isEmpty()) { return previous; } ConceptSelects dateRestrictionSelects = this.prepareDateRestrictionSelects(node, previous); - List dateRestriction = this.buildDateRestriction(context, previous); + Condition dateRestriction = this.buildDateRestriction(context, previous); + String dateRestrictionCteName = "concept_%s_date_restriction".formatted(conceptLabel); return QueryStep.builder() - .cteName(createCteName(conceptLabel, "_date_restriction")) + .cteName(dateRestrictionCteName) .fromTable(QueryStep.toTableLike(previous.getCteName())) .selects(dateRestrictionSelects) - .conditions(dateRestriction) + .conditions(List.of(dateRestriction)) .predecessors(List.of(previous)) .build(); } + private ConceptSelects prepareDateRestrictionSelects(CQConcept conceptNode, QueryStep previous) { + ConceptSelects.ConceptSelectsBuilder selectsBuilder = ((ConceptSelects) previous.getQualifiedSelects()).toBuilder(); + selectsBuilder.dateRestrictionRange(Optional.empty()); + if (conceptNode.isExcludeFromTimeAggregation()) { + selectsBuilder.validityDate(Optional.empty()); + } + return selectsBuilder.build(); + } + + private Condition buildDateRestriction(ConversionContext context, QueryStep previous) { + ConceptSelects previousSelects = (ConceptSelects) previous.getSelects(); + return context.getSqlDialect().getFunction() + .dateRestriction(previousSelects.getDateRestrictionRange().get(), previousSelects.getValidityDate().get()); + } + /** * selects: * - all of previous steps @@ -141,27 +157,6 @@ private QueryStep buildEventFilterQueryStep( .build(); } - private ConceptSelects prepareDateRestrictionSelects(CQConcept node, QueryStep previous) { - ConceptSelects.ConceptSelectsBuilder selectsBuilder = ((ConceptSelects) previous.getQualifiedSelects()).toBuilder(); - selectsBuilder.dateRestriction(Optional.empty()); - if (node.isExcludeFromTimeAggregation()) { - selectsBuilder.validityDate(Optional.empty()); - } - return selectsBuilder.build(); - } - - private List buildDateRestriction(ConversionContext context, QueryStep previous) { - return ((ConceptSelects) previous.getSelects()).getDateRestriction() - .map(dateRestrictionColumn -> getDateRestrictionAsCondition(context, previous, dateRestrictionColumn)) - .orElseGet(Collections::emptyList); - } - - private static List getDateRestrictionAsCondition(ConversionContext context, QueryStep previous, Field dateRestrictionColumn) { - return previous.getSelects().getValidityDate().stream() - .map(validityDateColumn -> context.getSqlDialect().getFunction().dateRestriction(dateRestrictionColumn, validityDateColumn)) - .toList(); - } - private ConceptSelects prepareEventSelectSelects( ConversionContext context, CQTable table, diff --git a/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/ConceptPreprocessingService.java b/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/ConceptPreprocessingService.java index 2800d8b30a..94970f4621 100644 --- a/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/ConceptPreprocessingService.java +++ b/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/ConceptPreprocessingService.java @@ -7,19 +7,17 @@ import com.bakdata.conquery.apiv1.query.concept.filter.CQTable; import com.bakdata.conquery.apiv1.query.concept.filter.FilterValue; import com.bakdata.conquery.apiv1.query.concept.specific.CQConcept; -import com.bakdata.conquery.models.common.daterange.CDateRange; import com.bakdata.conquery.models.datasets.Column; import com.bakdata.conquery.sql.conversion.context.ConversionContext; import com.bakdata.conquery.sql.conversion.context.selects.ConceptSelects; import com.bakdata.conquery.sql.conversion.context.step.QueryStep; import com.bakdata.conquery.sql.conversion.dialect.SqlFunctionProvider; +import com.bakdata.conquery.sql.models.ColumnDateRange; import org.jooq.Field; import org.jooq.impl.DSL; public class ConceptPreprocessingService { - private static final String DATE_RESTRICTION_COLUMN_NAME = "date_restriction"; - private static final String VALIDITY_DATE_COLUMN_NAME_SUFFIX = "_validity_date"; private final CQConcept concept; private final ConversionContext context; private final SqlFunctionProvider sqlFunctionProvider; @@ -43,7 +41,7 @@ public QueryStep buildPreprocessingQueryStepForTable(String conceptLabel, CQTabl ConceptSelects.ConceptSelectsBuilder selectsBuilder = ConceptSelects.builder(); selectsBuilder.primaryColumn(DSL.field(context.getConfig().getPrimaryColumn())); - selectsBuilder.dateRestriction(this.getDateRestrictionSelect(table)); + selectsBuilder.dateRestrictionRange(this.getDateRestrictionSelect(table)); selectsBuilder.validityDate(this.getValidityDateSelect(table, conceptLabel)); List> conceptSelectFields = this.getColumnSelectReferences(table); @@ -71,23 +69,18 @@ public QueryStep buildPreprocessingQueryStepForTable(String conceptLabel, CQTabl .build(); } - private Optional> getDateRestrictionSelect(CQTable table) { + private Optional getDateRestrictionSelect(CQTable table) { if (!this.context.dateRestrictionActive() || !this.tableHasValidityDates(table)) { return Optional.empty(); } - CDateRange dateRestrictionRange = this.context.getDateRestrictionRange(); - Field dateRestriction = this.sqlFunctionProvider.daterange(dateRestrictionRange) - .as(DATE_RESTRICTION_COLUMN_NAME); - return Optional.of(dateRestriction); + return Optional.of(sqlFunctionProvider.daterange(context.getDateRestrictionRange())); } - private Optional> getValidityDateSelect(CQTable table, String conceptLabel) { + private Optional getValidityDateSelect(CQTable table, String conceptLabel) { if (!this.validityDateIsRequired(table)) { return Optional.empty(); } - Field validityDateRange = this.sqlFunctionProvider.daterange(table.findValidityDateColumn()) - .as(conceptLabel + VALIDITY_DATE_COLUMN_NAME_SUFFIX); - return Optional.of(validityDateRange); + return Optional.of(sqlFunctionProvider.daterange(table.findValidityDate(), conceptLabel)); } /** diff --git a/backend/src/main/java/com/bakdata/conquery/sql/conversion/dialect/PostgreSqlFunctionProvider.java b/backend/src/main/java/com/bakdata/conquery/sql/conversion/dialect/PostgreSqlFunctionProvider.java index 5ee2069393..5fe2c9620c 100644 --- a/backend/src/main/java/com/bakdata/conquery/sql/conversion/dialect/PostgreSqlFunctionProvider.java +++ b/backend/src/main/java/com/bakdata/conquery/sql/conversion/dialect/PostgreSqlFunctionProvider.java @@ -1,8 +1,16 @@ package com.bakdata.conquery.sql.conversion.dialect; +import java.sql.Date; +import java.time.temporal.ChronoUnit; +import java.util.Map; + import com.bakdata.conquery.models.common.daterange.CDateRange; import com.bakdata.conquery.models.datasets.Column; +import com.bakdata.conquery.models.datasets.concepts.ValidityDate; +import com.bakdata.conquery.sql.models.ColumnDateRange; +import org.jetbrains.annotations.NotNull; import org.jooq.Condition; +import org.jooq.DatePart; import org.jooq.Field; import org.jooq.impl.DSL; @@ -13,40 +21,114 @@ */ public class PostgreSqlFunctionProvider implements SqlFunctionProvider { + private static final String INFINITY_DATE_VALUE = "infinity"; + private static final String MINUS_INFINITY_DATE_VALUE = "-infinity"; + + private static final Map DATE_CONVERSION = Map.of( + ChronoUnit.DECADES, DatePart.DECADE, + ChronoUnit.YEARS, DatePart.YEAR, + ChronoUnit.DAYS, DatePart.DAY, + ChronoUnit.MONTHS, DatePart.MONTH, + ChronoUnit.CENTURIES, DatePart.CENTURY + ); + @Override - public Condition dateRestriction(Field dateRestrictionColumn, Field validityDateColumn) { - // the && operator checks if two ranges overlap (see https://www.postgresql.org/docs/15/functions-range.html) - return DSL.condition( - "{0} && {1}", - dateRestrictionColumn, - validityDateColumn - ); + public Condition dateRestriction(ColumnDateRange dateRestriction, ColumnDateRange validityDate) { + if (!validityDate.isSingleColumnRange()) { + throw new UnsupportedOperationException("The validity date range has to be converted to a daterange field in the preprocessing step."); + } + else { + // the && operator checks if two ranges overlap (see https://www.postgresql.org/docs/15/functions-range.html) + return DSL.condition( + "{0} && {1}", + dateRestriction.getRange(), + validityDate.getRange() + ); + } } @Override - public Field daterange(CDateRange dateRestriction) { - return DSL.field( + public ColumnDateRange daterange(CDateRange dateRestriction) { + + String min = MINUS_INFINITY_DATE_VALUE; + String max = INFINITY_DATE_VALUE; + + if (dateRestriction.hasLowerBound()) { + min = dateRestriction.getMin().toString(); + } + if (dateRestriction.hasUpperBound()) { + max = dateRestriction.getMax().toString(); + } + + Field dateRestrictionRange = DSL.field( "daterange({0}::date, {1}::date, '[]')", - DSL.val(dateRestriction.getMin().toString()), - DSL.val(dateRestriction.getMax().toString()) + DSL.val(min), + DSL.val(max) ); + + return ColumnDateRange.of(dateRestrictionRange) + .asDateRestrictionRange(); } @Override - public Field daterange(Column column) { - return switch (column.getType()) { - // if validityDateColumn is a DATE_RANGE we can make use of Postgres' integrated daterange type. - case DATE_RANGE -> DSL.field(column.getName()); - // if the validity date column is not of daterange type, we construct it manually - case DATE -> DSL.field( - "daterange({0}, {0}, '[]')", - DSL.field(column.getName()) - ); - default -> throw new IllegalArgumentException( - "Given column type '%s' can't be converted to a proper date restriction." - .formatted(column.getType()) - ); - }; + public ColumnDateRange daterange(ValidityDate validityDate, String alias) { + + Field dateRange; + + if (validityDate.getEndColumn() != null) { + + Column startColumn = validityDate.getStartColumn(); + Column endColumn = validityDate.getEndColumn(); + + dateRange = daterange(startColumn, endColumn, "[]"); + } + else { + Column column = validityDate.getColumn(); + dateRange = switch (column.getType()) { + // if validityDateColumn is a DATE_RANGE we can make use of Postgres' integrated daterange type. + case DATE_RANGE -> DSL.field(DSL.name(column.getName())); + // if the validity date column is not of daterange type, we construct it manually + case DATE -> daterange(column, column, "[]"); + default -> throw new IllegalArgumentException( + "Given column type '%s' can't be converted to a proper date restriction.".formatted(column.getType()) + ); + }; + } + + return ColumnDateRange.of(dateRange) + .asValidityDateRange(alias); + } + + @Override + public Field daterangeString(ColumnDateRange columnDateRange) { + if (!columnDateRange.isSingleColumnRange()) { + throw new UnsupportedOperationException("All column date ranges should have been converted to single column ranges."); + } + return columnDateRange.getRange(); + } + + @Override + public Field dateDistance(ChronoUnit timeUnit, Column startDateColumn, Date endDateExpression) { + + DatePart datePart = DATE_CONVERSION.get(timeUnit); + if (datePart == null) { + throw new UnsupportedOperationException("Chrono unit %s is not supported".formatted(timeUnit)); + } + + // we can now safely cast to Field of type Date + Field startDate = DSL.field(DSL.name(startDateColumn.getName()), Date.class); + return DSL.dateDiff(datePart, startDate, endDateExpression); + } + + @NotNull + private static Field daterange(Column startColumn, Column endColumn, String bounds) { + return DSL.function( + "daterange", + Object.class, + DSL.field(DSL.name(startColumn.getName())), + DSL.field(DSL.name(endColumn.getName())), + DSL.val(bounds) + ); } } diff --git a/backend/src/main/java/com/bakdata/conquery/sql/conversion/dialect/SqlFunctionProvider.java b/backend/src/main/java/com/bakdata/conquery/sql/conversion/dialect/SqlFunctionProvider.java index cdfde5aa06..9e5ebab7df 100644 --- a/backend/src/main/java/com/bakdata/conquery/sql/conversion/dialect/SqlFunctionProvider.java +++ b/backend/src/main/java/com/bakdata/conquery/sql/conversion/dialect/SqlFunctionProvider.java @@ -1,14 +1,16 @@ package com.bakdata.conquery.sql.conversion.dialect; import java.sql.Date; +import java.time.temporal.ChronoUnit; import com.bakdata.conquery.models.common.daterange.CDateRange; import com.bakdata.conquery.models.datasets.Column; -import com.bakdata.conquery.models.events.MajorTypeId; +import com.bakdata.conquery.models.datasets.concepts.ValidityDate; import com.bakdata.conquery.sql.conversion.context.step.QueryStep; +import com.bakdata.conquery.sql.models.ColumnDateRange; import org.jooq.Condition; -import org.jooq.DatePart; import org.jooq.Field; +import org.jooq.Name; import org.jooq.Record; import org.jooq.Table; import org.jooq.TableOnConditionStep; @@ -21,39 +23,26 @@ public interface SqlFunctionProvider { String DEFAULT_DATE_FORMAT = "yyyy-mm-dd"; - - Condition dateRestriction(Field dateRestrictionColumn, Field validityDateColumn); - /** - * @return A daterange for a date restriction. + * A date restriction condition is true if holds: + * dateRestrictionStart <= validityDateEnd and dateRestrictionEnd >= validityDateStart */ - Field daterange(CDateRange dateRestriction); + Condition dateRestriction(ColumnDateRange dateRestrictionRange, ColumnDateRange validityFieldRange); - /** - * @return A daterange for an existing column. - */ - Field daterange(Column column); + ColumnDateRange daterange(CDateRange dateRestriction); - default Field toDate(String dateColumn) { - return DSL.toDate(dateColumn, DEFAULT_DATE_FORMAT); - } + ColumnDateRange daterange(ValidityDate validityDate, String conceptLabel); - default Field dateDistance(DatePart timeUnit, Date endDate, Column startDateColumn) { - if (startDateColumn.getType() != MajorTypeId.DATE) { - throw new UnsupportedOperationException("Can't calculate date distance to column of type " - + startDateColumn.getType()); - } - // we can now safely cast to Field of type Date - Field startDate = DSL.field(startDateColumn.getName(), Date.class); - return DSL.dateDiff(timeUnit, startDate, endDate); - } + Field daterangeString(ColumnDateRange columnDateRange); + + Field dateDistance(ChronoUnit datePart, Column startDateColumn, Date endDateExpression); - default Condition in(String columnName, String[] values) { + default Condition in(Name columnName, String[] values) { return DSL.field(columnName) .in(values); } - default Field first(String columnName) { + default Field first(Name columnName) { // TODO: this is just a temporary placeholder return DSL.field(columnName); } @@ -80,4 +69,8 @@ default TableOnConditionStep fullOuterJoin( .on(leftPartPrimaryColumn.eq(rightPartPrimaryColumn)); } + default Field toDate(String dateExpression) { + return DSL.toDate(dateExpression, DEFAULT_DATE_FORMAT); + } + } diff --git a/backend/src/main/java/com/bakdata/conquery/sql/conversion/filter/FilterConverter.java b/backend/src/main/java/com/bakdata/conquery/sql/conversion/filter/FilterConverter.java index 1b645ba0fa..cdc40c3395 100644 --- a/backend/src/main/java/com/bakdata/conquery/sql/conversion/filter/FilterConverter.java +++ b/backend/src/main/java/com/bakdata/conquery/sql/conversion/filter/FilterConverter.java @@ -4,6 +4,8 @@ import com.bakdata.conquery.models.datasets.concepts.filters.SingleColumnFilter; import com.bakdata.conquery.sql.conversion.Converter; import org.jooq.Condition; +import org.jooq.Name; +import org.jooq.impl.DSL; /** * Converts a {@link com.bakdata.conquery.apiv1.query.concept.filter.FilterValue} @@ -13,9 +15,9 @@ */ public interface FilterConverter> extends Converter { - static String getColumnName(FilterValue filter) { + static Name getColumnName(FilterValue filter) { // works for now but we might have to distinguish later if we encounter non-SingleColumnFilters - return ((SingleColumnFilter) filter.getFilter()).getColumn().getName(); + return DSL.name(((SingleColumnFilter) filter.getFilter()).getColumn().getName()); } } diff --git a/backend/src/main/java/com/bakdata/conquery/sql/conversion/query/ConceptQueryConverter.java b/backend/src/main/java/com/bakdata/conquery/sql/conversion/query/ConceptQueryConverter.java index 9dd3367ed8..7a71880816 100644 --- a/backend/src/main/java/com/bakdata/conquery/sql/conversion/query/ConceptQueryConverter.java +++ b/backend/src/main/java/com/bakdata/conquery/sql/conversion/query/ConceptQueryConverter.java @@ -5,19 +5,29 @@ import com.bakdata.conquery.apiv1.query.ConceptQuery; import com.bakdata.conquery.sql.conversion.NodeConverter; import com.bakdata.conquery.sql.conversion.context.ConversionContext; +import com.bakdata.conquery.sql.conversion.context.selects.Selects; import com.bakdata.conquery.sql.conversion.context.step.QueryStep; import com.bakdata.conquery.sql.conversion.context.step.QueryStepTransformer; +import com.bakdata.conquery.sql.conversion.dialect.SqlFunctionProvider; +import com.bakdata.conquery.sql.models.ColumnDateRange; +import org.jooq.Field; import org.jooq.Record; import org.jooq.Select; public class ConceptQueryConverter implements NodeConverter { + public static final String FINAL_VALIDITY_DATE_COLUMN_NAME = "dates"; private final QueryStepTransformer queryStepTransformer; public ConceptQueryConverter(QueryStepTransformer queryStepTransformer) { this.queryStepTransformer = queryStepTransformer; } + @Override + public Class getConversionClass() { + return ConceptQuery.class; + } + @Override public ConversionContext convert(ConceptQuery node, ConversionContext context) { @@ -27,7 +37,7 @@ public ConversionContext convert(ConceptQuery node, ConversionContext context) { QueryStep preFinalStep = contextAfterConversion.getQuerySteps().iterator().next(); QueryStep finalStep = QueryStep.builder() .cteName(null) // the final QueryStep won't be converted to a CTE - .selects(preFinalStep.getQualifiedSelects()) + .selects(this.toFinalSelects(preFinalStep, context)) .fromTable(QueryStep.toTableLike(preFinalStep.getCteName())) .conditions(preFinalStep.getConditions()) .predecessors(List.of(preFinalStep)) @@ -37,8 +47,22 @@ public ConversionContext convert(ConceptQuery node, ConversionContext context) { return context.withFinalQuery(finalQuery); } - @Override - public Class getConversionClass() { - return ConceptQuery.class; + /** + * @return The final selects containing the final validity date, if present, as a string aggregation field. + */ + private Selects toFinalSelects(QueryStep preFinalStep, ConversionContext context) { + + Selects finalSelects = preFinalStep.getQualifiedSelects(); + + if (finalSelects.getValidityDate().isEmpty()) { + return finalSelects; + } + + SqlFunctionProvider functionProvider = context.getSqlDialect().getFunction(); + Field finalValidityDateSelect = functionProvider.daterangeString(finalSelects.getValidityDate().get()) + .as(FINAL_VALIDITY_DATE_COLUMN_NAME); + + return finalSelects.withValidityDate(ColumnDateRange.of(finalValidityDateSelect)); } + } diff --git a/backend/src/main/java/com/bakdata/conquery/sql/conversion/select/DateDistanceConverter.java b/backend/src/main/java/com/bakdata/conquery/sql/conversion/select/DateDistanceConverter.java index d70800c39f..52339db897 100644 --- a/backend/src/main/java/com/bakdata/conquery/sql/conversion/select/DateDistanceConverter.java +++ b/backend/src/main/java/com/bakdata/conquery/sql/conversion/select/DateDistanceConverter.java @@ -2,27 +2,18 @@ import java.sql.Date; import java.time.LocalDate; -import java.time.temporal.ChronoUnit; -import java.util.Map; import java.util.Objects; import com.bakdata.conquery.models.datasets.Column; import com.bakdata.conquery.models.datasets.concepts.select.connector.specific.DateDistanceSelect; import com.bakdata.conquery.models.events.MajorTypeId; import com.bakdata.conquery.sql.conversion.context.ConversionContext; +import com.bakdata.conquery.sql.conversion.dialect.SqlFunctionProvider; import com.bakdata.conquery.sql.conversion.supplier.DateNowSupplier; -import org.jooq.DatePart; import org.jooq.Field; public class DateDistanceConverter implements SelectConverter { - private static final Map DATE_CONVERSION = Map.of( - ChronoUnit.DECADES, DatePart.DECADE, - ChronoUnit.YEARS, DatePart.YEAR, - ChronoUnit.DAYS, DatePart.DAY, - ChronoUnit.MONTHS, DatePart.MONTH, - ChronoUnit.CENTURIES, DatePart.CENTURY - ); private final DateNowSupplier dateNowSupplier; public DateDistanceConverter(DateNowSupplier dateNowSupplier) { @@ -31,19 +22,18 @@ public DateDistanceConverter(DateNowSupplier dateNowSupplier) { @Override public Field convert(DateDistanceSelect select, ConversionContext context) { - DatePart timeUnit = DATE_CONVERSION.get(select.getTimeUnit()); - if (timeUnit == null) { - throw new UnsupportedOperationException("Chrono unit %s is not supported".formatted(select.getTimeUnit())); - } - Column startDateColumn = select.getColumn(); - Date endDate = getEndDate(context); + Column startDateColumn = select.getColumn(); if (startDateColumn.getType() != MajorTypeId.DATE) { throw new UnsupportedOperationException("Can't calculate date distance to column of type " + startDateColumn.getType()); } - return context.getSqlDialect().getFunction().dateDistance(timeUnit, endDate, startDateColumn) - .as(select.getLabel()); + + SqlFunctionProvider functionProvider = context.getSqlDialect().getFunction(); + Date endDate = getEndDate(context); + + return functionProvider.dateDistance(select.getTimeUnit(), startDateColumn, endDate) + .as(select.getLabel()); } private Date getEndDate(ConversionContext context) { diff --git a/backend/src/main/java/com/bakdata/conquery/sql/conversion/select/FirstValueConverter.java b/backend/src/main/java/com/bakdata/conquery/sql/conversion/select/FirstValueConverter.java index 50a66ddb03..af1593bf91 100644 --- a/backend/src/main/java/com/bakdata/conquery/sql/conversion/select/FirstValueConverter.java +++ b/backend/src/main/java/com/bakdata/conquery/sql/conversion/select/FirstValueConverter.java @@ -4,12 +4,15 @@ import com.bakdata.conquery.sql.conversion.context.ConversionContext; import com.bakdata.conquery.sql.conversion.dialect.SqlFunctionProvider; import org.jooq.Field; +import org.jooq.Name; +import org.jooq.impl.DSL; public class FirstValueConverter implements SelectConverter { public Field convert(FirstValueSelect select, ConversionContext context) { SqlFunctionProvider fn = context.getSqlDialect().getFunction(); - return fn.first(select.getColumn().getName()); + Name columnName = DSL.name(select.getColumn().getName()); + return fn.first(columnName); } @Override diff --git a/backend/src/main/java/com/bakdata/conquery/sql/models/ColumnDateRange.java b/backend/src/main/java/com/bakdata/conquery/sql/models/ColumnDateRange.java new file mode 100644 index 0000000000..e0faabdf4f --- /dev/null +++ b/backend/src/main/java/com/bakdata/conquery/sql/models/ColumnDateRange.java @@ -0,0 +1,84 @@ +package com.bakdata.conquery.sql.models; + +import java.util.List; + +import lombok.Getter; +import org.jooq.Field; +import org.jooq.impl.DSL; + +@Getter +public class ColumnDateRange { + + private static final String DATE_RESTRICTION_COLUMN_NAME = "date_restriction"; + private static final String VALIDITY_DATE_COLUMN_NAME_SUFFIX = "_validity_date"; + private static final String START_SUFFIX = "_start"; + private static final String END_SUFFIX = "_end"; + + private final boolean isEmpty; + private final Field range; + private final Field start; + private final Field end; + + private ColumnDateRange(boolean isEmpty, Field range, Field startColumn, Field endColumn) { + this.isEmpty = isEmpty; + this.range = range; + this.start = startColumn; + this.end = endColumn; + } + + public static ColumnDateRange of(Field rangeColumn) { + return new ColumnDateRange(false, rangeColumn, null, null); + } + + public static ColumnDateRange of(Field startColumn, Field endColumn) { + return new ColumnDateRange(true, null, startColumn, endColumn); + } + + public ColumnDateRange asDateRestrictionRange() { + return this.as(DATE_RESTRICTION_COLUMN_NAME); + } + + public ColumnDateRange asValidityDateRange(String alias) { + return this.as(alias + VALIDITY_DATE_COLUMN_NAME_SUFFIX); + } + + /** + * @return True if this {@link ColumnDateRange} consists of only 1 column. + * False if it consists of a start and end field. + */ + public boolean isSingleColumnRange() { + return this.range != null; + } + + public List> toFields() { + if (isSingleColumnRange()) { + return List.of(this.range); + } + return List.of(this.start, this.end); + } + + public ColumnDateRange qualify(String qualifier) { + if (isSingleColumnRange()) { + return ColumnDateRange.of(mapFieldOntoQualifier(getRange(), qualifier)); + } + return ColumnDateRange.of( + mapFieldOntoQualifier(getStart(), qualifier), + mapFieldOntoQualifier(getEnd(), qualifier) + ); + } + + private ColumnDateRange as(String alias) { + if (isSingleColumnRange()) { + return ColumnDateRange.of(this.range.as(alias)); + } + return ColumnDateRange.of( + this.start.as(alias + START_SUFFIX), + this.end.as(alias + END_SUFFIX) + ); + } + + private Field mapFieldOntoQualifier(Field field, String qualifier) { + return DSL.field(DSL.name(qualifier, field.getName())); + } + +} diff --git a/backend/src/test/java/com/bakdata/conquery/integration/IntegrationTests.java b/backend/src/test/java/com/bakdata/conquery/integration/IntegrationTests.java index bd2fd0f5bc..d40a970169 100644 --- a/backend/src/test/java/com/bakdata/conquery/integration/IntegrationTests.java +++ b/backend/src/test/java/com/bakdata/conquery/integration/IntegrationTests.java @@ -143,8 +143,10 @@ public Stream sqlTests(SqlDialect sqlDialect, SqlConnectorConfig sq Stream paths = Files.walk(testRootDir); List dynamicTestStream = paths.filter(path -> !Files.isDirectory(path) && path.toString().endsWith(".json")) - .map(path -> SqlIntegrationTest.fromPath(path, sqlDialect, sqlConfig)) - .map(test -> DynamicTest.dynamicTest(test.getTestSpec().getLabel(), test)).toList(); + .map(path -> SqlIntegrationTest.fromPath(path, sqlDialect, sqlConfig)) + .filter(sqlIntegrationTest -> sqlIntegrationTest.getTestSpec() + .supportsDialects(sqlConfig.getDialect())) + .map(test -> DynamicTest.dynamicTest(test.getTestSpec().getLabel(), test)).toList(); return dynamicTestStream.stream(); } diff --git a/backend/src/test/java/com/bakdata/conquery/integration/sql/SqlIntegrationTestSpec.java b/backend/src/test/java/com/bakdata/conquery/integration/sql/SqlIntegrationTestSpec.java index 83bc210090..ba61fe0173 100644 --- a/backend/src/test/java/com/bakdata/conquery/integration/sql/SqlIntegrationTestSpec.java +++ b/backend/src/test/java/com/bakdata/conquery/integration/sql/SqlIntegrationTestSpec.java @@ -14,13 +14,12 @@ import com.bakdata.conquery.integration.json.ConqueryTestSpec; import com.bakdata.conquery.io.cps.CPSType; import com.bakdata.conquery.io.jackson.Jackson; +import com.bakdata.conquery.models.config.Dialect; import com.bakdata.conquery.models.datasets.Table; import com.bakdata.conquery.models.datasets.concepts.Concept; import com.bakdata.conquery.models.exceptions.JSONException; import com.bakdata.conquery.models.query.results.EntityResult; -import com.bakdata.conquery.models.query.results.SinglelineEntityResult; import com.bakdata.conquery.sql.conquery.SqlManagedQuery; -import com.bakdata.conquery.sql.execution.SqlEntityResult; import com.bakdata.conquery.sql.execution.SqlExecutionResult; import com.fasterxml.jackson.annotation.JsonIgnore; import com.fasterxml.jackson.annotation.JsonProperty; @@ -41,6 +40,8 @@ public class SqlIntegrationTestSpec extends ConqueryTestSpec supportedDialects; + @NotNull @JsonProperty("query") private JsonNode rawQuery; @@ -65,6 +66,13 @@ public class SqlIntegrationTestSpec extends ConqueryTestSpec validityDates = List.of(val0, val1, val2); connector.setColumn(column); connector.setConcept(concept); diff --git a/backend/src/test/java/com/bakdata/conquery/models/types/SerialisationObjectsUtil.java b/backend/src/test/java/com/bakdata/conquery/models/types/SerialisationObjectsUtil.java index a27126bd39..d2e9409287 100644 --- a/backend/src/test/java/com/bakdata/conquery/models/types/SerialisationObjectsUtil.java +++ b/backend/src/test/java/com/bakdata/conquery/models/types/SerialisationObjectsUtil.java @@ -77,8 +77,7 @@ public static TreeConcept createConcept(CentralRegistry registry, Dataset datase concept.setConnectors(List.of(connector)); - ValidityDate valDate = new ValidityDate(); - valDate.setColumn(dateColumn); + ValidityDate valDate = ValidityDate.create(dateColumn); valDate.setConnector(connector); valDate.setLabel("valLabel"); valDate.setName("valName"); diff --git a/backend/src/test/java/com/bakdata/conquery/util/SerialisationObjectsUtil.java b/backend/src/test/java/com/bakdata/conquery/util/SerialisationObjectsUtil.java index 60a23aebb8..de4137ac1f 100644 --- a/backend/src/test/java/com/bakdata/conquery/util/SerialisationObjectsUtil.java +++ b/backend/src/test/java/com/bakdata/conquery/util/SerialisationObjectsUtil.java @@ -77,8 +77,7 @@ public static TreeConcept createConcept(CentralRegistry registry, Dataset datase concept.setConnectors(List.of(connector)); - ValidityDate valDate = new ValidityDate(); - valDate.setColumn(dateColumn); + ValidityDate valDate = ValidityDate.create(dateColumn); valDate.setConnector(connector); valDate.setLabel("valLabel"); valDate.setName("valName"); diff --git a/backend/src/test/resources/tests/sql/date_restriction/only_max/content.csv b/backend/src/test/resources/tests/sql/date_restriction/only_max/content.csv new file mode 100644 index 0000000000..212025dec4 --- /dev/null +++ b/backend/src/test/resources/tests/sql/date_restriction/only_max/content.csv @@ -0,0 +1,9 @@ +pid,datum,geschlecht +1,2012-01-01,"f" +2,2010-07-15,"m" +3,2012-11-10,"f" +4,2012-11-11,"m" +5,2007-11-11,"" +6,2012-11-11,"" +7,2012-11-11,"mf" +8,2012-11-11,"fm" diff --git a/backend/src/test/resources/tests/sql/date_restriction/only_max/expected.csv b/backend/src/test/resources/tests/sql/date_restriction/only_max/expected.csv new file mode 100644 index 0000000000..c34539ab81 --- /dev/null +++ b/backend/src/test/resources/tests/sql/date_restriction/only_max/expected.csv @@ -0,0 +1,3 @@ +pid,datum +1,"[2012-01-01,2012-01-02)" +3,"[2012-11-10,2012-11-11)" diff --git a/backend/src/test/resources/tests/sql/date_restriction/only_max/only_max.spec.json b/backend/src/test/resources/tests/sql/date_restriction/only_max/only_max.spec.json new file mode 100644 index 0000000000..fbb6d2e1d0 --- /dev/null +++ b/backend/src/test/resources/tests/sql/date_restriction/only_max/only_max.spec.json @@ -0,0 +1,84 @@ +{ + "label": "Date restriction query with simple date validity date and only max date restriction set", + "type": "SQL_TEST", + "expectedCsv": "expected.csv", + "query": { + "type": "CONCEPT_QUERY", + "root": { + "type": "AND", + "children": [ + { + "type": "DATE_RESTRICTION", + "dateRange": { + "max": "2012-12-31" + }, + "child": { + "ids": [ + "geschlecht_select" + ], + "type": "CONCEPT", + "label": "Geschlecht SELECT", + "tables": [ + { + "id": "geschlecht_select.geschlecht_connector", + "filters": [ + { + "filter": "geschlecht_select.geschlecht_connector.geschlecht", + "type": "BIG_MULTI_SELECT", + "value": [ + "f" + ] + } + ] + } + ] + } + } + ] + } + }, + "concepts": [ + { + "label": "geschlecht_select", + "type": "TREE", + "connectors": [ + { + "label": "geschlecht_connector", + "table": "table1", + "validityDates": { + "label": "datum", + "column": "table1.datum" + }, + "filters": { + "label": "geschlecht", + "description": "Geschlecht zur gegebenen Datumseinschränkung", + "column": "table1.geschlecht", + "type": "SELECT" + } + } + ] + } + ], + "content": { + "tables": [ + { + "csv": "tests/sql/date_restriction/simple_date/content.csv", + "name": "table1", + "primaryColumn": { + "name": "pid", + "type": "STRING" + }, + "columns": [ + { + "name": "datum", + "type": "DATE" + }, + { + "name": "geschlecht", + "type": "STRING" + } + ] + } + ] + } +} diff --git a/backend/src/test/resources/tests/sql/date_restriction/only_min/content.csv b/backend/src/test/resources/tests/sql/date_restriction/only_min/content.csv new file mode 100644 index 0000000000..212025dec4 --- /dev/null +++ b/backend/src/test/resources/tests/sql/date_restriction/only_min/content.csv @@ -0,0 +1,9 @@ +pid,datum,geschlecht +1,2012-01-01,"f" +2,2010-07-15,"m" +3,2012-11-10,"f" +4,2012-11-11,"m" +5,2007-11-11,"" +6,2012-11-11,"" +7,2012-11-11,"mf" +8,2012-11-11,"fm" diff --git a/backend/src/test/resources/tests/sql/date_restriction/only_min/expected.csv b/backend/src/test/resources/tests/sql/date_restriction/only_min/expected.csv new file mode 100644 index 0000000000..c34539ab81 --- /dev/null +++ b/backend/src/test/resources/tests/sql/date_restriction/only_min/expected.csv @@ -0,0 +1,3 @@ +pid,datum +1,"[2012-01-01,2012-01-02)" +3,"[2012-11-10,2012-11-11)" diff --git a/backend/src/test/resources/tests/sql/date_restriction/only_min/only_min.spec.json b/backend/src/test/resources/tests/sql/date_restriction/only_min/only_min.spec.json new file mode 100644 index 0000000000..76a93dafde --- /dev/null +++ b/backend/src/test/resources/tests/sql/date_restriction/only_min/only_min.spec.json @@ -0,0 +1,84 @@ +{ + "label": "Date restriction query with simple date validity date and only min date restriction set", + "type": "SQL_TEST", + "expectedCsv": "expected.csv", + "query": { + "type": "CONCEPT_QUERY", + "root": { + "type": "AND", + "children": [ + { + "type": "DATE_RESTRICTION", + "dateRange": { + "min": "2012-01-01" + }, + "child": { + "ids": [ + "geschlecht_select" + ], + "type": "CONCEPT", + "label": "Geschlecht SELECT", + "tables": [ + { + "id": "geschlecht_select.geschlecht_connector", + "filters": [ + { + "filter": "geschlecht_select.geschlecht_connector.geschlecht", + "type": "BIG_MULTI_SELECT", + "value": [ + "f" + ] + } + ] + } + ] + } + } + ] + } + }, + "concepts": [ + { + "label": "geschlecht_select", + "type": "TREE", + "connectors": [ + { + "label": "geschlecht_connector", + "table": "table1", + "validityDates": { + "label": "datum", + "column": "table1.datum" + }, + "filters": { + "label": "geschlecht", + "description": "Geschlecht zur gegebenen Datumseinschränkung", + "column": "table1.geschlecht", + "type": "SELECT" + } + } + ] + } + ], + "content": { + "tables": [ + { + "csv": "tests/sql/date_restriction/simple_date/content.csv", + "name": "table1", + "primaryColumn": { + "name": "pid", + "type": "STRING" + }, + "columns": [ + { + "name": "datum", + "type": "DATE" + }, + { + "name": "geschlecht", + "type": "STRING" + } + ] + } + ] + } +} diff --git a/tutorial/mimic_iii_demo/age_gender.ipynb b/tutorial/mimic_iii_demo/age_gender.ipynb index f1a2f40929..2824cfb129 100644 --- a/tutorial/mimic_iii_demo/age_gender.ipynb +++ b/tutorial/mimic_iii_demo/age_gender.ipynb @@ -346,7 +346,7 @@ "source": [ "## Preprocessing and Upload\n", "\n", - "The next tutorial is to [Preprocess and Upload](./preprocess_and_upload.ipynb) all data and meta data produced from this notebook." + "The next tutorial is to [Preprocess and Upload](./preprocess_and_upload.ipynb) all data and meta data produced from this notebook.\n" ] } ], diff --git a/tutorial/mimic_iii_demo/icd9.ipynb b/tutorial/mimic_iii_demo/icd9.ipynb index 547875081c..69656e305e 100644 --- a/tutorial/mimic_iii_demo/icd9.ipynb +++ b/tutorial/mimic_iii_demo/icd9.ipynb @@ -470,7 +470,7 @@ "source": [ "## Preprocessing and Upload\n", "\n", - "The next tutorial is to [Preprocess and Upload](./preprocess_and_upload.ipynb) all data and meta data produced from this notebook." + "The next tutorial is to [Preprocess and Upload](./preprocess_and_upload.ipynb) all data and meta data produced from this notebook.\n" ] } ], From 16150e0952a342949cbfa651d9c733cfdd779518 Mon Sep 17 00:00:00 2001 From: awildturtok <1553491+awildturtok@users.noreply.github.com> Date: Tue, 15 Aug 2023 15:57:23 +0200 Subject: [PATCH 52/96] create a RejectionHandler that blocks the caller --- .../xodus/stores/SerializingStore.java | 9 ++--- .../util/CallerBlocksRejectionHandler.java | 33 +++++++++++++++++++ 2 files changed, 38 insertions(+), 4 deletions(-) create mode 100644 backend/src/main/java/com/bakdata/conquery/util/CallerBlocksRejectionHandler.java diff --git a/backend/src/main/java/com/bakdata/conquery/io/storage/xodus/stores/SerializingStore.java b/backend/src/main/java/com/bakdata/conquery/io/storage/xodus/stores/SerializingStore.java index 46c85356c8..b86de46226 100644 --- a/backend/src/main/java/com/bakdata/conquery/io/storage/xodus/stores/SerializingStore.java +++ b/backend/src/main/java/com/bakdata/conquery/io/storage/xodus/stores/SerializingStore.java @@ -21,6 +21,7 @@ import com.bakdata.conquery.io.storage.Store; import com.bakdata.conquery.models.config.XodusStoreFactory; import com.bakdata.conquery.models.exceptions.ValidatorHelper; +import com.bakdata.conquery.util.CallerBlocksRejectionHandler; import com.bakdata.conquery.util.io.FileUtil; import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.JsonNode; @@ -332,11 +333,11 @@ public IterationStatistic forEach(StoreEntryConsumer consumer) { final IterationStatistic result = new IterationStatistic(); final ArrayList unreadables = new ArrayList<>(); - final ThreadPoolExecutor executorService = new ThreadPoolExecutor(5, 5, - 60L, TimeUnit.SECONDS, - new ArrayBlockingQueue<>(5), + final ThreadPoolExecutor executorService = new ThreadPoolExecutor(10, 10, + 0, TimeUnit.SECONDS, + new ArrayBlockingQueue<>(50), Executors.defaultThreadFactory(), - new ThreadPoolExecutor.CallerRunsPolicy() + new CallerBlocksRejectionHandler(TimeUnit.MINUTES.toMillis(5)) ); store.forEach((k, v) -> { diff --git a/backend/src/main/java/com/bakdata/conquery/util/CallerBlocksRejectionHandler.java b/backend/src/main/java/com/bakdata/conquery/util/CallerBlocksRejectionHandler.java new file mode 100644 index 0000000000..b5c8a3355e --- /dev/null +++ b/backend/src/main/java/com/bakdata/conquery/util/CallerBlocksRejectionHandler.java @@ -0,0 +1,33 @@ +package com.bakdata.conquery.util; + +import java.util.concurrent.RejectedExecutionException; +import java.util.concurrent.RejectedExecutionHandler; +import java.util.concurrent.ThreadPoolExecutor; +import java.util.concurrent.TimeUnit; + +import lombok.Data; + +@Data +public class CallerBlocksRejectionHandler implements RejectedExecutionHandler { + + private final long timeoutMillis; + + @Override + public void rejectedExecution(Runnable r, ThreadPoolExecutor executor) { + if (executor.isShutdown()){ + return; + } + + try { + final boolean success = executor.getQueue().offer(r, getTimeoutMillis(), TimeUnit.MILLISECONDS); + + if(!success){ + throw new RejectedExecutionException("Could not submit within specified timeout."); + } + } + catch (InterruptedException e) { + Thread.currentThread().interrupt(); + throw new RejectedExecutionException("Thread was interrupted."); + } + } +} From 9b26190aa15f53d630f5e19334f9fe84949170b3 Mon Sep 17 00:00:00 2001 From: awildturtok <1553491+awildturtok@users.noreply.github.com> Date: Tue, 15 Aug 2023 16:24:14 +0200 Subject: [PATCH 53/96] reduce core pool size and queue size, also measure time spent waiting --- .../io/storage/xodus/stores/SerializingStore.java | 9 ++++++--- .../conquery/util/CallerBlocksRejectionHandler.java | 10 ++++++++-- 2 files changed, 14 insertions(+), 5 deletions(-) diff --git a/backend/src/main/java/com/bakdata/conquery/io/storage/xodus/stores/SerializingStore.java b/backend/src/main/java/com/bakdata/conquery/io/storage/xodus/stores/SerializingStore.java index b86de46226..9adf10fd14 100644 --- a/backend/src/main/java/com/bakdata/conquery/io/storage/xodus/stores/SerializingStore.java +++ b/backend/src/main/java/com/bakdata/conquery/io/storage/xodus/stores/SerializingStore.java @@ -36,6 +36,7 @@ import lombok.SneakyThrows; import lombok.ToString; import lombok.extern.slf4j.Slf4j; +import org.apache.commons.lang3.time.DurationFormatUtils; import org.jetbrains.annotations.NotNull; /** @@ -332,12 +333,13 @@ private static String sanitiseFileName(@NotNull String name) { public IterationStatistic forEach(StoreEntryConsumer consumer) { final IterationStatistic result = new IterationStatistic(); final ArrayList unreadables = new ArrayList<>(); + final CallerBlocksRejectionHandler rejectionHandler = new CallerBlocksRejectionHandler(TimeUnit.MINUTES.toMillis(5)); - final ThreadPoolExecutor executorService = new ThreadPoolExecutor(10, 10, + final ThreadPoolExecutor executorService = new ThreadPoolExecutor(2, 2, 0, TimeUnit.SECONDS, - new ArrayBlockingQueue<>(50), + new ArrayBlockingQueue<>(10), Executors.defaultThreadFactory(), - new CallerBlocksRejectionHandler(TimeUnit.MINUTES.toMillis(5)) + rejectionHandler ); store.forEach((k, v) -> { @@ -390,6 +392,7 @@ public IterationStatistic forEach(StoreEntryConsumer consumer) { log.debug("Still waiting for {} to load.", this); } + log.debug("Waited {} on workers.", DurationFormatUtils.formatDurationHMS(rejectionHandler.getWaitedMillis().sum())); // Print some statistics final int total = result.getTotalProcessed(); log.debug( diff --git a/backend/src/main/java/com/bakdata/conquery/util/CallerBlocksRejectionHandler.java b/backend/src/main/java/com/bakdata/conquery/util/CallerBlocksRejectionHandler.java index b5c8a3355e..b6ce4b4556 100644 --- a/backend/src/main/java/com/bakdata/conquery/util/CallerBlocksRejectionHandler.java +++ b/backend/src/main/java/com/bakdata/conquery/util/CallerBlocksRejectionHandler.java @@ -4,6 +4,7 @@ import java.util.concurrent.RejectedExecutionHandler; import java.util.concurrent.ThreadPoolExecutor; import java.util.concurrent.TimeUnit; +import java.util.concurrent.atomic.LongAdder; import lombok.Data; @@ -11,17 +12,22 @@ public class CallerBlocksRejectionHandler implements RejectedExecutionHandler { private final long timeoutMillis; + private final LongAdder waitedMillis = new LongAdder(); @Override public void rejectedExecution(Runnable r, ThreadPoolExecutor executor) { - if (executor.isShutdown()){ + if (executor.isShutdown()) { return; } try { + long before = System.currentTimeMillis(); final boolean success = executor.getQueue().offer(r, getTimeoutMillis(), TimeUnit.MILLISECONDS); + long after = System.currentTimeMillis(); - if(!success){ + waitedMillis.add(after - before); + + if (!success) { throw new RejectedExecutionException("Could not submit within specified timeout."); } } From 89ad58463f9a3fe21c7fb3c7b14f39cdf93c5190 Mon Sep 17 00:00:00 2001 From: Fabian Blank Date: Wed, 16 Aug 2023 12:02:08 +0200 Subject: [PATCH 54/96] use absolute position, fix bug with laying a concept within another --- .../DropzoneBetweenElements.tsx | 22 ++++----- .../form-components/DropzoneList.tsx | 46 ++++++++++++------- .../form-concept-group/FormConceptGroup.tsx | 32 ++++++------- 3 files changed, 52 insertions(+), 48 deletions(-) diff --git a/frontend/src/js/external-forms/form-components/DropzoneBetweenElements.tsx b/frontend/src/js/external-forms/form-components/DropzoneBetweenElements.tsx index d6d9a0511a..4d54e98d08 100644 --- a/frontend/src/js/external-forms/form-components/DropzoneBetweenElements.tsx +++ b/frontend/src/js/external-forms/form-components/DropzoneBetweenElements.tsx @@ -7,6 +7,7 @@ interface Props { onDrop: (item: DroppableObject, monitor: DropTargetMonitor) => void; acceptedDropTypes: string[]; lastElement?: boolean; + top?: number; } const RootHeightBase = 30; @@ -15,13 +16,14 @@ const Root = styled("div")` width: 100%; left: 0; right: 0; - position: relative; + position: absolute; border-radius: ${({ theme }) => theme.borderRadius}; `; -const Line = styled("div")` +const Line = styled("div")<{show:boolean}>` overflow: hidden; display: block; + visibility: ${({show}) => show ? "visible" : "hidden"}; background-color: ${({ theme }) => theme.col.blueGrayDark}; margin: 1px 0; height: ${LineHeight}px; @@ -34,6 +36,7 @@ const DropzoneBetweenElements = < acceptedDropTypes, onDrop, lastElement, + top }: Props) => { const [{ isOver }, addZoneRef] = useDrop({ accept: acceptedDropTypes, @@ -44,24 +47,17 @@ const DropzoneBetweenElements = < }), }); - const rootHeightMultiplier = lastElement ? 0.5 : 1; - const rootDefaultMarginTop = (lastElement ? -15 : -5) - LineHeight; - const rootOverMarginTop = lastElement ? -23 : -10; - const rootDefaultTop = lastElement ? -5 : -10; - const rootOverTop = (lastElement ? -2 : -15) - LineHeight; + const rootHeightMultiplier = lastElement ? 0.7 : 1; return ( <> - {isOver && } + diff --git a/frontend/src/js/external-forms/form-components/DropzoneList.tsx b/frontend/src/js/external-forms/form-components/DropzoneList.tsx index 09e653a93b..2e890ef35e 100644 --- a/frontend/src/js/external-forms/form-components/DropzoneList.tsx +++ b/frontend/src/js/external-forms/form-components/DropzoneList.tsx @@ -19,7 +19,7 @@ import DropzoneBetweenElements from "./DropzoneBetweenElements"; const ListItem = styled("div")` position: relative; - padding: 0 5px; + padding: 5px; box-shadow: 0 0 3px 0 rgba(0, 0, 0, 0.1); background-color: white; border-radius: ${({ theme }) => theme.borderRadius}; @@ -36,6 +36,10 @@ const Row = styled("div")` align-items: center; `; +const ConceptContainer = styled("div")` + position: relative; +`; + interface PropsT { className?: string; label?: ReactNode; @@ -78,6 +82,24 @@ const DropzoneList = ( const showDropzone = (items && items.length === 0) || !disallowMultipleColumns; + function genItems(){ + return items.map((item, i) => ( + + {!disallowMultipleColumns && ( + + )} + + onDelete(i)} /> + {item} + + + )) + } + return (
@@ -90,30 +112,20 @@ const DropzoneList = ( {tooltip && } {items && items.length > 0 && ( -
- {items.map((item, i) => ( -
- {!disallowMultipleColumns && ( - - )} - - onDelete(i)} /> - {item} - -
- ))} + <> + { genItems()} + {!disallowMultipleColumns && ( )} -
+ + )}
{showDropzone && onImportLines && ( diff --git a/frontend/src/js/external-forms/form-concept-group/FormConceptGroup.tsx b/frontend/src/js/external-forms/form-concept-group/FormConceptGroup.tsx index bf381b6db3..9ad9dfb435 100644 --- a/frontend/src/js/external-forms/form-concept-group/FormConceptGroup.tsx +++ b/frontend/src/js/external-forms/form-concept-group/FormConceptGroup.tsx @@ -87,9 +87,7 @@ interface Props { }) => ReactNode; } -const DropzoneListItem = styled("div")` - margin-top: -20px; -`; +const DropzoneListItem = styled("div")``; const Row = styled("div")` display: flex; @@ -448,29 +446,27 @@ const FormConceptGroup = (props: Props) => { }} onDropConcept={(concept) => { let { valueIdx, conceptIdx } = editedFormQueryNodePosition; + let updatedValue = props.value; if (isMovedObject(concept)) { + const { movedFromFieldName, movedFromAndIdx, movedFromOrIdx } = concept.dragContext; valueIdx = - valueIdx > concept.dragContext.movedFromAndIdx + valueIdx > movedFromAndIdx ? valueIdx - 1 : valueIdx; - if (concept.dragContext.movedFromFieldName === props.fieldName) { - const updatedValue = - props.value[concept.dragContext.movedFromAndIdx].concepts + if (movedFromFieldName === props.fieldName) { + updatedValue = + updatedValue[movedFromAndIdx].concepts .length === 1 ? removeValue( - props.value, - concept.dragContext.movedFromAndIdx, + updatedValue, + movedFromAndIdx, ) : removeConcept( - props.value, - concept.dragContext.movedFromAndIdx, - concept.dragContext.movedFromOrIdx, + updatedValue, + movedFromAndIdx, + movedFromOrIdx, ); - return props.onChange( - setConceptProperties(updatedValue, valueIdx, conceptIdx, { - ids: [...concept.ids, ...editedNode.ids], - }), - ); + setEditedFormQueryNodePosition({ valueIdx, conceptIdx }); } else { if (exists(concept.dragContext.deleteFromOtherField)) { concept.dragContext.deleteFromOtherField(); @@ -478,7 +474,7 @@ const FormConceptGroup = (props: Props) => { } } props.onChange( - setConceptProperties(props.value, valueIdx, conceptIdx, { + setConceptProperties(updatedValue, valueIdx, conceptIdx, { ids: [...concept.ids, ...editedNode.ids], }), ); From 98e19d94e16ae141fdc8119250d4a46778273c8a Mon Sep 17 00:00:00 2001 From: Fabian Blank Date: Wed, 16 Aug 2023 12:15:04 +0200 Subject: [PATCH 55/96] formatting --- .../DropzoneBetweenElements.tsx | 11 ++++---- .../form-components/DropzoneList.tsx | 26 +++++++++---------- .../form-concept-group/FormConceptGroup.tsx | 18 +++++-------- 3 files changed, 24 insertions(+), 31 deletions(-) diff --git a/frontend/src/js/external-forms/form-components/DropzoneBetweenElements.tsx b/frontend/src/js/external-forms/form-components/DropzoneBetweenElements.tsx index 4d54e98d08..7d46cd7cb7 100644 --- a/frontend/src/js/external-forms/form-components/DropzoneBetweenElements.tsx +++ b/frontend/src/js/external-forms/form-components/DropzoneBetweenElements.tsx @@ -20,10 +20,10 @@ const Root = styled("div")` border-radius: ${({ theme }) => theme.borderRadius}; `; -const Line = styled("div")<{show:boolean}>` +const Line = styled("div")<{ show: boolean }>` overflow: hidden; display: block; - visibility: ${({show}) => show ? "visible" : "hidden"}; + visibility: ${({ show }) => (show ? "visible" : "hidden")}; background-color: ${({ theme }) => theme.col.blueGrayDark}; margin: 1px 0; height: ${LineHeight}px; @@ -36,7 +36,7 @@ const DropzoneBetweenElements = < acceptedDropTypes, onDrop, lastElement, - top + top, }: Props) => { const [{ isOver }, addZoneRef] = useDrop({ accept: acceptedDropTypes, @@ -51,12 +51,11 @@ const DropzoneBetweenElements = < return ( <> - + diff --git a/frontend/src/js/external-forms/form-components/DropzoneList.tsx b/frontend/src/js/external-forms/form-components/DropzoneList.tsx index 2e890ef35e..2222525592 100644 --- a/frontend/src/js/external-forms/form-components/DropzoneList.tsx +++ b/frontend/src/js/external-forms/form-components/DropzoneList.tsx @@ -82,7 +82,7 @@ const DropzoneList = ( const showDropzone = (items && items.length === 0) || !disallowMultipleColumns; - function genItems(){ + function genItems() { return items.map((item, i) => ( {!disallowMultipleColumns && ( @@ -97,7 +97,7 @@ const DropzoneList = ( {item} - )) + )); } return ( @@ -113,18 +113,18 @@ const DropzoneList = ( {items && items.length > 0 && ( <> - { genItems()} + {genItems()} - - {!disallowMultipleColumns && ( - - )} - + + {!disallowMultipleColumns && ( + + )} + )}
diff --git a/frontend/src/js/external-forms/form-concept-group/FormConceptGroup.tsx b/frontend/src/js/external-forms/form-concept-group/FormConceptGroup.tsx index 9ad9dfb435..1ef51b1073 100644 --- a/frontend/src/js/external-forms/form-concept-group/FormConceptGroup.tsx +++ b/frontend/src/js/external-forms/form-concept-group/FormConceptGroup.tsx @@ -448,21 +448,15 @@ const FormConceptGroup = (props: Props) => { let { valueIdx, conceptIdx } = editedFormQueryNodePosition; let updatedValue = props.value; if (isMovedObject(concept)) { - const { movedFromFieldName, movedFromAndIdx, movedFromOrIdx } = concept.dragContext; - valueIdx = - valueIdx > movedFromAndIdx - ? valueIdx - 1 - : valueIdx; + const { movedFromFieldName, movedFromAndIdx, movedFromOrIdx } = + concept.dragContext; + valueIdx = valueIdx > movedFromAndIdx ? valueIdx - 1 : valueIdx; if (movedFromFieldName === props.fieldName) { updatedValue = - updatedValue[movedFromAndIdx].concepts - .length === 1 - ? removeValue( - updatedValue, - movedFromAndIdx, - ) + updatedValue[movedFromAndIdx].concepts.length === 1 + ? removeValue(updatedValue, movedFromAndIdx) : removeConcept( - updatedValue, + updatedValue, movedFromAndIdx, movedFromOrIdx, ); From 4de97258140e9b41c988cf0819ec4f6ec4b93a6f Mon Sep 17 00:00:00 2001 From: Fabian Blank Date: Wed, 16 Aug 2023 12:17:01 +0200 Subject: [PATCH 56/96] inline --- .../form-components/DropzoneList.tsx | 34 ++++++++----------- 1 file changed, 15 insertions(+), 19 deletions(-) diff --git a/frontend/src/js/external-forms/form-components/DropzoneList.tsx b/frontend/src/js/external-forms/form-components/DropzoneList.tsx index 2222525592..7af7f1bf0c 100644 --- a/frontend/src/js/external-forms/form-components/DropzoneList.tsx +++ b/frontend/src/js/external-forms/form-components/DropzoneList.tsx @@ -82,24 +82,6 @@ const DropzoneList = ( const showDropzone = (items && items.length === 0) || !disallowMultipleColumns; - function genItems() { - return items.map((item, i) => ( - - {!disallowMultipleColumns && ( - - )} - - onDelete(i)} /> - {item} - - - )); - } - return (
@@ -113,7 +95,21 @@ const DropzoneList = ( {items && items.length > 0 && ( <> - {genItems()} + {items.map((item, i) => ( + + {!disallowMultipleColumns && ( + + )} + + onDelete(i)} /> + {item} + + + ))} {!disallowMultipleColumns && ( From 6b5e7e9b2e07fc5348af24ab436b36b21af1c178 Mon Sep 17 00:00:00 2001 From: awildturtok <1553491+awildturtok@users.noreply.github.com> Date: Wed, 16 Aug 2023 12:25:40 +0200 Subject: [PATCH 57/96] log attrs for debugging --- .../java/com/bakdata/conquery/commands/MigrateCommand.java | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/backend/src/main/java/com/bakdata/conquery/commands/MigrateCommand.java b/backend/src/main/java/com/bakdata/conquery/commands/MigrateCommand.java index f63c78daf9..e465501fc9 100644 --- a/backend/src/main/java/com/bakdata/conquery/commands/MigrateCommand.java +++ b/backend/src/main/java/com/bakdata/conquery/commands/MigrateCommand.java @@ -105,11 +105,13 @@ public void configure(Subparser subparser) { @Override protected void run(io.dropwizard.setup.Environment environment, Namespace namespace, ConqueryConfig configuration) throws Exception { + log.debug("Attrs: {}", namespace.getAttrs()); + final File inStoreDirectory = namespace.get("in"); final File outStoreDirectory = namespace.get("out"); - final boolean inGzip = namespace.getBoolean("in-gzip"); - final boolean outGzip = namespace.getBoolean("out-gzip"); + final boolean inGzip = namespace.get("in-gzip"); + final boolean outGzip = namespace.get("out-gzip"); final long logsize = ((XodusStoreFactory) configuration.getStorage()).getXodus().getLogFileSize().toKilobytes(); From a0c263b7c36262cb216fc7661ea235fa39b48ab8 Mon Sep 17 00:00:00 2001 From: awildturtok <1553491+awildturtok@users.noreply.github.com> Date: Wed, 16 Aug 2023 12:59:12 +0200 Subject: [PATCH 58/96] fixes access to attrs --- .../java/com/bakdata/conquery/commands/MigrateCommand.java | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/backend/src/main/java/com/bakdata/conquery/commands/MigrateCommand.java b/backend/src/main/java/com/bakdata/conquery/commands/MigrateCommand.java index e465501fc9..5a5484184d 100644 --- a/backend/src/main/java/com/bakdata/conquery/commands/MigrateCommand.java +++ b/backend/src/main/java/com/bakdata/conquery/commands/MigrateCommand.java @@ -105,13 +105,11 @@ public void configure(Subparser subparser) { @Override protected void run(io.dropwizard.setup.Environment environment, Namespace namespace, ConqueryConfig configuration) throws Exception { - log.debug("Attrs: {}", namespace.getAttrs()); - final File inStoreDirectory = namespace.get("in"); final File outStoreDirectory = namespace.get("out"); - final boolean inGzip = namespace.get("in-gzip"); - final boolean outGzip = namespace.get("out-gzip"); + final boolean inGzip = namespace.get("in_gzip"); + final boolean outGzip = namespace.get("out_gzip"); final long logsize = ((XodusStoreFactory) configuration.getStorage()).getXodus().getLogFileSize().toKilobytes(); From 848adeeed788b6b9c706cb31fc78527999f6aaef Mon Sep 17 00:00:00 2001 From: awildturtok <1553491+awildturtok@users.noreply.github.com> Date: Wed, 16 Aug 2023 13:02:56 +0200 Subject: [PATCH 59/96] simplify code in TreeConcept#findMostSpecificChild and improve logging --- .../datasets/concepts/tree/TreeConcept.java | 42 ++++++++++--------- 1 file changed, 23 insertions(+), 19 deletions(-) diff --git a/backend/src/main/java/com/bakdata/conquery/models/datasets/concepts/tree/TreeConcept.java b/backend/src/main/java/com/bakdata/conquery/models/datasets/concepts/tree/TreeConcept.java index d500616780..92640e3213 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/datasets/concepts/tree/TreeConcept.java +++ b/backend/src/main/java/com/bakdata/conquery/models/datasets/concepts/tree/TreeConcept.java @@ -152,35 +152,39 @@ private ConceptTreeChild findMostSpecificChild(String stringValue, CalculatedVal continue; } - if (match == null) { - match = n; + if (match != null) { + failed = true; + log.error("Value '{}' matches the two nodes {} and {} in the tree {} (row={}))" + , stringValue, match.getId(), n.getId(), n.getConcept().getId(), rowMap.getValue()); + continue; + } - if (n.getChildIndex() != null) { - ConceptTreeChild specificChild = n.getChildIndex().findMostSpecificChild(stringValue); + match = n; - if (specificChild != null) { - match = specificChild; - } - } + if (n.getChildIndex() == null) { + continue; } - else { - failed = true; - log.error("Value '{}' matches the two nodes {} and {} in the tree {} (row={}))" - , stringValue, match.getLabel(), n.getLabel(), n.getConcept().getLabel(), rowMap.getValue()); - // TODO Why don't we return null here and drop the `failed`-flag? + + final ConceptTreeChild specificChild = n.getChildIndex().findMostSpecificChild(stringValue); + + if (specificChild == null) { + continue; } + + match = specificChild; } if (failed) { return null; } - else if (match != null) { - best = match; - currentList = match.getChildren(); - } - else { - break; + + // Nothing better found below, so return best-so far match + if (match == null) { + return best; } + + best = match; + currentList = match.getChildren(); } return best; } From 8ea002c88457b55c9d36e0b2fbfcea44b96ddc7d Mon Sep 17 00:00:00 2001 From: Fabian Blank Date: Wed, 16 Aug 2023 14:11:41 +0200 Subject: [PATCH 60/96] fix highlighting in tooltip --- frontend/src/js/tooltip/Tooltip.tsx | 74 +++++++++++++++++++++++++---- 1 file changed, 64 insertions(+), 10 deletions(-) diff --git a/frontend/src/js/tooltip/Tooltip.tsx b/frontend/src/js/tooltip/Tooltip.tsx index 49bd6f8621..7c1e1b0188 100644 --- a/frontend/src/js/tooltip/Tooltip.tsx +++ b/frontend/src/js/tooltip/Tooltip.tsx @@ -1,9 +1,17 @@ import styled from "@emotion/styled"; import { faThumbtack, IconDefinition } from "@fortawesome/free-solid-svg-icons"; -import { ReactNode } from "react"; +import { + Children, + DetailedHTMLProps, + ElementType, + HTMLAttributes, + ReactElement, + ReactNode, +} from "react"; import Highlighter from "react-highlight-words"; import { useTranslation } from "react-i18next"; import Markdown from "react-markdown"; +import { ReactMarkdownProps } from "react-markdown/lib/complex-types"; import { useDispatch, useSelector } from "react-redux"; import remarkGfm from "remark-gfm"; @@ -158,6 +166,52 @@ const ConceptLabel = ({ ); }; +function isReactElement(element: any): element is ReactElement { + return ( + element && + typeof element === "object" && + element.hasOwnProperty("type") && + element.hasOwnProperty("props") + ); +} + +function highlight( + words: string[], + Element: Omit< + DetailedHTMLProps, HTMLElement>, + "ref" + > & + ReactMarkdownProps, +): ReactElement | null { + let children = Children.map(Element.children, (child): ReactElement => { + if (!child) return <>; + if (typeof child === "string") { + return HighlightedText({ words, text: child }); + } + if (typeof child === "number" || typeof child === "boolean") { + return <>{child}; + } + if (isReactElement(child)) { + if (Array.isArray(Element)) { + return child; + } + let TagName = child.type as ElementType; + return ( + + {highlight(words, child.props.children)} + + ); + } + return <>{child}; + }); + + if (Array.isArray(Element) || !Element.node) { + return <>{children}; + } + let TagName = Element.node?.tagName as ElementType; + return {children}; +} + const Tooltip = () => { const words = useSelector( (state) => state.conceptTrees.search.words || [], @@ -236,15 +290,15 @@ const Tooltip = () => { searchHighlight(node) - } - } + components={{ + // TODO: Won't work anymore with the latest react-markdown, because + // Try to use another package for highlighting that doesn't depend on a string + // or just highlight ourselves + p: (a) => highlight(words, a), + td: (a) => highlight(words, a), + b: (a) => highlight(words, a), + th: (a) => highlight(words, a), + }} > {info.value} From da7c6094b07a6864cf1730edc60627eb6fc87a99 Mon Sep 17 00:00:00 2001 From: Fabian Blank Date: Wed, 16 Aug 2023 14:27:12 +0200 Subject: [PATCH 61/96] Simplify code --- frontend/src/js/tooltip/Tooltip.tsx | 13 +++++-------- 1 file changed, 5 insertions(+), 8 deletions(-) diff --git a/frontend/src/js/tooltip/Tooltip.tsx b/frontend/src/js/tooltip/Tooltip.tsx index 7c1e1b0188..d117729f56 100644 --- a/frontend/src/js/tooltip/Tooltip.tsx +++ b/frontend/src/js/tooltip/Tooltip.tsx @@ -6,7 +6,9 @@ import { ElementType, HTMLAttributes, ReactElement, + ReactFragment, ReactNode, + ReactPortal, } from "react"; import Highlighter from "react-highlight-words"; import { useTranslation } from "react-i18next"; @@ -166,9 +168,10 @@ const ConceptLabel = ({ ); }; -function isReactElement(element: any): element is ReactElement { +function isReactElement( + element: ReactFragment | ReactElement | ReactPortal | boolean | number, +): element is ReactElement { return ( - element && typeof element === "object" && element.hasOwnProperty("type") && element.hasOwnProperty("props") @@ -188,13 +191,7 @@ function highlight( if (typeof child === "string") { return HighlightedText({ words, text: child }); } - if (typeof child === "number" || typeof child === "boolean") { - return <>{child}; - } if (isReactElement(child)) { - if (Array.isArray(Element)) { - return child; - } let TagName = child.type as ElementType; return ( From 8dfed4fbe8211cc9def7d55e7b400b2c5a0cf176 Mon Sep 17 00:00:00 2001 From: awildturtok <1553491+awildturtok@users.noreply.github.com> Date: Wed, 16 Aug 2023 16:27:24 +0200 Subject: [PATCH 62/96] move multithreaded reading into separate class, hopefully avoiding a lot of references --- .../xodus/stores/SerializingStore.java | 186 ++++++++++++------ 1 file changed, 123 insertions(+), 63 deletions(-) diff --git a/backend/src/main/java/com/bakdata/conquery/io/storage/xodus/stores/SerializingStore.java b/backend/src/main/java/com/bakdata/conquery/io/storage/xodus/stores/SerializingStore.java index 9adf10fd14..24880ff5a9 100644 --- a/backend/src/main/java/com/bakdata/conquery/io/storage/xodus/stores/SerializingStore.java +++ b/backend/src/main/java/com/bakdata/conquery/io/storage/xodus/stores/SerializingStore.java @@ -7,10 +7,14 @@ import java.time.format.DateTimeFormatter; import java.util.ArrayList; import java.util.Collection; +import java.util.List; import java.util.concurrent.ArrayBlockingQueue; +import java.util.concurrent.BlockingQueue; +import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; -import java.util.concurrent.ThreadPoolExecutor; import java.util.concurrent.TimeUnit; +import java.util.concurrent.atomic.AtomicBoolean; +import java.util.concurrent.atomic.AtomicInteger; import java.util.function.Function; import java.util.function.Supplier; @@ -21,7 +25,6 @@ import com.bakdata.conquery.io.storage.Store; import com.bakdata.conquery.models.config.XodusStoreFactory; import com.bakdata.conquery.models.exceptions.ValidatorHelper; -import com.bakdata.conquery.util.CallerBlocksRejectionHandler; import com.bakdata.conquery.util.io.FileUtil; import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.JsonNode; @@ -32,11 +35,11 @@ import jetbrains.exodus.ArrayByteIterable; import jetbrains.exodus.ByteIterable; import lombok.Data; +import lombok.NoArgsConstructor; import lombok.NonNull; import lombok.SneakyThrows; import lombok.ToString; import lombok.extern.slf4j.Slf4j; -import org.apache.commons.lang3.time.DurationFormatUtils; import org.jetbrains.annotations.NotNull; /** @@ -333,66 +336,28 @@ private static String sanitiseFileName(@NotNull String name) { public IterationStatistic forEach(StoreEntryConsumer consumer) { final IterationStatistic result = new IterationStatistic(); final ArrayList unreadables = new ArrayList<>(); - final CallerBlocksRejectionHandler rejectionHandler = new CallerBlocksRejectionHandler(TimeUnit.MINUTES.toMillis(5)); - - final ThreadPoolExecutor executorService = new ThreadPoolExecutor(2, 2, - 0, TimeUnit.SECONDS, - new ArrayBlockingQueue<>(10), - Executors.defaultThreadFactory(), - rejectionHandler - ); - - store.forEach((k, v) -> { - executorService.submit(() -> { - - result.incrTotalProcessed(); - - // Try to read the key first - final KEY key = getDeserializedAndDumpFailed( - k, - this::readKey, - () -> new String(k.getBytesUnsafe()), - v, - "Could not parse key [{}]" - ); - if (key == null) { - unreadables.add(k); - result.incrFailedKeys(); - return; - } - // Try to read the value - final VALUE value = getDeserializedAndDumpFailed( - v, - this::readValue, - key::toString, - v, - "Could not parse value for key [{}]" - ); - - if (value == null) { - unreadables.add(k); - result.incrFailedValues(); - return; - } + final int nWorkers = 5; - // Apply the consumer to key and value - try { - consumer.accept(key, value, v.getLength()); - } - catch (Exception e) { - log.warn("Unable to apply for-each consumer on key[{}]", key, e); - } - }); - }); + final BlockingQueue workQueue = new ArrayBlockingQueue<>(nWorkers * 5); + final AtomicBoolean done = new AtomicBoolean(false); + + final ExecutorService executorService = Executors.newFixedThreadPool(nWorkers); + + for (int ignored = 0; ignored < nWorkers; ignored++) { + final Reader reader = new Reader(workQueue, done, consumer, result, unreadables); + executorService.submit(reader::run); + } + + store.forEach((k, v) -> workQueue.add(new Pair(k, v))); executorService.shutdown(); + done.set(true); - while (!executorService.awaitTermination(1, TimeUnit.MINUTES)) { - log.debug("Still waiting for {} to load.", this); + while(!executorService.awaitTermination(30, TimeUnit.SECONDS)){ + log.debug("Still waiting for {} jobs.", workQueue.size()); } - log.debug("Waited {} on workers.", DurationFormatUtils.formatDurationHMS(rejectionHandler.getWaitedMillis().sum())); // Print some statistics final int total = result.getTotalProcessed(); log.debug( @@ -494,22 +459,117 @@ public void close() { store.close(); } - @Data + @NoArgsConstructor public static class IterationStatistic { - private int totalProcessed; - private int failedKeys; - private int failedValues; + private final AtomicInteger totalProcessed = new AtomicInteger(); + private final AtomicInteger failedKeys = new AtomicInteger(); + private final AtomicInteger failedValues = new AtomicInteger(); public void incrTotalProcessed() { - totalProcessed++; + totalProcessed.incrementAndGet(); } public void incrFailedKeys() { - failedKeys++; + failedKeys.incrementAndGet(); } public void incrFailedValues() { - failedValues++; + failedValues.incrementAndGet(); + } + + public void setTotalProcessed(int totalProcessed) { + this.totalProcessed.set(totalProcessed); + } + + public void setFailedKeys(int failedKeys) { + this.failedKeys.set(failedKeys); + } + + public void setFailedValues(int failedValues) { + this.failedValues.set(failedValues); + } + + public int getFailedKeys() { + return failedKeys.get(); + } + + public int getFailedValues() { + return failedValues.get(); + } + + public int getTotalProcessed() { + return totalProcessed.get(); + } + } + + private record Pair(ByteIterable key, ByteIterable value) { + } + + @Data + private class Reader { + private final BlockingQueue queue; + private final AtomicBoolean done; + private final StoreEntryConsumer consumer; + private final IterationStatistic result; + private final List unreadables; + + public void run() { + + while (!done.get() || !queue.isEmpty()) { + try { + final Pair next = queue.poll(100, TimeUnit.MILLISECONDS); + + if (next == null) { + continue; + } + + handle(consumer, result, unreadables, next.key, next.value); + } + catch (Exception exception) { + log.warn("", exception); + } + } + } + + private void handle(StoreEntryConsumer consumer, IterationStatistic result, List unreadables, ByteIterable k, ByteIterable v) { + result.incrTotalProcessed(); + + // Try to read the key first + final KEY key = getDeserializedAndDumpFailed( + k, + SerializingStore.this::readKey, + () -> new String(k.getBytesUnsafe()), + v, + "Could not parse key [{}]" + ); + if (key == null) { + unreadables.add(k); + result.incrFailedKeys(); + return; + } + + // Try to read the value + final VALUE value = getDeserializedAndDumpFailed( + v, + SerializingStore.this::readValue, + key::toString, + v, + "Could not parse value for key [{}]" + ); + + if (value == null) { + unreadables.add(k); + result.incrFailedValues(); + return; + } + + // Apply the consumer to key and value + try { + consumer.accept(key, value, v.getLength()); + } + catch (Exception e) { + log.warn("Unable to apply for-each consumer on key[{}]", key, e); + } } } } From 3230f8c2644164f0f72cfdf92107cdc005fe7469 Mon Sep 17 00:00:00 2001 From: awildturtok <1553491+awildturtok@users.noreply.github.com> Date: Wed, 16 Aug 2023 16:30:29 +0200 Subject: [PATCH 63/96] fixes using wrong insertion method --- .../io/storage/xodus/stores/SerializingStore.java | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/backend/src/main/java/com/bakdata/conquery/io/storage/xodus/stores/SerializingStore.java b/backend/src/main/java/com/bakdata/conquery/io/storage/xodus/stores/SerializingStore.java index 24880ff5a9..dac11b19b2 100644 --- a/backend/src/main/java/com/bakdata/conquery/io/storage/xodus/stores/SerializingStore.java +++ b/backend/src/main/java/com/bakdata/conquery/io/storage/xodus/stores/SerializingStore.java @@ -349,7 +349,14 @@ public IterationStatistic forEach(StoreEntryConsumer consumer) { executorService.submit(reader::run); } - store.forEach((k, v) -> workQueue.add(new Pair(k, v))); + store.forEach((k, v) -> { + try { + workQueue.put(new Pair(k, v)); + } + catch (InterruptedException e) { + throw new RuntimeException(e); + } + }); executorService.shutdown(); done.set(true); From 62d7385527066f3fb2858c66d54ad0be5c4b772a Mon Sep 17 00:00:00 2001 From: awildturtok <1553491+awildturtok@users.noreply.github.com> Date: Wed, 16 Aug 2023 16:36:03 +0200 Subject: [PATCH 64/96] increase both workers and buffers per worker --- .../conquery/io/storage/xodus/stores/SerializingStore.java | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/backend/src/main/java/com/bakdata/conquery/io/storage/xodus/stores/SerializingStore.java b/backend/src/main/java/com/bakdata/conquery/io/storage/xodus/stores/SerializingStore.java index dac11b19b2..90236e6ac1 100644 --- a/backend/src/main/java/com/bakdata/conquery/io/storage/xodus/stores/SerializingStore.java +++ b/backend/src/main/java/com/bakdata/conquery/io/storage/xodus/stores/SerializingStore.java @@ -337,9 +337,9 @@ public IterationStatistic forEach(StoreEntryConsumer consumer) { final IterationStatistic result = new IterationStatistic(); final ArrayList unreadables = new ArrayList<>(); - final int nWorkers = 5; + final int nWorkers = 10; - final BlockingQueue workQueue = new ArrayBlockingQueue<>(nWorkers * 5); + final BlockingQueue workQueue = new ArrayBlockingQueue<>(nWorkers * 20); final AtomicBoolean done = new AtomicBoolean(false); final ExecutorService executorService = Executors.newFixedThreadPool(nWorkers); From 102f03225af871fceeac23cbf8272d7b7311ebaf Mon Sep 17 00:00:00 2001 From: Fabian Blank Date: Wed, 16 Aug 2023 16:42:46 +0200 Subject: [PATCH 65/96] Allow for li elements --- frontend/src/js/tooltip/Tooltip.tsx | 41 ++++++++++++++++++++++------- 1 file changed, 31 insertions(+), 10 deletions(-) diff --git a/frontend/src/js/tooltip/Tooltip.tsx b/frontend/src/js/tooltip/Tooltip.tsx index d117729f56..7c782f4229 100644 --- a/frontend/src/js/tooltip/Tooltip.tsx +++ b/frontend/src/js/tooltip/Tooltip.tsx @@ -178,6 +178,16 @@ function isReactElement( ); } +type MarkdownElement = + | ReactFragment + | ReactElement + | ReactPortal + | boolean + | number + | boolean + | string + | null + | undefined; function highlight( words: string[], Element: Omit< @@ -186,7 +196,10 @@ function highlight( > & ReactMarkdownProps, ): ReactElement | null { - let children = Children.map(Element.children, (child): ReactElement => { + if (!Element) { + return Element; + } + const mappingFunction = (child: MarkdownElement): ReactElement => { if (!child) return <>; if (typeof child === "string") { return HighlightedText({ words, text: child }); @@ -200,13 +213,18 @@ function highlight( ); } return <>{child}; - }); + }; + + if (Array.isArray(Element)) { + return <>{Children.map(Element, mappingFunction)}; + } - if (Array.isArray(Element) || !Element.node) { - return <>{children}; + if (typeof Element === "object" && Element.hasOwnProperty("children")) { + let children = Children.map(Element.children, mappingFunction); + let TagName = Element.node?.tagName as ElementType; + return {children}; } - let TagName = Element.node?.tagName as ElementType; - return {children}; + return <>{Element}; } const Tooltip = () => { @@ -291,10 +309,13 @@ const Tooltip = () => { // TODO: Won't work anymore with the latest react-markdown, because // Try to use another package for highlighting that doesn't depend on a string // or just highlight ourselves - p: (a) => highlight(words, a), - td: (a) => highlight(words, a), - b: (a) => highlight(words, a), - th: (a) => highlight(words, a), + p: (el) => highlight(words, el), + td: (el) => highlight(words, el), + b: (el) => highlight(words, el), + th: (el) => highlight(words, el), + i: (el) => highlight(words, el), + ul: (el) => highlight(words, el), + ol: (el) => highlight(words, el), }} > {info.value} From b11ab637245b4eadbf2e18fa2d7c088c3fd0c908 Mon Sep 17 00:00:00 2001 From: Fabian Blank Date: Wed, 16 Aug 2023 16:48:11 +0200 Subject: [PATCH 66/96] add headers --- frontend/src/js/tooltip/Tooltip.tsx | 2 ++ 1 file changed, 2 insertions(+) diff --git a/frontend/src/js/tooltip/Tooltip.tsx b/frontend/src/js/tooltip/Tooltip.tsx index 7c782f4229..15fbe0fb9b 100644 --- a/frontend/src/js/tooltip/Tooltip.tsx +++ b/frontend/src/js/tooltip/Tooltip.tsx @@ -316,6 +316,8 @@ const Tooltip = () => { i: (el) => highlight(words, el), ul: (el) => highlight(words, el), ol: (el) => highlight(words, el), + h1: (el) => highlight(words, el), + h2: (el) => highlight(words, el), }} > {info.value} From 3139b38e35bfb96b95823c1e079999485cd9f9f5 Mon Sep 17 00:00:00 2001 From: Fabian Blank Date: Wed, 16 Aug 2023 17:04:59 +0200 Subject: [PATCH 67/96] fix regression for tables --- frontend/src/js/tooltip/Tooltip.tsx | 16 +++++++--------- 1 file changed, 7 insertions(+), 9 deletions(-) diff --git a/frontend/src/js/tooltip/Tooltip.tsx b/frontend/src/js/tooltip/Tooltip.tsx index 15fbe0fb9b..28c29c0e7c 100644 --- a/frontend/src/js/tooltip/Tooltip.tsx +++ b/frontend/src/js/tooltip/Tooltip.tsx @@ -219,12 +219,13 @@ function highlight( return <>{Children.map(Element, mappingFunction)}; } - if (typeof Element === "object" && Element.hasOwnProperty("children")) { - let children = Children.map(Element.children, mappingFunction); - let TagName = Element.node?.tagName as ElementType; - return {children}; - } - return <>{Element}; + let children = + typeof Element === "object" && Element.hasOwnProperty("children") + ? Children.map(Element.children, mappingFunction) + : Element.children; + + let TagName = Element.node?.tagName as ElementType; + return {children}; } const Tooltip = () => { @@ -306,9 +307,6 @@ const Tooltip = () => { highlight(words, el), td: (el) => highlight(words, el), b: (el) => highlight(words, el), From 2f51ab2e4520f0db2d96404e505217b935042e69 Mon Sep 17 00:00:00 2001 From: awildturtok <1553491+awildturtok@users.noreply.github.com> Date: Wed, 16 Aug 2023 18:03:26 +0200 Subject: [PATCH 68/96] properly parametrize nWorkers and buffer size, and cleanup --- .../io/storage/xodus/stores/BigStore.java | 9 +-- .../xodus/stores/KeyIncludingStore.java | 9 +-- .../xodus/stores/SerializingStore.java | 52 ++++++++++------ .../models/auth/apitoken/TokenStorage.java | 4 +- .../auth/basic/LocalAuthenticationRealm.java | 2 +- .../models/config/XodusStoreFactory.java | 61 ++++++++++++------- .../io/storage/xodus/stores/BigStoreTest.java | 4 +- .../stores/SerializingStoreDumpTest.java | 2 +- 8 files changed, 85 insertions(+), 58 deletions(-) diff --git a/backend/src/main/java/com/bakdata/conquery/io/storage/xodus/stores/BigStore.java b/backend/src/main/java/com/bakdata/conquery/io/storage/xodus/stores/BigStore.java index 556c561b6d..8fbc3b38b1 100644 --- a/backend/src/main/java/com/bakdata/conquery/io/storage/xodus/stores/BigStore.java +++ b/backend/src/main/java/com/bakdata/conquery/io/storage/xodus/stores/BigStore.java @@ -13,6 +13,7 @@ import java.util.Iterator; import java.util.List; import java.util.UUID; +import java.util.concurrent.ConcurrentLinkedQueue; import java.util.concurrent.atomic.AtomicLong; import java.util.function.Consumer; import java.util.stream.Stream; @@ -62,7 +63,7 @@ public BigStore(XodusStoreFactory config, StoreInfo storeInfo, Consumer storeCloseHook, Consumer storeRemoveHook, - ObjectMapper mapper) { + ObjectMapper mapper, int nWorkers, int bufferPerWorker) { this.storeInfo = storeInfo; // Recommendation by the author of Xodus is to have logFileSize at least be 4 times the biggest file size. @@ -77,7 +78,7 @@ public BigStore(XodusStoreFactory config, BigStoreMetaKeys.class, config.isValidateOnWrite(), config.isRemoveUnreadableFromStore(), - config.getUnreadableDataDumpDirectory() + config.getUnreadableDataDumpDirectory(), nWorkers, bufferPerWorker ); @@ -90,7 +91,7 @@ public BigStore(XodusStoreFactory config, byte[].class, config.isValidateOnWrite(), config.isRemoveUnreadableFromStore(), - config.getUnreadableDataDumpDirectory() + config.getUnreadableDataDumpDirectory(), nWorkers, bufferPerWorker ); @@ -158,7 +159,7 @@ public Collection getAll() { @Override public Collection getAllKeys() { - List out = new ArrayList<>(); + Collection out = new ConcurrentLinkedQueue<>(); // has to be concurrent because forEach is concurrent. metaStore.forEach((key, value, size) -> out.add(key)); return out; } diff --git a/backend/src/main/java/com/bakdata/conquery/io/storage/xodus/stores/KeyIncludingStore.java b/backend/src/main/java/com/bakdata/conquery/io/storage/xodus/stores/KeyIncludingStore.java index 9d1dbdf6ca..585eb6460d 100644 --- a/backend/src/main/java/com/bakdata/conquery/io/storage/xodus/stores/KeyIncludingStore.java +++ b/backend/src/main/java/com/bakdata/conquery/io/storage/xodus/stores/KeyIncludingStore.java @@ -1,11 +1,10 @@ package com.bakdata.conquery.io.storage.xodus.stores; -import com.bakdata.conquery.io.storage.Store; - import java.io.Closeable; import java.io.IOException; import java.util.Collection; -import java.util.function.Consumer; + +import com.bakdata.conquery.io.storage.Store; public abstract class KeyIncludingStore implements Closeable { @@ -26,9 +25,7 @@ public VALUE get(KEY key) { return store.get(key); } - public void forEach(Consumer consumer) { - store.forEach((key, value, size) -> consumer.accept(value)); - } + public void update(VALUE value) { updated(value); diff --git a/backend/src/main/java/com/bakdata/conquery/io/storage/xodus/stores/SerializingStore.java b/backend/src/main/java/com/bakdata/conquery/io/storage/xodus/stores/SerializingStore.java index 90236e6ac1..6de77f12ce 100644 --- a/backend/src/main/java/com/bakdata/conquery/io/storage/xodus/stores/SerializingStore.java +++ b/backend/src/main/java/com/bakdata/conquery/io/storage/xodus/stores/SerializingStore.java @@ -5,18 +5,18 @@ import java.io.PrintStream; import java.time.LocalDateTime; import java.time.format.DateTimeFormatter; -import java.util.ArrayList; import java.util.Collection; import java.util.List; import java.util.concurrent.ArrayBlockingQueue; import java.util.concurrent.BlockingQueue; +import java.util.concurrent.ConcurrentLinkedQueue; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.TimeUnit; -import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicInteger; import java.util.function.Function; import java.util.function.Supplier; +import java.util.stream.IntStream; import javax.validation.Validator; @@ -106,6 +106,8 @@ public class SerializingStore implements Store { private final boolean removeUnreadablesFromUnderlyingStore; private final ObjectMapper objectMapper; + private final int nWorkers; + private final int bufferPerWorker; public , CLASS_V extends Class> SerializingStore(XodusStore store, Validator validator, @@ -114,7 +116,7 @@ public , CLASS_V extends Class> SerializingSto CLASS_V valueType, boolean validateOnWrite, boolean removeUnreadableFromStore, - File unreadableDataDumpDirectory) { + File unreadableDataDumpDirectory, int nWorkers, int bufferPerWorker) { this.store = store; this.validator = validator; this.validateOnWrite = validateOnWrite; @@ -135,6 +137,9 @@ public , CLASS_V extends Class> SerializingSto unreadableValuesDumpDir = unreadableDataDumpDirectory; + this.nWorkers = nWorkers; + this.bufferPerWorker = bufferPerWorker; + if (shouldDumpUnreadables()) { if (!unreadableValuesDumpDir.exists() && !unreadableValuesDumpDir.mkdirs()) { throw new IllegalStateException("Could not create dump directory: " + unreadableValuesDumpDir); @@ -247,8 +252,6 @@ private static void dumpToFile(@NonNull ByteIterable obj, @NonNull String keyOfD throw new IllegalStateException("Could not create `%s`.".formatted(dumpfile.getParentFile())); } - //TODO FK: dump in a separate thread so we are not blocking the reader thread. - // Write json try { log.info("Dumping value of key {} to {} (because it cannot be deserialized anymore).", keyOfDump, dumpfile.getCanonicalPath()); @@ -330,36 +333,38 @@ private static String sanitiseFileName(@NotNull String name) { * Iterates a given consumer over the entries of this store. * Depending on the {@link XodusStoreFactory} corrupt entries may be dump to a file and/or removed from the store. * These entries are not submitted to the consumer. + * + * @implNote This method is concurrent! */ @SneakyThrows @Override public IterationStatistic forEach(StoreEntryConsumer consumer) { final IterationStatistic result = new IterationStatistic(); - final ArrayList unreadables = new ArrayList<>(); - - final int nWorkers = 10; + final Collection unreadables = new ConcurrentLinkedQueue<>(); - final BlockingQueue workQueue = new ArrayBlockingQueue<>(nWorkers * 20); - final AtomicBoolean done = new AtomicBoolean(false); + // Some magic number of buffering per worker, that isn't so high, that we fill up RAM with useless stuff, but have enough data to keep the workers occupied. + final BlockingQueue workQueue = new ArrayBlockingQueue<>(nWorkers * bufferPerWorker); final ExecutorService executorService = Executors.newFixedThreadPool(nWorkers); - for (int ignored = 0; ignored < nWorkers; ignored++) { - final Reader reader = new Reader(workQueue, done, consumer, result, unreadables); - executorService.submit(reader::run); - } + final List readers = IntStream.range(0, nWorkers) + .mapToObj(ignored -> new Reader(workQueue, consumer, result, unreadables)) + .peek(reader -> executorService.submit(reader::run)) + .toList(); + // We read in single thread, and deserialise and dispatch in multiple threads. store.forEach((k, v) -> { try { workQueue.put(new Pair(k, v)); } catch (InterruptedException e) { + //TODO wat do? throw new RuntimeException(e); } }); executorService.shutdown(); - done.set(true); + readers.forEach(Reader::finish); while(!executorService.awaitTermination(30, TimeUnit.SECONDS)){ log.debug("Still waiting for {} jobs.", workQueue.size()); @@ -466,8 +471,10 @@ public void close() { store.close(); } + @NoArgsConstructor public static class IterationStatistic { + //TODO move into reader? private final AtomicInteger totalProcessed = new AtomicInteger(); private final AtomicInteger failedKeys = new AtomicInteger(); private final AtomicInteger failedValues = new AtomicInteger(); @@ -515,16 +522,16 @@ private record Pair(ByteIterable key, ByteIterable value) { @Data private class Reader { private final BlockingQueue queue; - private final AtomicBoolean done; + private boolean done = false; private final StoreEntryConsumer consumer; private final IterationStatistic result; - private final List unreadables; + private final Collection unreadables; public void run() { - while (!done.get() || !queue.isEmpty()) { + while (!done || !queue.isEmpty()) { try { - final Pair next = queue.poll(100, TimeUnit.MILLISECONDS); + final Pair next = queue.poll(1, TimeUnit.SECONDS); if (next == null) { continue; @@ -533,12 +540,17 @@ public void run() { handle(consumer, result, unreadables, next.key, next.value); } catch (Exception exception) { + //TODO probably split for InterrupedException? No idea how to handle that though log.warn("", exception); } } } - private void handle(StoreEntryConsumer consumer, IterationStatistic result, List unreadables, ByteIterable k, ByteIterable v) { + public void finish() { + done = true; + } + + private void handle(StoreEntryConsumer consumer, IterationStatistic result, Collection unreadables, ByteIterable k, ByteIterable v) { result.incrTotalProcessed(); // Try to read the key first diff --git a/backend/src/main/java/com/bakdata/conquery/models/auth/apitoken/TokenStorage.java b/backend/src/main/java/com/bakdata/conquery/models/auth/apitoken/TokenStorage.java index c42b24ec0c..9d4ec129d7 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/auth/apitoken/TokenStorage.java +++ b/backend/src/main/java/com/bakdata/conquery/models/auth/apitoken/TokenStorage.java @@ -72,7 +72,7 @@ public void start(){ ApiTokenData.class, true, false, - null + null, 1, 100 )); openStoresInEnv.add(data); @@ -90,7 +90,7 @@ public void start(){ ApiTokenData.MetaData.class, true, false, - null + null, 1, 100 )); openStoresInEnv.add(meta); } diff --git a/backend/src/main/java/com/bakdata/conquery/models/auth/basic/LocalAuthenticationRealm.java b/backend/src/main/java/com/bakdata/conquery/models/auth/basic/LocalAuthenticationRealm.java index 2632931fc8..6fca172e9b 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/auth/basic/LocalAuthenticationRealm.java +++ b/backend/src/main/java/com/bakdata/conquery/models/auth/basic/LocalAuthenticationRealm.java @@ -108,7 +108,7 @@ protected void onInit() { PasswordHasher.HashedEntry.class, false, true, - null + null, 1, 100 )); } diff --git a/backend/src/main/java/com/bakdata/conquery/models/config/XodusStoreFactory.java b/backend/src/main/java/com/bakdata/conquery/models/config/XodusStoreFactory.java index 437ba9c2b9..068ca1363d 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/config/XodusStoreFactory.java +++ b/backend/src/main/java/com/bakdata/conquery/models/config/XodusStoreFactory.java @@ -16,6 +16,7 @@ import javax.annotation.Nullable; import javax.validation.Valid; import javax.validation.Validator; +import javax.validation.constraints.Min; import javax.validation.constraints.NotNull; import com.bakdata.conquery.io.cps.CPSType; @@ -125,32 +126,45 @@ public class XodusStoreFactory implements StoreFactory { private Path directory = Path.of("storage"); - private boolean validateOnWrite = false; + private boolean validateOnWrite; @NotNull @Valid private XodusConfig xodus = new XodusConfig(); - private boolean useWeakDictionaryCaching = false; + /** + * Number of threads reading from XoduStore. + * @implNote it's always only one thread reading from disk, dispatching to multiple reader threads. + */ + @Min(1) + private int readerWorkers = 10; + + /** + * How many slots of buffering to use before the IO thread is put to sleep. + */ + @Min(1) + private int bufferPerWorker = 20; + + private boolean useWeakDictionaryCaching; @NotNull private Duration weakCacheDuration = Duration.hours(48); /** * Flag for the {@link SerializingStore} whether to delete values from the underlying store, that cannot be mapped to an object anymore. */ - private boolean removeUnreadableFromStore = false; + private boolean removeUnreadableFromStore; /** * When set, all values that could not be deserialized from the persistent store, are dump into individual files. */ @Nullable - private File unreadableDataDumpDirectory = null; + private File unreadableDataDumpDirectory; /** * If set, an environment will not be loaded if it misses a required store. * If not set, the environment is loaded and the application needs to create the store. * This is useful if a new version introduces a new store, but will also alter the environment upon reading. */ - private boolean loadEnvironmentWithMissingStores = false; + private boolean loadEnvironmentWithMissingStores; @JsonIgnore private transient Validator validator; @@ -175,13 +189,13 @@ public Collection discoverWorkerStorages() { private List loadNamespacedStores(String prefix, Function creator, Set storesToTest) { - File baseDir = getDirectory().toFile(); + final File baseDir = getDirectory().toFile(); if (baseDir.mkdirs()) { log.warn("Had to create Storage Dir at `{}`", baseDir); } - List storages = new ArrayList<>(); + final List storages = new ArrayList<>(); for (File directory : Objects.requireNonNull(baseDir.listFiles((file, name) -> file.isDirectory() && name.startsWith(prefix)))) { @@ -194,7 +208,7 @@ private List loadNamespacedStores(String prefix continue; } - T namespacedStorage = creator.apply(name); + final T namespacedStorage = creator.apply(name); storages.add(namespacedStorage); } @@ -203,8 +217,8 @@ private List loadNamespacedStores(String prefix } private boolean environmentHasStores(File pathName, Set storesToTest) { - Environment env = findEnvironment(pathName); - boolean exists = env.computeInTransaction(t -> { + final Environment env = findEnvironment(pathName); + final boolean exists = env.computeInTransaction(t -> { final List allStoreNames = env.getAllStoreNames(t); final boolean complete = new HashSet<>(allStoreNames).containsAll(storesToTest); if (complete) { @@ -273,7 +287,7 @@ public IdentifiableStore createDictionaryStore(CentralRegistry centr DICTIONARIES.storeInfo(), this::closeStore, this::removeStore, - centralRegistry.injectIntoNew(objectMapper) + centralRegistry.injectIntoNew(objectMapper), getReaderWorkers(), getBufferPerWorker() ); openStoresInEnv.put(bigStore.getDataXodusStore().getEnvironment(), bigStore.getDataXodusStore()); openStoresInEnv.put(bigStore.getMetaXodusStore().getEnvironment(), bigStore.getMetaXodusStore()); @@ -316,7 +330,7 @@ public SingletonStore createIdMappingStore(String pathName, ObjectM synchronized (openStoresInEnv) { final BigStore bigStore = - new BigStore<>(this, validator, environment, ID_MAPPING.storeInfo(), this::closeStore, this::removeStore, objectMapper); + new BigStore<>(this, validator, environment, ID_MAPPING.storeInfo(), this::closeStore, this::removeStore, objectMapper, 10, 20); openStoresInEnv.put(bigStore.getDataXodusStore().getEnvironment(), bigStore.getDataXodusStore()); openStoresInEnv.put(bigStore.getMetaXodusStore().getEnvironment(), bigStore.getMetaXodusStore()); @@ -396,14 +410,14 @@ private Environment findEnvironment(@NonNull File path) { } private Environment findEnvironment(String pathName) { - File path = getStorageDir(pathName); + final File path = getStorageDir(pathName); return findEnvironment(path); } private void closeStore(XodusStore store) { - Environment env = store.getEnvironment(); + final Environment env = store.getEnvironment(); synchronized (openStoresInEnv) { - Collection stores = openStoresInEnv.get(env); + final Collection stores = openStoresInEnv.get(env); stores.remove(store); log.info("Closed XodusStore: {}", store); @@ -427,9 +441,9 @@ private void closeEnvironment(Environment env) { } private void removeStore(XodusStore store) { - Environment env = store.getEnvironment(); + final Environment env = store.getEnvironment(); synchronized (openStoresInEnv){ - Collection stores = openStoresInEnv.get(env); + final Collection stores = openStoresInEnv.get(env); stores.remove(store); @@ -467,9 +481,11 @@ public Store createStore(Environment environment, Valid if(openStoresInEnv.get(environment).stream().map(XodusStore::getName).anyMatch(name -> storeInfo.getName().equals(name))){ throw new IllegalStateException("Attempted to open an already opened store:" + storeInfo.getName()); } - final XodusStore store = - new XodusStore(environment, storeInfo.getName(), this::closeStore, this::removeStore); + + final XodusStore store = new XodusStore(environment, storeInfo.getName(), this::closeStore, this::removeStore); + openStoresInEnv.put(environment, store); + return new CachedStore<>( new SerializingStore<>( store, @@ -477,9 +493,10 @@ public Store createStore(Environment environment, Valid objectMapper, storeInfo.getKeyType(), storeInfo.getValueType(), - this.isValidateOnWrite(), - this.isRemoveUnreadableFromStore(), - this.getUnreadableDataDumpDirectory() + isValidateOnWrite(), + isRemoveUnreadableFromStore(), + getUnreadableDataDumpDirectory(), + getReaderWorkers(), getBufferPerWorker() )); } } diff --git a/backend/src/test/java/com/bakdata/conquery/io/storage/xodus/stores/BigStoreTest.java b/backend/src/test/java/com/bakdata/conquery/io/storage/xodus/stores/BigStoreTest.java index f27b1a1aa6..be92d2e953 100644 --- a/backend/src/test/java/com/bakdata/conquery/io/storage/xodus/stores/BigStoreTest.java +++ b/backend/src/test/java/com/bakdata/conquery/io/storage/xodus/stores/BigStoreTest.java @@ -64,7 +64,7 @@ public void destroy() throws IOException { public void testFull() throws JSONException, IOException { BigStore store = new BigStore<>(new XodusStoreFactory(), Validators.newValidator(), env, - StoreMappings.DICTIONARIES.storeInfo(), (e) -> {}, (e) -> {}, MAPPER + StoreMappings.DICTIONARIES.storeInfo(), (e) -> {}, (e) -> {}, MAPPER, 10, 20 ); @@ -106,7 +106,7 @@ public void testFull() throws JSONException, IOException { @Test public void testEmpty() throws JSONException, IOException { BigStore store = new BigStore<>(new XodusStoreFactory(), Validators.newValidator(), env, - StoreMappings.DICTIONARIES.storeInfo(), (e) -> {}, (e) -> {}, MAPPER + StoreMappings.DICTIONARIES.storeInfo(), (e) -> {}, (e) -> {}, MAPPER, 10, 20 ); store.setChunkByteSize(Ints.checkedCast(DataSize.megabytes(1).toBytes())); diff --git a/backend/src/test/java/com/bakdata/conquery/io/storage/xodus/stores/SerializingStoreDumpTest.java b/backend/src/test/java/com/bakdata/conquery/io/storage/xodus/stores/SerializingStoreDumpTest.java index e06b241ebb..5d92970875 100644 --- a/backend/src/test/java/com/bakdata/conquery/io/storage/xodus/stores/SerializingStoreDumpTest.java +++ b/backend/src/test/java/com/bakdata/conquery/io/storage/xodus/stores/SerializingStoreDumpTest.java @@ -72,7 +72,7 @@ private SerializingStore createSerializedStore(XodusSto storeId.getValueType(), config.isValidateOnWrite(), config.isRemoveUnreadableFromStore(), - config.getUnreadableDataDumpDirectory() + config.getUnreadableDataDumpDirectory(), 1, 1 ); } From 079e9c2eccb5b856fa1b199e8a62dd243ec0c05f Mon Sep 17 00:00:00 2001 From: Kai Rollmann Date: Wed, 16 Aug 2023 18:27:12 +0200 Subject: [PATCH 69/96] Improve select validation in concept list fields --- frontend/src/js/external-forms/form/Field.tsx | 20 +++++-- frontend/src/js/external-forms/validators.ts | 53 ++++++++++++++++--- frontend/src/js/model/select.ts | 14 ++++- .../src/js/query-node-editor/NodeSelects.tsx | 5 +- .../src/js/query-node-editor/TableSelects.tsx | 11 ++-- .../InputSelect/SelectListOption.tsx | 10 ++-- frontend/src/localization/de.json | 3 +- frontend/src/localization/en.json | 3 +- 8 files changed, 89 insertions(+), 30 deletions(-) diff --git a/frontend/src/js/external-forms/form/Field.tsx b/frontend/src/js/external-forms/form/Field.tsx index c39a051775..48776d37c1 100644 --- a/frontend/src/js/external-forms/form/Field.tsx +++ b/frontend/src/js/external-forms/form/Field.tsx @@ -73,12 +73,23 @@ type Props = T & { noContainer?: boolean; noLabel?: boolean; }; -const FieldContainer = styled("div")<{ noLabel?: boolean }>` +const FieldContainer = styled("div")<{ noLabel?: boolean; hasError?: boolean }>` + display: flex; + flex-direction: column; + gap: 5px; padding: ${({ noLabel }) => (noLabel ? "7px 10px" : "2px 10px 7px")}; background-color: white; border-radius: ${({ theme }) => theme.borderRadius}; - border: 1px solid ${({ theme }) => theme.col.grayLight}; + border: 1px solid + ${({ theme, hasError }) => (hasError ? theme.col.red : theme.col.grayLight)}; `; + +const ErrorContainer = styled("div")` + color: ${({ theme }) => theme.col.red}; + font-weight: 700; + font-size: ${({ theme }) => theme.font.sm}; +`; + const ConnectedField = ({ children, control, @@ -89,7 +100,7 @@ const ConnectedField = ({ ...props }: Props) => { const { t } = useTranslation(); - const { field } = useController({ + const { field, fieldState } = useController({ name: formField.name, defaultValue, control, @@ -105,8 +116,9 @@ const ConnectedField = ({ return noContainer ? (
{children({ ...field, ...props })}
) : ( - + {children({ ...field, ...props })} + {fieldState.error?.message} ); }; diff --git a/frontend/src/js/external-forms/validators.ts b/frontend/src/js/external-forms/validators.ts index 50abbdc670..b70169a8ce 100644 --- a/frontend/src/js/external-forms/validators.ts +++ b/frontend/src/js/external-forms/validators.ts @@ -1,8 +1,16 @@ import { TFunction } from "i18next"; import { isEmpty } from "../common/helpers/commonHelper"; +import { exists } from "../common/helpers/exists"; +import { isValidSelect } from "../model/select"; -import { CheckboxField, Field, FormField } from "./config-types"; +import { + CheckboxField, + ConceptListField, + Field, + FormField, +} from "./config-types"; +import { FormConceptGroupT } from "./form-concept-group/formConceptGroupState"; export const validateRequired = (t: TFunction, value: any): string | null => { return isEmpty(value) ? t("externalForms.formValidation.isRequired") : null; @@ -57,15 +65,48 @@ export const validateConceptGroupFilled = ( : null; }; +const validateRestrictedSelects = ( + t: TFunction, + value: FormConceptGroupT[], + field: ConceptListField, +) => { + if (!value || value.length === 0) return null; + + const { allowlistedSelects, blocklistedSelects } = field; + + const hasAllowlistedSelects = (allowlistedSelects?.length || 0) > 0; + const hasBlocklistedSelects = (blocklistedSelects?.length || 0) > 0; + + if (hasAllowlistedSelects || hasBlocklistedSelects) { + const validSelects = value + .flatMap((v) => v.concepts) + .filter(exists) + .flatMap((c) => { + const tableSelects = c.tables.flatMap((t) => t.selects); + + return [...c.selects, ...tableSelects].filter( + isValidSelect({ allowlistedSelects, blocklistedSelects }), + ); + }); + + if (validSelects.length === 0) { + return t("externalForms.formValidation.validSelectRequired"); + } + } + + return null; +}; + +// TODO: Refactor using generics to try and tie the `field` to its `value` const DEFAULT_VALIDATION_BY_TYPE: Record< FormField["type"], - null | ((t: TFunction, value: any) => string | null) + null | ((t: TFunction, value: any, field: any) => string | null) > = { STRING: null, TEXTAREA: null, NUMBER: null, CHECKBOX: null, - CONCEPT_LIST: null, + CONCEPT_LIST: validateRestrictedSelects, RESULT_GROUP: null, SELECT: null, TABS: null, @@ -86,7 +127,7 @@ function getNotEmptyValidation(fieldType: string) { } } -function getPossibleValidations(fieldType: string) { +function getConfigurableValidations(fieldType: string) { return { NOT_EMPTY: getNotEmptyValidation(fieldType), GREATER_THAN_ZERO: validatePositive, @@ -108,7 +149,7 @@ export function getErrorForField( ) { const defaultValidation = DEFAULT_VALIDATION_BY_TYPE[field.type]; - let error = defaultValidation ? defaultValidation(t, value) : null; + let error = defaultValidation ? defaultValidation(t, value, field) : null; if ( isFieldWithValidations(field) && @@ -116,7 +157,7 @@ export function getErrorForField( field.validations.length > 0 ) { for (let validation of field.validations) { - const validateFn = getPossibleValidations(field.type)[validation]; + const validateFn = getConfigurableValidations(field.type)[validation]; if (validateFn) { error = error || validateFn(t, value); diff --git a/frontend/src/js/model/select.ts b/frontend/src/js/model/select.ts index b8029a1820..3030d8432b 100644 --- a/frontend/src/js/model/select.ts +++ b/frontend/src/js/model/select.ts @@ -63,10 +63,20 @@ export function selectIsWithinTypes( ); } +interface AllowBlocklistedSelects { + blocklistedSelects?: SelectorResultType[]; + allowlistedSelects?: SelectorResultType[]; +} + export const isSelectDisabled = ( select: SelectorT, - blocklistedSelects?: SelectorResultType[], - allowlistedSelects?: SelectorResultType[], + { blocklistedSelects, allowlistedSelects }: AllowBlocklistedSelects, ) => (!!allowlistedSelects && !selectIsWithinTypes(select, allowlistedSelects)) || (!!blocklistedSelects && selectIsWithinTypes(select, blocklistedSelects)); + +export const isValidSelect = + ({ blocklistedSelects, allowlistedSelects }: AllowBlocklistedSelects) => + (select: SelectedSelectorT) => + !!select.selected && + !isSelectDisabled(select, { blocklistedSelects, allowlistedSelects }); diff --git a/frontend/src/js/query-node-editor/NodeSelects.tsx b/frontend/src/js/query-node-editor/NodeSelects.tsx index f117d16369..a5a8f880b9 100644 --- a/frontend/src/js/query-node-editor/NodeSelects.tsx +++ b/frontend/src/js/query-node-editor/NodeSelects.tsx @@ -27,11 +27,10 @@ const NodeSelects = ({ sortSelects(selects).map((select) => ({ value: select.id, label: select.label, - disabled: isSelectDisabled( - select, + disabled: isSelectDisabled(select, { blocklistedSelects, allowlistedSelects, - ), + }), })), [selects, allowlistedSelects, blocklistedSelects], ); diff --git a/frontend/src/js/query-node-editor/TableSelects.tsx b/frontend/src/js/query-node-editor/TableSelects.tsx index b7fd814e5c..89a2a09f67 100644 --- a/frontend/src/js/query-node-editor/TableSelects.tsx +++ b/frontend/src/js/query-node-editor/TableSelects.tsx @@ -1,7 +1,7 @@ import { useMemo } from "react"; import type { SelectOptionT, SelectorResultType } from "../api/types"; -import { isSelectDisabled, sortSelects } from "../model/select"; +import { isSelectDisabled, isValidSelect, sortSelects } from "../model/select"; import { SelectedSelectorT } from "../standard-query-editor/types"; import InputMultiSelect from "../ui-components/InputMultiSelect/InputMultiSelect"; @@ -24,19 +24,18 @@ const TableSelects = ({ return sortSelects(selects).map((select) => ({ value: select.id, label: select.label, - disabled: isSelectDisabled( - select, + disabled: isSelectDisabled(select, { blocklistedSelects, allowlistedSelects, - ), + }), })); }, [selects, allowlistedSelects, blocklistedSelects]); const value = useMemo(() => { return selects - .filter(({ selected }) => !!selected) + .filter(isValidSelect({ blocklistedSelects, allowlistedSelects })) .map(({ id, label }) => ({ value: id, label: label })); - }, [selects]); + }, [selects, allowlistedSelects, blocklistedSelects]); return (
diff --git a/frontend/src/js/ui-components/InputSelect/SelectListOption.tsx b/frontend/src/js/ui-components/InputSelect/SelectListOption.tsx index 0d975ee031..c884228a0c 100644 --- a/frontend/src/js/ui-components/InputSelect/SelectListOption.tsx +++ b/frontend/src/js/ui-components/InputSelect/SelectListOption.tsx @@ -20,12 +20,8 @@ const Container = styled("div")` background-color: ${theme.col.blueGrayVeryLight}; `}; - ${({ disabled }) => - disabled && - css` - opacity: 0.5; - cursor: not-allowed; - `}; + opacity: ${({ disabled }) => (disabled ? 0.4 : 1)}; + cursor: ${({ disabled }) => (disabled ? "not-allowed" : "pointer")}; /* to style react-markdown */ p { @@ -47,7 +43,7 @@ const SelectListOption = forwardRef( const label = option.label || String(option.value); return ( - + {option.displayLabel ? ( option.displayLabel ) : ( diff --git a/frontend/src/localization/de.json b/frontend/src/localization/de.json index 82e79e2581..e5bbafb6ba 100644 --- a/frontend/src/localization/de.json +++ b/frontend/src/localization/de.json @@ -269,7 +269,8 @@ "formValidation": { "mustBePositiveNumber": "Muss positive Zahl sein", "isRequired": "Erforderlich", - "invalidDateRange": "Enddatum liegt vor Startdatum" + "invalidDateRange": "Enddatum liegt vor Startdatum", + "validSelectRequired": "Kompatibler Ausgabewert erforderlich" }, "default": { "conceptDropzoneLabel": "Füge ein Konzept oder eine Konzeptliste hinzu", diff --git a/frontend/src/localization/en.json b/frontend/src/localization/en.json index 0c207597fe..be8312517c 100644 --- a/frontend/src/localization/en.json +++ b/frontend/src/localization/en.json @@ -269,7 +269,8 @@ "formValidation": { "mustBePositiveNumber": "Must be a positive number", "isRequired": "Required", - "invalidDateRange": "End date is smaller than start date" + "invalidDateRange": "End date is smaller than start date", + "validSelectRequired": "Valid select required" }, "default": { "conceptDropzoneLabel": "Add a concept or a concept list", From 71b8aa78ff745c37123fc82efa459c1592d94aa1 Mon Sep 17 00:00:00 2001 From: awildturtok <1553491+awildturtok@users.noreply.github.com> Date: Thu, 17 Aug 2023 09:37:56 +0200 Subject: [PATCH 70/96] use ExecutorService like a grownup --- .../xodus/stores/SerializingStore.java | 137 ++++++------------ 1 file changed, 43 insertions(+), 94 deletions(-) diff --git a/backend/src/main/java/com/bakdata/conquery/io/storage/xodus/stores/SerializingStore.java b/backend/src/main/java/com/bakdata/conquery/io/storage/xodus/stores/SerializingStore.java index 6de77f12ce..b6acc83351 100644 --- a/backend/src/main/java/com/bakdata/conquery/io/storage/xodus/stores/SerializingStore.java +++ b/backend/src/main/java/com/bakdata/conquery/io/storage/xodus/stores/SerializingStore.java @@ -6,17 +6,13 @@ import java.time.LocalDateTime; import java.time.format.DateTimeFormatter; import java.util.Collection; -import java.util.List; import java.util.concurrent.ArrayBlockingQueue; -import java.util.concurrent.BlockingQueue; import java.util.concurrent.ConcurrentLinkedQueue; -import java.util.concurrent.ExecutorService; -import java.util.concurrent.Executors; +import java.util.concurrent.ThreadPoolExecutor; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicInteger; import java.util.function.Function; import java.util.function.Supplier; -import java.util.stream.IntStream; import javax.validation.Validator; @@ -34,7 +30,6 @@ import com.google.common.base.Throwables; import jetbrains.exodus.ArrayByteIterable; import jetbrains.exodus.ByteIterable; -import lombok.Data; import lombok.NoArgsConstructor; import lombok.NonNull; import lombok.SneakyThrows; @@ -343,31 +338,21 @@ public IterationStatistic forEach(StoreEntryConsumer consumer) { final Collection unreadables = new ConcurrentLinkedQueue<>(); // Some magic number of buffering per worker, that isn't so high, that we fill up RAM with useless stuff, but have enough data to keep the workers occupied. - final BlockingQueue workQueue = new ArrayBlockingQueue<>(nWorkers * bufferPerWorker); - final ExecutorService executorService = Executors.newFixedThreadPool(nWorkers); - - final List readers = IntStream.range(0, nWorkers) - .mapToObj(ignored -> new Reader(workQueue, consumer, result, unreadables)) - .peek(reader -> executorService.submit(reader::run)) - .toList(); + final ThreadPoolExecutor executorService = new ThreadPoolExecutor( + nWorkers, nWorkers, + 0, TimeUnit.SECONDS, + new ArrayBlockingQueue<>(nWorkers * bufferPerWorker), + new ThreadPoolExecutor.CallerRunsPolicy() + ); // We read in single thread, and deserialise and dispatch in multiple threads. - store.forEach((k, v) -> { - try { - workQueue.put(new Pair(k, v)); - } - catch (InterruptedException e) { - //TODO wat do? - throw new RuntimeException(e); - } - }); + store.forEach((k, v) -> executorService.submit(() -> handle(consumer, result, unreadables, k, v))); executorService.shutdown(); - readers.forEach(Reader::finish); while(!executorService.awaitTermination(30, TimeUnit.SECONDS)){ - log.debug("Still waiting for {} jobs.", workQueue.size()); + log.debug("Still waiting for {} jobs.", executorService.getQueue().size()); } // Print some statistics @@ -474,7 +459,6 @@ public void close() { @NoArgsConstructor public static class IterationStatistic { - //TODO move into reader? private final AtomicInteger totalProcessed = new AtomicInteger(); private final AtomicInteger failedKeys = new AtomicInteger(); private final AtomicInteger failedValues = new AtomicInteger(); @@ -516,79 +500,44 @@ public int getTotalProcessed() { } } - private record Pair(ByteIterable key, ByteIterable value) { - } - - @Data - private class Reader { - private final BlockingQueue queue; - private boolean done = false; - private final StoreEntryConsumer consumer; - private final IterationStatistic result; - private final Collection unreadables; - - public void run() { - - while (!done || !queue.isEmpty()) { - try { - final Pair next = queue.poll(1, TimeUnit.SECONDS); - - if (next == null) { - continue; - } - - handle(consumer, result, unreadables, next.key, next.value); - } - catch (Exception exception) { - //TODO probably split for InterrupedException? No idea how to handle that though - log.warn("", exception); - } - } - } - - public void finish() { - done = true; + private void handle(StoreEntryConsumer consumer, IterationStatistic result, Collection unreadables, ByteIterable k, ByteIterable v) { + result.incrTotalProcessed(); + + // Try to read the key first + final KEY key = getDeserializedAndDumpFailed( + k, + SerializingStore.this::readKey, + () -> new String(k.getBytesUnsafe()), + v, + "Could not parse key [{}]" + ); + if (key == null) { + unreadables.add(k); + result.incrFailedKeys(); + return; } - private void handle(StoreEntryConsumer consumer, IterationStatistic result, Collection unreadables, ByteIterable k, ByteIterable v) { - result.incrTotalProcessed(); + // Try to read the value + final VALUE value = getDeserializedAndDumpFailed( + v, + SerializingStore.this::readValue, + key::toString, + v, + "Could not parse value for key [{}]" + ); - // Try to read the key first - final KEY key = getDeserializedAndDumpFailed( - k, - SerializingStore.this::readKey, - () -> new String(k.getBytesUnsafe()), - v, - "Could not parse key [{}]" - ); - if (key == null) { - unreadables.add(k); - result.incrFailedKeys(); - return; - } - - // Try to read the value - final VALUE value = getDeserializedAndDumpFailed( - v, - SerializingStore.this::readValue, - key::toString, - v, - "Could not parse value for key [{}]" - ); - - if (value == null) { - unreadables.add(k); - result.incrFailedValues(); - return; - } + if (value == null) { + unreadables.add(k); + result.incrFailedValues(); + return; + } - // Apply the consumer to key and value - try { - consumer.accept(key, value, v.getLength()); - } - catch (Exception e) { - log.warn("Unable to apply for-each consumer on key[{}]", key, e); - } + // Apply the consumer to key and value + try { + consumer.accept(key, value, v.getLength()); + } + catch (Exception e) { + log.warn("Unable to apply for-each consumer on key[{}]", key, e); } } } From c9582cbad8fb563bbdef71f00a4bea00642c122a Mon Sep 17 00:00:00 2001 From: awildturtok <1553491+awildturtok@users.noreply.github.com> Date: Thu, 17 Aug 2023 10:29:53 +0200 Subject: [PATCH 71/96] cleanup --- .../xodus/stores/SerializingStore.java | 21 +++++----- .../util/CallerBlocksRejectionHandler.java | 39 ------------------- 2 files changed, 10 insertions(+), 50 deletions(-) delete mode 100644 backend/src/main/java/com/bakdata/conquery/util/CallerBlocksRejectionHandler.java diff --git a/backend/src/main/java/com/bakdata/conquery/io/storage/xodus/stores/SerializingStore.java b/backend/src/main/java/com/bakdata/conquery/io/storage/xodus/stores/SerializingStore.java index b6acc83351..5ef1333e7a 100644 --- a/backend/src/main/java/com/bakdata/conquery/io/storage/xodus/stores/SerializingStore.java +++ b/backend/src/main/java/com/bakdata/conquery/io/storage/xodus/stores/SerializingStore.java @@ -337,8 +337,6 @@ public IterationStatistic forEach(StoreEntryConsumer consumer) { final IterationStatistic result = new IterationStatistic(); final Collection unreadables = new ConcurrentLinkedQueue<>(); - // Some magic number of buffering per worker, that isn't so high, that we fill up RAM with useless stuff, but have enough data to keep the workers occupied. - final ThreadPoolExecutor executorService = new ThreadPoolExecutor( nWorkers, nWorkers, 0, TimeUnit.SECONDS, @@ -357,16 +355,17 @@ public IterationStatistic forEach(StoreEntryConsumer consumer) { // Print some statistics final int total = result.getTotalProcessed(); + log.debug( - String.format( - "While processing store %s:\n\tEntries processed:\t%d\n\tKey read failure:\t%d (%.2f%%)\n\tValue read failure:\t%d (%.2f%%)", - store.getName(), - total, - result.getFailedKeys(), - total > 0 ? (float) result.getFailedKeys() / total * 100 : 0, - result.getFailedValues(), - total > 0 ? (float) result.getFailedValues() / total * 100 : 0 - )); + "While processing store %s:\n\tEntries processed:\t%d\n\tKey read failure:\t%d (%.2f%%)\n\tValue read failure:\t%d (%.2f%%)" + .formatted( + store.getName(), + total, + result.getFailedKeys(), + total > 0 ? (float) result.getFailedKeys() / total * 100 : 0, + result.getFailedValues(), + total > 0 ? (float) result.getFailedValues() / total * 100 : 0 + )); // Remove corrupted entries from the store if configured so if (removeUnreadablesFromUnderlyingStore) { diff --git a/backend/src/main/java/com/bakdata/conquery/util/CallerBlocksRejectionHandler.java b/backend/src/main/java/com/bakdata/conquery/util/CallerBlocksRejectionHandler.java deleted file mode 100644 index b6ce4b4556..0000000000 --- a/backend/src/main/java/com/bakdata/conquery/util/CallerBlocksRejectionHandler.java +++ /dev/null @@ -1,39 +0,0 @@ -package com.bakdata.conquery.util; - -import java.util.concurrent.RejectedExecutionException; -import java.util.concurrent.RejectedExecutionHandler; -import java.util.concurrent.ThreadPoolExecutor; -import java.util.concurrent.TimeUnit; -import java.util.concurrent.atomic.LongAdder; - -import lombok.Data; - -@Data -public class CallerBlocksRejectionHandler implements RejectedExecutionHandler { - - private final long timeoutMillis; - private final LongAdder waitedMillis = new LongAdder(); - - @Override - public void rejectedExecution(Runnable r, ThreadPoolExecutor executor) { - if (executor.isShutdown()) { - return; - } - - try { - long before = System.currentTimeMillis(); - final boolean success = executor.getQueue().offer(r, getTimeoutMillis(), TimeUnit.MILLISECONDS); - long after = System.currentTimeMillis(); - - waitedMillis.add(after - before); - - if (!success) { - throw new RejectedExecutionException("Could not submit within specified timeout."); - } - } - catch (InterruptedException e) { - Thread.currentThread().interrupt(); - throw new RejectedExecutionException("Thread was interrupted."); - } - } -} From 1ccddc3946f01fc88622c94159f56cc778ea3777 Mon Sep 17 00:00:00 2001 From: awildturtok <1553491+awildturtok@users.noreply.github.com> Date: Thu, 17 Aug 2023 10:46:12 +0200 Subject: [PATCH 72/96] fixes comparator of IterationStatistic --- .../io/storage/xodus/stores/SerializingStore.java | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/backend/src/main/java/com/bakdata/conquery/io/storage/xodus/stores/SerializingStore.java b/backend/src/main/java/com/bakdata/conquery/io/storage/xodus/stores/SerializingStore.java index 5ef1333e7a..a788bef874 100644 --- a/backend/src/main/java/com/bakdata/conquery/io/storage/xodus/stores/SerializingStore.java +++ b/backend/src/main/java/com/bakdata/conquery/io/storage/xodus/stores/SerializingStore.java @@ -30,6 +30,8 @@ import com.google.common.base.Throwables; import jetbrains.exodus.ArrayByteIterable; import jetbrains.exodus.ByteIterable; +import lombok.Data; +import lombok.EqualsAndHashCode; import lombok.NoArgsConstructor; import lombok.NonNull; import lombok.SneakyThrows; @@ -457,9 +459,14 @@ public void close() { @NoArgsConstructor + @EqualsAndHashCode + @Data public static class IterationStatistic { + @EqualsAndHashCode.Exclude private final AtomicInteger totalProcessed = new AtomicInteger(); + @EqualsAndHashCode.Exclude private final AtomicInteger failedKeys = new AtomicInteger(); + @EqualsAndHashCode.Exclude private final AtomicInteger failedValues = new AtomicInteger(); public void incrTotalProcessed() { @@ -486,14 +493,17 @@ public void setFailedValues(int failedValues) { this.failedValues.set(failedValues); } + @EqualsAndHashCode.Include public int getFailedKeys() { return failedKeys.get(); } + @EqualsAndHashCode.Include public int getFailedValues() { return failedValues.get(); } + @EqualsAndHashCode.Include public int getTotalProcessed() { return totalProcessed.get(); } From 7df676153c24283495db5128efe015f3b7d7300b Mon Sep 17 00:00:00 2001 From: Fabian Blank Date: Thu, 17 Aug 2023 11:16:13 +0200 Subject: [PATCH 73/96] change root to Dropzone component -> extend dropzone component to allow for hovering --- .../DropzoneBetweenElements.tsx | 65 +++++++++---------- .../form-components/DropzoneList.tsx | 5 +- .../form-concept-group/FormConceptGroup.tsx | 6 +- frontend/src/js/ui-components/Dropzone.tsx | 8 ++- 4 files changed, 43 insertions(+), 41 deletions(-) diff --git a/frontend/src/js/external-forms/form-components/DropzoneBetweenElements.tsx b/frontend/src/js/external-forms/form-components/DropzoneBetweenElements.tsx index 7d46cd7cb7..937d2ac68c 100644 --- a/frontend/src/js/external-forms/form-components/DropzoneBetweenElements.tsx +++ b/frontend/src/js/external-forms/form-components/DropzoneBetweenElements.tsx @@ -1,24 +1,19 @@ import styled from "@emotion/styled"; -import { DropTargetMonitor, useDrop } from "react-dnd"; +import { useState } from "react"; +import { DropTargetMonitor } from "react-dnd"; -import { PossibleDroppableObject } from "../../ui-components/Dropzone"; +import Dropzone, { + PossibleDroppableObject, +} from "../../ui-components/Dropzone"; -interface Props { - onDrop: (item: DroppableObject, monitor: DropTargetMonitor) => void; +interface Props { + onDrop: (props: PossibleDroppableObject, monitor: DropTargetMonitor) => void; acceptedDropTypes: string[]; - lastElement?: boolean; - top?: number; + top: number; + height: number; } -const RootHeightBase = 30; const LineHeight = 3; -const Root = styled("div")` - width: 100%; - left: 0; - right: 0; - position: absolute; - border-radius: ${({ theme }) => theme.borderRadius}; -`; const Line = styled("div")<{ show: boolean }>` overflow: hidden; @@ -30,35 +25,33 @@ const Line = styled("div")<{ show: boolean }>` border-radius: 2px; `; -const DropzoneBetweenElements = < - DroppableObject extends PossibleDroppableObject, ->({ +const SxDropzone = styled(Dropzone)<{ height: number; top: number }>` + height: ${({ height }) => height}px; + top: ${({ top }) => top}px; + position: absolute; + background-color: transparent; +`; + +const DropzoneBetweenElements = ({ acceptedDropTypes, onDrop, - lastElement, + height, top, -}: Props) => { - const [{ isOver }, addZoneRef] = useDrop({ - accept: acceptedDropTypes, - drop: onDrop, - collect: (monitor) => ({ - isOver: monitor.isOver(), - isDroppable: monitor.canDrop(), - }), - }); - - const rootHeightMultiplier = lastElement ? 0.7 : 1; +}: Props) => { + let [isOver, setIsOver] = useState(false); return ( <> - + ); }; diff --git a/frontend/src/js/external-forms/form-components/DropzoneList.tsx b/frontend/src/js/external-forms/form-components/DropzoneList.tsx index 7af7f1bf0c..8bf451dc93 100644 --- a/frontend/src/js/external-forms/form-components/DropzoneList.tsx +++ b/frontend/src/js/external-forms/form-components/DropzoneList.tsx @@ -58,7 +58,7 @@ interface PropsT { onImportLines: (lines: string[]) => void; dropBetween: ( i: number, - ) => (item: DroppableObject, monitor: DropTargetMonitor) => void; + ) => (item: PossibleDroppableObject, monitor: DropTargetMonitor) => void; } const DropzoneList = ( @@ -102,6 +102,7 @@ const DropzoneList = ( acceptedDropTypes={acceptedDropTypes} onDrop={dropBetween(i)} top={-15} + height={30} /> )} @@ -117,7 +118,7 @@ const DropzoneList = ( acceptedDropTypes={acceptedDropTypes} onDrop={dropBetween(items.length)} top={-20} - lastElement + height={15} /> )} diff --git a/frontend/src/js/external-forms/form-concept-group/FormConceptGroup.tsx b/frontend/src/js/external-forms/form-concept-group/FormConceptGroup.tsx index 1ef51b1073..7bbb28ba88 100644 --- a/frontend/src/js/external-forms/form-concept-group/FormConceptGroup.tsx +++ b/frontend/src/js/external-forms/form-concept-group/FormConceptGroup.tsx @@ -16,7 +16,7 @@ import { nodeHasNonDefaultSettings, } from "../../model/node"; import type { DragItemConceptTreeNode } from "../../standard-query-editor/types"; -import { isMovedObject } from "../../ui-components/Dropzone"; +import { PossibleDroppableObject, isMovedObject } from "../../ui-components/Dropzone"; import DropzoneWithFileInput, { DragItemFile, } from "../../ui-components/DropzoneWithFileInput"; @@ -206,7 +206,9 @@ const FormConceptGroup = (props: Props) => { : props.attributeDropzoneText } dropBetween={(i: number) => { - return (item: DragItemConceptTreeNode) => { + return (item: PossibleDroppableObject) => { + if (item.type !== DNDType.CONCEPT_TREE_NODE)return; + if (props.isValidConcept && !props.isValidConcept(item)) return null; diff --git a/frontend/src/js/ui-components/Dropzone.tsx b/frontend/src/js/ui-components/Dropzone.tsx index 9662a49891..c83df6cd46 100644 --- a/frontend/src/js/ui-components/Dropzone.tsx +++ b/frontend/src/js/ui-components/Dropzone.tsx @@ -1,5 +1,5 @@ import styled from "@emotion/styled"; -import { ForwardedRef, forwardRef, ReactNode } from "react"; +import { ForwardedRef, forwardRef, ReactNode, useEffect } from "react"; import { DropTargetMonitor, useDrop } from "react-dnd"; import { DNDType } from "../common/constants/dndTypes"; @@ -67,6 +67,7 @@ export interface DropzoneProps { canDrop?: (props: DroppableObject, monitor: DropTargetMonitor) => boolean; onClick?: () => void; children?: (args: ChildArgs) => ReactNode; + setIsOver?: (state: boolean) => void; } export type PossibleDroppableObject = @@ -107,6 +108,7 @@ const Dropzone = ( onClick, invisible, children, + setIsOver, }: DropzoneProps, ref?: ForwardedRef, ) => { @@ -126,6 +128,10 @@ const Dropzone = ( }), }); + useEffect(() => { + if (setIsOver) setIsOver(isOver); + }, [isOver, setIsOver]); + return ( { From 0beaeb3f511259382dcc8c264f25800e6b92be27 Mon Sep 17 00:00:00 2001 From: Fabian Blank Date: Thu, 17 Aug 2023 11:18:31 +0200 Subject: [PATCH 74/96] formatting --- .../form-components/DropzoneBetweenElements.tsx | 2 +- .../form-concept-group/FormConceptGroup.tsx | 9 ++++++--- 2 files changed, 7 insertions(+), 4 deletions(-) diff --git a/frontend/src/js/external-forms/form-components/DropzoneBetweenElements.tsx b/frontend/src/js/external-forms/form-components/DropzoneBetweenElements.tsx index 937d2ac68c..5ddbc9cd01 100644 --- a/frontend/src/js/external-forms/form-components/DropzoneBetweenElements.tsx +++ b/frontend/src/js/external-forms/form-components/DropzoneBetweenElements.tsx @@ -6,7 +6,7 @@ import Dropzone, { PossibleDroppableObject, } from "../../ui-components/Dropzone"; -interface Props { +interface Props { onDrop: (props: PossibleDroppableObject, monitor: DropTargetMonitor) => void; acceptedDropTypes: string[]; top: number; diff --git a/frontend/src/js/external-forms/form-concept-group/FormConceptGroup.tsx b/frontend/src/js/external-forms/form-concept-group/FormConceptGroup.tsx index 7bbb28ba88..82de595e0e 100644 --- a/frontend/src/js/external-forms/form-concept-group/FormConceptGroup.tsx +++ b/frontend/src/js/external-forms/form-concept-group/FormConceptGroup.tsx @@ -16,7 +16,10 @@ import { nodeHasNonDefaultSettings, } from "../../model/node"; import type { DragItemConceptTreeNode } from "../../standard-query-editor/types"; -import { PossibleDroppableObject, isMovedObject } from "../../ui-components/Dropzone"; +import { + PossibleDroppableObject, + isMovedObject, +} from "../../ui-components/Dropzone"; import DropzoneWithFileInput, { DragItemFile, } from "../../ui-components/DropzoneWithFileInput"; @@ -207,8 +210,8 @@ const FormConceptGroup = (props: Props) => { } dropBetween={(i: number) => { return (item: PossibleDroppableObject) => { - if (item.type !== DNDType.CONCEPT_TREE_NODE)return; - + if (item.type !== DNDType.CONCEPT_TREE_NODE) return; + if (props.isValidConcept && !props.isValidConcept(item)) return null; From b08dc1e684fb9fce8fa730acfa6995f03fda59c6 Mon Sep 17 00:00:00 2001 From: awildturtok <1553491+awildturtok@users.noreply.github.com> Date: Thu, 17 Aug 2023 14:03:47 +0200 Subject: [PATCH 75/96] Update backend/src/main/java/com/bakdata/conquery/models/config/XodusStoreFactory.java Co-authored-by: Torben Meyer --- .../com/bakdata/conquery/models/config/XodusStoreFactory.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/backend/src/main/java/com/bakdata/conquery/models/config/XodusStoreFactory.java b/backend/src/main/java/com/bakdata/conquery/models/config/XodusStoreFactory.java index 068ca1363d..480918b7f2 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/config/XodusStoreFactory.java +++ b/backend/src/main/java/com/bakdata/conquery/models/config/XodusStoreFactory.java @@ -330,7 +330,7 @@ public SingletonStore createIdMappingStore(String pathName, ObjectM synchronized (openStoresInEnv) { final BigStore bigStore = - new BigStore<>(this, validator, environment, ID_MAPPING.storeInfo(), this::closeStore, this::removeStore, objectMapper, 10, 20); + new BigStore<>(this, validator, environment, ID_MAPPING.storeInfo(), this::closeStore, this::removeStore, objectMapper, getReaderWorkers(), getBufferPerWorker()); openStoresInEnv.put(bigStore.getDataXodusStore().getEnvironment(), bigStore.getDataXodusStore()); openStoresInEnv.put(bigStore.getMetaXodusStore().getEnvironment(), bigStore.getMetaXodusStore()); From ad3add127e6eb3e55567d23c0657b9873d2dd55f Mon Sep 17 00:00:00 2001 From: awildturtok <1553491+awildturtok@users.noreply.github.com> Date: Thu, 17 Aug 2023 16:43:44 +0200 Subject: [PATCH 76/96] Move to a global ExecutorService in XodusStoreFactory --- .../io/storage/xodus/stores/BigStore.java | 7 +- .../xodus/stores/SerializingStore.java | 89 ++++++++++--------- .../models/auth/apitoken/TokenStorage.java | 5 +- .../auth/basic/LocalAuthenticationRealm.java | 3 +- .../models/config/XodusStoreFactory.java | 28 +++++- .../io/storage/xodus/stores/BigStoreTest.java | 5 +- .../stores/SerializingStoreDumpTest.java | 3 +- 7 files changed, 87 insertions(+), 53 deletions(-) diff --git a/backend/src/main/java/com/bakdata/conquery/io/storage/xodus/stores/BigStore.java b/backend/src/main/java/com/bakdata/conquery/io/storage/xodus/stores/BigStore.java index 8fbc3b38b1..4beb3633c0 100644 --- a/backend/src/main/java/com/bakdata/conquery/io/storage/xodus/stores/BigStore.java +++ b/backend/src/main/java/com/bakdata/conquery/io/storage/xodus/stores/BigStore.java @@ -14,6 +14,7 @@ import java.util.List; import java.util.UUID; import java.util.concurrent.ConcurrentLinkedQueue; +import java.util.concurrent.ExecutorService; import java.util.concurrent.atomic.AtomicLong; import java.util.function.Consumer; import java.util.stream.Stream; @@ -63,7 +64,7 @@ public BigStore(XodusStoreFactory config, StoreInfo storeInfo, Consumer storeCloseHook, Consumer storeRemoveHook, - ObjectMapper mapper, int nWorkers, int bufferPerWorker) { + ObjectMapper mapper, ExecutorService executorService) { this.storeInfo = storeInfo; // Recommendation by the author of Xodus is to have logFileSize at least be 4 times the biggest file size. @@ -78,7 +79,7 @@ public BigStore(XodusStoreFactory config, BigStoreMetaKeys.class, config.isValidateOnWrite(), config.isRemoveUnreadableFromStore(), - config.getUnreadableDataDumpDirectory(), nWorkers, bufferPerWorker + config.getUnreadableDataDumpDirectory(), executorService ); @@ -91,7 +92,7 @@ public BigStore(XodusStoreFactory config, byte[].class, config.isValidateOnWrite(), config.isRemoveUnreadableFromStore(), - config.getUnreadableDataDumpDirectory(), nWorkers, bufferPerWorker + config.getUnreadableDataDumpDirectory(), executorService ); diff --git a/backend/src/main/java/com/bakdata/conquery/io/storage/xodus/stores/SerializingStore.java b/backend/src/main/java/com/bakdata/conquery/io/storage/xodus/stores/SerializingStore.java index a788bef874..82ebdd7efc 100644 --- a/backend/src/main/java/com/bakdata/conquery/io/storage/xodus/stores/SerializingStore.java +++ b/backend/src/main/java/com/bakdata/conquery/io/storage/xodus/stores/SerializingStore.java @@ -6,12 +6,16 @@ import java.time.LocalDateTime; import java.time.format.DateTimeFormatter; import java.util.Collection; -import java.util.concurrent.ArrayBlockingQueue; +import java.util.List; +import java.util.Objects; +import java.util.Queue; import java.util.concurrent.ConcurrentLinkedQueue; -import java.util.concurrent.ThreadPoolExecutor; +import java.util.concurrent.ExecutorService; +import java.util.concurrent.Future; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicInteger; import java.util.function.Function; +import java.util.function.Predicate; import java.util.function.Supplier; import javax.validation.Validator; @@ -28,6 +32,10 @@ import com.fasterxml.jackson.databind.ObjectReader; import com.fasterxml.jackson.databind.ObjectWriter; import com.google.common.base.Throwables; +import com.google.common.util.concurrent.Futures; +import com.google.common.util.concurrent.ListenableFuture; +import com.google.common.util.concurrent.ListeningExecutorService; +import com.google.common.util.concurrent.MoreExecutors; import jetbrains.exodus.ArrayByteIterable; import jetbrains.exodus.ByteIterable; import lombok.Data; @@ -38,6 +46,7 @@ import lombok.ToString; import lombok.extern.slf4j.Slf4j; import org.jetbrains.annotations.NotNull; +import org.jetbrains.annotations.TestOnly; /** * Key-value-store from {@link KEY} type values to {@link VALUE} values. ACID consistent, stored on disk using {@link jetbrains.exodus.env.Store} via {@link XodusStore}. @@ -62,7 +71,7 @@ public class SerializingStore implements Store { /** * Deserializer for keys */ - private final ThreadLocal keyReader; + private final ObjectReader keyReader; /** * Serializer for values @@ -72,7 +81,7 @@ public class SerializingStore implements Store { /** * Deserializer for values */ - private final ThreadLocal valueReader; + private final ObjectReader valueReader; /** * Optional validator used for serialization. @@ -103,8 +112,7 @@ public class SerializingStore implements Store { private final boolean removeUnreadablesFromUnderlyingStore; private final ObjectMapper objectMapper; - private final int nWorkers; - private final int bufferPerWorker; + private final ExecutorService executor; public , CLASS_V extends Class> SerializingStore(XodusStore store, Validator validator, @@ -113,7 +121,7 @@ public , CLASS_V extends Class> SerializingSto CLASS_V valueType, boolean validateOnWrite, boolean removeUnreadableFromStore, - File unreadableDataDumpDirectory, int nWorkers, int bufferPerWorker) { + File unreadableDataDumpDirectory, ExecutorService executorService) { this.store = store; this.validator = validator; this.validateOnWrite = validateOnWrite; @@ -124,22 +132,21 @@ public , CLASS_V extends Class> SerializingSto valueWriter = objectMapper.writerFor(this.valueType); - valueReader = ThreadLocal.withInitial(() -> objectMapper.readerFor(this.valueType)); + valueReader = objectMapper.readerFor(this.valueType); keyWriter = objectMapper.writerFor(keyType); - keyReader = ThreadLocal.withInitial(() -> objectMapper.readerFor(keyType)); + keyReader = objectMapper.readerFor(keyType); removeUnreadablesFromUnderlyingStore = removeUnreadableFromStore; unreadableValuesDumpDir = unreadableDataDumpDirectory; - this.nWorkers = nWorkers; - this.bufferPerWorker = bufferPerWorker; + executor = executorService; if (shouldDumpUnreadables()) { if (!unreadableValuesDumpDir.exists() && !unreadableValuesDumpDir.mkdirs()) { - throw new IllegalStateException("Could not create dump directory: " + unreadableValuesDumpDir); + throw new IllegalStateException("Could not create dump directory: %s".formatted(unreadableValuesDumpDir)); } else if (!unreadableValuesDumpDir.isDirectory()) { throw new IllegalArgumentException(String.format("The provided path points to an existing file which is not a directory. Was: %s", unreadableValuesDumpDir.getAbsolutePath())); @@ -154,7 +161,7 @@ private boolean shouldDumpUnreadables() { @Override public void add(KEY key, VALUE value) { if (!valueType.isInstance(value)) { - throw new IllegalStateException("The element " + value + " is not of the required type " + valueType); + throw new IllegalStateException("The element %s is not of the required type %s".formatted(value, valueType)); } if (validateOnWrite) { ValidatorHelper.failOnError(log, validator.validate(value)); @@ -190,7 +197,7 @@ private ByteIterable write(Object obj, ObjectWriter writer) { return new ArrayByteIterable(bytes); } catch (JsonProcessingException e) { - throw new RuntimeException("Failed to write " + obj, e); + throw new RuntimeException("Failed to write %s".formatted(obj), e); } } @@ -223,7 +230,7 @@ public VALUE get(KEY key) { * Deserialize value with {@code valueReader}. */ private VALUE readValue(ByteIterable value) { - return read(valueReader.get(), value); + return read(valueReader, value); } /** @@ -286,7 +293,7 @@ private T read(ObjectReader reader, ByteIterable obj) { return reader.readValue(obj.getBytesUnsafe(), 0, obj.getLength()); } catch (IOException e) { - throw new RuntimeException("Failed to read " + JacksonUtil.toJsonDebug(obj.getBytesUnsafe()), e); + throw new RuntimeException("Failed to read %s".formatted(JacksonUtil.toJsonDebug(obj.getBytesUnsafe())), e); } } @@ -337,24 +344,25 @@ private static String sanitiseFileName(@NotNull String name) { @Override public IterationStatistic forEach(StoreEntryConsumer consumer) { final IterationStatistic result = new IterationStatistic(); - final Collection unreadables = new ConcurrentLinkedQueue<>(); - final ThreadPoolExecutor executorService = new ThreadPoolExecutor( - nWorkers, nWorkers, - 0, TimeUnit.SECONDS, - new ArrayBlockingQueue<>(nWorkers * bufferPerWorker), - new ThreadPoolExecutor.CallerRunsPolicy() - ); + final ListeningExecutorService executorService = MoreExecutors.listeningDecorator(executor); + + final Queue> jobs = new ConcurrentLinkedQueue<>(); - // We read in single thread, and deserialise and dispatch in multiple threads. - store.forEach((k, v) -> executorService.submit(() -> handle(consumer, result, unreadables, k, v))); + // We read in single thread, and deserialize and dispatch in multiple threads. + store.forEach((k, v) -> jobs.add(executorService.submit(() -> handle(consumer, result, k, v)))); - executorService.shutdown(); + final ListenableFuture> allJobs = Futures.allAsList(jobs); - while(!executorService.awaitTermination(30, TimeUnit.SECONDS)){ - log.debug("Still waiting for {} jobs.", executorService.getQueue().size()); + while(allJobs.get(30, TimeUnit.SECONDS) == null){ + log.debug("Still waiting for {} jobs.", jobs.stream().filter(Predicate.not(Future::isDone)).count()); } + final List unreadables = allJobs.get() + .stream() + .filter(Objects::nonNull) + .toList(); + // Print some statistics final int total = result.getTotalProcessed(); @@ -407,13 +415,13 @@ private TYPE getDeserializedAndDumpFailed(ByteIterable serial, Function consumer, IterationStatistic result, Collection unreadables, ByteIterable k, ByteIterable v) { + private ByteIterable handle(StoreEntryConsumer consumer, IterationStatistic result, ByteIterable keyRaw, ByteIterable v) { result.incrTotalProcessed(); // Try to read the key first final KEY key = getDeserializedAndDumpFailed( - k, + keyRaw, SerializingStore.this::readKey, - () -> new String(k.getBytesUnsafe()), + () -> new String(keyRaw.getBytesUnsafe()), v, "Could not parse key [{}]" ); if (key == null) { - unreadables.add(k); result.incrFailedKeys(); - return; + return keyRaw; } // Try to read the value @@ -536,9 +544,8 @@ private void handle(StoreEntryConsumer consumer, IterationStatistic ); if (value == null) { - unreadables.add(k); result.incrFailedValues(); - return; + return keyRaw; } // Apply the consumer to key and value @@ -548,5 +555,7 @@ private void handle(StoreEntryConsumer consumer, IterationStatistic catch (Exception e) { log.warn("Unable to apply for-each consumer on key[{}]", key, e); } + + return null; } } diff --git a/backend/src/main/java/com/bakdata/conquery/models/auth/apitoken/TokenStorage.java b/backend/src/main/java/com/bakdata/conquery/models/auth/apitoken/TokenStorage.java index 9d4ec129d7..86b34d5267 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/auth/apitoken/TokenStorage.java +++ b/backend/src/main/java/com/bakdata/conquery/models/auth/apitoken/TokenStorage.java @@ -8,6 +8,7 @@ import java.util.List; import java.util.Optional; import java.util.UUID; +import java.util.concurrent.Executors; import javax.validation.Validator; @@ -72,7 +73,7 @@ public void start(){ ApiTokenData.class, true, false, - null, 1, 100 + null, Executors.newSingleThreadExecutor() )); openStoresInEnv.add(data); @@ -90,7 +91,7 @@ public void start(){ ApiTokenData.MetaData.class, true, false, - null, 1, 100 + null, Executors.newSingleThreadExecutor() )); openStoresInEnv.add(meta); } diff --git a/backend/src/main/java/com/bakdata/conquery/models/auth/basic/LocalAuthenticationRealm.java b/backend/src/main/java/com/bakdata/conquery/models/auth/basic/LocalAuthenticationRealm.java index 6fca172e9b..ed2e0eb49b 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/auth/basic/LocalAuthenticationRealm.java +++ b/backend/src/main/java/com/bakdata/conquery/models/auth/basic/LocalAuthenticationRealm.java @@ -4,6 +4,7 @@ import java.io.IOException; import java.util.List; import java.util.Optional; +import java.util.concurrent.Executors; import javax.validation.Validator; @@ -108,7 +109,7 @@ protected void onInit() { PasswordHasher.HashedEntry.class, false, true, - null, 1, 100 + null, Executors.newSingleThreadExecutor() )); } diff --git a/backend/src/main/java/com/bakdata/conquery/models/config/XodusStoreFactory.java b/backend/src/main/java/com/bakdata/conquery/models/config/XodusStoreFactory.java index 480918b7f2..4e28a84f8c 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/config/XodusStoreFactory.java +++ b/backend/src/main/java/com/bakdata/conquery/models/config/XodusStoreFactory.java @@ -11,6 +11,10 @@ import java.util.List; import java.util.Objects; import java.util.Set; +import java.util.concurrent.ArrayBlockingQueue; +import java.util.concurrent.ExecutorService; +import java.util.concurrent.ThreadPoolExecutor; +import java.util.concurrent.TimeUnit; import java.util.function.Function; import javax.annotation.Nullable; @@ -136,7 +140,7 @@ public class XodusStoreFactory implements StoreFactory { * @implNote it's always only one thread reading from disk, dispatching to multiple reader threads. */ @Min(1) - private int readerWorkers = 10; + private int readerWorkers = Runtime.getRuntime().availableProcessors(); /** * How many slots of buffering to use before the IO thread is put to sleep. @@ -144,6 +148,22 @@ public class XodusStoreFactory implements StoreFactory { @Min(1) private int bufferPerWorker = 20; + @JsonIgnore + private ExecutorService readerExecutorService; + + public ExecutorService getReaderExecutorService() { + if (readerExecutorService == null){ + readerExecutorService = new ThreadPoolExecutor( + 1, getReaderWorkers(), + 5, TimeUnit.MINUTES, + new ArrayBlockingQueue<>(getReaderWorkers() * getBufferPerWorker()), + new ThreadPoolExecutor.CallerRunsPolicy() + ); + } + + return readerExecutorService; + } + private boolean useWeakDictionaryCaching; @NotNull private Duration weakCacheDuration = Duration.hours(48); @@ -287,7 +307,7 @@ public IdentifiableStore createDictionaryStore(CentralRegistry centr DICTIONARIES.storeInfo(), this::closeStore, this::removeStore, - centralRegistry.injectIntoNew(objectMapper), getReaderWorkers(), getBufferPerWorker() + centralRegistry.injectIntoNew(objectMapper), getReaderExecutorService() ); openStoresInEnv.put(bigStore.getDataXodusStore().getEnvironment(), bigStore.getDataXodusStore()); openStoresInEnv.put(bigStore.getMetaXodusStore().getEnvironment(), bigStore.getMetaXodusStore()); @@ -330,7 +350,7 @@ public SingletonStore createIdMappingStore(String pathName, ObjectM synchronized (openStoresInEnv) { final BigStore bigStore = - new BigStore<>(this, validator, environment, ID_MAPPING.storeInfo(), this::closeStore, this::removeStore, objectMapper, getReaderWorkers(), getBufferPerWorker()); + new BigStore<>(this, validator, environment, ID_MAPPING.storeInfo(), this::closeStore, this::removeStore, objectMapper, getReaderExecutorService()); openStoresInEnv.put(bigStore.getDataXodusStore().getEnvironment(), bigStore.getDataXodusStore()); openStoresInEnv.put(bigStore.getMetaXodusStore().getEnvironment(), bigStore.getMetaXodusStore()); @@ -496,7 +516,7 @@ public Store createStore(Environment environment, Valid isValidateOnWrite(), isRemoveUnreadableFromStore(), getUnreadableDataDumpDirectory(), - getReaderWorkers(), getBufferPerWorker() + getReaderExecutorService() )); } } diff --git a/backend/src/test/java/com/bakdata/conquery/io/storage/xodus/stores/BigStoreTest.java b/backend/src/test/java/com/bakdata/conquery/io/storage/xodus/stores/BigStoreTest.java index be92d2e953..4ce7bb25d7 100644 --- a/backend/src/test/java/com/bakdata/conquery/io/storage/xodus/stores/BigStoreTest.java +++ b/backend/src/test/java/com/bakdata/conquery/io/storage/xodus/stores/BigStoreTest.java @@ -7,6 +7,7 @@ import java.io.IOException; import java.io.SequenceInputStream; import java.nio.file.Files; +import java.util.concurrent.Executors; import com.bakdata.conquery.io.jackson.Jackson; import com.bakdata.conquery.io.storage.StoreMappings; @@ -64,7 +65,7 @@ public void destroy() throws IOException { public void testFull() throws JSONException, IOException { BigStore store = new BigStore<>(new XodusStoreFactory(), Validators.newValidator(), env, - StoreMappings.DICTIONARIES.storeInfo(), (e) -> {}, (e) -> {}, MAPPER, 10, 20 + StoreMappings.DICTIONARIES.storeInfo(), (e) -> {}, (e) -> {}, MAPPER, Executors.newSingleThreadExecutor() ); @@ -106,7 +107,7 @@ public void testFull() throws JSONException, IOException { @Test public void testEmpty() throws JSONException, IOException { BigStore store = new BigStore<>(new XodusStoreFactory(), Validators.newValidator(), env, - StoreMappings.DICTIONARIES.storeInfo(), (e) -> {}, (e) -> {}, MAPPER, 10, 20 + StoreMappings.DICTIONARIES.storeInfo(), (e) -> {}, (e) -> {}, MAPPER, Executors.newSingleThreadExecutor() ); store.setChunkByteSize(Ints.checkedCast(DataSize.megabytes(1).toBytes())); diff --git a/backend/src/test/java/com/bakdata/conquery/io/storage/xodus/stores/SerializingStoreDumpTest.java b/backend/src/test/java/com/bakdata/conquery/io/storage/xodus/stores/SerializingStoreDumpTest.java index 5d92970875..d55eb144ed 100644 --- a/backend/src/test/java/com/bakdata/conquery/io/storage/xodus/stores/SerializingStoreDumpTest.java +++ b/backend/src/test/java/com/bakdata/conquery/io/storage/xodus/stores/SerializingStoreDumpTest.java @@ -4,6 +4,7 @@ import java.io.File; import java.io.IOException; +import java.util.concurrent.Executors; import javax.validation.Validator; @@ -72,7 +73,7 @@ private SerializingStore createSerializedStore(XodusSto storeId.getValueType(), config.isValidateOnWrite(), config.isRemoveUnreadableFromStore(), - config.getUnreadableDataDumpDirectory(), 1, 1 + config.getUnreadableDataDumpDirectory(), Executors.newSingleThreadExecutor() ); } From ab753486a1ecc2aaac3b65db55f519829db4af98 Mon Sep 17 00:00:00 2001 From: awildturtok <1553491+awildturtok@users.noreply.github.com> Date: Thu, 17 Aug 2023 17:06:09 +0200 Subject: [PATCH 77/96] cleanup --- .../io/storage/xodus/stores/SerializingStore.java | 12 +++++------- 1 file changed, 5 insertions(+), 7 deletions(-) diff --git a/backend/src/main/java/com/bakdata/conquery/io/storage/xodus/stores/SerializingStore.java b/backend/src/main/java/com/bakdata/conquery/io/storage/xodus/stores/SerializingStore.java index 82ebdd7efc..ec11f0fa6b 100644 --- a/backend/src/main/java/com/bakdata/conquery/io/storage/xodus/stores/SerializingStore.java +++ b/backend/src/main/java/com/bakdata/conquery/io/storage/xodus/stores/SerializingStore.java @@ -310,7 +310,6 @@ public static File makeDumpFileName(@NotNull String keyOfDump, @NotNull File unr .resolve(storeName) .resolve(sanitiseFileName(keyOfDump) + "." + DUMP_FILE_EXTENSION) .toFile(); - } /** @@ -326,7 +325,6 @@ public static File makeExceptionFileName(@NotNull String keyOfDump, @NotNull Fil .resolve(storeName) .resolve(sanitiseFileName(keyOfDump) + "." + EXCEPTION_FILE_EXTENSION) .toFile(); - } private static String sanitiseFileName(@NotNull String name) { @@ -518,7 +516,7 @@ public int getTotalProcessed() { } } - private ByteIterable handle(StoreEntryConsumer consumer, IterationStatistic result, ByteIterable keyRaw, ByteIterable v) { + private ByteIterable handle(StoreEntryConsumer consumer, IterationStatistic result, ByteIterable keyRaw, ByteIterable valueRaw) { result.incrTotalProcessed(); // Try to read the key first @@ -526,7 +524,7 @@ private ByteIterable handle(StoreEntryConsumer consumer, IterationSt keyRaw, SerializingStore.this::readKey, () -> new String(keyRaw.getBytesUnsafe()), - v, + valueRaw, "Could not parse key [{}]" ); if (key == null) { @@ -536,10 +534,10 @@ private ByteIterable handle(StoreEntryConsumer consumer, IterationSt // Try to read the value final VALUE value = getDeserializedAndDumpFailed( - v, + valueRaw, SerializingStore.this::readValue, key::toString, - v, + valueRaw, "Could not parse value for key [{}]" ); @@ -550,7 +548,7 @@ private ByteIterable handle(StoreEntryConsumer consumer, IterationSt // Apply the consumer to key and value try { - consumer.accept(key, value, v.getLength()); + consumer.accept(key, value, valueRaw.getLength()); } catch (Exception e) { log.warn("Unable to apply for-each consumer on key[{}]", key, e); From 09b039ff1c002b44c31cf3aeff02c0814b82e79c Mon Sep 17 00:00:00 2001 From: awildturtok <1553491+awildturtok@users.noreply.github.com> Date: Thu, 17 Aug 2023 17:57:18 +0200 Subject: [PATCH 78/96] fixes duplicate and faulty implementation of dumpToFile --- .../xodus/stores/SerializingStore.java | 57 +++---------------- 1 file changed, 8 insertions(+), 49 deletions(-) diff --git a/backend/src/main/java/com/bakdata/conquery/io/storage/xodus/stores/SerializingStore.java b/backend/src/main/java/com/bakdata/conquery/io/storage/xodus/stores/SerializingStore.java index 6b1a1095d1..c4d2591d71 100644 --- a/backend/src/main/java/com/bakdata/conquery/io/storage/xodus/stores/SerializingStore.java +++ b/backend/src/main/java/com/bakdata/conquery/io/storage/xodus/stores/SerializingStore.java @@ -192,48 +192,6 @@ private static String sanitiseFileName(@NotNull String name) { return FileUtil.SAVE_FILENAME_REPLACEMENT_MATCHER.matcher(name).replaceAll("_"); } - /** - * Dumps the content of an unreadable value to a file as a json (it tries to parse it as an object and than tries to dump it as a json). - * - * @param obj The object to dump. - * @param keyOfDump The key under which the unreadable value is accessible. It is used for the file name. - * @param reason The exception causing us to dump the file - * @param unreadableDumpDir The director to dump to. The method assumes that the directory exists and is okay to write to. - * @param storeName The name of the store which is also used in the dump file name. - */ - private static void dumpToFile(@NonNull ByteIterable obj, @NonNull String keyOfDump, Exception reason, @NonNull File unreadableDumpDir, String storeName, ObjectMapper objectMapper) { - // Create dump filehandle - final File dumpfile = makeDumpFileName(keyOfDump, unreadableDumpDir, storeName); - final File exceptionFileName = makeExceptionFileName(keyOfDump, unreadableDumpDir, storeName); - - if (dumpfile.exists() || exceptionFileName.exists()) { - log.trace("Abort dumping of file {} because it already exists.", dumpfile); - return; - } - - if (!dumpfile.getParentFile().exists() && !dumpfile.getParentFile().mkdirs()) { - throw new IllegalStateException("Could not create `%s`.".formatted(dumpfile.getParentFile())); - } - - // Write json - try { - log.info("Dumping value of key {} to {} (because it cannot be deserialized anymore).", keyOfDump, dumpfile.getCanonicalPath()); - - final JsonNode dump = objectMapper.readerFor(JsonNode.class).readValue(obj.getBytesUnsafe(), 0, obj.getLength()); - Jackson.MAPPER.writer().writeValue(dumpfile, dump); - } - catch (IOException e) { - log.error("Failed to dump unreadable value of key `{}` to file `{}`", keyOfDump, dumpfile, e); - } - - try (PrintStream out = new PrintStream(exceptionFileName)) { - reason.printStackTrace(out); - } - catch (IOException e) { - log.error("Failed to dump exception for `{}` to file `{}`.", keyOfDump, exceptionFileName, e); - } - - } @Override public void add(KEY key, VALUE value) { @@ -324,7 +282,7 @@ private VALUE readValue(ByteIterable value) { * @param unreadableDumpDir The director to dump to. The method assumes that the directory exists and is okay to write to. * @param storeName The name of the store which is also used in the dump file name. */ - private static void dumpToFile(@NonNull byte[] gzippedObj, @NonNull String keyOfDump, Exception reason, @NonNull File unreadableDumpDir, String storeName, ObjectMapper objectMapper) { + private static void dumpToFile(byte[] gzippedObj, @NonNull String keyOfDump, Exception reason, @NonNull File unreadableDumpDir, String storeName, ObjectMapper objectMapper) { // Create dump filehandle final File dumpfile = makeDumpFileName(keyOfDump, unreadableDumpDir, storeName); final File exceptionFileName = makeExceptionFileName(keyOfDump, unreadableDumpDir, storeName); @@ -338,13 +296,11 @@ private static void dumpToFile(@NonNull byte[] gzippedObj, @NonNull String keyOf throw new IllegalStateException("Could not create `%s`.".formatted(dumpfile.getParentFile())); } - //TODO FK: dump in a separate thread so we are not blocking the reader thread. - // Write json try { log.info("Dumping value of key {} to {} (because it cannot be deserialized anymore).", keyOfDump, dumpfile.getCanonicalPath()); - final JsonNode dump = objectMapper.readerFor(JsonNode.class).readValue(debugUnGzip(gzippedObj)); + final JsonNode dump = objectMapper.readerFor(JsonNode.class).readValue(new GZIPInputStream(new ByteArrayInputStream(gzippedObj))); Jackson.MAPPER.writer().writeValue(dumpfile, dump); } catch (IOException e) { @@ -411,11 +367,14 @@ public IterationStatistic forEach(StoreEntryConsumer consumer) { final ListenableFuture> allJobs = Futures.allAsList(jobs); - while (allJobs.get(30, TimeUnit.SECONDS) == null) { + + List maybeFailed; + + while ((maybeFailed = allJobs.get(30, TimeUnit.SECONDS)) == null) { log.debug("Still waiting for {} jobs.", jobs.stream().filter(Predicate.not(Future::isDone)).count()); } - final List unreadables = allJobs.get().stream().filter(Objects::nonNull).toList(); + final List unreadables = maybeFailed.stream().filter(Objects::nonNull).toList(); // Print some statistics final int total = result.getTotalProcessed(); @@ -491,7 +450,7 @@ private TYPE getDeserializedAndDumpFailed(ByteIterable serial, Function Date: Mon, 21 Aug 2023 15:28:15 +0200 Subject: [PATCH 79/96] test if the query result contains dates --- .../apiv1/execution/FullExecutionStatus.java | 2 ++ .../models/execution/ManagedExecution.java | 33 ++++++++++++++++--- 2 files changed, 31 insertions(+), 4 deletions(-) diff --git a/backend/src/main/java/com/bakdata/conquery/apiv1/execution/FullExecutionStatus.java b/backend/src/main/java/com/bakdata/conquery/apiv1/execution/FullExecutionStatus.java index 17c058884d..32d9d45c38 100644 --- a/backend/src/main/java/com/bakdata/conquery/apiv1/execution/FullExecutionStatus.java +++ b/backend/src/main/java/com/bakdata/conquery/apiv1/execution/FullExecutionStatus.java @@ -46,6 +46,8 @@ public class FullExecutionStatus extends ExecutionStatus { */ private boolean canExpand; + private boolean containsDates; + /** * Is set to the query description if the user can expand all included concepts. */ diff --git a/backend/src/main/java/com/bakdata/conquery/models/execution/ManagedExecution.java b/backend/src/main/java/com/bakdata/conquery/models/execution/ManagedExecution.java index 3dae68245c..fb709ad704 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/execution/ManagedExecution.java +++ b/backend/src/main/java/com/bakdata/conquery/models/execution/ManagedExecution.java @@ -5,6 +5,7 @@ import java.time.ZoneId; import java.time.temporal.ChronoUnit; import java.util.ArrayList; +import java.util.Arrays; import java.util.List; import java.util.Set; import java.util.UUID; @@ -20,6 +21,9 @@ import com.bakdata.conquery.apiv1.execution.FullExecutionStatus; import com.bakdata.conquery.apiv1.execution.OverviewExecutionStatus; import com.bakdata.conquery.apiv1.query.QueryDescription; +import com.bakdata.conquery.apiv1.query.concept.specific.CQConcept; +import com.bakdata.conquery.apiv1.query.concept.specific.external.CQExternal; +import com.bakdata.conquery.apiv1.query.concept.specific.external.DateFormat; import com.bakdata.conquery.io.cps.CPSBase; import com.bakdata.conquery.io.jackson.serializer.MetaIdRef; import com.bakdata.conquery.io.jackson.serializer.NsIdRef; @@ -41,7 +45,6 @@ import com.bakdata.conquery.models.identifiable.ids.specific.ManagedExecutionId; import com.bakdata.conquery.models.query.PrintSettings; import com.bakdata.conquery.models.query.Visitable; -import com.bakdata.conquery.models.worker.DistributedNamespace; import com.bakdata.conquery.models.worker.Namespace; import com.bakdata.conquery.util.QueryUtils; import com.bakdata.conquery.util.QueryUtils.NamespacedIdentifiableCollector; @@ -347,6 +350,30 @@ protected void setAdditionalFieldsForStatusWithColumnDescription(Subject subject */ protected void setAdditionalFieldsForStatusWithSource(Subject subject, FullExecutionStatus status) { QueryDescription query = getSubmitted(); + + status.setCanExpand(canSubjectExpand(subject, query)); + + status.setContainsDates(containsDates(query)); + + status.setQuery(canSubjectExpand(subject, query) ? getSubmitted() : null); + } + + private boolean containsDates(QueryDescription query) { + return Visitable.stream(query) + .anyMatch(visitable -> { + if (visitable instanceof CQConcept cqConcept) { + return cqConcept.isAggregateEventDates(); + } + + if (visitable instanceof CQExternal external) { + return Arrays.stream(DateFormat.values()).anyMatch(external.getFormat()::contains); + } + + return false; + }); + } + + private boolean canSubjectExpand(Subject subject, QueryDescription query) { NamespacedIdentifiableCollector namespacesIdCollector = new NamespacedIdentifiableCollector(); query.visit(namespacesIdCollector); @@ -358,9 +385,7 @@ protected void setAdditionalFieldsForStatusWithSource(Subject subject, FullExecu .collect(Collectors.toSet()); boolean canExpand = subject.isPermittedAll(concepts, Ability.READ); - - status.setCanExpand(canExpand); - status.setQuery(canExpand ? getSubmitted() : null); + return canExpand; } @JsonIgnore From 1ec0dcb0e2c4d58c67de3ba0c7b7fc2f15d13b77 Mon Sep 17 00:00:00 2001 From: Kai Rollmann Date: Tue, 22 Aug 2023 16:06:51 +0200 Subject: [PATCH 80/96] Validate form right away, show no red --- frontend/src/js/external-forms/FormsTab.tsx | 13 +++++++++++++ frontend/src/js/external-forms/form/Field.tsx | 5 +++-- frontend/src/js/query-runner/QueryRunner.tsx | 19 ++++++++----------- 3 files changed, 24 insertions(+), 13 deletions(-) diff --git a/frontend/src/js/external-forms/FormsTab.tsx b/frontend/src/js/external-forms/FormsTab.tsx index 1a7f93aa9c..5ffd4cdb17 100644 --- a/frontend/src/js/external-forms/FormsTab.tsx +++ b/frontend/src/js/external-forms/FormsTab.tsx @@ -94,8 +94,18 @@ const useInitializeForm = ({ mode: "onChange", }); + useEffect( + function triggerValidationInitially() { + methods.trigger(); + }, + [methods, config], + ); + const onReset = useCallback(() => { methods.reset(defaultValues); + // Because for some reason, running this in the same tick doesn't work + // Asked about it: https://github.com/orgs/react-hook-form/discussions/10823 + setTimeout(() => methods.trigger(), 0); }, [methods, defaultValues]); const onResetActiveForm = useCallback(() => { @@ -103,6 +113,9 @@ const useInitializeForm = ({ ...methods.getValues(), ...defaultValues, }); + // Because for some reason, running this in the same tick doesn't work + // Asked about it: https://github.com/orgs/react-hook-form/discussions/10823 + setTimeout(() => methods.trigger(), 0); }, [methods, defaultValues]); return { methods, config, datasetOptions, onReset, onResetActiveForm }; diff --git a/frontend/src/js/external-forms/form/Field.tsx b/frontend/src/js/external-forms/form/Field.tsx index 48776d37c1..ad657d4e2f 100644 --- a/frontend/src/js/external-forms/form/Field.tsx +++ b/frontend/src/js/external-forms/form/Field.tsx @@ -81,11 +81,12 @@ const FieldContainer = styled("div")<{ noLabel?: boolean; hasError?: boolean }>` background-color: white; border-radius: ${({ theme }) => theme.borderRadius}; border: 1px solid - ${({ theme, hasError }) => (hasError ? theme.col.red : theme.col.grayLight)}; + ${({ theme, hasError }) => + hasError ? theme.col.blueGrayDark : theme.col.grayLight}; `; const ErrorContainer = styled("div")` - color: ${({ theme }) => theme.col.red}; + color: ${({ theme }) => theme.col.blueGrayDark}; font-weight: 700; font-size: ${({ theme }) => theme.font.sm}; `; diff --git a/frontend/src/js/query-runner/QueryRunner.tsx b/frontend/src/js/query-runner/QueryRunner.tsx index 7b01ef1918..296b46be41 100644 --- a/frontend/src/js/query-runner/QueryRunner.tsx +++ b/frontend/src/js/query-runner/QueryRunner.tsx @@ -1,5 +1,4 @@ import styled from "@emotion/styled"; -import { FC } from "react"; import { useHotkeys } from "react-hotkeys-hook"; import { exists } from "../common/helpers/exists"; @@ -37,22 +36,20 @@ const LoadingGroup = styled("div")` justify-content: flex-end; `; -interface PropsT { - queryRunner?: QueryRunnerStateT; - isQueryRunning: boolean; - disabled: boolean; - buttonTooltip?: string; - startQuery: () => void; - stopQuery: () => void; -} - -const QueryRunner: FC = ({ +const QueryRunner = ({ queryRunner, startQuery, stopQuery, buttonTooltip, isQueryRunning, disabled, +}: { + queryRunner?: QueryRunnerStateT; + isQueryRunning: boolean; + disabled: boolean; + buttonTooltip?: string; + startQuery: () => void; + stopQuery: () => void; }) => { const btnAction = isQueryRunning ? stopQuery : startQuery; const isStartStopLoading = From e55f0262fcf1fd8582f298cad3d281e9d516fb73 Mon Sep 17 00:00:00 2001 From: Kai Rollmann Date: Wed, 23 Aug 2023 12:03:02 +0200 Subject: [PATCH 81/96] Upgrade react-hook-form to fix async trigger --- frontend/package.json | 2 +- frontend/src/js/external-forms/FormsTab.tsx | 8 ++------ frontend/yarn.lock | 8 ++++---- 3 files changed, 7 insertions(+), 11 deletions(-) diff --git a/frontend/package.json b/frontend/package.json index d886016692..7a622bc734 100644 --- a/frontend/package.json +++ b/frontend/package.json @@ -64,7 +64,7 @@ "react-dom": "^18.1.0", "react-error-boundary": "^3.1.4", "react-highlight-words": "^0.20.0", - "react-hook-form": "^7.43.5", + "react-hook-form": "^7.45.4", "react-hotkeys-hook": "^4.3.8", "react-i18next": "^12.2.0", "react-list": "^0.8.16", diff --git a/frontend/src/js/external-forms/FormsTab.tsx b/frontend/src/js/external-forms/FormsTab.tsx index 5ffd4cdb17..b7ffa118aa 100644 --- a/frontend/src/js/external-forms/FormsTab.tsx +++ b/frontend/src/js/external-forms/FormsTab.tsx @@ -103,9 +103,7 @@ const useInitializeForm = ({ const onReset = useCallback(() => { methods.reset(defaultValues); - // Because for some reason, running this in the same tick doesn't work - // Asked about it: https://github.com/orgs/react-hook-form/discussions/10823 - setTimeout(() => methods.trigger(), 0); + methods.trigger(); }, [methods, defaultValues]); const onResetActiveForm = useCallback(() => { @@ -113,9 +111,7 @@ const useInitializeForm = ({ ...methods.getValues(), ...defaultValues, }); - // Because for some reason, running this in the same tick doesn't work - // Asked about it: https://github.com/orgs/react-hook-form/discussions/10823 - setTimeout(() => methods.trigger(), 0); + methods.trigger(); }, [methods, defaultValues]); return { methods, config, datasetOptions, onReset, onResetActiveForm }; diff --git a/frontend/yarn.lock b/frontend/yarn.lock index ae9546cbe7..825ff7e73f 100644 --- a/frontend/yarn.lock +++ b/frontend/yarn.lock @@ -8534,10 +8534,10 @@ react-highlight-words@^0.20.0: memoize-one "^4.0.0" prop-types "^15.5.8" -react-hook-form@^7.43.5: - version "7.43.5" - resolved "https://registry.yarnpkg.com/react-hook-form/-/react-hook-form-7.43.5.tgz#b320405594f1506d8d57b954383166d4ff563778" - integrity sha512-YcaXhuFHoOPipu5pC7ckxrLrialiOcU91pKu8P+isAcXZyMgByUK9PkI9j5fENO4+6XU5PwWXRGMIFlk9u9UBQ== +react-hook-form@^7.45.4: + version "7.45.4" + resolved "https://registry.yarnpkg.com/react-hook-form/-/react-hook-form-7.45.4.tgz#73d228b704026ae95d7e5f7b207a681b173ec62a" + integrity sha512-HGDV1JOOBPZj10LB3+OZgfDBTn+IeEsNOKiq/cxbQAIbKaiJUe/KV8DBUzsx0Gx/7IG/orWqRRm736JwOfUSWQ== react-hotkeys-hook@^4.3.8: version "4.3.8" From 9422cd5fd27bc12ce8ee94b6e04f24cf7f463bcf Mon Sep 17 00:00:00 2001 From: Jonas Arnhold Date: Wed, 23 Aug 2023 16:54:31 +0200 Subject: [PATCH 82/96] Add HANA support (#109) (#3154) --- .github/workflows/test_backend.yml | 2 +- backend/pom.xml | 5 + .../mode/local/LocalManagerProvider.java | 16 +- .../conquery/models/config/Dialect.java | 3 +- .../models/config/SqlConnectorConfig.java | 1 + .../conquery/sql/DslContextFactory.java | 9 +- .../conversion/context/ConversionContext.java | 17 +- .../context/selects/ConceptSelects.java | 2 +- .../context/selects/MergedSelects.java | 19 +- .../conversion/context/selects/Selects.java | 5 +- .../conversion/context/step/QueryStep.java | 6 +- .../cqelement/CQConceptConverter.java | 203 ------------------ .../cqelement/concept/CQConceptConverter.java | 76 +++++++ .../cqelement/concept/ConceptQueryStep.java | 40 ++++ .../concept/DateRestrictionQueryStep.java | 44 ++++ .../concept/EventFilterQueryStep.java | 39 ++++ .../cqelement/concept/EventSelectStep.java | 39 ++++ .../concept/FinalConceptQueryStep.java | 23 ++ .../PreprocessingQueryStep.java} | 80 +++---- .../cqelement/concept/StepContext.java | 22 ++ .../conversion/dialect/HanaSqlDialect.java | 46 ++++ .../dialect/HanaSqlFunctionProvider.java | 134 ++++++++++++ .../dialect/PostgreSqlFunctionProvider.java | 57 ++--- .../sql/conversion/dialect/SqlDialect.java | 16 +- .../dialect/SqlFunctionProvider.java | 5 +- .../select/DateDistanceConverter.java | 20 +- .../select/FirstValueConverter.java | 4 +- .../sql/execution/SqlExecutionService.java | 5 +- .../conquery/sql/models/ColumnDateRange.java | 24 ++- .../integration/IntegrationTests.java | 4 +- .../integration/sql/CsvTableImporter.java | 65 ++++-- .../sql/SqlIntegrationTestSpec.java | 8 +- .../sql/TestPostgreSqlDialect.java | 35 --- .../sql/dialect/HanaSqlIntegrationTests.java | 168 +++++++++++++++ .../sql/dialect/MockDateNowSupplier.java | 14 ++ .../PostgreSqlIntegrationTests.java | 47 ++-- .../sql/dialect/TestContextProvider.java | 11 + .../sql/testcontainer/hana/HanaContainer.java | 71 ++++++ .../tests/sql/and/different_concept/and.json | 13 +- .../sql/and/different_concept/content_1.csv | 26 +-- .../sql/and/different_concept/expected.csv | 8 +- .../{and.json => and_same_concept.json} | 0 .../date_restriction_date_column/content.csv | 18 +- .../date_restriction_date_column.json | 11 +- .../date_restriction_date_range.json | 85 -------- .../content.csv | 0 .../daterange_column.spec.json | 83 +++++++ .../expected.csv | 2 +- .../tests/sql/filter/number/content.csv | 26 +-- .../tests/sql/filter/number/number.spec.json | 4 - .../sql/filter/number_only_max/content.csv | 26 +-- .../number_only_max/number_only_max.spec.json | 4 - .../sql/filter/number_only_min/content.csv | 26 +-- .../number_only_min/number_only_min.spec.json | 4 - .../tests/sql/filter/select/content.csv | 18 +- .../tests/sql/filter/select/select.spec.json | 4 - .../sql/or/different_concept/content_1.csv | 26 +-- .../sql/or/different_concept/expected.csv | 2 +- .../sql/or/different_concept/or.spec.json | 11 +- ...or.spec.json => or_same_concept.spec.json} | 0 .../selects/date_distance/months/content.csv | 2 +- .../selects/date_distance/months/expected.csv | 2 +- .../selects/date_distance/years/expected.csv | 2 +- executable/pom.xml | 9 +- 64 files changed, 1169 insertions(+), 628 deletions(-) delete mode 100644 backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/CQConceptConverter.java create mode 100644 backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/concept/CQConceptConverter.java create mode 100644 backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/concept/ConceptQueryStep.java create mode 100644 backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/concept/DateRestrictionQueryStep.java create mode 100644 backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/concept/EventFilterQueryStep.java create mode 100644 backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/concept/EventSelectStep.java create mode 100644 backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/concept/FinalConceptQueryStep.java rename backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/{ConceptPreprocessingService.java => concept/PreprocessingQueryStep.java} (50%) create mode 100644 backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/concept/StepContext.java create mode 100644 backend/src/main/java/com/bakdata/conquery/sql/conversion/dialect/HanaSqlDialect.java create mode 100644 backend/src/main/java/com/bakdata/conquery/sql/conversion/dialect/HanaSqlFunctionProvider.java delete mode 100644 backend/src/test/java/com/bakdata/conquery/integration/sql/TestPostgreSqlDialect.java create mode 100644 backend/src/test/java/com/bakdata/conquery/integration/sql/dialect/HanaSqlIntegrationTests.java create mode 100644 backend/src/test/java/com/bakdata/conquery/integration/sql/dialect/MockDateNowSupplier.java rename backend/src/test/java/com/bakdata/conquery/integration/sql/{ => dialect}/PostgreSqlIntegrationTests.java (67%) create mode 100644 backend/src/test/java/com/bakdata/conquery/integration/sql/dialect/TestContextProvider.java create mode 100644 backend/src/test/java/com/bakdata/conquery/integration/sql/testcontainer/hana/HanaContainer.java rename backend/src/test/resources/tests/sql/and/same_concept/{and.json => and_same_concept.json} (100%) delete mode 100644 backend/src/test/resources/tests/sql/date_restriction/daterange/date_restriction_date_range.json rename backend/src/test/resources/tests/sql/date_restriction/{daterange => postgres_daterange}/content.csv (100%) create mode 100644 backend/src/test/resources/tests/sql/date_restriction/postgres_daterange/daterange_column.spec.json rename backend/src/test/resources/tests/sql/date_restriction/{daterange => postgres_daterange}/expected.csv (68%) rename backend/src/test/resources/tests/sql/or/same_concept/{or.spec.json => or_same_concept.spec.json} (100%) diff --git a/.github/workflows/test_backend.yml b/.github/workflows/test_backend.yml index bcb2ce27c5..f250dfcc49 100644 --- a/.github/workflows/test_backend.yml +++ b/.github/workflows/test_backend.yml @@ -14,7 +14,7 @@ on: jobs: test: runs-on: ubuntu-latest - timeout-minutes: 15 + timeout-minutes: 20 steps: - name: Cache local Maven repository uses: actions/cache@v2 diff --git a/backend/pom.xml b/backend/pom.xml index c2cdef06ad..5f4b400d24 100644 --- a/backend/pom.xml +++ b/backend/pom.xml @@ -379,5 +379,10 @@ 1.17.6 test + + com.sap.cloud.db.jdbc + ngdbc + 2.17.10 + diff --git a/backend/src/main/java/com/bakdata/conquery/mode/local/LocalManagerProvider.java b/backend/src/main/java/com/bakdata/conquery/mode/local/LocalManagerProvider.java index 794df98a87..eb76f7ff68 100644 --- a/backend/src/main/java/com/bakdata/conquery/mode/local/LocalManagerProvider.java +++ b/backend/src/main/java/com/bakdata/conquery/mode/local/LocalManagerProvider.java @@ -10,22 +10,34 @@ import com.bakdata.conquery.mode.ManagerProvider; import com.bakdata.conquery.mode.NamespaceHandler; import com.bakdata.conquery.models.config.ConqueryConfig; +import com.bakdata.conquery.models.config.SqlConnectorConfig; import com.bakdata.conquery.models.worker.DatasetRegistry; import com.bakdata.conquery.models.worker.LocalNamespace; import com.bakdata.conquery.models.worker.ShardNodeInformation; import com.bakdata.conquery.sql.DslContextFactory; import com.bakdata.conquery.sql.SqlContext; +import com.bakdata.conquery.sql.conversion.dialect.HanaSqlDialect; import com.bakdata.conquery.sql.conversion.dialect.PostgreSqlDialect; +import com.bakdata.conquery.sql.conversion.dialect.SqlDialect; import io.dropwizard.setup.Environment; +import org.jooq.DSLContext; public class LocalManagerProvider implements ManagerProvider { private static final Supplier> EMPTY_NODE_PROVIDER = Collections::emptyList; public DelegateManager provideManager(ConqueryConfig config, Environment environment) { + InternalObjectMapperCreator creator = ManagerProvider.newInternalObjectMapperCreator(config, environment.getValidator()); - // todo(tm): proper injection - SqlContext sqlContext = new SqlContext(config.getSqlConnectorConfig(), new PostgreSqlDialect(DslContextFactory.create(config.getSqlConnectorConfig()))); + + SqlConnectorConfig sqlConnectorConfig = config.getSqlConnectorConfig(); + DSLContext dslContext = DslContextFactory.create(sqlConnectorConfig); + SqlDialect sqlDialect = switch (sqlConnectorConfig.getDialect()) { + case POSTGRESQL -> new PostgreSqlDialect(dslContext); + case HANA -> new HanaSqlDialect(dslContext); + }; + SqlContext sqlContext = new SqlContext(sqlConnectorConfig, sqlDialect); + NamespaceHandler namespaceHandler = new LocalNamespaceHandler(config, creator, sqlContext); DatasetRegistry datasetRegistry = ManagerProvider.createDatasetRegistry(namespaceHandler, config, creator); creator.init(datasetRegistry); diff --git a/backend/src/main/java/com/bakdata/conquery/models/config/Dialect.java b/backend/src/main/java/com/bakdata/conquery/models/config/Dialect.java index 2ec655aea9..b57931bbac 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/config/Dialect.java +++ b/backend/src/main/java/com/bakdata/conquery/models/config/Dialect.java @@ -6,7 +6,8 @@ @Getter public enum Dialect { - POSTGRESQL(SQLDialect.POSTGRES); + POSTGRESQL(SQLDialect.POSTGRES), + HANA(SQLDialect.DEFAULT); private final SQLDialect jooqDialect; diff --git a/backend/src/main/java/com/bakdata/conquery/models/config/SqlConnectorConfig.java b/backend/src/main/java/com/bakdata/conquery/models/config/SqlConnectorConfig.java index 857018cada..e6e83b723b 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/config/SqlConnectorConfig.java +++ b/backend/src/main/java/com/bakdata/conquery/models/config/SqlConnectorConfig.java @@ -25,5 +25,6 @@ public class SqlConnectorConfig { private String databasePassword; private String jdbcConnectionUrl; + private String primaryColumn = "pid"; } diff --git a/backend/src/main/java/com/bakdata/conquery/sql/DslContextFactory.java b/backend/src/main/java/com/bakdata/conquery/sql/DslContextFactory.java index 16bfe54ecd..980aa597bb 100644 --- a/backend/src/main/java/com/bakdata/conquery/sql/DslContextFactory.java +++ b/backend/src/main/java/com/bakdata/conquery/sql/DslContextFactory.java @@ -6,6 +6,7 @@ import com.zaxxer.hikari.HikariConfig; import com.zaxxer.hikari.HikariDataSource; import org.jooq.DSLContext; +import org.jooq.conf.RenderQuotedNames; import org.jooq.conf.Settings; import org.jooq.impl.DSL; @@ -19,10 +20,16 @@ public static DSLContext create(SqlConnectorConfig config) { DataSource dataSource = new HikariDataSource(hikariConfig); + Settings settings = new Settings() + .withRenderFormatted(config.isWithPrettyPrinting()) + // enforces all identifiers to be quoted if not explicitly unquoted via DSL.unquotedName() + // to prevent any lowercase/uppercase SQL dialect specific identifier naming issues + .withRenderQuotedNames(RenderQuotedNames.EXPLICIT_DEFAULT_QUOTED); + return DSL.using( dataSource, config.getDialect().getJooqDialect(), - new Settings().withRenderFormatted(config.isWithPrettyPrinting()) + settings ); } diff --git a/backend/src/main/java/com/bakdata/conquery/sql/conversion/context/ConversionContext.java b/backend/src/main/java/com/bakdata/conquery/sql/conversion/context/ConversionContext.java index b07383dacf..cdd73d7f8f 100644 --- a/backend/src/main/java/com/bakdata/conquery/sql/conversion/context/ConversionContext.java +++ b/backend/src/main/java/com/bakdata/conquery/sql/conversion/context/ConversionContext.java @@ -1,5 +1,7 @@ package com.bakdata.conquery.sql.conversion.context; +import java.util.List; + import com.bakdata.conquery.models.common.daterange.CDateRange; import com.bakdata.conquery.models.config.SqlConnectorConfig; import com.bakdata.conquery.sql.conversion.NodeConverterService; @@ -12,8 +14,6 @@ import org.jooq.Record; import org.jooq.Select; -import java.util.List; - @Value @With @Builder(toBuilder = true) @@ -25,20 +25,23 @@ public class ConversionContext { @Singular List querySteps; Select finalQuery; - boolean negation; CDateRange dateRestrictionRange; int queryStepCounter; - + boolean negation; + boolean isGroupBy; public boolean dateRestrictionActive() { return this.dateRestrictionRange != null; } + /** + * Adds a converted {@link QueryStep} to the list of query steps of this {@link ConversionContext} and increments its conceptCounter by 1. + */ public ConversionContext withQueryStep(QueryStep queryStep) { return this.toBuilder() - .queryStep(queryStep) - .queryStepCounter(queryStepCounter + 1) - .build(); + .queryStep(queryStep) + .queryStepCounter(queryStepCounter + 1) + .build(); } } diff --git a/backend/src/main/java/com/bakdata/conquery/sql/conversion/context/selects/ConceptSelects.java b/backend/src/main/java/com/bakdata/conquery/sql/conversion/context/selects/ConceptSelects.java index 2d2d808632..9e398edad3 100644 --- a/backend/src/main/java/com/bakdata/conquery/sql/conversion/context/selects/ConceptSelects.java +++ b/backend/src/main/java/com/bakdata/conquery/sql/conversion/context/selects/ConceptSelects.java @@ -37,7 +37,7 @@ public Selects withValidityDate(ColumnDateRange validityDate) { } @Override - public ConceptSelects byName(String qualifier) { + public ConceptSelects qualifiedWith(String qualifier) { return builder() .primaryColumn(this.mapFieldToQualifier(qualifier, this.primaryColumn)) .dateRestrictionRange(this.dateRestrictionRange.map(dateRestriction -> dateRestriction.qualify(qualifier))) diff --git a/backend/src/main/java/com/bakdata/conquery/sql/conversion/context/selects/MergedSelects.java b/backend/src/main/java/com/bakdata/conquery/sql/conversion/context/selects/MergedSelects.java index 7d3547d5b2..461b44a631 100644 --- a/backend/src/main/java/com/bakdata/conquery/sql/conversion/context/selects/MergedSelects.java +++ b/backend/src/main/java/com/bakdata/conquery/sql/conversion/context/selects/MergedSelects.java @@ -53,11 +53,11 @@ public Selects withValidityDate(ColumnDateRange validityDate) { } @Override - public MergedSelects byName(String qualifier) { + public MergedSelects qualifiedWith(String cteName) { return new MergedSelects( - this.mapFieldToQualifier(qualifier, this.primaryColumn), - this.validityDate.map(columnDateRange -> columnDateRange.qualify(qualifier)), - this.mapFieldStreamToQualifier(qualifier, this.mergedSelects.stream()).toList() + this.mapFieldToQualifier(cteName, this.primaryColumn), + this.validityDate.map(columnDateRange -> columnDateRange.qualify(cteName)), + this.mapFieldStreamToQualifier(cteName, this.mergedSelects.stream()).toList() ); } @@ -76,8 +76,7 @@ public List> explicitSelects() { private Field coalescePrimaryColumns(List querySteps) { List> primaryColumns = querySteps.stream() - .map(queryStep -> this.mapFieldToQualifier(queryStep.getCteName(), queryStep.getSelects() - .getPrimaryColumn())) + .map(queryStep -> queryStep.getQualifiedSelects().getPrimaryColumn()) .toList(); return DSL.coalesce((Object) primaryColumns.get(0), primaryColumns.subList(1, primaryColumns.size()).toArray()) .as(PRIMARY_COLUMN_NAME); @@ -87,17 +86,13 @@ private Optional extractValidityDates(List queryStep // TODO: date aggregation... return querySteps.stream() .filter(queryStep -> queryStep.getSelects().getValidityDate().isPresent()) - .map(queryStep -> { - ColumnDateRange validityDate = queryStep.getSelects().getValidityDate().get(); - return validityDate.qualify(queryStep.getCteName()); - }) + .map(queryStep -> queryStep.getQualifiedSelects().getValidityDate().get()) .findFirst(); } private List> mergeSelects(List queriesToJoin) { return queriesToJoin.stream() - .flatMap(queryStep -> queryStep.getSelects().explicitSelects().stream() - .map(field -> this.mapFieldToQualifier(queryStep.getCteName(), field))) + .flatMap(queryStep -> queryStep.getQualifiedSelects().explicitSelects().stream()) .toList(); } diff --git a/backend/src/main/java/com/bakdata/conquery/sql/conversion/context/selects/Selects.java b/backend/src/main/java/com/bakdata/conquery/sql/conversion/context/selects/Selects.java index 09d7259e25..30d15b17b7 100644 --- a/backend/src/main/java/com/bakdata/conquery/sql/conversion/context/selects/Selects.java +++ b/backend/src/main/java/com/bakdata/conquery/sql/conversion/context/selects/Selects.java @@ -23,7 +23,7 @@ public interface Selects { * @return selects as fully qualified reference * @see Selects#mapFieldToQualifier(String, Field) */ - Selects byName(String qualifier); + Selects qualifiedWith(String qualifier); /** * @return A list of all select fields including the primary column and validity date. @@ -55,9 +55,6 @@ default Stream> mapFieldStreamToQualifier(String qualifier, Stream *

* This function maps the select {@code c1 - c2 as c} to {@code t1.c}. * - * @param qualifier - * @param field - * @return */ default Field mapFieldToQualifier(String qualifier, Field field) { return DSL.field(DSL.name(qualifier, field.getName())); diff --git a/backend/src/main/java/com/bakdata/conquery/sql/conversion/context/step/QueryStep.java b/backend/src/main/java/com/bakdata/conquery/sql/conversion/context/step/QueryStep.java index 9d72ec56bd..83ed4298ff 100644 --- a/backend/src/main/java/com/bakdata/conquery/sql/conversion/context/step/QueryStep.java +++ b/backend/src/main/java/com/bakdata/conquery/sql/conversion/context/step/QueryStep.java @@ -1,5 +1,6 @@ package com.bakdata.conquery.sql.conversion.context.step; +import java.util.Collections; import java.util.List; import com.bakdata.conquery.sql.conversion.context.selects.Selects; @@ -20,7 +21,8 @@ public class QueryStep { String cteName; Selects selects; TableLike fromTable; - List conditions; + @Builder.Default + List conditions = Collections.emptyList(); /** * The CTEs referenced by this QueryStep */ @@ -34,7 +36,7 @@ public static TableLike toTableLike(String fromTableName) { * @return All selects re-mapped to a qualifier, which is the cteName of this QueryStep. */ public Selects getQualifiedSelects() { - return this.selects.byName(this.cteName); + return this.selects.qualifiedWith(this.cteName); } } diff --git a/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/CQConceptConverter.java b/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/CQConceptConverter.java deleted file mode 100644 index 400dac447f..0000000000 --- a/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/CQConceptConverter.java +++ /dev/null @@ -1,203 +0,0 @@ -package com.bakdata.conquery.sql.conversion.cqelement; - -import java.util.Collections; -import java.util.List; -import java.util.Locale; -import java.util.Optional; - -import com.bakdata.conquery.apiv1.query.concept.filter.CQTable; -import com.bakdata.conquery.apiv1.query.concept.specific.CQConcept; -import com.bakdata.conquery.sql.conversion.NodeConverter; -import com.bakdata.conquery.sql.conversion.context.ConversionContext; -import com.bakdata.conquery.sql.conversion.context.selects.ConceptSelects; -import com.bakdata.conquery.sql.conversion.context.step.QueryStep; -import com.bakdata.conquery.sql.conversion.filter.FilterConverterService; -import com.bakdata.conquery.sql.conversion.select.SelectConverterService; -import org.jooq.Condition; -import org.jooq.Field; - -public class CQConceptConverter implements NodeConverter { - - private final FilterConverterService filterConverterService; - private final SelectConverterService selectConverterService; - - public CQConceptConverter(FilterConverterService filterConverterService, SelectConverterService selectConverterService) { - this.filterConverterService = filterConverterService; - this.selectConverterService = selectConverterService; - } - - @Override - public Class getConversionClass() { - return CQConcept.class; - } - - @Override - public ConversionContext convert(CQConcept node, ConversionContext context) { - - if (node.getTables().size() > 1) { - throw new UnsupportedOperationException("Can't handle concepts with multiple tables for now."); - } - - ConceptPreprocessingService preprocessingService = new ConceptPreprocessingService(node, context); - CQTable table = node.getTables().get(0); - String conceptLabel = this.getConceptLabel(node, context); - - QueryStep preprocessingStep = preprocessingService.buildPreprocessingQueryStepForTable(conceptLabel, table); - QueryStep dateRestriction = this.buildDateRestrictionQueryStep(context, node, conceptLabel, preprocessingStep); - QueryStep eventSelect = this.buildEventSelectQueryStep(context, table, conceptLabel, dateRestriction); - QueryStep eventFilter = this.buildEventFilterQueryStep(context, table, conceptLabel, eventSelect); - QueryStep finalStep = this.buildFinalQueryStep(conceptLabel, eventFilter); - - return context.withQueryStep(finalStep); - } - - private String getConceptLabel(CQConcept node, ConversionContext context) { - // only relevant for debugging purposes as it will be part of the generated SQL query - // we prefix each cte name of a concept with an incrementing counter to prevent naming collisions if the same concept is selected multiple times - return "%s_%s".formatted( - context.getQueryStepCounter(), - node.getUserOrDefaultLabel(Locale.ENGLISH) - .toLowerCase() - .replace(' ', '_') - .replaceAll("\\s", "_") - ); - } - - /** - * selects: - * - all of previous step - */ - private QueryStep buildDateRestrictionQueryStep( - ConversionContext context, - CQConcept node, - String conceptLabel, - QueryStep previous - ) { - if (((ConceptSelects) previous.getSelects()).getDateRestrictionRange().isEmpty()) { - return previous; - } - - ConceptSelects dateRestrictionSelects = this.prepareDateRestrictionSelects(node, previous); - Condition dateRestriction = this.buildDateRestriction(context, previous); - String dateRestrictionCteName = "concept_%s_date_restriction".formatted(conceptLabel); - - return QueryStep.builder() - .cteName(dateRestrictionCteName) - .fromTable(QueryStep.toTableLike(previous.getCteName())) - .selects(dateRestrictionSelects) - .conditions(List.of(dateRestriction)) - .predecessors(List.of(previous)) - .build(); - } - - private ConceptSelects prepareDateRestrictionSelects(CQConcept conceptNode, QueryStep previous) { - ConceptSelects.ConceptSelectsBuilder selectsBuilder = ((ConceptSelects) previous.getQualifiedSelects()).toBuilder(); - selectsBuilder.dateRestrictionRange(Optional.empty()); - if (conceptNode.isExcludeFromTimeAggregation()) { - selectsBuilder.validityDate(Optional.empty()); - } - return selectsBuilder.build(); - } - - private Condition buildDateRestriction(ConversionContext context, QueryStep previous) { - ConceptSelects previousSelects = (ConceptSelects) previous.getSelects(); - return context.getSqlDialect().getFunction() - .dateRestriction(previousSelects.getDateRestrictionRange().get(), previousSelects.getValidityDate().get()); - } - - /** - * selects: - * - all of previous steps - * - transformed columns with selects - */ - private QueryStep buildEventSelectQueryStep( - ConversionContext context, - CQTable table, - String conceptLabel, QueryStep previous - ) { - if (table.getSelects().isEmpty()) { - return previous; - } - - ConceptSelects eventSelectSelects = this.prepareEventSelectSelects(context, table, previous); - - return QueryStep.builder() - .cteName(createCteName(conceptLabel, "_event_select")) - .fromTable(QueryStep.toTableLike(previous.getCteName())) - .selects(eventSelectSelects) - .conditions(Collections.emptyList()) - .predecessors(List.of(previous)) - .build(); - } - - /** - * selects: - * - all of previous step - * - remove filter - */ - private QueryStep buildEventFilterQueryStep( - ConversionContext context, - CQTable table, - String conceptLabel, - QueryStep previous - ) { - if (table.getFilters().isEmpty()) { - return previous; - } - - ConceptSelects eventFilterSelects = this.prepareEventFilterSelects(previous); - List eventFilterConditions = this.buildEventFilterConditions(context, table); - - return QueryStep.builder() - .cteName(createCteName(conceptLabel, "_event_filter")) - .fromTable(QueryStep.toTableLike(previous.getCteName())) - .selects(eventFilterSelects) - .conditions(eventFilterConditions) - .predecessors(List.of(previous)) - .build(); - } - - private ConceptSelects prepareEventSelectSelects( - ConversionContext context, - CQTable table, - QueryStep previous - ) { - return ((ConceptSelects) previous.getQualifiedSelects()).withEventSelect(this.getEventSelects(context, table)); - } - - private ConceptSelects prepareEventFilterSelects(QueryStep previous) { - return ((ConceptSelects) previous.getQualifiedSelects()).withEventFilter(Collections.emptyList()); - } - - private List buildEventFilterConditions(ConversionContext context, CQTable table) { - return table.getFilters().stream() - .map(filterValue -> this.filterConverterService.convert(filterValue, context)) - .toList(); - } - - private List> getEventSelects(ConversionContext context, CQTable table) { - return table.getSelects().stream() - .map(select -> (Field) this.selectConverterService.convert(select, context)) - .toList(); - } - - /** - * selects: - * - all of previous step - */ - private QueryStep buildFinalQueryStep(String conceptLabel, QueryStep previous) { - ConceptSelects finalSelects = ((ConceptSelects) previous.getQualifiedSelects()); - return QueryStep.builder() - .cteName(createCteName(conceptLabel, "")) - .fromTable(QueryStep.toTableLike(previous.getCteName())) - .selects(finalSelects) - .conditions(Collections.emptyList()) - .predecessors(List.of(previous)) - .build(); - } - - private static String createCteName(String conceptLabel, String suffix) { - return "concept_%s%s".formatted(conceptLabel, suffix); - } - -} diff --git a/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/concept/CQConceptConverter.java b/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/concept/CQConceptConverter.java new file mode 100644 index 0000000000..64e05c8cfd --- /dev/null +++ b/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/concept/CQConceptConverter.java @@ -0,0 +1,76 @@ +package com.bakdata.conquery.sql.conversion.cqelement.concept; + +import java.util.List; +import java.util.Locale; +import java.util.Optional; + +import com.bakdata.conquery.apiv1.query.concept.specific.CQConcept; +import com.bakdata.conquery.sql.conversion.NodeConverter; +import com.bakdata.conquery.sql.conversion.context.ConversionContext; +import com.bakdata.conquery.sql.conversion.context.selects.ConceptSelects; +import com.bakdata.conquery.sql.conversion.context.step.QueryStep; +import com.bakdata.conquery.sql.conversion.filter.FilterConverterService; +import com.bakdata.conquery.sql.conversion.select.SelectConverterService; + +public class CQConceptConverter implements NodeConverter { + + private final List querySteps; + + public CQConceptConverter(FilterConverterService filterConverterService, SelectConverterService selectConverterService) { + this.querySteps = List.of( + new PreprocessingQueryStep(), + new DateRestrictionQueryStep(), + new EventSelectStep(selectConverterService), + new EventFilterQueryStep(filterConverterService), + new FinalConceptQueryStep() + ); + } + + @Override + public Class getConversionClass() { + return CQConcept.class; + } + + @Override + public ConversionContext convert(CQConcept node, ConversionContext context) { + + if (node.getTables().size() > 1) { + throw new UnsupportedOperationException("Can't handle concepts with multiple tables for now."); + } + + StepContext stepContext = StepContext.builder() + .context(context) + .node(node) + .table(node.getTables().get(0)) + .conceptLabel(this.getConceptLabel(node, context)) + .sqlFunctions(context.getSqlDialect().getFunction()) + .build(); + + for (ConceptQueryStep queryStep : this.querySteps) { + Optional convert = queryStep.convert(stepContext); + if (convert.isEmpty()) { + continue; + } + stepContext = stepContext.toBuilder() + .previous(convert.get()) + .previousSelects((ConceptSelects) convert.get().getQualifiedSelects()) + .build(); + } + + return context.withQueryStep(stepContext.getPrevious()); + } + + private String getConceptLabel(CQConcept node, ConversionContext context) { + // only relevant for debugging purposes as it will be part of the generated SQL query + // we prefix each cte name of a concept with an incrementing counter to prevent naming collisions if the same concept is selected multiple times + return "%s_%s".formatted( + context.getQueryStepCounter(), + node.getUserOrDefaultLabel(Locale.ENGLISH) + .toLowerCase() + .replace(' ', '_') + .replaceAll("\\s", "_") + ); + } + + +} diff --git a/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/concept/ConceptQueryStep.java b/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/concept/ConceptQueryStep.java new file mode 100644 index 0000000000..d0175c8523 --- /dev/null +++ b/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/concept/ConceptQueryStep.java @@ -0,0 +1,40 @@ +package com.bakdata.conquery.sql.conversion.cqelement.concept; + +import java.util.Collections; +import java.util.List; +import java.util.Optional; + +import com.bakdata.conquery.sql.conversion.context.step.QueryStep; + +abstract class ConceptQueryStep { + + public Optional convert(StepContext context) { + if (!canConvert(context)) { + return Optional.empty(); + } + + QueryStep.QueryStepBuilder queryStepBuilder = this.convertStep(context).cteName(createCteName(context)); + + if (context.getPrevious() != null) { + queryStepBuilder.predecessors(List.of(context.getPrevious())) + .fromTable(QueryStep.toTableLike(context.getPrevious().getCteName())); + } + else { + queryStepBuilder.predecessors(Collections.emptyList()) + .fromTable(QueryStep.toTableLike(context.getTable().getConnector().getTable().getName())); + } + return Optional.of(queryStepBuilder.build()); + + } + + abstract boolean canConvert(StepContext stepContext); + + abstract QueryStep.QueryStepBuilder convertStep(StepContext stepContext); + + abstract String nameSuffix(); + + private String createCteName(StepContext stepContext) { + return "concept_%s%s".formatted(stepContext.getConceptLabel(), nameSuffix()); + } + +} diff --git a/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/concept/DateRestrictionQueryStep.java b/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/concept/DateRestrictionQueryStep.java new file mode 100644 index 0000000000..c41c4bea7c --- /dev/null +++ b/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/concept/DateRestrictionQueryStep.java @@ -0,0 +1,44 @@ +package com.bakdata.conquery.sql.conversion.cqelement.concept; + +import java.util.List; +import java.util.Optional; + +import com.bakdata.conquery.sql.conversion.context.selects.ConceptSelects; +import com.bakdata.conquery.sql.conversion.context.step.QueryStep; +import org.jooq.Condition; + +class DateRestrictionQueryStep extends ConceptQueryStep { + + @Override + public boolean canConvert(StepContext stepContext) { + return stepContext.getPreviousSelects().getDateRestrictionRange().isPresent(); + } + + @Override + public QueryStep.QueryStepBuilder convertStep(StepContext stepContext) { + ConceptSelects dateRestrictionSelects = this.prepareDateRestrictionSelects(stepContext); + Condition dateRestriction = stepContext.getSqlFunctions().dateRestriction( + stepContext.getPreviousSelects().getDateRestrictionRange().get(), + stepContext.getPreviousSelects().getValidityDate().get() + ); + + return QueryStep.builder() + .selects(dateRestrictionSelects) + .conditions(List.of(dateRestriction)); + } + + @Override + public String nameSuffix() { + return "_date_restriction"; + } + + private ConceptSelects prepareDateRestrictionSelects(final StepContext stepContext) { + ConceptSelects.ConceptSelectsBuilder selectsBuilder = stepContext.getPreviousSelects().toBuilder(); + selectsBuilder.dateRestrictionRange(Optional.empty()); + if (stepContext.getNode().isExcludeFromTimeAggregation()) { + selectsBuilder.validityDate(Optional.empty()); + } + return selectsBuilder.build(); + } + +} diff --git a/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/concept/EventFilterQueryStep.java b/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/concept/EventFilterQueryStep.java new file mode 100644 index 0000000000..71b0b80a01 --- /dev/null +++ b/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/concept/EventFilterQueryStep.java @@ -0,0 +1,39 @@ +package com.bakdata.conquery.sql.conversion.cqelement.concept; + +import java.util.Collections; +import java.util.List; + +import com.bakdata.conquery.sql.conversion.context.selects.ConceptSelects; +import com.bakdata.conquery.sql.conversion.context.step.QueryStep; +import com.bakdata.conquery.sql.conversion.filter.FilterConverterService; +import org.jooq.Condition; + +public class EventFilterQueryStep extends ConceptQueryStep { + + private final FilterConverterService filterConverterService; + + public EventFilterQueryStep(FilterConverterService filterConverterService) { + this.filterConverterService = filterConverterService; + } + + @Override + public boolean canConvert(StepContext stepContext) { + return !stepContext.getTable().getFilters().isEmpty(); + } + + @Override + public QueryStep.QueryStepBuilder convertStep(StepContext stepContext) { + + ConceptSelects eventFilterSelects = stepContext.getPreviousSelects().withEventFilter(Collections.emptyList()); + List eventFilterConditions = stepContext.getTable().getFilters().stream() + .map(filterValue -> this.filterConverterService.convert(filterValue, stepContext.getContext())) + .toList(); + return QueryStep.builder().selects(eventFilterSelects).conditions(eventFilterConditions); + } + + @Override + public String nameSuffix() { + return "_event_filter"; + } + +} diff --git a/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/concept/EventSelectStep.java b/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/concept/EventSelectStep.java new file mode 100644 index 0000000000..c0ceb15299 --- /dev/null +++ b/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/concept/EventSelectStep.java @@ -0,0 +1,39 @@ +package com.bakdata.conquery.sql.conversion.cqelement.concept; + +import java.util.List; +import java.util.stream.Stream; + +import com.bakdata.conquery.sql.conversion.context.step.QueryStep; +import com.bakdata.conquery.sql.conversion.select.SelectConverterService; +import org.jooq.Field; + +class EventSelectStep extends ConceptQueryStep { + + private final SelectConverterService selectConverterService; + + EventSelectStep(SelectConverterService selectConverterService) { + this.selectConverterService = selectConverterService; + } + + @Override + public boolean canConvert(StepContext stepContext) { + return !stepContext.getTable().getSelects().isEmpty() || !stepContext.getNode().getSelects().isEmpty(); + } + + @Override + public QueryStep.QueryStepBuilder convertStep(StepContext stepContext) { + return QueryStep.builder().selects(stepContext.getPreviousSelects().withEventSelect(this.getEventSelects(stepContext))); + } + + @Override + public String nameSuffix() { + return "_event_select"; + } + + @SuppressWarnings("unchecked") + private List> getEventSelects(StepContext stepContext) { + return Stream.concat(stepContext.getTable().getSelects().stream(), stepContext.getNode().getSelects().stream()) + .map(select -> (Field) this.selectConverterService.convert(select, stepContext.getContext())) + .toList(); + } +} diff --git a/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/concept/FinalConceptQueryStep.java b/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/concept/FinalConceptQueryStep.java new file mode 100644 index 0000000000..a58e7e1c91 --- /dev/null +++ b/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/concept/FinalConceptQueryStep.java @@ -0,0 +1,23 @@ +package com.bakdata.conquery.sql.conversion.cqelement.concept; + +import com.bakdata.conquery.sql.conversion.context.selects.Selects; +import com.bakdata.conquery.sql.conversion.context.step.QueryStep; + +class FinalConceptQueryStep extends ConceptQueryStep { + + @Override + public boolean canConvert(StepContext stepContext) { + return true; + } + + @Override + public QueryStep.QueryStepBuilder convertStep(StepContext stepContext) { + Selects finalSelects = stepContext.getPrevious().getQualifiedSelects(); + return QueryStep.builder().selects(finalSelects); + } + + @Override + public String nameSuffix() { + return ""; + } +} diff --git a/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/ConceptPreprocessingService.java b/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/concept/PreprocessingQueryStep.java similarity index 50% rename from backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/ConceptPreprocessingService.java rename to backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/concept/PreprocessingQueryStep.java index 94970f4621..ce6b9875e4 100644 --- a/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/ConceptPreprocessingService.java +++ b/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/concept/PreprocessingQueryStep.java @@ -1,4 +1,4 @@ -package com.bakdata.conquery.sql.conversion.cqelement; +package com.bakdata.conquery.sql.conversion.cqelement.concept; import java.util.Collections; import java.util.List; @@ -6,43 +6,28 @@ import com.bakdata.conquery.apiv1.query.concept.filter.CQTable; import com.bakdata.conquery.apiv1.query.concept.filter.FilterValue; -import com.bakdata.conquery.apiv1.query.concept.specific.CQConcept; import com.bakdata.conquery.models.datasets.Column; -import com.bakdata.conquery.sql.conversion.context.ConversionContext; import com.bakdata.conquery.sql.conversion.context.selects.ConceptSelects; import com.bakdata.conquery.sql.conversion.context.step.QueryStep; -import com.bakdata.conquery.sql.conversion.dialect.SqlFunctionProvider; import com.bakdata.conquery.sql.models.ColumnDateRange; import org.jooq.Field; import org.jooq.impl.DSL; -public class ConceptPreprocessingService { +class PreprocessingQueryStep extends ConceptQueryStep { - private final CQConcept concept; - private final ConversionContext context; - private final SqlFunctionProvider sqlFunctionProvider; - - public ConceptPreprocessingService(CQConcept concept, ConversionContext context) { - this.concept = concept; - this.context = context; - this.sqlFunctionProvider = this.context.getSqlDialect().getFunction(); + public boolean canConvert(StepContext stepContext) { + // We always apply preprocessing to select the required columns + return true; } - /** - * selects: - * - (primary column) - * - date restriction - * - validity date - * - any filter (group/event) - * - any select (group/event) - */ - public QueryStep buildPreprocessingQueryStepForTable(String conceptLabel, CQTable table) { + public QueryStep.QueryStepBuilder convertStep(StepContext stepContext) { + CQTable table = stepContext.getTable(); ConceptSelects.ConceptSelectsBuilder selectsBuilder = ConceptSelects.builder(); - selectsBuilder.primaryColumn(DSL.field(context.getConfig().getPrimaryColumn())); - selectsBuilder.dateRestrictionRange(this.getDateRestrictionSelect(table)); - selectsBuilder.validityDate(this.getValidityDateSelect(table, conceptLabel)); + selectsBuilder.primaryColumn(DSL.field(DSL.name(stepContext.getContext().getConfig().getPrimaryColumn()))) + .dateRestrictionRange(this.getDateRestrictionSelect(stepContext)) + .validityDate(this.getValidityDateSelect(stepContext)); List> conceptSelectFields = this.getColumnSelectReferences(table); List> conceptFilterFields = this.getColumnFilterReferences(table); @@ -53,43 +38,46 @@ public QueryStep buildPreprocessingQueryStepForTable(String conceptLabel, CQTabl .filter(field -> !conceptSelectFields.contains(field)) .toList(); - selectsBuilder.eventSelect(conceptSelectFields); - selectsBuilder.eventFilter(deduplicatedFilterFields); + selectsBuilder.eventSelect(conceptSelectFields). + eventFilter(deduplicatedFilterFields); // not part of preprocessing yet selectsBuilder.groupSelect(Collections.emptyList()) .groupFilter(Collections.emptyList()); return QueryStep.builder() - .cteName(this.getPreprocessingStepLabel(conceptLabel)) - .fromTable(QueryStep.toTableLike(this.getFromTableName(table))) .selects(selectsBuilder.build()) .conditions(Collections.emptyList()) - .predecessors(Collections.emptyList()) - .build(); + .predecessors(Collections.emptyList()); + } + + @Override + public String nameSuffix() { + return "_preprocessing"; } - private Optional getDateRestrictionSelect(CQTable table) { - if (!this.context.dateRestrictionActive() || !this.tableHasValidityDates(table)) { + private Optional getDateRestrictionSelect(final StepContext stepContext) { + if (!stepContext.getContext().dateRestrictionActive() || !this.tableHasValidityDates(stepContext.getTable())) { return Optional.empty(); } - return Optional.of(sqlFunctionProvider.daterange(context.getDateRestrictionRange())); + ColumnDateRange dateRestriction = stepContext.getContext().getSqlDialect().getFunction().daterange(stepContext.getContext().getDateRestrictionRange()); + return Optional.of(dateRestriction); } - private Optional getValidityDateSelect(CQTable table, String conceptLabel) { - if (!this.validityDateIsRequired(table)) { + private Optional getValidityDateSelect(final StepContext stepContext) { + if (!this.validityDateIsRequired(stepContext)) { return Optional.empty(); } - return Optional.of(sqlFunctionProvider.daterange(table.findValidityDate(), conceptLabel)); + return Optional.of(stepContext.getSqlFunctions().daterange(stepContext.getTable().findValidityDate(), stepContext.getConceptLabel())); } /** * @return True, if a date restriction is active and the node is not excluded from time aggregation * OR there is no date restriction, but still existing validity dates which are included in time aggregation. */ - private boolean validityDateIsRequired(CQTable table) { - return this.tableHasValidityDates(table) - && !this.concept.isExcludeFromTimeAggregation(); + private boolean validityDateIsRequired(final StepContext stepContext) { + return this.tableHasValidityDates(stepContext.getTable()) + && !stepContext.getNode().isExcludeFromTimeAggregation(); } private boolean tableHasValidityDates(CQTable table) { @@ -111,19 +99,9 @@ private List> getColumnFilterReferences(CQTable table) { .toList(); } - private String getFromTableName(CQTable table) { - return table.getConnector() - .getTable() - .getName(); - } private Field mapColumnOntoTable(Column column, CQTable table) { - return DSL.field(DSL.name(this.getFromTableName(table), column.getName())); - } - - private String getPreprocessingStepLabel(String conceptLabel) { - return "concept_%s_preprocessing".formatted(conceptLabel); - } + return DSL.field(DSL.name(table.getConnector().getTable().getName(), column.getName()));} } diff --git a/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/concept/StepContext.java b/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/concept/StepContext.java new file mode 100644 index 0000000000..92f242c5bc --- /dev/null +++ b/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/concept/StepContext.java @@ -0,0 +1,22 @@ +package com.bakdata.conquery.sql.conversion.cqelement.concept; + +import com.bakdata.conquery.apiv1.query.concept.filter.CQTable; +import com.bakdata.conquery.apiv1.query.concept.specific.CQConcept; +import com.bakdata.conquery.sql.conversion.context.ConversionContext; +import com.bakdata.conquery.sql.conversion.context.selects.ConceptSelects; +import com.bakdata.conquery.sql.conversion.context.step.QueryStep; +import com.bakdata.conquery.sql.conversion.dialect.SqlFunctionProvider; +import lombok.Builder; +import lombok.Value; + +@Value +@Builder(toBuilder = true) +class StepContext { + ConversionContext context; + SqlFunctionProvider sqlFunctions; + CQConcept node; + CQTable table; + String conceptLabel; + QueryStep previous; + ConceptSelects previousSelects; +} diff --git a/backend/src/main/java/com/bakdata/conquery/sql/conversion/dialect/HanaSqlDialect.java b/backend/src/main/java/com/bakdata/conquery/sql/conversion/dialect/HanaSqlDialect.java new file mode 100644 index 0000000000..41887c2a2c --- /dev/null +++ b/backend/src/main/java/com/bakdata/conquery/sql/conversion/dialect/HanaSqlDialect.java @@ -0,0 +1,46 @@ +package com.bakdata.conquery.sql.conversion.dialect; + +import java.util.List; + +import com.bakdata.conquery.apiv1.query.concept.filter.FilterValue; +import com.bakdata.conquery.models.datasets.concepts.select.Select; +import com.bakdata.conquery.models.query.Visitable; +import com.bakdata.conquery.sql.conversion.NodeConverter; +import com.bakdata.conquery.sql.conversion.filter.FilterConverter; +import com.bakdata.conquery.sql.conversion.select.SelectConverter; +import org.jooq.DSLContext; + +public class HanaSqlDialect implements SqlDialect { + + private final DSLContext dslContext; + + public HanaSqlDialect(DSLContext dslContext) { + this.dslContext = dslContext; + } + + @Override + public DSLContext getDSLContext() { + return this.dslContext; + } + + @Override + public List> getNodeConverters() { + return getDefaultNodeConverters(); + } + + @Override + public List>> getFilterConverters() { + return getDefaultFilterConverters(); + } + + @Override + public List> getSelectConverters() { + return getDefaultSelectConverters(); + } + + @Override + public SqlFunctionProvider getFunction() { + return new HanaSqlFunctionProvider(); + } + +} diff --git a/backend/src/main/java/com/bakdata/conquery/sql/conversion/dialect/HanaSqlFunctionProvider.java b/backend/src/main/java/com/bakdata/conquery/sql/conversion/dialect/HanaSqlFunctionProvider.java new file mode 100644 index 0000000000..13a7682aca --- /dev/null +++ b/backend/src/main/java/com/bakdata/conquery/sql/conversion/dialect/HanaSqlFunctionProvider.java @@ -0,0 +1,134 @@ +package com.bakdata.conquery.sql.conversion.dialect; + +import java.sql.Date; +import java.time.temporal.ChronoUnit; +import java.util.stream.Collectors; +import java.util.stream.Stream; + +import com.bakdata.conquery.models.common.daterange.CDateRange; +import com.bakdata.conquery.models.datasets.Column; +import com.bakdata.conquery.models.datasets.concepts.ValidityDate; +import com.bakdata.conquery.sql.models.ColumnDateRange; +import org.jooq.Condition; +import org.jooq.Field; +import org.jooq.Name; +import org.jooq.impl.DSL; + +public class HanaSqlFunctionProvider implements SqlFunctionProvider { + + private static final String INFINITY_DATE_VALUE = "9999-12-31"; + private static final String MINUS_INFINITY_DATE_VALUE = "0001-01-01"; + + @Override + public Condition dateRestriction(ColumnDateRange dateRestriction, ColumnDateRange validityDate) { + + if (dateRestriction.isSingleColumnRange() || validityDate.isSingleColumnRange()) { + throw new UnsupportedOperationException("HANA does not support single column ranges."); + } + + Condition dateRestrictionStartsBeforeDate = dateRestriction.getStart().lessOrEqual(validityDate.getEnd()); + Condition dateRestrictionEndsAfterDate = dateRestriction.getEnd().greaterOrEqual(validityDate.getStart()); + + return DSL.condition(dateRestrictionStartsBeforeDate.and(dateRestrictionEndsAfterDate)); + } + + @Override + public ColumnDateRange daterange(CDateRange dateRestriction) { + + String startDateExpression = MINUS_INFINITY_DATE_VALUE; + String endDateExpression = INFINITY_DATE_VALUE; + + if (dateRestriction.hasLowerBound()) { + startDateExpression = dateRestriction.getMin().toString(); + } + if (dateRestriction.hasUpperBound()) { + endDateExpression = dateRestriction.getMax().toString(); + } + + return ColumnDateRange.of(toDateField(startDateExpression), toDateField(endDateExpression)) + .asDateRestrictionRange(); + } + + @Override + public ColumnDateRange daterange(ValidityDate validityDate, String conceptLabel) { + + Column startColumn; + Column endColumn; + + if (validityDate.getEndColumn() != null) { + startColumn = validityDate.getStartColumn(); + endColumn = validityDate.getEndColumn(); + } + else { + startColumn = validityDate.getColumn(); + endColumn = validityDate.getColumn(); + } + + // when aggregating date ranges, we want to treat the last day of the range as excluded, + // so when using the date value of the end column, we add +1 day as end of the date range + Field rangeStart = DSL.field(DSL.name(startColumn.getName()), Date.class); + Field rangeEnd = addDay(endColumn); + + return ColumnDateRange.of(rangeStart, rangeEnd) + .asValidityDateRange(conceptLabel); + } + + @Override + public Field daterangeString(ColumnDateRange columnDateRange) { + + if (columnDateRange.isSingleColumnRange()) { + throw new UnsupportedOperationException("HANA does not support single-column date ranges."); + } + + String datesConcatenated = Stream.of(columnDateRange.getStart(), columnDateRange.getEnd()) + .map(" || %s || "::formatted) + .collect(Collectors.joining(" ',' ", "'['", "')'")); + + return DSL.field(datesConcatenated); + } + + @Override + public Field dateDistance(ChronoUnit timeUnit, Name startDateColumnName, Date endDateExpression) { + + String betweenFunction = switch (timeUnit) { + case DAYS -> "DAYS_BETWEEN"; + case MONTHS -> "MONTHS_BETWEEN"; + case YEARS, DECADES, CENTURIES -> "YEARS_BETWEEN"; + default -> throw new UnsupportedOperationException("Given ChronoUnit %s is not supported."); + }; + + Field startDate = DSL.field(startDateColumnName, Date.class); + Field endDate = toDateField(endDateExpression.toString()); + Field dateDistance = DSL.function(betweenFunction, Integer.class, startDate, endDate); + + // HANA does not support decades or centuries directly + dateDistance = switch (timeUnit) { + case DECADES -> dateDistance.divide(10); + case CENTURIES -> dateDistance.divide(100); + default -> dateDistance; + }; + + // otherwise HANA would return floating point numbers for date distances + return dateDistance.cast(Integer.class); + } + + @Override + public Field toDateField(String dateExpression) { + return DSL.function( + "TO_DATE", + Date.class, + DSL.val(dateExpression), + DSL.val(DEFAULT_DATE_FORMAT) + ); + } + + private Field addDay(Column dateColumn) { + return DSL.function( + "ADD_DAYS", + Date.class, + DSL.field(DSL.name(dateColumn.getName())), + DSL.val(1) + ); + } + +} diff --git a/backend/src/main/java/com/bakdata/conquery/sql/conversion/dialect/PostgreSqlFunctionProvider.java b/backend/src/main/java/com/bakdata/conquery/sql/conversion/dialect/PostgreSqlFunctionProvider.java index 5fe2c9620c..60925126bb 100644 --- a/backend/src/main/java/com/bakdata/conquery/sql/conversion/dialect/PostgreSqlFunctionProvider.java +++ b/backend/src/main/java/com/bakdata/conquery/sql/conversion/dialect/PostgreSqlFunctionProvider.java @@ -2,16 +2,15 @@ import java.sql.Date; import java.time.temporal.ChronoUnit; -import java.util.Map; import com.bakdata.conquery.models.common.daterange.CDateRange; import com.bakdata.conquery.models.datasets.Column; import com.bakdata.conquery.models.datasets.concepts.ValidityDate; import com.bakdata.conquery.sql.models.ColumnDateRange; -import org.jetbrains.annotations.NotNull; import org.jooq.Condition; import org.jooq.DatePart; import org.jooq.Field; +import org.jooq.Name; import org.jooq.impl.DSL; /** @@ -24,14 +23,6 @@ public class PostgreSqlFunctionProvider implements SqlFunctionProvider { private static final String INFINITY_DATE_VALUE = "infinity"; private static final String MINUS_INFINITY_DATE_VALUE = "-infinity"; - private static final Map DATE_CONVERSION = Map.of( - ChronoUnit.DECADES, DatePart.DECADE, - ChronoUnit.YEARS, DatePart.YEAR, - ChronoUnit.DAYS, DatePart.DAY, - ChronoUnit.MONTHS, DatePart.MONTH, - ChronoUnit.CENTURIES, DatePart.CENTURY - ); - @Override public Condition dateRestriction(ColumnDateRange dateRestriction, ColumnDateRange validityDate) { if (!validityDate.isSingleColumnRange()) { @@ -50,20 +41,20 @@ public Condition dateRestriction(ColumnDateRange dateRestriction, ColumnDateRang @Override public ColumnDateRange daterange(CDateRange dateRestriction) { - String min = MINUS_INFINITY_DATE_VALUE; - String max = INFINITY_DATE_VALUE; + String startDateExpression = MINUS_INFINITY_DATE_VALUE; + String endDateExpression = INFINITY_DATE_VALUE; if (dateRestriction.hasLowerBound()) { - min = dateRestriction.getMin().toString(); + startDateExpression = dateRestriction.getMin().toString(); } if (dateRestriction.hasUpperBound()) { - max = dateRestriction.getMax().toString(); + endDateExpression = dateRestriction.getMax().toString(); } Field dateRestrictionRange = DSL.field( "daterange({0}::date, {1}::date, '[]')", - DSL.val(min), - DSL.val(max) + DSL.val(startDateExpression), + DSL.val(endDateExpression) ); return ColumnDateRange.of(dateRestrictionRange) @@ -108,20 +99,28 @@ public Field daterangeString(ColumnDateRange columnDateRange) { } @Override - public Field dateDistance(ChronoUnit timeUnit, Column startDateColumn, Date endDateExpression) { + public Field dateDistance(ChronoUnit timeUnit, Name startDateColumnName, Date endDateExpression) { + + Field startDate = DSL.field(startDateColumnName, Date.class); + Field endDate = toDateField(endDateExpression.toString()); - DatePart datePart = DATE_CONVERSION.get(timeUnit); - if (datePart == null) { - throw new UnsupportedOperationException("Chrono unit %s is not supported".formatted(timeUnit)); + if (timeUnit == ChronoUnit.DAYS) { + return endDate.minus(startDate).coerce(Integer.class); } - // we can now safely cast to Field of type Date - Field startDate = DSL.field(DSL.name(startDateColumn.getName()), Date.class); - return DSL.dateDiff(datePart, startDate, endDateExpression); + Field age = DSL.function("AGE", Object.class, endDate, startDate); + + return switch (timeUnit) { + case MONTHS -> extract(DatePart.YEAR, age).multiply(12) + .plus(extract(DatePart.MONTH, age)); + case YEARS -> extract(DatePart.YEAR, age); + case DECADES -> extract(DatePart.DECADE, age); + case CENTURIES -> extract(DatePart.CENTURY, age); + default -> throw new UnsupportedOperationException("Given ChronoUnit %s is not supported."); + }; } - @NotNull - private static Field daterange(Column startColumn, Column endColumn, String bounds) { + private Field daterange(Column startColumn, Column endColumn, String bounds) { return DSL.function( "daterange", Object.class, @@ -131,4 +130,12 @@ private static Field daterange(Column startColumn, Column endColumn, Str ); } + private Field extract(DatePart datePart, Field timeInterval) { + return DSL.function( + "EXTRACT", + Integer.class, + DSL.inlined(DSL.field("%s FROM %s".formatted(datePart, timeInterval))) + ); + } + } diff --git a/backend/src/main/java/com/bakdata/conquery/sql/conversion/dialect/SqlDialect.java b/backend/src/main/java/com/bakdata/conquery/sql/conversion/dialect/SqlDialect.java index 8ee2a08256..6bdba63b99 100644 --- a/backend/src/main/java/com/bakdata/conquery/sql/conversion/dialect/SqlDialect.java +++ b/backend/src/main/java/com/bakdata/conquery/sql/conversion/dialect/SqlDialect.java @@ -1,12 +1,21 @@ package com.bakdata.conquery.sql.conversion.dialect; +import java.util.List; +import java.util.Map; +import java.util.function.Function; +import java.util.stream.Collectors; + import com.bakdata.conquery.apiv1.query.concept.filter.FilterValue; import com.bakdata.conquery.models.datasets.concepts.select.Select; import com.bakdata.conquery.models.query.Visitable; import com.bakdata.conquery.sql.conversion.Converter; import com.bakdata.conquery.sql.conversion.NodeConverter; import com.bakdata.conquery.sql.conversion.context.step.QueryStepTransformer; -import com.bakdata.conquery.sql.conversion.cqelement.*; +import com.bakdata.conquery.sql.conversion.cqelement.CQAndConverter; +import com.bakdata.conquery.sql.conversion.cqelement.CQDateRestrictionConverter; +import com.bakdata.conquery.sql.conversion.cqelement.CQNegationConverter; +import com.bakdata.conquery.sql.conversion.cqelement.CQOrConverter; +import com.bakdata.conquery.sql.conversion.cqelement.concept.CQConceptConverter; import com.bakdata.conquery.sql.conversion.filter.FilterConverter; import com.bakdata.conquery.sql.conversion.filter.FilterConverterService; import com.bakdata.conquery.sql.conversion.filter.MultiSelectConverter; @@ -19,11 +28,6 @@ import com.bakdata.conquery.sql.conversion.supplier.SystemDateNowSupplier; import org.jooq.DSLContext; -import java.util.List; -import java.util.Map; -import java.util.function.Function; -import java.util.stream.Collectors; - public interface SqlDialect { SqlFunctionProvider getFunction(); diff --git a/backend/src/main/java/com/bakdata/conquery/sql/conversion/dialect/SqlFunctionProvider.java b/backend/src/main/java/com/bakdata/conquery/sql/conversion/dialect/SqlFunctionProvider.java index 9e5ebab7df..3a0b305139 100644 --- a/backend/src/main/java/com/bakdata/conquery/sql/conversion/dialect/SqlFunctionProvider.java +++ b/backend/src/main/java/com/bakdata/conquery/sql/conversion/dialect/SqlFunctionProvider.java @@ -4,7 +4,6 @@ import java.time.temporal.ChronoUnit; import com.bakdata.conquery.models.common.daterange.CDateRange; -import com.bakdata.conquery.models.datasets.Column; import com.bakdata.conquery.models.datasets.concepts.ValidityDate; import com.bakdata.conquery.sql.conversion.context.step.QueryStep; import com.bakdata.conquery.sql.models.ColumnDateRange; @@ -35,7 +34,7 @@ public interface SqlFunctionProvider { Field daterangeString(ColumnDateRange columnDateRange); - Field dateDistance(ChronoUnit datePart, Column startDateColumn, Date endDateExpression); + Field dateDistance(ChronoUnit datePart, Name startDateColumn, Date endDateExpression); default Condition in(Name columnName, String[] values) { return DSL.field(columnName) @@ -69,7 +68,7 @@ default TableOnConditionStep fullOuterJoin( .on(leftPartPrimaryColumn.eq(rightPartPrimaryColumn)); } - default Field toDate(String dateExpression) { + default Field toDateField(String dateExpression) { return DSL.toDate(dateExpression, DEFAULT_DATE_FORMAT); } diff --git a/backend/src/main/java/com/bakdata/conquery/sql/conversion/select/DateDistanceConverter.java b/backend/src/main/java/com/bakdata/conquery/sql/conversion/select/DateDistanceConverter.java index 52339db897..c2f90372f8 100644 --- a/backend/src/main/java/com/bakdata/conquery/sql/conversion/select/DateDistanceConverter.java +++ b/backend/src/main/java/com/bakdata/conquery/sql/conversion/select/DateDistanceConverter.java @@ -2,15 +2,15 @@ import java.sql.Date; import java.time.LocalDate; +import java.time.temporal.ChronoUnit; import java.util.Objects; -import com.bakdata.conquery.models.datasets.Column; import com.bakdata.conquery.models.datasets.concepts.select.connector.specific.DateDistanceSelect; -import com.bakdata.conquery.models.events.MajorTypeId; import com.bakdata.conquery.sql.conversion.context.ConversionContext; -import com.bakdata.conquery.sql.conversion.dialect.SqlFunctionProvider; import com.bakdata.conquery.sql.conversion.supplier.DateNowSupplier; import org.jooq.Field; +import org.jooq.Name; +import org.jooq.impl.DSL; public class DateDistanceConverter implements SelectConverter { @@ -23,17 +23,12 @@ public DateDistanceConverter(DateNowSupplier dateNowSupplier) { @Override public Field convert(DateDistanceSelect select, ConversionContext context) { - Column startDateColumn = select.getColumn(); - if (startDateColumn.getType() != MajorTypeId.DATE) { - throw new UnsupportedOperationException("Can't calculate date distance to column of type " - + startDateColumn.getType()); - } - - SqlFunctionProvider functionProvider = context.getSqlDialect().getFunction(); + ChronoUnit timeUnit = select.getTimeUnit(); + Name startDateColumnName = DSL.name(select.getColumn().getName()); Date endDate = getEndDate(context); - return functionProvider.dateDistance(select.getTimeUnit(), startDateColumn, endDate) - .as(select.getLabel()); + return context.getSqlDialect().getFunction().dateDistance(timeUnit, startDateColumnName, endDate) + .as(select.getLabel()); } private Date getEndDate(ConversionContext context) { @@ -53,4 +48,5 @@ private Date getEndDate(ConversionContext context) { public Class getConversionClass() { return DateDistanceSelect.class; } + } diff --git a/backend/src/main/java/com/bakdata/conquery/sql/conversion/select/FirstValueConverter.java b/backend/src/main/java/com/bakdata/conquery/sql/conversion/select/FirstValueConverter.java index af1593bf91..f91c5e4d8a 100644 --- a/backend/src/main/java/com/bakdata/conquery/sql/conversion/select/FirstValueConverter.java +++ b/backend/src/main/java/com/bakdata/conquery/sql/conversion/select/FirstValueConverter.java @@ -4,15 +4,13 @@ import com.bakdata.conquery.sql.conversion.context.ConversionContext; import com.bakdata.conquery.sql.conversion.dialect.SqlFunctionProvider; import org.jooq.Field; -import org.jooq.Name; import org.jooq.impl.DSL; public class FirstValueConverter implements SelectConverter { public Field convert(FirstValueSelect select, ConversionContext context) { SqlFunctionProvider fn = context.getSqlDialect().getFunction(); - Name columnName = DSL.name(select.getColumn().getName()); - return fn.first(columnName); + return fn.first(DSL.name(select.getColumn().getName())); } @Override diff --git a/backend/src/main/java/com/bakdata/conquery/sql/execution/SqlExecutionService.java b/backend/src/main/java/com/bakdata/conquery/sql/execution/SqlExecutionService.java index 5bea87bd0d..e847611ad5 100644 --- a/backend/src/main/java/com/bakdata/conquery/sql/execution/SqlExecutionService.java +++ b/backend/src/main/java/com/bakdata/conquery/sql/execution/SqlExecutionService.java @@ -10,7 +10,6 @@ import com.bakdata.conquery.models.error.ConqueryError; import com.bakdata.conquery.models.query.results.EntityResult; -import com.bakdata.conquery.models.query.results.SinglelineEntityResult; import com.bakdata.conquery.sql.conquery.SqlManagedQuery; import com.google.common.base.Stopwatch; import lombok.RequiredArgsConstructor; @@ -33,8 +32,10 @@ public SqlExecutionResult execute(SqlManagedQuery sqlQuery) { private SqlExecutionResult createStatementAndExecute(SqlManagedQuery sqlQuery, Connection connection) { + String sqlString = sqlQuery.getSqlQuery().getSqlString(); + log.debug("Executing query: \n{}", sqlString); try (Statement statement = connection.createStatement(); - ResultSet resultSet = statement.executeQuery(sqlQuery.getSqlQuery().getSqlString())) { + ResultSet resultSet = statement.executeQuery(sqlString)) { int columnCount = resultSet.getMetaData().getColumnCount(); List columnNames = this.getColumnNames(resultSet, columnCount); List resultTable = this.createResultTable(resultSet, columnCount); diff --git a/backend/src/main/java/com/bakdata/conquery/sql/models/ColumnDateRange.java b/backend/src/main/java/com/bakdata/conquery/sql/models/ColumnDateRange.java index e0faabdf4f..6e1f832c18 100644 --- a/backend/src/main/java/com/bakdata/conquery/sql/models/ColumnDateRange.java +++ b/backend/src/main/java/com/bakdata/conquery/sql/models/ColumnDateRange.java @@ -1,6 +1,8 @@ package com.bakdata.conquery.sql.models; +import java.sql.Date; import java.util.List; +import java.util.stream.Stream; import lombok.Getter; import org.jooq.Field; @@ -16,10 +18,10 @@ public class ColumnDateRange { private final boolean isEmpty; private final Field range; - private final Field start; - private final Field end; + private final Field start; + private final Field end; - private ColumnDateRange(boolean isEmpty, Field range, Field startColumn, Field endColumn) { + private ColumnDateRange(boolean isEmpty, Field range, Field startColumn, Field endColumn) { this.isEmpty = isEmpty; this.range = range; this.start = startColumn; @@ -30,7 +32,7 @@ public static ColumnDateRange of(Field rangeColumn) { return new ColumnDateRange(false, rangeColumn, null, null); } - public static ColumnDateRange of(Field startColumn, Field endColumn) { + public static ColumnDateRange of(Field startColumn, Field endColumn) { return new ColumnDateRange(true, null, startColumn, endColumn); } @@ -54,16 +56,18 @@ public List> toFields() { if (isSingleColumnRange()) { return List.of(this.range); } - return List.of(this.start, this.end); + return Stream.of(this.start, this.end) + .map(dateField -> dateField.coerce(Object.class)) + .toList(); } public ColumnDateRange qualify(String qualifier) { if (isSingleColumnRange()) { - return ColumnDateRange.of(mapFieldOntoQualifier(getRange(), qualifier)); + return ColumnDateRange.of(mapFieldOntoQualifier(getRange(), Object.class, qualifier)); } return ColumnDateRange.of( - mapFieldOntoQualifier(getStart(), qualifier), - mapFieldOntoQualifier(getEnd(), qualifier) + mapFieldOntoQualifier(getStart(), Date.class, qualifier), + mapFieldOntoQualifier(getEnd(), Date.class, qualifier) ); } @@ -77,8 +81,8 @@ private ColumnDateRange as(String alias) { ); } - private Field mapFieldOntoQualifier(Field field, String qualifier) { - return DSL.field(DSL.name(qualifier, field.getName())); + private Field mapFieldOntoQualifier(Field field, Class fieldType, String qualifier) { + return DSL.field(DSL.name(qualifier, field.getName()), fieldType); } } diff --git a/backend/src/test/java/com/bakdata/conquery/integration/IntegrationTests.java b/backend/src/test/java/com/bakdata/conquery/integration/IntegrationTests.java index d40a970169..72072df65e 100644 --- a/backend/src/test/java/com/bakdata/conquery/integration/IntegrationTests.java +++ b/backend/src/test/java/com/bakdata/conquery/integration/IntegrationTests.java @@ -54,6 +54,9 @@ public class IntegrationTests { private static final ObjectWriter CONFIG_WRITER; static { + + SharedMetricRegistries.setDefault("test"); + final ObjectMapper mapper = Jackson.MAPPER.copy(); MAPPER = mapper.setConfig(mapper.getDeserializationConfig().withView(View.Persistence.class)) @@ -135,7 +138,6 @@ public Stream programmaticTests() { @SneakyThrows public Stream sqlTests(SqlDialect sqlDialect, SqlConnectorConfig sqlConfig) { - SharedMetricRegistries.setDefault("test"); final Path testRootDir = Path.of(Objects.requireNonNullElse( System.getenv(TestTags.SQL_BACKEND_TEST_DIRECTORY_ENVIRONMENT_VARIABLE), SqlIntegrationTest.SQL_TEST_DIR diff --git a/backend/src/test/java/com/bakdata/conquery/integration/sql/CsvTableImporter.java b/backend/src/test/java/com/bakdata/conquery/integration/sql/CsvTableImporter.java index 78a3d72366..3af71cc613 100644 --- a/backend/src/test/java/com/bakdata/conquery/integration/sql/CsvTableImporter.java +++ b/backend/src/test/java/com/bakdata/conquery/integration/sql/CsvTableImporter.java @@ -1,12 +1,12 @@ package com.bakdata.conquery.integration.sql; - import java.io.IOException; import java.math.BigDecimal; import java.nio.file.Files; import java.nio.file.Path; import java.sql.Connection; import java.sql.Date; +import java.sql.SQLException; import java.sql.Statement; import java.util.ArrayList; import java.util.Arrays; @@ -26,6 +26,7 @@ import com.google.common.base.Strings; import com.univocity.parsers.csv.CsvParser; import lombok.SneakyThrows; +import lombok.extern.slf4j.Slf4j; import org.jooq.DSLContext; import org.jooq.DataType; import org.jooq.Field; @@ -38,8 +39,10 @@ import org.jooq.impl.SQLDataType; import org.jooq.postgres.extensions.types.DateRange; +@Slf4j public class CsvTableImporter { + private static final int DEFAULT_VARCHAR_LENGTH = 25; // HANA will use 1 as default otherwise private final DSLContext dslContext; private final DateRangeParser dateRangeParser; private final CsvParser csvReader; @@ -56,30 +59,17 @@ public CsvTableImporter(DSLContext dslContext) { */ public void importTableIntoDatabase(RequiredTable requiredTable) { - Table table = DSL.table(requiredTable.getName()); + Table table = DSL.table(DSL.name(requiredTable.getName())); List allRequiredColumns = this.getAllRequiredColumns(requiredTable); List> columns = this.createFieldsForColumns(allRequiredColumns); List content = this.getTablesContentFromCSV(requiredTable.getCsv(), allRequiredColumns); - // because we currently won't shut down the container between the testcases, we drop tables upfront if they - // exist to ensure consistency if table names of different testcases are the same - String dropTableStatement = dslContext.dropTableIfExists(table) - .getSQL(ParamType.INLINED); - - String createTableStatement = dslContext.createTable(table) - .columns(columns) - .getSQL(ParamType.INLINED); - - String insertIntoTableStatement = dslContext.insertInto(table, columns) - .valuesOfRows(content) - .getSQL(ParamType.INLINED); - - // we directly use JDBC because JOOQ can't cope with PostgreSQL custom types + // we directly use JDBC because JOOQ can't cope with some custom types like daterange dslContext.connection((Connection connection) -> { try (Statement statement = connection.createStatement()) { - statement.execute(dropTableStatement); - statement.execute(createTableStatement); - statement.execute(insertIntoTableStatement); + dropTable(table, statement); + createTable(table, columns, statement); + insertValuesIntoTable(table, columns, content, statement); } }); } @@ -94,6 +84,36 @@ public List readExpectedEntities(Path csv) throws IOException { return results; } + private void insertValuesIntoTable(Table table, List> columns, List content, Statement statement) throws SQLException { + for (RowN rowN : content) { + // e.g. HANA does not support bulk insert, so we insert row by row + String insertRowStatement = dslContext.insertInto(table, columns) + .values(rowN) + .getSQL(ParamType.INLINED); + log.info("Inserting into table: {}", insertRowStatement); + statement.execute(insertRowStatement); + } + } + + private void createTable(Table table, List> columns, Statement statement) throws SQLException { + String createTableStatement = dslContext.createTable(table) + .columns(columns) + .getSQL(ParamType.INLINED); + log.info("Creating table: {}", createTableStatement); + statement.execute(createTableStatement); + } + + private void dropTable(Table table, Statement statement) { + try { + // DROP TABLE IF EXISTS is not supported in HANA, we just ignore possible errors if the table does not exist + String dropTableStatement = dslContext.dropTable(table) + .getSQL(ParamType.INLINED); + statement.execute(dropTableStatement); + } + catch (SQLException e) { + log.info("Dropping table {} failed.", table.getName(), e); + } + } private List> createFieldsForColumns(List requiredColumns) { return requiredColumns.stream() @@ -110,15 +130,16 @@ private List getAllRequiredColumns(RequiredTable table) { private Field createField(RequiredColumn requiredColumn) { DataType dataType = switch (requiredColumn.getType()) { - case STRING -> SQLDataType.VARCHAR; + case STRING -> SQLDataType.VARCHAR(DEFAULT_VARCHAR_LENGTH); case INTEGER -> SQLDataType.INTEGER; case BOOLEAN -> SQLDataType.BOOLEAN; - case REAL -> SQLDataType.REAL; + // TODO: temporary workaround until we cast ResultSet elements back + case REAL -> SQLDataType.DECIMAL(10,2); case DECIMAL, MONEY -> SQLDataType.DECIMAL; case DATE -> SQLDataType.DATE; case DATE_RANGE -> new BuiltInDataType<>(DateRange.class, "daterange"); }; - return DSL.field(requiredColumn.getName(), dataType); + return DSL.field(DSL.name(requiredColumn.getName()), dataType); } @SneakyThrows diff --git a/backend/src/test/java/com/bakdata/conquery/integration/sql/SqlIntegrationTestSpec.java b/backend/src/test/java/com/bakdata/conquery/integration/sql/SqlIntegrationTestSpec.java index ba61fe0173..c6340f3227 100644 --- a/backend/src/test/java/com/bakdata/conquery/integration/sql/SqlIntegrationTestSpec.java +++ b/backend/src/test/java/com/bakdata/conquery/integration/sql/SqlIntegrationTestSpec.java @@ -94,13 +94,16 @@ public void executeTest(SqlStandaloneSupport support) throws IOException { SqlManagedQuery managedQuery = support.getExecutionManager() .runQuery(support.getNamespace(), getQuery(), support.getTestUser(), support.getDataset(), support.getConfig(), false); - log.info("Execute query: \n{}", managedQuery.getSqlQuery().getSqlString()); SqlExecutionResult result = managedQuery.getResult(); List resultCsv = result.getTable(); + Path expectedCsvFile = this.specDir.resolve(this.expectedCsv); List expectedCsv = support.getTableImporter().readExpectedEntities(expectedCsvFile); - Assertions.assertThat(resultCsv).usingRecursiveFieldByFieldElementComparator().containsExactlyElementsOf(expectedCsv); + + Assertions.assertThat(resultCsv) + .usingRecursiveFieldByFieldElementComparatorIgnoringFields("entityId") + .containsExactlyInAnyOrderElementsOf(expectedCsv); } @Override @@ -128,5 +131,4 @@ private void importConcepts(SqlStandaloneSupport support) throws IOException, JS } } - } diff --git a/backend/src/test/java/com/bakdata/conquery/integration/sql/TestPostgreSqlDialect.java b/backend/src/test/java/com/bakdata/conquery/integration/sql/TestPostgreSqlDialect.java deleted file mode 100644 index 09ee227eb3..0000000000 --- a/backend/src/test/java/com/bakdata/conquery/integration/sql/TestPostgreSqlDialect.java +++ /dev/null @@ -1,35 +0,0 @@ -package com.bakdata.conquery.integration.sql; - -import com.bakdata.conquery.models.datasets.concepts.select.Select; -import com.bakdata.conquery.sql.conversion.select.SelectConverter; -import com.bakdata.conquery.sql.conversion.select.DateDistanceConverter; -import com.bakdata.conquery.sql.conversion.dialect.PostgreSqlDialect; -import com.bakdata.conquery.sql.conversion.supplier.DateNowSupplier; -import org.jooq.DSLContext; - -import java.time.LocalDate; -import java.util.List; - -public class TestPostgreSqlDialect extends PostgreSqlDialect { - - public TestPostgreSqlDialect(DSLContext dslContext) { - super(dslContext); - } - - @Override - public List> getSelectConverters() { - return this.customizeSelectConverters(List.of( - new DateDistanceConverter(new MockDateNowSupplier()) - )); - } - - private class MockDateNowSupplier implements DateNowSupplier { - - @Override - public LocalDate getLocalDateNow() { - return LocalDate.parse("2023-03-28"); - } - - } - -} diff --git a/backend/src/test/java/com/bakdata/conquery/integration/sql/dialect/HanaSqlIntegrationTests.java b/backend/src/test/java/com/bakdata/conquery/integration/sql/dialect/HanaSqlIntegrationTests.java new file mode 100644 index 0000000000..8da1528554 --- /dev/null +++ b/backend/src/test/java/com/bakdata/conquery/integration/sql/dialect/HanaSqlIntegrationTests.java @@ -0,0 +1,168 @@ +package com.bakdata.conquery.integration.sql.dialect; + +import java.io.File; +import java.nio.file.Files; +import java.nio.file.Path; +import java.nio.file.Paths; +import java.nio.file.StandardOpenOption; +import java.nio.file.attribute.PosixFilePermission; +import java.util.List; +import java.util.Objects; +import java.util.Set; +import java.util.stream.Stream; + +import com.bakdata.conquery.TestTags; +import com.bakdata.conquery.integration.IntegrationTests; +import com.bakdata.conquery.integration.sql.testcontainer.hana.HanaContainer; +import com.bakdata.conquery.models.config.Dialect; +import com.bakdata.conquery.models.config.SqlConnectorConfig; +import com.bakdata.conquery.models.datasets.concepts.select.Select; +import com.bakdata.conquery.sql.DslContextFactory; +import com.bakdata.conquery.sql.conversion.dialect.HanaSqlDialect; +import com.bakdata.conquery.sql.conversion.select.DateDistanceConverter; +import com.bakdata.conquery.sql.conversion.select.SelectConverter; +import com.google.common.base.Strings; +import lombok.Getter; +import lombok.SneakyThrows; +import lombok.extern.slf4j.Slf4j; +import org.jooq.DSLContext; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.DynamicTest; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.TestFactory; +import org.testcontainers.junit.jupiter.Container; +import org.testcontainers.utility.DockerImageName; + +@Slf4j +public class HanaSqlIntegrationTests extends IntegrationTests { + + private final static DockerImageName HANA_IMAGE = DockerImageName.parse("saplabs/hanaexpress:2.00.061.00.20220519.1"); + private static final Path TMP_HANA_MOUNT_DIR = Paths.get("/tmp/data/hana"); + private static boolean useLocalHanaDb = true; + + static { + final String USE_LOCAL_HANA_DB = System.getenv("USE_LOCAL_HANA_DB"); + if (!Strings.isNullOrEmpty(USE_LOCAL_HANA_DB)) { + useLocalHanaDb = Boolean.parseBoolean(USE_LOCAL_HANA_DB); + } + } + + public HanaSqlIntegrationTests() { + super("tests/", "com.bakdata.conquery.integration"); + } + + @TestFactory + @Tag(TestTags.INTEGRATION_SQL_BACKEND) + public Stream sqlBackendTests() { + + TestContextProvider provider = useLocalHanaDb + ? new HanaTestcontainerContextProvider() + : new RemoteHanaContextProvider(); + + log.info("Running HANA tests with %s.".formatted(provider.getClass().getSimpleName())); + + DSLContext dslContext = provider.getDslContext(); + SqlConnectorConfig config = provider.getSqlConnectorConfig(); + + return super.sqlTests(new TestHanaDialect(dslContext), config); + } + + @SneakyThrows + @BeforeAll + public static void prepareTmpHanaDir() { + + if (!useLocalHanaDb) { + return; + } + + Path masterPasswordFile = TMP_HANA_MOUNT_DIR.resolve("password.json"); + String content = "{\"master_password\":\"%s\"}".formatted(HanaContainer.DEFAULT_MASTER_PASSWORD); + + Files.createDirectories(TMP_HANA_MOUNT_DIR); + Files.write(masterPasswordFile, content.getBytes(), StandardOpenOption.CREATE, StandardOpenOption.WRITE); + Files.setPosixFilePermissions(TMP_HANA_MOUNT_DIR, Set.of(PosixFilePermission.values())); + } + + @SneakyThrows + @AfterAll + public static void tearDownClass() { + if (!Files.exists(TMP_HANA_MOUNT_DIR)) { + return; + } + try (Stream walk = Files.walk(TMP_HANA_MOUNT_DIR)) { + walk.sorted((p1, p2) -> - p1.compareTo(p2)) + .map(Path::toFile) + .forEach(File::delete); + } + } + + private static class TestHanaDialect extends HanaSqlDialect { + + public TestHanaDialect(DSLContext dslContext) { + super(dslContext); + } + + @Override + public List> getSelectConverters() { + return this.customizeSelectConverters(List.of( + new DateDistanceConverter(new MockDateNowSupplier()) + )); + } + + } + + @Getter + private static class HanaTestcontainerContextProvider implements TestContextProvider { + + private final DSLContext dslContext; + private final SqlConnectorConfig sqlConnectorConfig; + + @Container + private final HanaContainer hanaContainer; + + public HanaTestcontainerContextProvider() { + this.hanaContainer = new HanaContainer<>(HANA_IMAGE) + .withFileSystemBind(TMP_HANA_MOUNT_DIR.toString(), "/home/secrets"); + this.hanaContainer.start(); + + this.sqlConnectorConfig = SqlConnectorConfig.builder() + .dialect(Dialect.HANA) + .jdbcConnectionUrl(hanaContainer.getJdbcUrl()) + .databaseUsername(hanaContainer.getUsername()) + .databasePassword(hanaContainer.getPassword()) + .withPrettyPrinting(true) + .primaryColumn("pid") + .build(); + this.dslContext = DslContextFactory.create(sqlConnectorConfig); + } + + } + + @Getter + private static class RemoteHanaContextProvider implements TestContextProvider { + + private final static String PORT = Objects.requireNonNullElse(System.getenv("CONQUERY_SQL_PORT"), "39041"); + private final static String HOST = System.getenv("CONQUERY_SQL_DB"); + private final static String CONNECTION_URL = "jdbc:sap://%s:%s/databaseName=HXE&encrypt=true&validateCertificate=false".formatted(HOST, PORT); + private final static String USERNAME = System.getenv("CONQUERY_SQL_USER"); + private final static String PASSWORD = System.getenv("CONQUERY_SQL_PASSWORD"); + private final DSLContext dslContext; + private final SqlConnectorConfig sqlConnectorConfig; + + public RemoteHanaContextProvider() { + this.sqlConnectorConfig = SqlConnectorConfig.builder() + .enabled(true) + .dialect(Dialect.HANA) + .withPrettyPrinting(true) + .jdbcConnectionUrl(CONNECTION_URL) + .databaseUsername(USERNAME) + .databasePassword(PASSWORD) + .primaryColumn("pid") + .build(); + this.dslContext = DslContextFactory.create(sqlConnectorConfig); + } + + } + +} diff --git a/backend/src/test/java/com/bakdata/conquery/integration/sql/dialect/MockDateNowSupplier.java b/backend/src/test/java/com/bakdata/conquery/integration/sql/dialect/MockDateNowSupplier.java new file mode 100644 index 0000000000..47a510d588 --- /dev/null +++ b/backend/src/test/java/com/bakdata/conquery/integration/sql/dialect/MockDateNowSupplier.java @@ -0,0 +1,14 @@ +package com.bakdata.conquery.integration.sql.dialect; + +import java.time.LocalDate; + +import com.bakdata.conquery.sql.conversion.supplier.DateNowSupplier; + +public class MockDateNowSupplier implements DateNowSupplier { + + @Override + public LocalDate getLocalDateNow() { + return LocalDate.parse("2023-03-28"); + } + +} diff --git a/backend/src/test/java/com/bakdata/conquery/integration/sql/PostgreSqlIntegrationTests.java b/backend/src/test/java/com/bakdata/conquery/integration/sql/dialect/PostgreSqlIntegrationTests.java similarity index 67% rename from backend/src/test/java/com/bakdata/conquery/integration/sql/PostgreSqlIntegrationTests.java rename to backend/src/test/java/com/bakdata/conquery/integration/sql/dialect/PostgreSqlIntegrationTests.java index 2e83b9b7e6..d4804c3076 100644 --- a/backend/src/test/java/com/bakdata/conquery/integration/sql/PostgreSqlIntegrationTests.java +++ b/backend/src/test/java/com/bakdata/conquery/integration/sql/dialect/PostgreSqlIntegrationTests.java @@ -1,7 +1,6 @@ -package com.bakdata.conquery.integration.sql; - -import static org.assertj.core.api.Assertions.assertThat; +package com.bakdata.conquery.integration.sql.dialect; +import java.util.List; import java.util.stream.Stream; import com.bakdata.conquery.TestTags; @@ -9,11 +8,15 @@ import com.bakdata.conquery.integration.IntegrationTests; import com.bakdata.conquery.models.config.Dialect; import com.bakdata.conquery.models.config.SqlConnectorConfig; +import com.bakdata.conquery.models.datasets.concepts.select.Select; import com.bakdata.conquery.models.error.ConqueryError; import com.bakdata.conquery.models.i18n.I18n; import com.bakdata.conquery.sql.DslContextFactory; import com.bakdata.conquery.sql.SqlQuery; import com.bakdata.conquery.sql.conquery.SqlManagedQuery; +import com.bakdata.conquery.sql.conversion.dialect.PostgreSqlDialect; +import com.bakdata.conquery.sql.conversion.select.DateDistanceConverter; +import com.bakdata.conquery.sql.conversion.select.SelectConverter; import com.bakdata.conquery.sql.execution.SqlExecutionService; import lombok.extern.slf4j.Slf4j; import org.assertj.core.api.Assertions; @@ -33,9 +36,9 @@ public class PostgreSqlIntegrationTests extends IntegrationTests { private static final DockerImageName postgreSqlImageName = DockerImageName.parse("postgres:alpine3.17"); - private static final String databaseName = "test"; - private static final String username = "user"; - private static final String password = "pass"; + private static final String DATABASE_NAME = "test"; + private static final String USERNAME = "user"; + private static final String PASSWORD = "pass"; private static DSLContext dslContext; private static SqlConnectorConfig sqlConfig; @@ -44,20 +47,20 @@ public PostgreSqlIntegrationTests() { } @Container - private static final PostgreSQLContainer postgresqlContainer = new PostgreSQLContainer<>(postgreSqlImageName) - .withDatabaseName(databaseName) - .withUsername(username) - .withPassword(password); + private static final PostgreSQLContainer POSTGRESQL_CONTAINER = new PostgreSQLContainer<>(postgreSqlImageName) + .withDatabaseName(DATABASE_NAME) + .withUsername(USERNAME) + .withPassword(PASSWORD); @BeforeAll static void before() { - postgresqlContainer.start(); + POSTGRESQL_CONTAINER.start(); sqlConfig = SqlConnectorConfig.builder() .dialect(Dialect.POSTGRESQL) - .jdbcConnectionUrl(postgresqlContainer.getJdbcUrl()) - .databaseUsername(username) - .databasePassword(password) + .jdbcConnectionUrl(POSTGRESQL_CONTAINER.getJdbcUrl()) + .databaseUsername(USERNAME) + .databasePassword(PASSWORD) .withPrettyPrinting(true) .primaryColumn("pid") .build(); @@ -77,7 +80,7 @@ public void shouldThrowException() { SqlManagedQuery emptyQuery = new SqlManagedQuery(new ConceptQuery(), null, null, null, new SqlQuery("")); Assertions.assertThatThrownBy(() -> executionService.execute(emptyQuery)) .isInstanceOf(ConqueryError.SqlError.class) - .hasMessageContaining("Something went wrong while querying the database: $org.postgresql.util.PSQLException"); + .hasMessageContaining("$org.postgresql.util.PSQLException"); } @@ -87,5 +90,19 @@ public Stream sqlBackendTests() { return super.sqlTests(new TestPostgreSqlDialect(dslContext), sqlConfig); } + private static class TestPostgreSqlDialect extends PostgreSqlDialect { + + public TestPostgreSqlDialect(DSLContext dslContext) { + super(dslContext); + } + + @Override + public List> getSelectConverters() { + return this.customizeSelectConverters(List.of( + new DateDistanceConverter(new MockDateNowSupplier()) + )); + } + + } } diff --git a/backend/src/test/java/com/bakdata/conquery/integration/sql/dialect/TestContextProvider.java b/backend/src/test/java/com/bakdata/conquery/integration/sql/dialect/TestContextProvider.java new file mode 100644 index 0000000000..a4971a7ff5 --- /dev/null +++ b/backend/src/test/java/com/bakdata/conquery/integration/sql/dialect/TestContextProvider.java @@ -0,0 +1,11 @@ +package com.bakdata.conquery.integration.sql.dialect; + +import com.bakdata.conquery.models.config.SqlConnectorConfig; +import org.jooq.DSLContext; + +public interface TestContextProvider { + + SqlConnectorConfig getSqlConnectorConfig(); + DSLContext getDslContext(); + +} diff --git a/backend/src/test/java/com/bakdata/conquery/integration/sql/testcontainer/hana/HanaContainer.java b/backend/src/test/java/com/bakdata/conquery/integration/sql/testcontainer/hana/HanaContainer.java new file mode 100644 index 0000000000..b60a476ffe --- /dev/null +++ b/backend/src/test/java/com/bakdata/conquery/integration/sql/testcontainer/hana/HanaContainer.java @@ -0,0 +1,71 @@ +package com.bakdata.conquery.integration.sql.testcontainer.hana; + +import java.time.Duration; + +import org.testcontainers.containers.JdbcDatabaseContainer; +import org.testcontainers.containers.wait.strategy.LogMessageWaitStrategy; +import org.testcontainers.utility.DockerImageName; + +public class HanaContainer> extends JdbcDatabaseContainer { + + public static final Integer DEFAULT_TENANT_HANA_PORT = 39041; + public static final String DEFAULT_MASTER_PASSWORD = "HXEHana1"; + private static final String USERNAME = "SYSTEM"; + private static final String DATABASE_NAME = "HXE"; + + public HanaContainer(DockerImageName dockerImageName) { + super(dockerImageName); + setWaitStrategy( + new LogMessageWaitStrategy() + .withRegEx(".*Startup finished.*\\s") + .withStartupTimeout(Duration.ofMinutes(10)) + ); + addExposedPort(DEFAULT_TENANT_HANA_PORT); + setCommand(composeHanaArgs()); + } + + @Override + public String getDriverClassName() { + return "com.sap.cloud.db.jdbc"; + } + + @Override + public String getJdbcUrl() { + return "jdbc:sap://%s:%s/?databaseName=%s&encrypt=true&validateCertificate=false".formatted( + getHost(), + getMappedPort(DEFAULT_TENANT_HANA_PORT), + DATABASE_NAME + ); + } + + @Override + public String getUsername() { + return USERNAME; + } + + @Override + public String getPassword() { + return DEFAULT_MASTER_PASSWORD; + } + + @Override + public String getDatabaseName() { + return DATABASE_NAME; + } + + @Override + protected String getTestQueryString() { + return "SELECT 1"; + } + + @Override + protected void waitUntilContainerStarted() { + getWaitStrategy().waitUntilReady(this); + } + + private String composeHanaArgs() { + return "--agree-to-sap-license " + + "--passwords-url file:///home/secrets/password.json"; + } + +} diff --git a/backend/src/test/resources/tests/sql/and/different_concept/and.json b/backend/src/test/resources/tests/sql/and/different_concept/and.json index 9e855976df..67b6bdf742 100644 --- a/backend/src/test/resources/tests/sql/and/different_concept/and.json +++ b/backend/src/test/resources/tests/sql/and/different_concept/and.json @@ -22,7 +22,7 @@ "type": "REAL_RANGE", "value": { "min": 0, - "max": 1 + "max": 1.0 } } ], @@ -93,7 +93,8 @@ "table": "table1", "validityDates": { "label": "datum", - "column": "table1.datum" + "startColumn": "table1.datum_start", + "endColumn": "table1.datum_end" }, "filters": { "label": "value", @@ -175,8 +176,12 @@ "type": "REAL" }, { - "name": "datum", - "type": "DATE_RANGE" + "name": "datum_start", + "type": "DATE" + }, + { + "name": "datum_end", + "type": "DATE" } ] }, diff --git a/backend/src/test/resources/tests/sql/and/different_concept/content_1.csv b/backend/src/test/resources/tests/sql/and/different_concept/content_1.csv index 1851eed139..244295a111 100644 --- a/backend/src/test/resources/tests/sql/and/different_concept/content_1.csv +++ b/backend/src/test/resources/tests/sql/and/different_concept/content_1.csv @@ -1,13 +1,13 @@ -pid,value,datum -1,1,"2014-06-30/2015-06-30" -2,1.01,"2014-06-30/2015-06-30" -1,1,"2015-02-03/2015-06-30" -1,0.5,"2014-06-30/2015-06-30" -3,0.5,"2014-04-30/2014-06-30" -4,1,"2014-06-30/2015-06-30" -5,0.5,"2014-04-30/2014-06-30" -5,1,"2014-06-30/2015-06-30" -6,1,"2014-04-30/2014-06-30" -7,1,"2014-02-05/2014-02-20" -8,1,"2014-04-30/2014-06-30" -7,-1,"2014-06-30/2015-06-30" +pid,value,datum_start,datum_end +1,1,2014-06-30,2015-06-30 +2,1.01,2014-06-30,2015-06-30 +1,1,2015-02-03,2015-06-30 +1,0.5,2014-06-30,2015-06-30 +3,0.5,2014-04-30,2014-06-30 +4,1,2014-06-30,2015-06-30 +5,0.5,2014-04-30,2014-06-30 +5,1,2014-06-30,2015-06-30 +6,1,2014-04-30,2014-06-30 +7,1,2014-02-05,2014-02-20 +8,1,2014-04-30,2014-06-30 +7,-1,2014-06-30,2015-06-30 diff --git a/backend/src/test/resources/tests/sql/and/different_concept/expected.csv b/backend/src/test/resources/tests/sql/and/different_concept/expected.csv index beeae56d79..c1885bafaf 100644 --- a/backend/src/test/resources/tests/sql/and/different_concept/expected.csv +++ b/backend/src/test/resources/tests/sql/and/different_concept/expected.csv @@ -1,4 +1,4 @@ -pid,datum,value,geschlecht,language -1,"[2014-06-30,2015-06-30)",1,f,de -1,"[2015-02-03,2015-06-30)",1,f,de -1,"[2014-06-30,2015-06-30)",0.5,f,de +pid,validity_date_1value,geschlecht,language +1,"[2014-06-30,2015-07-01)",1.00,f,de +1,"[2015-02-03,2015-07-01)",1.00,f,de +1,"[2014-06-30,2015-07-01)",0.50,f,de diff --git a/backend/src/test/resources/tests/sql/and/same_concept/and.json b/backend/src/test/resources/tests/sql/and/same_concept/and_same_concept.json similarity index 100% rename from backend/src/test/resources/tests/sql/and/same_concept/and.json rename to backend/src/test/resources/tests/sql/and/same_concept/and_same_concept.json diff --git a/backend/src/test/resources/tests/sql/date_restriction/date_restriction_date_column/content.csv b/backend/src/test/resources/tests/sql/date_restriction/date_restriction_date_column/content.csv index 6f280c47ef..69f66cbc3e 100644 --- a/backend/src/test/resources/tests/sql/date_restriction/date_restriction_date_column/content.csv +++ b/backend/src/test/resources/tests/sql/date_restriction/date_restriction_date_column/content.csv @@ -1,9 +1,9 @@ -pid,datum,datum_alt,geschlecht -1,"2012-06-30/2015-06-30",2012-01-01,"f" -2,"2012-06-30/2015-06-30",2010-07-15,"m" -3,"2012-02-03/2012-06-30",2012-11-10,"f" -4,"2010-06-30/2015-06-30",2012-11-11,"m" -5,"2011-04-30/2014-06-30",2007-11-11,"" -6,"2015-06-30/2016-06-30",2012-11-11,"" -7,"2014-04-30/2015-06-30",2012-11-11,"mf" -8,"2012-04-30/2014-06-30",2012-11-11,"fm" +pid,datum_start,datum_end,datum_alt,geschlecht +1,2012-06-30,2015-06-30,2012-01-01,"f" +2,2012-06-30,2015-06-30,2010-07-15,"m" +3,2012-02-03,2012-06-30,2012-11-10,"f" +4,2010-06-30,2015-06-30,2012-11-11,"m" +5,2011-04-30,2014-06-30,2007-11-11,"" +6,2015-06-30,2016-06-30,2012-11-11,"" +7,2014-04-30,2015-06-30,2012-11-11,"mf" +8,2012-04-30,2014-06-30,2012-11-11,"fm" diff --git a/backend/src/test/resources/tests/sql/date_restriction/date_restriction_date_column/date_restriction_date_column.json b/backend/src/test/resources/tests/sql/date_restriction/date_restriction_date_column/date_restriction_date_column.json index cecd7086e0..08897212ea 100644 --- a/backend/src/test/resources/tests/sql/date_restriction/date_restriction_date_column/date_restriction_date_column.json +++ b/backend/src/test/resources/tests/sql/date_restriction/date_restriction_date_column/date_restriction_date_column.json @@ -52,7 +52,8 @@ "validityDates": [ { "label": "datum", - "column": "table1.datum" + "startColumn": "table1.datum_start", + "endColumn": "table1.datum_end" }, { "label": "datum_alt", @@ -80,8 +81,12 @@ }, "columns": [ { - "name": "datum", - "type": "DATE_RANGE" + "name": "datum_start", + "type": "DATE" + }, + { + "name": "datum_end", + "type": "DATE" }, { "name": "datum_alt", diff --git a/backend/src/test/resources/tests/sql/date_restriction/daterange/date_restriction_date_range.json b/backend/src/test/resources/tests/sql/date_restriction/daterange/date_restriction_date_range.json deleted file mode 100644 index 4942aacbfa..0000000000 --- a/backend/src/test/resources/tests/sql/date_restriction/daterange/date_restriction_date_range.json +++ /dev/null @@ -1,85 +0,0 @@ -{ - "label": "Date restriction query with daterange validity date", - "type": "SQL_TEST", - "expectedCsv": "expected.csv", - "query": { - "type": "CONCEPT_QUERY", - "root": { - "type": "AND", - "children": [ - { - "type": "DATE_RESTRICTION", - "dateRange": { - "min": "2012-01-01", - "max": "2012-12-31" - }, - "child": { - "ids": [ - "geschlecht_select" - ], - "type": "CONCEPT", - "label": "Geschlecht SELECT", - "tables": [ - { - "id": "geschlecht_select.geschlecht_connector", - "filters": [ - { - "filter": "geschlecht_select.geschlecht_connector.geschlecht", - "type": "BIG_MULTI_SELECT", - "value": [ - "f" - ] - } - ] - } - ] - } - } - ] - } - }, - "concepts": [ - { - "label": "geschlecht_select", - "type": "TREE", - "connectors": [ - { - "label": "geschlecht_connector", - "table": "table1", - "validityDates": { - "label": "datum", - "column": "table1.datum" - }, - "filters": { - "label": "geschlecht", - "description": "Geschlecht zur gegebenen Datumseinschränkung", - "column": "table1.geschlecht", - "type": "SELECT" - } - } - ] - } - ], - "content": { - "tables": [ - { - "csv": "tests/sql/date_restriction/daterange/content.csv", - "name": "table1", - "primaryColumn": { - "name": "pid", - "type": "STRING" - }, - "columns": [ - { - "name": "datum", - "type": "DATE_RANGE" - }, - { - "name": "geschlecht", - "type": "STRING" - } - ] - } - ] - } -} diff --git a/backend/src/test/resources/tests/sql/date_restriction/daterange/content.csv b/backend/src/test/resources/tests/sql/date_restriction/postgres_daterange/content.csv similarity index 100% rename from backend/src/test/resources/tests/sql/date_restriction/daterange/content.csv rename to backend/src/test/resources/tests/sql/date_restriction/postgres_daterange/content.csv diff --git a/backend/src/test/resources/tests/sql/date_restriction/postgres_daterange/daterange_column.spec.json b/backend/src/test/resources/tests/sql/date_restriction/postgres_daterange/daterange_column.spec.json new file mode 100644 index 0000000000..62965cefe5 --- /dev/null +++ b/backend/src/test/resources/tests/sql/date_restriction/postgres_daterange/daterange_column.spec.json @@ -0,0 +1,83 @@ +{ + "type": "SQL_TEST", + "supportedDialects": [ + "POSTGRESQL" + ], + "label": "Date restriction with a daterange column validity date (PostgreSQL only)", + "expectedCsv": "expected.csv", + "query": { + "type": "CONCEPT_QUERY", + "root": { + "type": "DATE_RESTRICTION", + "dateRange": { + "min": "2012-01-01", + "max": "2012-12-31" + }, + "child": { + "ids": [ + "geschlecht_select" + ], + "type": "CONCEPT", + "label": "Geschlecht SELECT", + "tables": [ + { + "id": "geschlecht_select.geschlecht_connector", + "filters": [ + { + "filter": "geschlecht_select.geschlecht_connector.geschlecht", + "type": "BIG_MULTI_SELECT", + "value": [ + "f" + ] + } + ] + } + ] + } + } + }, + "concepts":[ + { + "label":"geschlecht_select", + "type":"TREE", + "connectors":[ + { + "label":"geschlecht_connector", + "table":"table1", + "validityDates":{ + "label":"datum", + "column":"table1.datum" + }, + "filters":{ + "label":"geschlecht", + "description":"Geschlecht zur gegebenen Datumseinschränkung", + "column":"table1.geschlecht", + "type":"SELECT" + } + } + ] + } + ], + "content":{ + "tables":[ + { + "csv":"tests/sql/date_restriction/postgres_daterange/content.csv", + "name":"table1", + "primaryColumn":{ + "name":"pid", + "type":"STRING" + }, + "columns":[ + { + "name":"datum", + "type":"DATE_RANGE" + }, + { + "name":"geschlecht", + "type":"STRING" + } + ] + } + ] + } +} diff --git a/backend/src/test/resources/tests/sql/date_restriction/daterange/expected.csv b/backend/src/test/resources/tests/sql/date_restriction/postgres_daterange/expected.csv similarity index 68% rename from backend/src/test/resources/tests/sql/date_restriction/daterange/expected.csv rename to backend/src/test/resources/tests/sql/date_restriction/postgres_daterange/expected.csv index a8bff966b3..777e8d3e55 100644 --- a/backend/src/test/resources/tests/sql/date_restriction/daterange/expected.csv +++ b/backend/src/test/resources/tests/sql/date_restriction/postgres_daterange/expected.csv @@ -1,3 +1,3 @@ -pid,datum +pid,datum_start,datum_end 1,"[2012-06-30,2015-06-30)" 3,"[2012-02-03,2012-06-30)" diff --git a/backend/src/test/resources/tests/sql/filter/number/content.csv b/backend/src/test/resources/tests/sql/filter/number/content.csv index 1851eed139..5ceffe16ca 100644 --- a/backend/src/test/resources/tests/sql/filter/number/content.csv +++ b/backend/src/test/resources/tests/sql/filter/number/content.csv @@ -1,13 +1,13 @@ -pid,value,datum -1,1,"2014-06-30/2015-06-30" -2,1.01,"2014-06-30/2015-06-30" -1,1,"2015-02-03/2015-06-30" -1,0.5,"2014-06-30/2015-06-30" -3,0.5,"2014-04-30/2014-06-30" -4,1,"2014-06-30/2015-06-30" -5,0.5,"2014-04-30/2014-06-30" -5,1,"2014-06-30/2015-06-30" -6,1,"2014-04-30/2014-06-30" -7,1,"2014-02-05/2014-02-20" -8,1,"2014-04-30/2014-06-30" -7,-1,"2014-06-30/2015-06-30" +pid,value +1,1 +2,1.01 +1,1 +1,0.5 +3,0.5 +4,1 +5,0.5 +5,1 +6,1 +7,1 +8,1 +7,-1 diff --git a/backend/src/test/resources/tests/sql/filter/number/number.spec.json b/backend/src/test/resources/tests/sql/filter/number/number.spec.json index 46dd0f5425..e480a59934 100644 --- a/backend/src/test/resources/tests/sql/filter/number/number.spec.json +++ b/backend/src/test/resources/tests/sql/filter/number/number.spec.json @@ -63,10 +63,6 @@ { "name": "value", "type": "REAL" - }, - { - "name": "datum", - "type": "DATE_RANGE" } ] } diff --git a/backend/src/test/resources/tests/sql/filter/number_only_max/content.csv b/backend/src/test/resources/tests/sql/filter/number_only_max/content.csv index 1851eed139..5ceffe16ca 100644 --- a/backend/src/test/resources/tests/sql/filter/number_only_max/content.csv +++ b/backend/src/test/resources/tests/sql/filter/number_only_max/content.csv @@ -1,13 +1,13 @@ -pid,value,datum -1,1,"2014-06-30/2015-06-30" -2,1.01,"2014-06-30/2015-06-30" -1,1,"2015-02-03/2015-06-30" -1,0.5,"2014-06-30/2015-06-30" -3,0.5,"2014-04-30/2014-06-30" -4,1,"2014-06-30/2015-06-30" -5,0.5,"2014-04-30/2014-06-30" -5,1,"2014-06-30/2015-06-30" -6,1,"2014-04-30/2014-06-30" -7,1,"2014-02-05/2014-02-20" -8,1,"2014-04-30/2014-06-30" -7,-1,"2014-06-30/2015-06-30" +pid,value +1,1 +2,1.01 +1,1 +1,0.5 +3,0.5 +4,1 +5,0.5 +5,1 +6,1 +7,1 +8,1 +7,-1 diff --git a/backend/src/test/resources/tests/sql/filter/number_only_max/number_only_max.spec.json b/backend/src/test/resources/tests/sql/filter/number_only_max/number_only_max.spec.json index 1b49d2b5aa..3f9f991d8a 100644 --- a/backend/src/test/resources/tests/sql/filter/number_only_max/number_only_max.spec.json +++ b/backend/src/test/resources/tests/sql/filter/number_only_max/number_only_max.spec.json @@ -62,10 +62,6 @@ { "name": "value", "type": "REAL" - }, - { - "name": "datum", - "type": "DATE_RANGE" } ] } diff --git a/backend/src/test/resources/tests/sql/filter/number_only_min/content.csv b/backend/src/test/resources/tests/sql/filter/number_only_min/content.csv index 1851eed139..5ceffe16ca 100644 --- a/backend/src/test/resources/tests/sql/filter/number_only_min/content.csv +++ b/backend/src/test/resources/tests/sql/filter/number_only_min/content.csv @@ -1,13 +1,13 @@ -pid,value,datum -1,1,"2014-06-30/2015-06-30" -2,1.01,"2014-06-30/2015-06-30" -1,1,"2015-02-03/2015-06-30" -1,0.5,"2014-06-30/2015-06-30" -3,0.5,"2014-04-30/2014-06-30" -4,1,"2014-06-30/2015-06-30" -5,0.5,"2014-04-30/2014-06-30" -5,1,"2014-06-30/2015-06-30" -6,1,"2014-04-30/2014-06-30" -7,1,"2014-02-05/2014-02-20" -8,1,"2014-04-30/2014-06-30" -7,-1,"2014-06-30/2015-06-30" +pid,value +1,1 +2,1.01 +1,1 +1,0.5 +3,0.5 +4,1 +5,0.5 +5,1 +6,1 +7,1 +8,1 +7,-1 diff --git a/backend/src/test/resources/tests/sql/filter/number_only_min/number_only_min.spec.json b/backend/src/test/resources/tests/sql/filter/number_only_min/number_only_min.spec.json index 918c2d521d..c95f1d2670 100644 --- a/backend/src/test/resources/tests/sql/filter/number_only_min/number_only_min.spec.json +++ b/backend/src/test/resources/tests/sql/filter/number_only_min/number_only_min.spec.json @@ -62,10 +62,6 @@ { "name": "value", "type": "REAL" - }, - { - "name": "datum", - "type": "DATE_RANGE" } ] } diff --git a/backend/src/test/resources/tests/sql/filter/select/content.csv b/backend/src/test/resources/tests/sql/filter/select/content.csv index db93b08bd4..7d719b498c 100644 --- a/backend/src/test/resources/tests/sql/filter/select/content.csv +++ b/backend/src/test/resources/tests/sql/filter/select/content.csv @@ -1,9 +1,9 @@ -pid,datum,geschlecht -1,2012-01-01,"f" -2,2010-07-15,"m" -3,2013-11-10,"f" -4,2012-11-11,"m" -5,2007-11-11,"" -6,2012-11-11,"" -7,2012-11-11,"mf" -8,2012-11-11,"fm" +pid,geschlecht +1,"f" +2,"m" +3,"f" +4,"m" +5,"" +6,"" +7,"mf" +8,"fm" diff --git a/backend/src/test/resources/tests/sql/filter/select/select.spec.json b/backend/src/test/resources/tests/sql/filter/select/select.spec.json index 11ae8586f4..c545967747 100644 --- a/backend/src/test/resources/tests/sql/filter/select/select.spec.json +++ b/backend/src/test/resources/tests/sql/filter/select/select.spec.json @@ -59,10 +59,6 @@ "type":"STRING" }, "columns":[ - { - "name":"datum", - "type":"DATE" - }, { "name":"geschlecht", "type":"STRING" diff --git a/backend/src/test/resources/tests/sql/or/different_concept/content_1.csv b/backend/src/test/resources/tests/sql/or/different_concept/content_1.csv index 1851eed139..244295a111 100644 --- a/backend/src/test/resources/tests/sql/or/different_concept/content_1.csv +++ b/backend/src/test/resources/tests/sql/or/different_concept/content_1.csv @@ -1,13 +1,13 @@ -pid,value,datum -1,1,"2014-06-30/2015-06-30" -2,1.01,"2014-06-30/2015-06-30" -1,1,"2015-02-03/2015-06-30" -1,0.5,"2014-06-30/2015-06-30" -3,0.5,"2014-04-30/2014-06-30" -4,1,"2014-06-30/2015-06-30" -5,0.5,"2014-04-30/2014-06-30" -5,1,"2014-06-30/2015-06-30" -6,1,"2014-04-30/2014-06-30" -7,1,"2014-02-05/2014-02-20" -8,1,"2014-04-30/2014-06-30" -7,-1,"2014-06-30/2015-06-30" +pid,value,datum_start,datum_end +1,1,2014-06-30,2015-06-30 +2,1.01,2014-06-30,2015-06-30 +1,1,2015-02-03,2015-06-30 +1,0.5,2014-06-30,2015-06-30 +3,0.5,2014-04-30,2014-06-30 +4,1,2014-06-30,2015-06-30 +5,0.5,2014-04-30,2014-06-30 +5,1,2014-06-30,2015-06-30 +6,1,2014-04-30,2014-06-30 +7,1,2014-02-05,2014-02-20 +8,1,2014-04-30,2014-06-30 +7,-1,2014-06-30,2015-06-30 diff --git a/backend/src/test/resources/tests/sql/or/different_concept/expected.csv b/backend/src/test/resources/tests/sql/or/different_concept/expected.csv index dea40ae400..e5155afc92 100644 --- a/backend/src/test/resources/tests/sql/or/different_concept/expected.csv +++ b/backend/src/test/resources/tests/sql/or/different_concept/expected.csv @@ -1,4 +1,4 @@ pid,validity_date_1,,value,geschlecht,language -7,"[2014-06-30,2015-06-30)",-1,mf, +7,"[2014-06-30,2015-07-01)",-1.00,mf, 8,,,,fr 2,,,,fr diff --git a/backend/src/test/resources/tests/sql/or/different_concept/or.spec.json b/backend/src/test/resources/tests/sql/or/different_concept/or.spec.json index 4fd7f736e4..0e29374a60 100644 --- a/backend/src/test/resources/tests/sql/or/different_concept/or.spec.json +++ b/backend/src/test/resources/tests/sql/or/different_concept/or.spec.json @@ -93,7 +93,8 @@ "table": "table1", "validityDates": { "label": "datum", - "column": "table1.datum" + "startColumn": "table1.datum_start", + "endColumn": "table1.datum_end" }, "filters": { "label": "value", @@ -175,8 +176,12 @@ "type": "REAL" }, { - "name": "datum", - "type": "DATE_RANGE" + "name": "datum_start", + "type": "DATE" + }, + { + "name": "datum_end", + "type": "DATE" } ] }, diff --git a/backend/src/test/resources/tests/sql/or/same_concept/or.spec.json b/backend/src/test/resources/tests/sql/or/same_concept/or_same_concept.spec.json similarity index 100% rename from backend/src/test/resources/tests/sql/or/same_concept/or.spec.json rename to backend/src/test/resources/tests/sql/or/same_concept/or_same_concept.spec.json diff --git a/backend/src/test/resources/tests/sql/selects/date_distance/months/content.csv b/backend/src/test/resources/tests/sql/selects/date_distance/months/content.csv index c2d4f04aef..2dfbea2947 100644 --- a/backend/src/test/resources/tests/sql/selects/date_distance/months/content.csv +++ b/backend/src/test/resources/tests/sql/selects/date_distance/months/content.csv @@ -1,5 +1,5 @@ pid,datum,geschlecht -1,2012-01-01,"f" +1,2012-01-29,"f" 2,2010-07-15,"m" 3,2010-11-10,"f" 4,2013-11-11,"m" diff --git a/backend/src/test/resources/tests/sql/selects/date_distance/months/expected.csv b/backend/src/test/resources/tests/sql/selects/date_distance/months/expected.csv index d0212b39b1..62ac211ec8 100644 --- a/backend/src/test/resources/tests/sql/selects/date_distance/months/expected.csv +++ b/backend/src/test/resources/tests/sql/selects/date_distance/months/expected.csv @@ -1,3 +1,3 @@ pid,date_distance_months -1,134 +1,133 3,148 diff --git a/backend/src/test/resources/tests/sql/selects/date_distance/years/expected.csv b/backend/src/test/resources/tests/sql/selects/date_distance/years/expected.csv index c0ec9df19d..41a6149bff 100644 --- a/backend/src/test/resources/tests/sql/selects/date_distance/years/expected.csv +++ b/backend/src/test/resources/tests/sql/selects/date_distance/years/expected.csv @@ -1,3 +1,3 @@ pid,date_distance_years 1,11 -3,13 +3,12 diff --git a/executable/pom.xml b/executable/pom.xml index ca58ce7c88..12762e1c6c 100644 --- a/executable/pom.xml +++ b/executable/pom.xml @@ -68,9 +68,12 @@ - com.bakdata.conquery.ConqueryServer - + implementation="org.apache.maven.plugins.shade.resource.ManifestResourceTransformer"> + com.bakdata.conquery.ConqueryServer + + true + + From 4a189290c04a625d84303e03498e36dd972da369 Mon Sep 17 00:00:00 2001 From: Fabian Blank Date: Mon, 28 Aug 2023 11:30:53 +0200 Subject: [PATCH 83/96] Refactor line to be in Dropzone, minor changes for code cleanliness --- .../DropzoneBetweenElements.tsx | 26 +++++++++---------- .../form-components/DropzoneList.tsx | 5 ++-- .../form-concept-group/FormConceptGroup.tsx | 5 +++- frontend/src/js/ui-components/Dropzone.tsx | 6 ----- 4 files changed, 19 insertions(+), 23 deletions(-) diff --git a/frontend/src/js/external-forms/form-components/DropzoneBetweenElements.tsx b/frontend/src/js/external-forms/form-components/DropzoneBetweenElements.tsx index 5ddbc9cd01..2e939ef1d4 100644 --- a/frontend/src/js/external-forms/form-components/DropzoneBetweenElements.tsx +++ b/frontend/src/js/external-forms/form-components/DropzoneBetweenElements.tsx @@ -1,5 +1,4 @@ import styled from "@emotion/styled"; -import { useState } from "react"; import { DropTargetMonitor } from "react-dnd"; import Dropzone, { @@ -13,16 +12,17 @@ interface Props { height: number; } -const LineHeight = 3; +const Root = styled("div")` + display: flex; + height: 4px; +`; -const Line = styled("div")<{ show: boolean }>` - overflow: hidden; - display: block; - visibility: ${({ show }) => (show ? "visible" : "hidden")}; +const Line = styled("div")` background-color: ${({ theme }) => theme.col.blueGrayDark}; margin: 1px 0; - height: ${LineHeight}px; + height: 4px; border-radius: 2px; + flex-grow: 1; `; const SxDropzone = styled(Dropzone)<{ height: number; top: number }>` @@ -38,21 +38,19 @@ const DropzoneBetweenElements = ({ height, top, }: Props) => { - let [isOver, setIsOver] = useState(false); - return ( - <> - + - + > + {({ isOver }) => isOver && } + + ); }; diff --git a/frontend/src/js/external-forms/form-components/DropzoneList.tsx b/frontend/src/js/external-forms/form-components/DropzoneList.tsx index 8bf451dc93..05fd3c5751 100644 --- a/frontend/src/js/external-forms/form-components/DropzoneList.tsx +++ b/frontend/src/js/external-forms/form-components/DropzoneList.tsx @@ -23,6 +23,7 @@ const ListItem = styled("div")` box-shadow: 0 0 3px 0 rgba(0, 0, 0, 0.1); background-color: white; border-radius: ${({ theme }) => theme.borderRadius}; + margin-bottom: 5px; `; const StyledIconButton = styled(IconButton)` @@ -117,8 +118,8 @@ const DropzoneList = ( )} diff --git a/frontend/src/js/external-forms/form-concept-group/FormConceptGroup.tsx b/frontend/src/js/external-forms/form-concept-group/FormConceptGroup.tsx index 82de595e0e..0c30a5fe49 100644 --- a/frontend/src/js/external-forms/form-concept-group/FormConceptGroup.tsx +++ b/frontend/src/js/external-forms/form-concept-group/FormConceptGroup.tsx @@ -225,7 +225,10 @@ const FormConceptGroup = (props: Props) => { item.dragContext; if (movedFromFieldName === props.fieldName) { - if (i > movedFromAndIdx && movedFromOrIdx === 0) { + const willConceptMoveUp = + i > movedFromAndIdx && + props.value[movedFromAndIdx].concepts.length == 1; + if (willConceptMoveUp) { insertIndex = i - 1; } newPropsValue = diff --git a/frontend/src/js/ui-components/Dropzone.tsx b/frontend/src/js/ui-components/Dropzone.tsx index c83df6cd46..1b4a42b45e 100644 --- a/frontend/src/js/ui-components/Dropzone.tsx +++ b/frontend/src/js/ui-components/Dropzone.tsx @@ -67,7 +67,6 @@ export interface DropzoneProps { canDrop?: (props: DroppableObject, monitor: DropTargetMonitor) => boolean; onClick?: () => void; children?: (args: ChildArgs) => ReactNode; - setIsOver?: (state: boolean) => void; } export type PossibleDroppableObject = @@ -108,7 +107,6 @@ const Dropzone = ( onClick, invisible, children, - setIsOver, }: DropzoneProps, ref?: ForwardedRef, ) => { @@ -128,10 +126,6 @@ const Dropzone = ( }), }); - useEffect(() => { - if (setIsOver) setIsOver(isOver); - }, [isOver, setIsOver]); - return ( { From 9475ac1399c580b9420241f1e8fd33984122b2a4 Mon Sep 17 00:00:00 2001 From: Fabian Blank Date: Mon, 28 Aug 2023 11:34:42 +0200 Subject: [PATCH 84/96] small lint changes --- .../js/external-forms/form-concept-group/FormConceptGroup.tsx | 2 +- frontend/src/js/ui-components/Dropzone.tsx | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/frontend/src/js/external-forms/form-concept-group/FormConceptGroup.tsx b/frontend/src/js/external-forms/form-concept-group/FormConceptGroup.tsx index 0c30a5fe49..6171a402ff 100644 --- a/frontend/src/js/external-forms/form-concept-group/FormConceptGroup.tsx +++ b/frontend/src/js/external-forms/form-concept-group/FormConceptGroup.tsx @@ -227,7 +227,7 @@ const FormConceptGroup = (props: Props) => { if (movedFromFieldName === props.fieldName) { const willConceptMoveUp = i > movedFromAndIdx && - props.value[movedFromAndIdx].concepts.length == 1; + props.value[movedFromAndIdx].concepts.length === 1; if (willConceptMoveUp) { insertIndex = i - 1; } diff --git a/frontend/src/js/ui-components/Dropzone.tsx b/frontend/src/js/ui-components/Dropzone.tsx index 1b4a42b45e..9662a49891 100644 --- a/frontend/src/js/ui-components/Dropzone.tsx +++ b/frontend/src/js/ui-components/Dropzone.tsx @@ -1,5 +1,5 @@ import styled from "@emotion/styled"; -import { ForwardedRef, forwardRef, ReactNode, useEffect } from "react"; +import { ForwardedRef, forwardRef, ReactNode } from "react"; import { DropTargetMonitor, useDrop } from "react-dnd"; import { DNDType } from "../common/constants/dndTypes"; From b45c424488a45222aa0ba48e5774acc1cec425ad Mon Sep 17 00:00:00 2001 From: Kai Rollmann Date: Mon, 28 Aug 2023 12:21:46 +0200 Subject: [PATCH 85/96] Simplify css and DropzoneBetweenElements --- .../DropzoneBetweenElements.tsx | 55 ++++++++----------- .../form-components/DropzoneList.tsx | 5 -- 2 files changed, 22 insertions(+), 38 deletions(-) diff --git a/frontend/src/js/external-forms/form-components/DropzoneBetweenElements.tsx b/frontend/src/js/external-forms/form-components/DropzoneBetweenElements.tsx index 2e939ef1d4..07fa20463e 100644 --- a/frontend/src/js/external-forms/form-components/DropzoneBetweenElements.tsx +++ b/frontend/src/js/external-forms/form-components/DropzoneBetweenElements.tsx @@ -5,52 +5,41 @@ import Dropzone, { PossibleDroppableObject, } from "../../ui-components/Dropzone"; -interface Props { - onDrop: (props: PossibleDroppableObject, monitor: DropTargetMonitor) => void; - acceptedDropTypes: string[]; - top: number; - height: number; -} - -const Root = styled("div")` - display: flex; - height: 4px; -`; +const LINE_HEIGHT = 4; const Line = styled("div")` background-color: ${({ theme }) => theme.col.blueGrayDark}; - margin: 1px 0; - height: 4px; - border-radius: 2px; - flex-grow: 1; + height: ${LINE_HEIGHT}px; + width: 100%; + border-radius: ${({ theme }) => theme.borderRadius}; `; -const SxDropzone = styled(Dropzone)<{ height: number; top: number }>` - height: ${({ height }) => height}px; - top: ${({ top }) => top}px; +const SxDropzone = styled(Dropzone)` + height: 30px; position: absolute; + top: 0; + left: 0; + transform: translateY(calc(-50% - ${LINE_HEIGHT / 2}px)); + z-index: 10; background-color: transparent; `; const DropzoneBetweenElements = ({ acceptedDropTypes, onDrop, - height, - top, -}: Props) => { +}: { + onDrop: (props: PossibleDroppableObject, monitor: DropTargetMonitor) => void; + acceptedDropTypes: string[]; +}) => { return ( - - - {({ isOver }) => isOver && } - - + + {({ isOver }) => isOver && } + ); }; diff --git a/frontend/src/js/external-forms/form-components/DropzoneList.tsx b/frontend/src/js/external-forms/form-components/DropzoneList.tsx index 05fd3c5751..00d5b86052 100644 --- a/frontend/src/js/external-forms/form-components/DropzoneList.tsx +++ b/frontend/src/js/external-forms/form-components/DropzoneList.tsx @@ -102,8 +102,6 @@ const DropzoneList = ( )} @@ -112,14 +110,11 @@ const DropzoneList = ( ))} - {!disallowMultipleColumns && ( )} From f1cfc82c41e2126f15663890b111541c51e27386 Mon Sep 17 00:00:00 2001 From: Fabian Blank Date: Mon, 28 Aug 2023 12:59:01 +0200 Subject: [PATCH 86/96] use mark directive --- frontend/package.json | 1 + frontend/src/js/tooltip/Tooltip.tsx | 93 +++-------------------------- frontend/yarn.lock | 28 +++++++++ 3 files changed, 37 insertions(+), 85 deletions(-) diff --git a/frontend/package.json b/frontend/package.json index d886016692..958226c031 100644 --- a/frontend/package.json +++ b/frontend/package.json @@ -76,6 +76,7 @@ "react-window": "^1.8.6", "redux": "^4.1.2", "redux-devtools-extension": "^2.13.9", + "remark-flexible-markers": "^1.0.3", "remark-gfm": "^3.0.1", "resize-observer-polyfill": "^1.5.1", "typesafe-actions": "^5.1.0", diff --git a/frontend/src/js/tooltip/Tooltip.tsx b/frontend/src/js/tooltip/Tooltip.tsx index 28c29c0e7c..572dbae924 100644 --- a/frontend/src/js/tooltip/Tooltip.tsx +++ b/frontend/src/js/tooltip/Tooltip.tsx @@ -1,20 +1,11 @@ import styled from "@emotion/styled"; import { faThumbtack, IconDefinition } from "@fortawesome/free-solid-svg-icons"; -import { - Children, - DetailedHTMLProps, - ElementType, - HTMLAttributes, - ReactElement, - ReactFragment, - ReactNode, - ReactPortal, -} from "react"; +import { ReactNode } from "react"; import Highlighter from "react-highlight-words"; import { useTranslation } from "react-i18next"; import Markdown from "react-markdown"; -import { ReactMarkdownProps } from "react-markdown/lib/complex-types"; import { useDispatch, useSelector } from "react-redux"; +import remarkFlexibleMarkers from "remark-flexible-markers"; import remarkGfm from "remark-gfm"; import type { StateT } from "../app/reducers"; @@ -168,65 +159,10 @@ const ConceptLabel = ({ ); }; -function isReactElement( - element: ReactFragment | ReactElement | ReactPortal | boolean | number, -): element is ReactElement { - return ( - typeof element === "object" && - element.hasOwnProperty("type") && - element.hasOwnProperty("props") - ); -} - -type MarkdownElement = - | ReactFragment - | ReactElement - | ReactPortal - | boolean - | number - | boolean - | string - | null - | undefined; -function highlight( - words: string[], - Element: Omit< - DetailedHTMLProps, HTMLElement>, - "ref" - > & - ReactMarkdownProps, -): ReactElement | null { - if (!Element) { - return Element; - } - const mappingFunction = (child: MarkdownElement): ReactElement => { - if (!child) return <>; - if (typeof child === "string") { - return HighlightedText({ words, text: child }); - } - if (isReactElement(child)) { - let TagName = child.type as ElementType; - return ( - - {highlight(words, child.props.children)} - - ); - } - return <>{child}; - }; - - if (Array.isArray(Element)) { - return <>{Children.map(Element, mappingFunction)}; - } - - let children = - typeof Element === "object" && Element.hasOwnProperty("children") - ? Children.map(Element.children, mappingFunction) - : Element.children; - - let TagName = Element.node?.tagName as ElementType; - return {children}; -} +const mark = (text: string, words: string[]): string => { + const regex = new RegExp(words.join("|"), "gi"); + return text.replace(regex, "==$&=="); +}; const Tooltip = () => { const words = useSelector( @@ -304,21 +240,8 @@ const Tooltip = () => { - highlight(words, el), - td: (el) => highlight(words, el), - b: (el) => highlight(words, el), - th: (el) => highlight(words, el), - i: (el) => highlight(words, el), - ul: (el) => highlight(words, el), - ol: (el) => highlight(words, el), - h1: (el) => highlight(words, el), - h2: (el) => highlight(words, el), - }} - > - {info.value} + + {mark(info.value, words)} ))} diff --git a/frontend/yarn.lock b/frontend/yarn.lock index ae9546cbe7..4137d6297a 100644 --- a/frontend/yarn.lock +++ b/frontend/yarn.lock @@ -3113,6 +3113,13 @@ dependencies: "@types/unist" "*" +"@types/mdast@^3.0.10": + version "3.0.12" + resolved "https://registry.yarnpkg.com/@types/mdast/-/mdast-3.0.12.tgz#beeb511b977c875a5b0cc92eab6fcac2f0895514" + integrity sha512-DT+iNIRNX884cx0/Q1ja7NyUPpZuv0KPyL5rGNxm1WC1OtHstl7n4Jb7nk+xacNShQMbczJjt8uFzznpp6kYBg== + dependencies: + "@types/unist" "^2" + "@types/mdx@^2.0.0": version "2.0.3" resolved "https://registry.yarnpkg.com/@types/mdx/-/mdx-2.0.3.tgz#43fd32414f17fcbeced3578109a6edd877a2d96e" @@ -3317,6 +3324,11 @@ resolved "https://registry.yarnpkg.com/@types/unist/-/unist-2.0.6.tgz#250a7b16c3b91f672a24552ec64678eeb1d3a08d" integrity sha512-PBjIUxZHOuj0R15/xuwJYjFi+KZdNFrehocChv4g5hu6aFroHue8m0lBP0POdK2nKzbw0cgV1mws8+V/JAcEkQ== +"@types/unist@^2": + version "2.0.7" + resolved "https://registry.yarnpkg.com/@types/unist/-/unist-2.0.7.tgz#5b06ad6894b236a1d2bd6b2f07850ca5c59cf4d6" + integrity sha512-cputDpIbFgLUaGQn6Vqg3/YsJwxUwHLO13v3i5ouxT4lat0khip9AEWxtERujXV9wxIB1EyF97BSJFt6vpdI8g== + "@types/use-sync-external-store@^0.0.3": version "0.0.3" resolved "https://registry.yarnpkg.com/@types/use-sync-external-store/-/use-sync-external-store-0.0.3.tgz#b6725d5f4af24ace33b36fafd295136e75509f43" @@ -8849,6 +8861,15 @@ remark-external-links@^8.0.0: space-separated-tokens "^1.0.0" unist-util-visit "^2.0.0" +remark-flexible-markers@^1.0.3: + version "1.0.3" + resolved "https://registry.yarnpkg.com/remark-flexible-markers/-/remark-flexible-markers-1.0.3.tgz#50914f5cac13da45b7d0ccb9a517b6c3cc8c85dc" + integrity sha512-O3aXLFXbPZRS9lZfTxqzgG0sknqgMdz0pfqp4vx5cofjlPfSJVyGMfTB5jxIdum4XzhN0FVtRBB+ksZocX168w== + dependencies: + "@types/mdast" "^3.0.10" + unist-builder "^3.0.1" + unist-util-visit "^4.0.0" + remark-gfm@^3.0.1: version "3.0.1" resolved "https://registry.yarnpkg.com/remark-gfm/-/remark-gfm-3.0.1.tgz#0b180f095e3036545e9dddac0e8df3fa5cfee54f" @@ -9822,6 +9843,13 @@ unique-string@^2.0.0: dependencies: crypto-random-string "^2.0.0" +unist-builder@^3.0.1: + version "3.0.1" + resolved "https://registry.yarnpkg.com/unist-builder/-/unist-builder-3.0.1.tgz#258b89dcadd3c973656b2327b347863556907f58" + integrity sha512-gnpOw7DIpCA0vpr6NqdPvTWnlPTApCTRzr+38E6hCWx3rz/cjo83SsKIlS1Z+L5ttScQ2AwutNnb8+tAvpb6qQ== + dependencies: + "@types/unist" "^2.0.0" + unist-util-generated@^2.0.0: version "2.0.1" resolved "https://registry.yarnpkg.com/unist-util-generated/-/unist-util-generated-2.0.1.tgz#e37c50af35d3ed185ac6ceacb6ca0afb28a85cae" From c762a1b8faf44fd979988ee3f5bdb6f360b816b2 Mon Sep 17 00:00:00 2001 From: Kai Rollmann Date: Mon, 28 Aug 2023 14:32:42 +0200 Subject: [PATCH 87/96] Fix last dropzone overlapping --- .../form-components/DropzoneBetweenElements.tsx | 5 ++++- .../src/js/external-forms/form-components/DropzoneList.tsx | 5 ++++- 2 files changed, 8 insertions(+), 2 deletions(-) diff --git a/frontend/src/js/external-forms/form-components/DropzoneBetweenElements.tsx b/frontend/src/js/external-forms/form-components/DropzoneBetweenElements.tsx index 07fa20463e..6642ee3f51 100644 --- a/frontend/src/js/external-forms/form-components/DropzoneBetweenElements.tsx +++ b/frontend/src/js/external-forms/form-components/DropzoneBetweenElements.tsx @@ -20,19 +20,22 @@ const SxDropzone = styled(Dropzone)` top: 0; left: 0; transform: translateY(calc(-50% - ${LINE_HEIGHT / 2}px)); - z-index: 10; + z-index: 1; background-color: transparent; `; const DropzoneBetweenElements = ({ acceptedDropTypes, onDrop, + className, }: { onDrop: (props: PossibleDroppableObject, monitor: DropTargetMonitor) => void; acceptedDropTypes: string[]; + className?: string; }) => { return ( { className?: string; @@ -112,7 +115,7 @@ const DropzoneList = ( ))} {!disallowMultipleColumns && ( - From 67f0c5ceecbbfb1eebaea22a7cb5a0bed390c81c Mon Sep 17 00:00:00 2001 From: Fabian Blank Date: Mon, 28 Aug 2023 14:53:47 +0200 Subject: [PATCH 88/96] code cleanup --- .../form-concept-group/FormConceptGroup.tsx | 16 ++++++++++++---- 1 file changed, 12 insertions(+), 4 deletions(-) diff --git a/frontend/src/js/external-forms/form-concept-group/FormConceptGroup.tsx b/frontend/src/js/external-forms/form-concept-group/FormConceptGroup.tsx index 6171a402ff..8821c30203 100644 --- a/frontend/src/js/external-forms/form-concept-group/FormConceptGroup.tsx +++ b/frontend/src/js/external-forms/form-concept-group/FormConceptGroup.tsx @@ -210,7 +210,7 @@ const FormConceptGroup = (props: Props) => { } dropBetween={(i: number) => { return (item: PossibleDroppableObject) => { - if (item.type !== DNDType.CONCEPT_TREE_NODE) return; + if (item.type !== DNDType.CONCEPT_TREE_NODE) return null; if (props.isValidConcept && !props.isValidConcept(item)) return null; @@ -225,10 +225,10 @@ const FormConceptGroup = (props: Props) => { item.dragContext; if (movedFromFieldName === props.fieldName) { - const willConceptMoveUp = + const willConceptMoveDown = i > movedFromAndIdx && props.value[movedFromAndIdx].concepts.length === 1; - if (willConceptMoveUp) { + if (willConceptMoveDown) { insertIndex = i - 1; } newPropsValue = @@ -458,7 +458,15 @@ const FormConceptGroup = (props: Props) => { if (isMovedObject(concept)) { const { movedFromFieldName, movedFromAndIdx, movedFromOrIdx } = concept.dragContext; - valueIdx = valueIdx > movedFromAndIdx ? valueIdx - 1 : valueIdx; + + // If the concept is moved from the same field and the concept is the only one + // in the value the index of the selected concept might change after the drop + const willSelectedConceptIndexChange = + valueIdx > movedFromAndIdx && + props.value[movedFromOrIdx].concepts.length === 1; + valueIdx = willSelectedConceptIndexChange + ? valueIdx - 1 + : valueIdx; if (movedFromFieldName === props.fieldName) { updatedValue = updatedValue[movedFromAndIdx].concepts.length === 1 From af52afc9bb28da39abfc947105664887b6791a77 Mon Sep 17 00:00:00 2001 From: Fabian Blank Date: Mon, 28 Aug 2023 15:11:20 +0200 Subject: [PATCH 89/96] useMemo on regex --- frontend/src/js/tooltip/Tooltip.tsx | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/frontend/src/js/tooltip/Tooltip.tsx b/frontend/src/js/tooltip/Tooltip.tsx index 572dbae924..345ad29d76 100644 --- a/frontend/src/js/tooltip/Tooltip.tsx +++ b/frontend/src/js/tooltip/Tooltip.tsx @@ -1,6 +1,6 @@ import styled from "@emotion/styled"; import { faThumbtack, IconDefinition } from "@fortawesome/free-solid-svg-icons"; -import { ReactNode } from "react"; +import { ReactNode, useMemo } from "react"; import Highlighter from "react-highlight-words"; import { useTranslation } from "react-i18next"; import Markdown from "react-markdown"; @@ -159,8 +159,7 @@ const ConceptLabel = ({ ); }; -const mark = (text: string, words: string[]): string => { - const regex = new RegExp(words.join("|"), "gi"); +const mark = (text: string, regex: RegExp): string => { return text.replace(regex, "==$&=="); }; @@ -188,6 +187,8 @@ const Tooltip = () => { (state) => state.tooltip.toggleAdditionalInfos, ); + const highlightRegex = useMemo(() => new RegExp(words.join("|"), "gi"), [words]); + const dispatch = useDispatch(); const onToggleAdditionalInfos = () => dispatch(toggleInfos()); @@ -241,7 +242,7 @@ const Tooltip = () => { - {mark(info.value, words)} + {mark(info.value, highlightRegex)} ))} From b29ce846a6f10cfca7cd59d516223f2fdcdaf8bf Mon Sep 17 00:00:00 2001 From: Fabian Blank Date: Mon, 28 Aug 2023 15:17:02 +0200 Subject: [PATCH 90/96] format, and fix small bug --- frontend/src/js/tooltip/Tooltip.tsx | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/frontend/src/js/tooltip/Tooltip.tsx b/frontend/src/js/tooltip/Tooltip.tsx index 345ad29d76..f79c507bea 100644 --- a/frontend/src/js/tooltip/Tooltip.tsx +++ b/frontend/src/js/tooltip/Tooltip.tsx @@ -160,6 +160,7 @@ const ConceptLabel = ({ }; const mark = (text: string, regex: RegExp): string => { + if (!regex) return text; return text.replace(regex, "==$&=="); }; @@ -187,7 +188,10 @@ const Tooltip = () => { (state) => state.tooltip.toggleAdditionalInfos, ); - const highlightRegex = useMemo(() => new RegExp(words.join("|"), "gi"), [words]); + const highlightRegex = useMemo( + () => words.length > 0 ? new RegExp(words.join("|"), "gi") : null, + [words], + ); const dispatch = useDispatch(); const onToggleAdditionalInfos = () => dispatch(toggleInfos()); From 4c074095decb8749ab534cc99d43993addaf390d Mon Sep 17 00:00:00 2001 From: Fabian Blank Date: Mon, 28 Aug 2023 15:27:03 +0200 Subject: [PATCH 91/96] fix small error --- frontend/src/js/tooltip/Tooltip.tsx | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/frontend/src/js/tooltip/Tooltip.tsx b/frontend/src/js/tooltip/Tooltip.tsx index f79c507bea..ba370e7345 100644 --- a/frontend/src/js/tooltip/Tooltip.tsx +++ b/frontend/src/js/tooltip/Tooltip.tsx @@ -159,7 +159,7 @@ const ConceptLabel = ({ ); }; -const mark = (text: string, regex: RegExp): string => { +const mark = (text: string, regex: RegExp | null): string => { if (!regex) return text; return text.replace(regex, "==$&=="); }; @@ -189,7 +189,7 @@ const Tooltip = () => { ); const highlightRegex = useMemo( - () => words.length > 0 ? new RegExp(words.join("|"), "gi") : null, + () => (words.length > 0 ? new RegExp(words.join("|"), "gi") : null), [words], ); From 351353383b38a523837608b9064304c3a53d3204 Mon Sep 17 00:00:00 2001 From: Fabian Blank Date: Mon, 28 Aug 2023 16:58:59 +0200 Subject: [PATCH 92/96] filter out words with 0 length for regex -> leading to invalid regex --- frontend/src/js/tooltip/Tooltip.tsx | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/frontend/src/js/tooltip/Tooltip.tsx b/frontend/src/js/tooltip/Tooltip.tsx index ba370e7345..bfbf5247a4 100644 --- a/frontend/src/js/tooltip/Tooltip.tsx +++ b/frontend/src/js/tooltip/Tooltip.tsx @@ -188,10 +188,12 @@ const Tooltip = () => { (state) => state.tooltip.toggleAdditionalInfos, ); - const highlightRegex = useMemo( - () => (words.length > 0 ? new RegExp(words.join("|"), "gi") : null), - [words], - ); + const highlightRegex = useMemo(() => { + console.log("words", words); + return words.length > 0 + ? new RegExp(words.filter((word) => word.length > 0).join("|"), "gi") + : null; + }, [words]); const dispatch = useDispatch(); const onToggleAdditionalInfos = () => dispatch(toggleInfos()); From 3b3ec99cd6f9e47a1b0469dd23d79c57d905ec72 Mon Sep 17 00:00:00 2001 From: Fabian Blank Date: Mon, 28 Aug 2023 17:00:18 +0200 Subject: [PATCH 93/96] remove log --- frontend/src/js/tooltip/Tooltip.tsx | 1 - 1 file changed, 1 deletion(-) diff --git a/frontend/src/js/tooltip/Tooltip.tsx b/frontend/src/js/tooltip/Tooltip.tsx index bfbf5247a4..2e8c0fe511 100644 --- a/frontend/src/js/tooltip/Tooltip.tsx +++ b/frontend/src/js/tooltip/Tooltip.tsx @@ -189,7 +189,6 @@ const Tooltip = () => { ); const highlightRegex = useMemo(() => { - console.log("words", words); return words.length > 0 ? new RegExp(words.filter((word) => word.length > 0).join("|"), "gi") : null; From e6dc0a3b99d94dbab91450aee6e6b8a84488fdf1 Mon Sep 17 00:00:00 2001 From: Fabian Blank Date: Tue, 29 Aug 2023 10:37:39 +0200 Subject: [PATCH 94/96] Change top for last and first DropzoneBetweenElements and change timing for hover again --- .../external-forms/form-components/DropzoneList.tsx | 13 ++++++++++--- .../js/small-tab-navigation/HoverNavigatable.tsx | 2 +- 2 files changed, 11 insertions(+), 4 deletions(-) diff --git a/frontend/src/js/external-forms/form-components/DropzoneList.tsx b/frontend/src/js/external-forms/form-components/DropzoneList.tsx index 2aa8d41181..048ec99144 100644 --- a/frontend/src/js/external-forms/form-components/DropzoneList.tsx +++ b/frontend/src/js/external-forms/form-components/DropzoneList.tsx @@ -40,8 +40,14 @@ const Row = styled("div")` const ConceptContainer = styled("div")` position: relative; `; -const SxDropzoneBetweenElements = styled(DropzoneBetweenElements)` + +const SxDropzoneBetweenElements = styled(DropzoneBetweenElements)<{index: number}>` + ${({ index }) => index === 0 ? "top: 3px;" : ""} +`; + +const SxLastDropzoneBetweenElements = styled(DropzoneBetweenElements)` height: 15px; + top: -5px; `; interface PropsT { @@ -102,9 +108,10 @@ const DropzoneList = ( {items.map((item, i) => ( {!disallowMultipleColumns && ( - )} @@ -115,7 +122,7 @@ const DropzoneList = ( ))} {!disallowMultipleColumns && ( - diff --git a/frontend/src/js/small-tab-navigation/HoverNavigatable.tsx b/frontend/src/js/small-tab-navigation/HoverNavigatable.tsx index 1f70870c3b..f09417cb02 100644 --- a/frontend/src/js/small-tab-navigation/HoverNavigatable.tsx +++ b/frontend/src/js/small-tab-navigation/HoverNavigatable.tsx @@ -33,7 +33,7 @@ const Root = styled("div")<{ `; // estimated to feel responsive, but not too quick -const TIME_UNTIL_NAVIGATE = 1000; +const TIME_UNTIL_NAVIGATE = 1400; export const HoverNavigatable = ({ triggerNavigate, From b1b24ee2a3f73692aa70635aac35b9880e9d7d58 Mon Sep 17 00:00:00 2001 From: Fabian Blank Date: Tue, 29 Aug 2023 11:33:48 +0200 Subject: [PATCH 95/96] Formatting --- .../src/js/external-forms/form-components/DropzoneList.tsx | 6 ++++-- frontend/src/js/small-tab-navigation/HoverNavigatable.tsx | 2 +- 2 files changed, 5 insertions(+), 3 deletions(-) diff --git a/frontend/src/js/external-forms/form-components/DropzoneList.tsx b/frontend/src/js/external-forms/form-components/DropzoneList.tsx index 048ec99144..953d16e1e2 100644 --- a/frontend/src/js/external-forms/form-components/DropzoneList.tsx +++ b/frontend/src/js/external-forms/form-components/DropzoneList.tsx @@ -41,8 +41,10 @@ const ConceptContainer = styled("div")` position: relative; `; -const SxDropzoneBetweenElements = styled(DropzoneBetweenElements)<{index: number}>` - ${({ index }) => index === 0 ? "top: 3px;" : ""} +const SxDropzoneBetweenElements = styled(DropzoneBetweenElements)<{ + index: number; +}>` + ${({ index }) => (index === 0 ? "top: 3px;" : "")} `; const SxLastDropzoneBetweenElements = styled(DropzoneBetweenElements)` diff --git a/frontend/src/js/small-tab-navigation/HoverNavigatable.tsx b/frontend/src/js/small-tab-navigation/HoverNavigatable.tsx index f09417cb02..0be4464475 100644 --- a/frontend/src/js/small-tab-navigation/HoverNavigatable.tsx +++ b/frontend/src/js/small-tab-navigation/HoverNavigatable.tsx @@ -33,7 +33,7 @@ const Root = styled("div")<{ `; // estimated to feel responsive, but not too quick -const TIME_UNTIL_NAVIGATE = 1400; +const TIME_UNTIL_NAVIGATE = 1300; export const HoverNavigatable = ({ triggerNavigate, From 078a89d7cd84d08f35e797f00799dad0e227eadc Mon Sep 17 00:00:00 2001 From: awildturtok <1553491+awildturtok@users.noreply.github.com> Date: Wed, 30 Aug 2023 12:54:25 +0200 Subject: [PATCH 96/96] adds config option to always allow custom input in SELECT-style Filters --- .../query/concept/filter/FilterValue.java | 2 +- .../models/config/FrontendConfig.java | 5 +++ .../concepts/FrontEndConceptBuilder.java | 35 ++++++++++--------- .../datasets/concepts/filters/Filter.java | 25 ++++++------- .../filters/specific/CountFilter.java | 3 +- .../filters/specific/CountQuartersFilter.java | 3 +- .../filters/specific/DateDistanceFilter.java | 3 +- .../filters/specific/DurationSumFilter.java | 3 +- .../concepts/filters/specific/FlagFilter.java | 3 +- .../filters/specific/NumberFilter.java | 3 +- .../filters/specific/PrefixTextFilter.java | 3 +- .../specific/QuartersInYearFilter.java | 3 +- .../filters/specific/SelectFilter.java | 5 +-- .../concepts/filters/specific/SumFilter.java | 3 +- .../resources/api/ConceptsProcessor.java | 7 ++-- .../integration/json/filter/FilterTest.java | 2 +- .../concepts/filters/TestGroupFilter.java | 3 +- .../frontend/FilterSearchItemTest.java | 3 +- 18 files changed, 69 insertions(+), 45 deletions(-) diff --git a/backend/src/main/java/com/bakdata/conquery/apiv1/query/concept/filter/FilterValue.java b/backend/src/main/java/com/bakdata/conquery/apiv1/query/concept/filter/FilterValue.java index f3d2a1de35..11bd78c789 100644 --- a/backend/src/main/java/com/bakdata/conquery/apiv1/query/concept/filter/FilterValue.java +++ b/backend/src/main/java/com/bakdata/conquery/apiv1/query/concept/filter/FilterValue.java @@ -191,7 +191,7 @@ public void resolve(QueryResolveContext context) { * Values of group filters can have an arbitrary format which is set by the filter itself. * Hence, we treat the value for the filter as Object.class. *

- * The resolved filter instructs the frontend on how to render and serialize the filter value using the {@link Filter#createFrontendConfig()} method. The filter must implement {@link GroupFilter} and provide the type information of the value to correctly deserialize the received object. + * The resolved filter instructs the frontend on how to render and serialize the filter value using the {@link Filter#createFrontendConfig(com.bakdata.conquery.models.config.ConqueryConfig)} method. The filter must implement {@link GroupFilter} and provide the type information of the value to correctly deserialize the received object. */ public static class GroupFilterDeserializer extends StdDeserializer { private final NsIdReferenceDeserializer> nsIdDeserializer = new NsIdReferenceDeserializer<>(Filter.class, null, FilterId.class); diff --git a/backend/src/main/java/com/bakdata/conquery/models/config/FrontendConfig.java b/backend/src/main/java/com/bakdata/conquery/models/config/FrontendConfig.java index 1bbfcddd56..e2883326f2 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/config/FrontendConfig.java +++ b/backend/src/main/java/com/bakdata/conquery/models/config/FrontendConfig.java @@ -60,6 +60,11 @@ public class FrontendConfig { @Email private String contactEmail; + /** + * If true, users are always allowed to add custom values into SelectFilter input fields. + */ + private boolean alwaysAllowCreateValue = false; + @Data public static class CurrencyConfig { diff --git a/backend/src/main/java/com/bakdata/conquery/models/datasets/concepts/FrontEndConceptBuilder.java b/backend/src/main/java/com/bakdata/conquery/models/datasets/concepts/FrontEndConceptBuilder.java index fee4d75bda..d89c285838 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/datasets/concepts/FrontEndConceptBuilder.java +++ b/backend/src/main/java/com/bakdata/conquery/models/datasets/concepts/FrontEndConceptBuilder.java @@ -23,6 +23,7 @@ import com.bakdata.conquery.io.storage.NamespaceStorage; import com.bakdata.conquery.models.auth.entities.Subject; import com.bakdata.conquery.models.auth.permissions.Ability; +import com.bakdata.conquery.models.config.ConqueryConfig; import com.bakdata.conquery.models.datasets.Column; import com.bakdata.conquery.models.datasets.concepts.filters.Filter; import com.bakdata.conquery.models.datasets.concepts.select.Select; @@ -35,17 +36,19 @@ import com.bakdata.conquery.models.identifiable.ids.specific.ConceptId; import com.bakdata.conquery.models.identifiable.ids.specific.ConceptTreeChildId; import com.bakdata.conquery.models.identifiable.ids.specific.StructureNodeId; -import lombok.AllArgsConstructor; +import lombok.Data; import lombok.extern.slf4j.Slf4j; /** * This class constructs the concept tree as it is presented to the front end. */ -@AllArgsConstructor +@Data @Slf4j public class FrontEndConceptBuilder { - public static FrontendRoot createRoot(NamespaceStorage storage, Subject subject) { + private final ConqueryConfig conqueryConfig; + + public FrontendRoot createRoot(NamespaceStorage storage, Subject subject) { final FrontendRoot root = new FrontendRoot(); final Map, FrontendNode> roots = root.getConcepts(); @@ -95,7 +98,7 @@ public static FrontendRoot createRoot(NamespaceStorage storage, Subject subject) return root; } - private static FrontendNode createConceptRoot(Concept concept, StructureNode[] structureNodes) { + private FrontendNode createConceptRoot(Concept concept, StructureNode[] structureNodes) { final MatchingStats matchingStats = concept.getMatchingStats(); @@ -121,8 +124,8 @@ private static FrontendNode createConceptRoot(Concept concept, StructureNode[ .flatMap(Collection::stream) .findAny() .isEmpty()) - .selects(concept.getSelects().stream().map(FrontEndConceptBuilder::createSelect).collect(Collectors.toList())) - .tables(concept.getConnectors().stream().map(FrontEndConceptBuilder::createTable).collect(Collectors.toList())) + .selects(concept.getSelects().stream().map(this::createSelect).collect(Collectors.toList())) + .tables(concept.getConnectors().stream().map(this::createTable).collect(Collectors.toList())) .build(); if (concept instanceof ConceptTreeNode tree && tree.getChildren() != null) { @@ -132,7 +135,7 @@ private static FrontendNode createConceptRoot(Concept concept, StructureNode[ } @Nullable - private static FrontendNode createStructureNode(StructureNode structureNode, Map, FrontendNode> roots) { + private FrontendNode createStructureNode(StructureNode structureNode, Map, FrontendNode> roots) { final List unstructured = new ArrayList<>(); for (ConceptId id : structureNode.getContainedRoots()) { if (!roots.containsKey(id)) { @@ -158,7 +161,7 @@ private static FrontendNode createStructureNode(StructureNode structureNode, Map .build(); } - public static FrontendSelect createSelect(Select select) { + public FrontendSelect createSelect(Select select) { return FrontendSelect.builder() .id(select.getId()) .label(select.getLabel()) @@ -168,7 +171,7 @@ public static FrontendSelect createSelect(Select select) { .build(); } - public static FrontendTable createTable(Connector con) { + public FrontendTable createTable(Connector con) { final FrontendTable result = FrontendTable.builder() @@ -176,8 +179,8 @@ public static FrontendTable createTable(Connector con) { .connectorId(con.getId()) .label(con.getLabel()) .isDefault(con.isDefault()) - .filters(con.collectAllFilters().stream().map(FrontEndConceptBuilder::createFilter).collect(Collectors.toList())) - .selects(con.getSelects().stream().map(FrontEndConceptBuilder::createSelect).collect(Collectors.toList())) + .filters(con.collectAllFilters().stream().map(this::createFilter).collect(Collectors.toList())) + .selects(con.getSelects().stream().map(this::createSelect).collect(Collectors.toList())) .supportedSecondaryIds(Arrays.stream(con.getTable().getColumns()) .map(Column::getSecondaryId) .filter(Objects::nonNull) @@ -199,16 +202,16 @@ public static FrontendTable createTable(Connector con) { return result; } - public static FrontendFilterConfiguration.Top createFilter(Filter filter) { + public FrontendFilterConfiguration.Top createFilter(Filter filter) { try { - return filter.createFrontendConfig(); + return filter.createFrontendConfig(conqueryConfig); } catch (ConceptConfigurationException e) { throw new IllegalStateException(e); } } - private static FrontendNode createCTNode(ConceptElement ce) { + private FrontendNode createCTNode(ConceptElement ce) { final MatchingStats matchingStats = ce.getMatchingStats(); FrontendNode.FrontendNodeBuilder nodeBuilder = FrontendNode.builder() .active(null) @@ -248,13 +251,13 @@ private static FrontendNode createCTNode(ConceptElement ce) { return n; } - public static FrontendList createTreeMap(Concept concept) { + public FrontendList createTreeMap(Concept concept) { final FrontendList map = new FrontendList(); fillTreeMap(concept, map); return map; } - private static void fillTreeMap(ConceptElement ce, FrontendList map) { + private void fillTreeMap(ConceptElement ce, FrontendList map) { map.add(ce.getId(), createCTNode(ce)); if (ce instanceof ConceptTreeNode && ((ConceptTreeNode) ce).getChildren() != null) { for (ConceptTreeChild c : ((ConceptTreeNode) ce).getChildren()) { diff --git a/backend/src/main/java/com/bakdata/conquery/models/datasets/concepts/filters/Filter.java b/backend/src/main/java/com/bakdata/conquery/models/datasets/concepts/filters/Filter.java index 4447e29f9f..c9949d085e 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/datasets/concepts/filters/Filter.java +++ b/backend/src/main/java/com/bakdata/conquery/models/datasets/concepts/filters/Filter.java @@ -4,6 +4,7 @@ import com.bakdata.conquery.apiv1.frontend.FrontendFilterConfiguration; import com.bakdata.conquery.io.cps.CPSBase; +import com.bakdata.conquery.models.config.ConqueryConfig; import com.bakdata.conquery.models.datasets.Column; import com.bakdata.conquery.models.datasets.Dataset; import com.bakdata.conquery.models.datasets.concepts.Connector; @@ -52,21 +53,21 @@ public Dataset getDataset() { return getConnector().getDataset(); } - public FrontendFilterConfiguration.Top createFrontendConfig() throws ConceptConfigurationException { - FrontendFilterConfiguration.Top f = FrontendFilterConfiguration.Top.builder() - .id(getId()) - .label(getLabel()) - .tooltip(getTooltip()) - .unit(getUnit()) - .allowDropFile(getAllowDropFile()) - .pattern(getPattern()) - .defaultValue(getDefaultValue()) - .build(); - configureFrontend(f); + public FrontendFilterConfiguration.Top createFrontendConfig(ConqueryConfig conqueryConfig) throws ConceptConfigurationException { + final FrontendFilterConfiguration.Top f = FrontendFilterConfiguration.Top.builder() + .id(getId()) + .label(getLabel()) + .tooltip(getTooltip()) + .unit(getUnit()) + .allowDropFile(getAllowDropFile()) + .pattern(getPattern()) + .defaultValue(getDefaultValue()) + .build(); + configureFrontend(f, conqueryConfig); return f; } - protected abstract void configureFrontend(FrontendFilterConfiguration.Top f) throws ConceptConfigurationException; + protected abstract void configureFrontend(FrontendFilterConfiguration.Top f, ConqueryConfig conqueryConfig) throws ConceptConfigurationException; @JsonIgnore public abstract List getRequiredColumns(); diff --git a/backend/src/main/java/com/bakdata/conquery/models/datasets/concepts/filters/specific/CountFilter.java b/backend/src/main/java/com/bakdata/conquery/models/datasets/concepts/filters/specific/CountFilter.java index 845648ad90..e45f0d7c9b 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/datasets/concepts/filters/specific/CountFilter.java +++ b/backend/src/main/java/com/bakdata/conquery/models/datasets/concepts/filters/specific/CountFilter.java @@ -12,6 +12,7 @@ import com.bakdata.conquery.io.jackson.serializer.NsIdRef; import com.bakdata.conquery.io.jackson.serializer.NsIdRefCollection; import com.bakdata.conquery.models.common.Range; +import com.bakdata.conquery.models.config.ConqueryConfig; import com.bakdata.conquery.models.datasets.Column; import com.bakdata.conquery.models.datasets.concepts.filters.Filter; import com.bakdata.conquery.models.query.filter.RangeFilterNode; @@ -36,7 +37,7 @@ public class CountFilter extends Filter { private boolean distinct; @Override - public void configureFrontend(FrontendFilterConfiguration.Top f) { + public void configureFrontend(FrontendFilterConfiguration.Top f, ConqueryConfig conqueryConfig) { f.setType(FrontendFilterType.Fields.INTEGER_RANGE); f.setMin(1); } diff --git a/backend/src/main/java/com/bakdata/conquery/models/datasets/concepts/filters/specific/CountQuartersFilter.java b/backend/src/main/java/com/bakdata/conquery/models/datasets/concepts/filters/specific/CountQuartersFilter.java index b0997ffc3c..41d42f9ae3 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/datasets/concepts/filters/specific/CountQuartersFilter.java +++ b/backend/src/main/java/com/bakdata/conquery/models/datasets/concepts/filters/specific/CountQuartersFilter.java @@ -6,6 +6,7 @@ import com.bakdata.conquery.apiv1.frontend.FrontendFilterType; import com.bakdata.conquery.io.cps.CPSType; import com.bakdata.conquery.models.common.Range; +import com.bakdata.conquery.models.config.ConqueryConfig; import com.bakdata.conquery.models.datasets.concepts.filters.Filter; import com.bakdata.conquery.models.datasets.concepts.filters.SingleColumnFilter; import com.bakdata.conquery.models.events.MajorTypeId; @@ -26,7 +27,7 @@ public EnumSet getAcceptedColumnTypes() { } @Override - public void configureFrontend(FrontendFilterConfiguration.Top f) { + public void configureFrontend(FrontendFilterConfiguration.Top f, ConqueryConfig conqueryConfig) { f.setType(FrontendFilterType.Fields.INTEGER_RANGE); f.setMin(1); } diff --git a/backend/src/main/java/com/bakdata/conquery/models/datasets/concepts/filters/specific/DateDistanceFilter.java b/backend/src/main/java/com/bakdata/conquery/models/datasets/concepts/filters/specific/DateDistanceFilter.java index c811b797af..ecad117745 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/datasets/concepts/filters/specific/DateDistanceFilter.java +++ b/backend/src/main/java/com/bakdata/conquery/models/datasets/concepts/filters/specific/DateDistanceFilter.java @@ -9,6 +9,7 @@ import com.bakdata.conquery.apiv1.frontend.FrontendFilterType; import com.bakdata.conquery.io.cps.CPSType; import com.bakdata.conquery.models.common.Range; +import com.bakdata.conquery.models.config.ConqueryConfig; import com.bakdata.conquery.models.datasets.concepts.filters.Filter; import com.bakdata.conquery.models.datasets.concepts.filters.SingleColumnFilter; import com.bakdata.conquery.models.events.MajorTypeId; @@ -35,7 +36,7 @@ public EnumSet getAcceptedColumnTypes() { } @Override - public void configureFrontend(FrontendFilterConfiguration.Top f) throws ConceptConfigurationException { + public void configureFrontend(FrontendFilterConfiguration.Top f, ConqueryConfig conqueryConfig) throws ConceptConfigurationException { if (getColumn().getType() != MajorTypeId.DATE) { throw new ConceptConfigurationException(getConnector(), "DATE_DISTANCE filter is incompatible with columns of type " + getColumn().getType()); } diff --git a/backend/src/main/java/com/bakdata/conquery/models/datasets/concepts/filters/specific/DurationSumFilter.java b/backend/src/main/java/com/bakdata/conquery/models/datasets/concepts/filters/specific/DurationSumFilter.java index 09087ee70d..cfc180d3ff 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/datasets/concepts/filters/specific/DurationSumFilter.java +++ b/backend/src/main/java/com/bakdata/conquery/models/datasets/concepts/filters/specific/DurationSumFilter.java @@ -6,6 +6,7 @@ import com.bakdata.conquery.apiv1.frontend.FrontendFilterType; import com.bakdata.conquery.io.cps.CPSType; import com.bakdata.conquery.models.common.Range; +import com.bakdata.conquery.models.config.ConqueryConfig; import com.bakdata.conquery.models.datasets.concepts.filters.Filter; import com.bakdata.conquery.models.datasets.concepts.filters.SingleColumnFilter; import com.bakdata.conquery.models.events.MajorTypeId; @@ -29,7 +30,7 @@ public EnumSet getAcceptedColumnTypes() { } @Override - public void configureFrontend(FrontendFilterConfiguration.Top f) throws ConceptConfigurationException { + public void configureFrontend(FrontendFilterConfiguration.Top f, ConqueryConfig conqueryConfig) throws ConceptConfigurationException { if (getColumn().getType() != MajorTypeId.DATE_RANGE) { throw new ConceptConfigurationException(getConnector(), "DURATION_SUM filter is incompatible with columns of type " + getColumn().getType()); diff --git a/backend/src/main/java/com/bakdata/conquery/models/datasets/concepts/filters/specific/FlagFilter.java b/backend/src/main/java/com/bakdata/conquery/models/datasets/concepts/filters/specific/FlagFilter.java index 324ecc22da..0a5c2598ba 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/datasets/concepts/filters/specific/FlagFilter.java +++ b/backend/src/main/java/com/bakdata/conquery/models/datasets/concepts/filters/specific/FlagFilter.java @@ -11,6 +11,7 @@ import com.bakdata.conquery.apiv1.frontend.FrontendValue; import com.bakdata.conquery.io.cps.CPSType; import com.bakdata.conquery.io.jackson.serializer.NsIdRefCollection; +import com.bakdata.conquery.models.config.ConqueryConfig; import com.bakdata.conquery.models.datasets.Column; import com.bakdata.conquery.models.datasets.concepts.filters.Filter; import com.bakdata.conquery.models.error.ConqueryError; @@ -39,7 +40,7 @@ public class FlagFilter extends Filter { private final Map flags; @Override - protected void configureFrontend(FrontendFilterConfiguration.Top f) throws ConceptConfigurationException { + protected void configureFrontend(FrontendFilterConfiguration.Top f, ConqueryConfig conqueryConfig) throws ConceptConfigurationException { f.setType(FrontendFilterType.Fields.MULTI_SELECT); f.setOptions(flags.keySet().stream().map(key -> new FrontendValue(key, key)).toList()); diff --git a/backend/src/main/java/com/bakdata/conquery/models/datasets/concepts/filters/specific/NumberFilter.java b/backend/src/main/java/com/bakdata/conquery/models/datasets/concepts/filters/specific/NumberFilter.java index 0040baab68..1894783631 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/datasets/concepts/filters/specific/NumberFilter.java +++ b/backend/src/main/java/com/bakdata/conquery/models/datasets/concepts/filters/specific/NumberFilter.java @@ -7,6 +7,7 @@ import com.bakdata.conquery.io.cps.CPSType; import com.bakdata.conquery.models.common.IRange; import com.bakdata.conquery.models.common.Range; +import com.bakdata.conquery.models.config.ConqueryConfig; import com.bakdata.conquery.models.datasets.concepts.filters.Filter; import com.bakdata.conquery.models.datasets.concepts.filters.SingleColumnFilter; import com.bakdata.conquery.models.exceptions.ConceptConfigurationException; @@ -29,7 +30,7 @@ public class NumberFilter> extends SingleColumnFilter { @Override - public void configureFrontend(FrontendFilterConfiguration.Top f) throws ConceptConfigurationException { + public void configureFrontend(FrontendFilterConfiguration.Top f, ConqueryConfig conqueryConfig) throws ConceptConfigurationException { final String type = switch (getColumn().getType()) { case MONEY -> FrontendFilterType.Fields.MONEY_RANGE; case INTEGER -> FrontendFilterType.Fields.INTEGER_RANGE; diff --git a/backend/src/main/java/com/bakdata/conquery/models/datasets/concepts/filters/specific/PrefixTextFilter.java b/backend/src/main/java/com/bakdata/conquery/models/datasets/concepts/filters/specific/PrefixTextFilter.java index bd5e83db3e..02480ea143 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/datasets/concepts/filters/specific/PrefixTextFilter.java +++ b/backend/src/main/java/com/bakdata/conquery/models/datasets/concepts/filters/specific/PrefixTextFilter.java @@ -5,6 +5,7 @@ import com.bakdata.conquery.apiv1.frontend.FrontendFilterConfiguration; import com.bakdata.conquery.apiv1.frontend.FrontendFilterType; import com.bakdata.conquery.io.cps.CPSType; +import com.bakdata.conquery.models.config.ConqueryConfig; import com.bakdata.conquery.models.datasets.concepts.filters.Filter; import com.bakdata.conquery.models.datasets.concepts.filters.SingleColumnFilter; import com.bakdata.conquery.models.events.MajorTypeId; @@ -20,7 +21,7 @@ public class PrefixTextFilter extends SingleColumnFilter { @Override - public void configureFrontend(FrontendFilterConfiguration.Top f) { + public void configureFrontend(FrontendFilterConfiguration.Top f, ConqueryConfig conqueryConfig) { f.setType(FrontendFilterType.Fields.STRING); } diff --git a/backend/src/main/java/com/bakdata/conquery/models/datasets/concepts/filters/specific/QuartersInYearFilter.java b/backend/src/main/java/com/bakdata/conquery/models/datasets/concepts/filters/specific/QuartersInYearFilter.java index 11bc8c36fa..1dac22cb0a 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/datasets/concepts/filters/specific/QuartersInYearFilter.java +++ b/backend/src/main/java/com/bakdata/conquery/models/datasets/concepts/filters/specific/QuartersInYearFilter.java @@ -6,6 +6,7 @@ import com.bakdata.conquery.apiv1.frontend.FrontendFilterType; import com.bakdata.conquery.io.cps.CPSType; import com.bakdata.conquery.models.common.Range; +import com.bakdata.conquery.models.config.ConqueryConfig; import com.bakdata.conquery.models.datasets.concepts.filters.Filter; import com.bakdata.conquery.models.datasets.concepts.filters.SingleColumnFilter; import com.bakdata.conquery.models.events.MajorTypeId; @@ -24,7 +25,7 @@ public EnumSet getAcceptedColumnTypes() { } @Override - public void configureFrontend(FrontendFilterConfiguration.Top f) { + public void configureFrontend(FrontendFilterConfiguration.Top f, ConqueryConfig conqueryConfig) { f.setType(FrontendFilterType.Fields.INTEGER_RANGE); f.setMin(1); f.setMax(4); diff --git a/backend/src/main/java/com/bakdata/conquery/models/datasets/concepts/filters/specific/SelectFilter.java b/backend/src/main/java/com/bakdata/conquery/models/datasets/concepts/filters/specific/SelectFilter.java index 8a547e5308..947edb2fcd 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/datasets/concepts/filters/specific/SelectFilter.java +++ b/backend/src/main/java/com/bakdata/conquery/models/datasets/concepts/filters/specific/SelectFilter.java @@ -12,6 +12,7 @@ import com.bakdata.conquery.io.jackson.View; import com.bakdata.conquery.io.jackson.serializer.NsIdRef; import com.bakdata.conquery.io.storage.NamespaceStorage; +import com.bakdata.conquery.models.config.ConqueryConfig; import com.bakdata.conquery.models.config.IndexConfig; import com.bakdata.conquery.models.datasets.concepts.Searchable; import com.bakdata.conquery.models.datasets.concepts.filters.SingleColumnFilter; @@ -56,12 +57,12 @@ public EnumSet getAcceptedColumnTypes() { } @Override - public void configureFrontend(FrontendFilterConfiguration.Top f) throws ConceptConfigurationException { + public void configureFrontend(FrontendFilterConfiguration.Top f, ConqueryConfig conqueryConfig) throws ConceptConfigurationException { f.setTemplate(getTemplate()); f.setType(getFilterType()); // If either not searches are available or all are disabled, we allow users to supply their own values - f.setCreatable(getSearchReferences().stream().noneMatch(Predicate.not(Searchable::isSearchDisabled))); + f.setCreatable(conqueryConfig.getFrontend().isAlwaysAllowCreateValue() || getSearchReferences().stream().noneMatch(Predicate.not(Searchable::isSearchDisabled))); f.setOptions(collectLabels()); } diff --git a/backend/src/main/java/com/bakdata/conquery/models/datasets/concepts/filters/specific/SumFilter.java b/backend/src/main/java/com/bakdata/conquery/models/datasets/concepts/filters/specific/SumFilter.java index c89db064ba..643e8e8807 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/datasets/concepts/filters/specific/SumFilter.java +++ b/backend/src/main/java/com/bakdata/conquery/models/datasets/concepts/filters/specific/SumFilter.java @@ -14,6 +14,7 @@ import com.bakdata.conquery.io.jackson.serializer.NsIdRefCollection; import com.bakdata.conquery.models.common.IRange; import com.bakdata.conquery.models.common.Range; +import com.bakdata.conquery.models.config.ConqueryConfig; import com.bakdata.conquery.models.datasets.Column; import com.bakdata.conquery.models.datasets.concepts.filters.Filter; import com.bakdata.conquery.models.events.MajorTypeId; @@ -57,7 +58,7 @@ public class SumFilter> extends Filter private List distinctByColumn = Collections.emptyList(); @Override - public void configureFrontend(FrontendFilterConfiguration.Top f) throws ConceptConfigurationException { + public void configureFrontend(FrontendFilterConfiguration.Top f, ConqueryConfig conqueryConfig) throws ConceptConfigurationException { final String type = switch (getColumn().getType()) { case MONEY -> FrontendFilterType.Fields.MONEY_RANGE; case INTEGER -> FrontendFilterType.Fields.INTEGER_RANGE; diff --git a/backend/src/main/java/com/bakdata/conquery/resources/api/ConceptsProcessor.java b/backend/src/main/java/com/bakdata/conquery/resources/api/ConceptsProcessor.java index ab877f9b41..d2c1ebac6b 100644 --- a/backend/src/main/java/com/bakdata/conquery/resources/api/ConceptsProcessor.java +++ b/backend/src/main/java/com/bakdata/conquery/resources/api/ConceptsProcessor.java @@ -65,11 +65,14 @@ public class ConceptsProcessor { private final ConqueryConfig config; + @Getter(lazy = true) + private final FrontEndConceptBuilder frontEndConceptBuilder = new FrontEndConceptBuilder(getConfig()); + private final LoadingCache, FrontendList> nodeCache = CacheBuilder.newBuilder().softValues().expireAfterWrite(10, TimeUnit.MINUTES).build(new CacheLoader<>() { @Override public FrontendList load(Concept concept) { - return FrontEndConceptBuilder.createTreeMap(concept); + return getFrontEndConceptBuilder().createTreeMap(concept); } }); @@ -106,7 +109,7 @@ public CursorAndLength load(Searchable searchable) { public FrontendRoot getRoot(NamespaceStorage storage, Subject subject) { - final FrontendRoot root = FrontEndConceptBuilder.createRoot(storage, subject); + final FrontendRoot root = getFrontEndConceptBuilder().createRoot(storage, subject); // Report Violation ValidatorHelper.createViolationsString(validator.validate(root), log.isTraceEnabled()).ifPresent(log::warn); diff --git a/backend/src/test/java/com/bakdata/conquery/integration/json/filter/FilterTest.java b/backend/src/test/java/com/bakdata/conquery/integration/json/filter/FilterTest.java index 063f4f3e81..2bdfbb7708 100644 --- a/backend/src/test/java/com/bakdata/conquery/integration/json/filter/FilterTest.java +++ b/backend/src/test/java/com/bakdata/conquery/integration/json/filter/FilterTest.java @@ -172,7 +172,7 @@ public Query getQuery() { @Override public void executeTest(StandaloneSupport standaloneSupport) throws IOException { try { - final FrontendFilterConfiguration.Top actual = connector.getFilters().iterator().next().createFrontendConfig(); + final FrontendFilterConfiguration.Top actual = connector.getFilters().iterator().next().createFrontendConfig(standaloneSupport.getConfig()); if (expectedFrontendConfig != null) { log.info("Checking actual FrontendConfig: {}", actual); diff --git a/backend/src/test/java/com/bakdata/conquery/models/datasets/concepts/filters/TestGroupFilter.java b/backend/src/test/java/com/bakdata/conquery/models/datasets/concepts/filters/TestGroupFilter.java index ddccaf3428..bcdd7a33e6 100644 --- a/backend/src/test/java/com/bakdata/conquery/models/datasets/concepts/filters/TestGroupFilter.java +++ b/backend/src/test/java/com/bakdata/conquery/models/datasets/concepts/filters/TestGroupFilter.java @@ -12,6 +12,7 @@ import com.bakdata.conquery.apiv1.frontend.FrontendFilterType; import com.bakdata.conquery.io.cps.CPSType; import com.bakdata.conquery.io.jackson.View; +import com.bakdata.conquery.models.config.ConqueryConfig; import com.bakdata.conquery.models.datasets.concepts.filters.specific.QueryContextResolvable; import com.bakdata.conquery.models.query.QueryResolveContext; import com.bakdata.conquery.models.query.filter.event.MultiSelectFilterNode; @@ -27,7 +28,7 @@ public class TestGroupFilter extends SingleColumnFilter implements GroupFilter { @Override - public void configureFrontend(FrontendFilterConfiguration.Top f) { + public void configureFrontend(FrontendFilterConfiguration.Top f, ConqueryConfig conqueryConfig) { f.setType(FrontendFilterType.Fields.GROUP); f.setFilters(getFEFilter()); } diff --git a/backend/src/test/java/com/bakdata/conquery/models/datasets/concepts/frontend/FilterSearchItemTest.java b/backend/src/test/java/com/bakdata/conquery/models/datasets/concepts/frontend/FilterSearchItemTest.java index 76d7fe7b25..d6b263ec37 100644 --- a/backend/src/test/java/com/bakdata/conquery/models/datasets/concepts/frontend/FilterSearchItemTest.java +++ b/backend/src/test/java/com/bakdata/conquery/models/datasets/concepts/frontend/FilterSearchItemTest.java @@ -6,6 +6,7 @@ import com.bakdata.conquery.apiv1.frontend.FrontendTable; import com.bakdata.conquery.apiv1.frontend.FrontendValue; +import com.bakdata.conquery.models.config.ConqueryConfig; import com.bakdata.conquery.models.datasets.Column; import com.bakdata.conquery.models.datasets.Dataset; import com.bakdata.conquery.models.datasets.Table; @@ -67,7 +68,7 @@ public void sortedValidityDates() { connector.setColumn(column); connector.setConcept(concept); connector.setValidityDates(validityDates); - FrontendTable feTable = FrontEndConceptBuilder.createTable(connector); + FrontendTable feTable = new FrontEndConceptBuilder(new ConqueryConfig()).createTable(connector); assertThat(feTable.getDateColumn().getOptions()).containsExactly( new FrontendValue(val0.getId().toString(), "val0"),