From 489eb09fa61aedec9c12fc22852e18892a3a7ea2 Mon Sep 17 00:00:00 2001 From: Pauline Conde Date: Wed, 8 Jan 2025 22:56:30 +0000 Subject: [PATCH 01/10] Update Questionnaire fixed_ema/fixed_ema_armt.json --- questionnaires/fixed_ema/fixed_ema_armt.json | 767 +++++++++++++++++++ 1 file changed, 767 insertions(+) create mode 100644 questionnaires/fixed_ema/fixed_ema_armt.json diff --git a/questionnaires/fixed_ema/fixed_ema_armt.json b/questionnaires/fixed_ema/fixed_ema_armt.json new file mode 100644 index 00000000..928ddbe6 --- /dev/null +++ b/questionnaires/fixed_ema/fixed_ema_armt.json @@ -0,0 +1,767 @@ +[ + { + "field_name": "fixedq1a", + "form_name": "fixed_ema", + "section_header": "", + "field_type": "yesno", + "field_label": "Have you used cocaine in the past 24 hours?", + "select_choices_or_calculations": "", + "field_note": "", + "text_validation_type_or_show_slider_number": "", + "text_validation_min": "", + "text_validation_max": "", + "identifier": "", + "branching_logic": "", + "required_field": "y", + "custom_alignment": "", + "question_number": "", + "matrix_group_name": "", + "matrix_ranking": "", + "field_annotation": "", + "evaluated_logic": "" + }, + { + "field_name": "fixed", + "form_name": "fixed_ema", + "section_header": "", + "field_type": "descriptive", + "field_label": "In this next section please complete for one cocaine use event at a time. Please keep adding as many events as needed.", + "select_choices_or_calculations": "", + "field_note": "", + "text_validation_type_or_show_slider_number": "", + "text_validation_min": "", + "text_validation_max": "", + "identifier": "", + "branching_logic": "", + "required_field": "", + "custom_alignment": "", + "question_number": "", + "matrix_group_name": "", + "matrix_ranking": "", + "field_annotation": "", + "evaluated_logic": "" + }, + { + "field_name": "fixedq2a", + "form_name": "fixed_ema", + "section_header": "", + "field_type": "dropdown", + "field_label": "What type of cocaine?", + "select_choices_or_calculations": [ + { + "code": "1", + "label": "Powder" + }, + { + "code": "2", + "label": "Crack" + } + ], + "field_note": "", + "text_validation_type_or_show_slider_number": "", + "text_validation_min": "", + "text_validation_max": "", + "identifier": "", + "branching_logic": "", + "required_field": "", + "custom_alignment": "", + "question_number": "", + "matrix_group_name": "", + "matrix_ranking": "", + "field_annotation": "", + "evaluated_logic": "" + }, + { + "field_name": "fixedq2b", + "form_name": "fixed_ema", + "section_header": "", + "field_type": "dropdown", + "field_label": "How did you use?", + "select_choices_or_calculations": [ + { + "code": "1", + "label": "Snort" + }, + { + "code": "2", + "label": "Smoke" + }, + { + "code": "3", + "label": "Inject" + } + ], + "field_note": "", + "text_validation_type_or_show_slider_number": "", + "text_validation_min": "", + "text_validation_max": "", + "identifier": "", + "branching_logic": "", + "required_field": "", + "custom_alignment": "", + "question_number": "", + "matrix_group_name": "", + "matrix_ranking": "", + "field_annotation": "", + "evaluated_logic": "" + }, + { + "field_name": "fixedq2c", + "form_name": "fixed_ema", + "section_header": "", + "field_type": "text", + "field_label": "When did you use?", + "select_choices_or_calculations": "", + "field_note": "", + "text_validation_type_or_show_slider_number": "", + "text_validation_min": "", + "text_validation_max": "", + "identifier": "", + "branching_logic": "", + "required_field": "", + "custom_alignment": "", + "question_number": "", + "matrix_group_name": "", + "matrix_ranking": "", + "field_annotation": "", + "evaluated_logic": "" + }, + { + "field_name": "fixedq2d", + "form_name": "fixed_ema", + "section_header": "", + "field_type": "text", + "field_label": "How much did you use?", + "select_choices_or_calculations": "", + "field_note": "", + "text_validation_type_or_show_slider_number": "", + "text_validation_min": "", + "text_validation_max": "", + "identifier": "", + "branching_logic": "", + "required_field": "", + "custom_alignment": "", + "question_number": "", + "matrix_group_name": "", + "matrix_ranking": "", + "field_annotation": "", + "evaluated_logic": "" + }, + { + "field_name": "fixedq2e", + "form_name": "fixed_ema", + "section_header": "", + "field_type": "text", + "field_label": "Why did you use?", + "select_choices_or_calculations": "", + "field_note": "", + "text_validation_type_or_show_slider_number": "", + "text_validation_min": "", + "text_validation_max": "", + "identifier": "", + "branching_logic": "", + "required_field": "", + "custom_alignment": "", + "question_number": "", + "matrix_group_name": "", + "matrix_ranking": "", + "field_annotation": "", + "evaluated_logic": "" + }, + { + "field_name": "fixedq2f", + "form_name": "fixed_ema", + "section_header": "", + "field_type": "yesno", + "field_label": "Did you use other substances at the same time of this cocaine event?", + "select_choices_or_calculations": "", + "field_note": "", + "text_validation_type_or_show_slider_number": "", + "text_validation_min": "", + "text_validation_max": "", + "identifier": "", + "branching_logic": "", + "required_field": "", + "custom_alignment": "", + "question_number": "", + "matrix_group_name": "", + "matrix_ranking": "", + "field_annotation": "", + "evaluated_logic": "" + }, + { + "field_name": "fixedq2g", + "form_name": "fixed_ema", + "section_header": "", + "field_type": "yesno", + "field_label": "Were you craving cocaine before using?", + "select_choices_or_calculations": "", + "field_note": "", + "text_validation_type_or_show_slider_number": "", + "text_validation_min": "", + "text_validation_max": "", + "identifier": "", + "branching_logic": "", + "required_field": "", + "custom_alignment": "", + "question_number": "", + "matrix_group_name": "", + "matrix_ranking": "", + "field_annotation": "", + "evaluated_logic": "" + }, + { + "field_name": "fixedq2h", + "form_name": "fixed_ema", + "section_header": "", + "field_type": "slider", + "field_label": "At the time of cocaine craving, how much did you want it?", + "select_choices_or_calculations": [], + "field_note": "", + "text_validation_type_or_show_slider_number": "", + "text_validation_min": "", + "text_validation_max": "", + "identifier": "", + "branching_logic": "", + "required_field": "", + "custom_alignment": "RH", + "question_number": "", + "matrix_group_name": "", + "matrix_ranking": "", + "field_annotation": "", + "range": { + "min": "Notatall", + "max": "Extremely", + "step": 1 + }, + "evaluated_logic": "" + }, + { + "field_name": "fixedq2i", + "form_name": "fixed_ema", + "section_header": "", + "field_type": "slider", + "field_label": "At the time of cocaine craving, how much did you need it?", + "select_choices_or_calculations": [], + "field_note": "", + "text_validation_type_or_show_slider_number": "", + "text_validation_min": "", + "text_validation_max": "", + "identifier": "", + "branching_logic": "", + "required_field": "", + "custom_alignment": "RH", + "question_number": "", + "matrix_group_name": "", + "matrix_ranking": "", + "field_annotation": "", + "range": { + "min": "Notatall", + "max": "Extremely", + "step": 1 + }, + "evaluated_logic": "" + }, + { + "field_name": "fixedq2j", + "form_name": "fixed_ema", + "section_header": "", + "field_type": "slider", + "field_label": "At the time of cocaine craving, how strong was the urge to have it?", + "select_choices_or_calculations": [], + "field_note": "", + "text_validation_type_or_show_slider_number": "", + "text_validation_min": "", + "text_validation_max": "", + "identifier": "", + "branching_logic": "", + "required_field": "", + "custom_alignment": "RH", + "question_number": "", + "matrix_group_name": "", + "matrix_ranking": "", + "field_annotation": "", + "range": { + "min": "Notatall", + "max": "Extremely", + "step": 1 + }, + "evaluated_logic": "" + }, + { + "field_name": "fixedq2k", + "form_name": "fixed_ema", + "section_header": "", + "field_type": "slider", + "field_label": "At the time of cocaine craving, how vividly did you picture it?", + "select_choices_or_calculations": [], + "field_note": "", + "text_validation_type_or_show_slider_number": "", + "text_validation_min": "", + "text_validation_max": "", + "identifier": "", + "branching_logic": "", + "required_field": "", + "custom_alignment": "RH", + "question_number": "", + "matrix_group_name": "", + "matrix_ranking": "", + "field_annotation": "", + "range": { + "min": "Notatall", + "max": "Extremely", + "step": 1 + }, + "evaluated_logic": "" + }, + { + "field_name": "fixedq2l", + "form_name": "fixed_ema", + "section_header": "", + "field_type": "slider", + "field_label": "At the time of cocaine craving, how vividly did you image its taste?", + "select_choices_or_calculations": [], + "field_note": "", + "text_validation_type_or_show_slider_number": "", + "text_validation_min": "", + "text_validation_max": "", + "identifier": "", + "branching_logic": "", + "required_field": "", + "custom_alignment": "RH", + "question_number": "", + "matrix_group_name": "", + "matrix_ranking": "", + "field_annotation": "", + "range": { + "min": "Notatall", + "max": "Extremely", + "step": 1 + }, + "evaluated_logic": "" + }, + { + "field_name": "fixedq2m", + "form_name": "fixed_ema", + "section_header": "", + "field_type": "slider", + "field_label": "At the time of cocaine craving, how vividly did you smell it?", + "select_choices_or_calculations": [], + "field_note": "", + "text_validation_type_or_show_slider_number": "", + "text_validation_min": "", + "text_validation_max": "", + "identifier": "", + "branching_logic": "", + "required_field": "", + "custom_alignment": "RH", + "question_number": "", + "matrix_group_name": "", + "matrix_ranking": "", + "field_annotation": "", + "range": { + "min": "Notatall", + "max": "Extremely", + "step": 1 + }, + "evaluated_logic": "" + }, + { + "field_name": "fixedq2n", + "form_name": "fixed_ema", + "section_header": "", + "field_type": "slider", + "field_label": "At the time of cocaine craving, how vividly did you imagine what it would feel like in your mouth/throat/body?", + "select_choices_or_calculations": [], + "field_note": "", + "text_validation_type_or_show_slider_number": "", + "text_validation_min": "", + "text_validation_max": "", + "identifier": "", + "branching_logic": "", + "required_field": "", + "custom_alignment": "RH", + "question_number": "", + "matrix_group_name": "", + "matrix_ranking": "", + "field_annotation": "", + "range": { + "min": "Notatall", + "max": "Extremely", + "step": 1 + }, + "evaluated_logic": "" + }, + { + "field_name": "fixedq2o", + "form_name": "fixed_ema", + "section_header": "", + "field_type": "slider", + "field_label": "At the time of cocaine craving, how hard were you trying to not think about it?", + "select_choices_or_calculations": [], + "field_note": "", + "text_validation_type_or_show_slider_number": "", + "text_validation_min": "", + "text_validation_max": "", + "identifier": "", + "branching_logic": "", + "required_field": "", + "custom_alignment": "RH", + "question_number": "", + "matrix_group_name": "", + "matrix_ranking": "", + "field_annotation": "", + "range": { + "min": "Notatall", + "max": "Extremely", + "step": 1 + }, + "evaluated_logic": "" + }, + { + "field_name": "fixedq2p", + "form_name": "fixed_ema", + "section_header": "", + "field_type": "slider", + "field_label": "At the time of cocaine craving, how intrusive were the thoughts?", + "select_choices_or_calculations": [], + "field_note": "", + "text_validation_type_or_show_slider_number": "", + "text_validation_min": "", + "text_validation_max": "", + "identifier": "", + "branching_logic": "", + "required_field": "", + "custom_alignment": "RH", + "question_number": "", + "matrix_group_name": "", + "matrix_ranking": "", + "field_annotation": "", + "range": { + "min": "Notatall", + "max": "Extremely", + "step": 1 + }, + "evaluated_logic": "" + }, + { + "field_name": "fixedq2q", + "form_name": "fixed_ema", + "section_header": "", + "field_type": "slider", + "field_label": "At the time of cocaine craving, how hard was it to think about anything else?", + "select_choices_or_calculations": [], + "field_note": "", + "text_validation_type_or_show_slider_number": "", + "text_validation_min": "", + "text_validation_max": "", + "identifier": "", + "branching_logic": "", + "required_field": "", + "custom_alignment": "RH", + "question_number": "", + "matrix_group_name": "", + "matrix_ranking": "", + "field_annotation": "", + "range": { + "min": "Notatall", + "max": "Extremely", + "step": 1 + }, + "evaluated_logic": "" + }, + { + "field_name": "fixed_repeat", + "form_name": "fixed_ema", + "section_header": "", + "field_type": "descriptive", + "field_label": "HERE NEEDS SOME KIND OF OPTION TO ADD A NEW EVENT AND GO THROUGH ALL Q2A-Q QUESTIONS AGAIN?", + "select_choices_or_calculations": "", + "field_note": "", + "text_validation_type_or_show_slider_number": "", + "text_validation_min": "", + "text_validation_max": "", + "identifier": "", + "branching_logic": "", + "required_field": "", + "custom_alignment": "", + "question_number": "", + "matrix_group_name": "", + "matrix_ranking": "", + "field_annotation": "", + "evaluated_logic": "" + }, + { + "field_name": "fixedq3a", + "form_name": "fixed_ema", + "section_header": "", + "field_type": "yesno", + "field_label": "Have you used any other substances in the past 24 hours? This includes any other drugs and alcohol.", + "select_choices_or_calculations": "", + "field_note": "", + "text_validation_type_or_show_slider_number": "", + "text_validation_min": "", + "text_validation_max": "", + "identifier": "", + "branching_logic": "", + "required_field": "", + "custom_alignment": "RH", + "question_number": "", + "matrix_group_name": "", + "matrix_ranking": "", + "field_annotation": "", + "evaluated_logic": "" + }, + { + "field_name": "fixedq4a", + "form_name": "fixed_ema", + "section_header": "", + "field_type": "yesno", + "field_label": "Alcohol", + "select_choices_or_calculations": "", + "field_note": "", + "text_validation_type_or_show_slider_number": "", + "text_validation_min": "", + "text_validation_max": "", + "identifier": "", + "branching_logic": "", + "required_field": "", + "custom_alignment": "RH", + "question_number": "", + "matrix_group_name": "", + "matrix_ranking": "", + "field_annotation": "", + "evaluated_logic": "" + }, + { + "field_name": "fixedq4b", + "form_name": "fixed_ema", + "section_header": "", + "field_type": "text", + "field_label": "What alcohol (e.g. wine, beer, spirits):", + "select_choices_or_calculations": "", + "field_note": "", + "text_validation_type_or_show_slider_number": "", + "text_validation_min": "", + "text_validation_max": "", + "identifier": "", + "branching_logic": "", + "required_field": "", + "custom_alignment": "RH", + "question_number": "", + "matrix_group_name": "", + "matrix_ranking": "", + "field_annotation": "", + "evaluated_logic": "" + }, + { + "field_name": "fixedq4c", + "form_name": "fixed_ema", + "section_header": "", + "field_type": "yesno", + "field_label": "Heroin (or other opioids):", + "select_choices_or_calculations": "", + "field_note": "", + "text_validation_type_or_show_slider_number": "", + "text_validation_min": "", + "text_validation_max": "", + "identifier": "", + "branching_logic": "", + "required_field": "", + "custom_alignment": "RH", + "question_number": "", + "matrix_group_name": "", + "matrix_ranking": "", + "field_annotation": "", + "evaluated_logic": "" + }, + { + "field_name": "fixedq4d", + "form_name": "fixed_ema", + "section_header": "", + "field_type": "yesno", + "field_label": "Cannabis", + "select_choices_or_calculations": "", + "field_note": "", + "text_validation_type_or_show_slider_number": "", + "text_validation_min": "", + "text_validation_max": "", + "identifier": "", + "branching_logic": "", + "required_field": "", + "custom_alignment": "RH", + "question_number": "", + "matrix_group_name": "", + "matrix_ranking": "", + "field_annotation": "", + "evaluated_logic": "" + }, + { + "field_name": "fixedq4e", + "form_name": "fixed_ema", + "section_header": "", + "field_type": "yesno", + "field_label": "Hallucinogenic:", + "select_choices_or_calculations": "", + "field_note": "", + "text_validation_type_or_show_slider_number": "", + "text_validation_min": "", + "text_validation_max": "", + "identifier": "", + "branching_logic": "", + "required_field": "", + "custom_alignment": "RH", + "question_number": "", + "matrix_group_name": "", + "matrix_ranking": "", + "field_annotation": "", + "evaluated_logic": "" + }, + { + "field_name": "fixedq4f", + "form_name": "fixed_ema", + "section_header": "", + "field_type": "yesno", + "field_label": "Ketamine:", + "select_choices_or_calculations": "", + "field_note": "", + "text_validation_type_or_show_slider_number": "", + "text_validation_min": "", + "text_validation_max": "", + "identifier": "", + "branching_logic": "", + "required_field": "", + "custom_alignment": "RH", + "question_number": "", + "matrix_group_name": "", + "matrix_ranking": "", + "field_annotation": "", + "evaluated_logic": "" + }, + { + "field_name": "fixedq4g", + "form_name": "fixed_ema", + "section_header": "", + "field_type": "yesno", + "field_label": "MDMA:", + "select_choices_or_calculations": "", + "field_note": "", + "text_validation_type_or_show_slider_number": "", + "text_validation_min": "", + "text_validation_max": "", + "identifier": "", + "branching_logic": "", + "required_field": "", + "custom_alignment": "RH", + "question_number": "", + "matrix_group_name": "", + "matrix_ranking": "", + "field_annotation": "", + "evaluated_logic": "" + }, + { + "field_name": "fixedq4h", + "form_name": "fixed_ema", + "section_header": "", + "field_type": "yesno", + "field_label": "Mushrooms:", + "select_choices_or_calculations": "", + "field_note": "", + "text_validation_type_or_show_slider_number": "", + "text_validation_min": "", + "text_validation_max": "", + "identifier": "", + "branching_logic": "", + "required_field": "", + "custom_alignment": "RH", + "question_number": "", + "matrix_group_name": "", + "matrix_ranking": "", + "field_annotation": "", + "evaluated_logic": "" + }, + { + "field_name": "fixedq4i", + "form_name": "fixed_ema", + "section_header": "", + "field_type": "yesno", + "field_label": "Steroids:", + "select_choices_or_calculations": "", + "field_note": "", + "text_validation_type_or_show_slider_number": "", + "text_validation_min": "", + "text_validation_max": "", + "identifier": "", + "branching_logic": "", + "required_field": "", + "custom_alignment": "RH", + "question_number": "", + "matrix_group_name": "", + "matrix_ranking": "", + "field_annotation": "", + "evaluated_logic": "" + }, + { + "field_name": "fixedq4j", + "form_name": "fixed_ema", + "section_header": "", + "field_type": "yesno", + "field_label": "Prescription medication (taken not as prescribed):", + "select_choices_or_calculations": "", + "field_note": "", + "text_validation_type_or_show_slider_number": "", + "text_validation_min": "", + "text_validation_max": "", + "identifier": "", + "branching_logic": "", + "required_field": "", + "custom_alignment": "RH", + "question_number": "", + "matrix_group_name": "", + "matrix_ranking": "", + "field_annotation": "", + "evaluated_logic": "" + }, + { + "field_name": "fixedq4k", + "form_name": "fixed_ema", + "section_header": "", + "field_type": "text", + "field_label": "Any other substances:", + "select_choices_or_calculations": "", + "field_note": "", + "text_validation_type_or_show_slider_number": "", + "text_validation_min": "", + "text_validation_max": "", + "identifier": "", + "branching_logic": "", + "required_field": "", + "custom_alignment": "RH", + "question_number": "", + "matrix_group_name": "", + "matrix_ranking": "", + "field_annotation": "", + "evaluated_logic": "" + }, + { + "field_name": "fixedq5", + "form_name": "fixed_ema", + "section_header": "", + "field_type": "text", + "field_label": "Anything else you would like to add:", + "select_choices_or_calculations": "", + "field_note": "", + "text_validation_type_or_show_slider_number": "", + "text_validation_min": "", + "text_validation_max": "", + "identifier": "", + "branching_logic": "", + "required_field": "", + "custom_alignment": "RH", + "question_number": "", + "matrix_group_name": "", + "matrix_ranking": "", + "field_annotation": "", + "evaluated_logic": "" + } +] \ No newline at end of file From e0113ddb2cf86ab49bc468994a07cf1759ae74d1 Mon Sep 17 00:00:00 2001 From: Pauline Conde Date: Wed, 8 Jan 2025 22:57:06 +0000 Subject: [PATCH 02/10] Update Questionnaire random_ema/random_ema_armt.json --- .../random_ema/random_ema_armt.json | 325 ++++++++++++++++++ 1 file changed, 325 insertions(+) create mode 100644 questionnaires/random_ema/random_ema_armt.json diff --git a/questionnaires/random_ema/random_ema_armt.json b/questionnaires/random_ema/random_ema_armt.json new file mode 100644 index 00000000..46ccb1d4 --- /dev/null +++ b/questionnaires/random_ema/random_ema_armt.json @@ -0,0 +1,325 @@ +[ + { + "field_name": "randomq1a", + "form_name": "random_ema", + "section_header": "", + "field_type": "yesno", + "field_label": "Have you experienced cocaine craving in the past 3 hours?", + "select_choices_or_calculations": "", + "field_note": "", + "text_validation_type_or_show_slider_number": "", + "text_validation_min": "", + "text_validation_max": "", + "identifier": "", + "branching_logic": "", + "required_field": "y", + "custom_alignment": "", + "question_number": "", + "matrix_group_name": "", + "matrix_ranking": "", + "field_annotation": "", + "evaluated_logic": "" + }, + { + "field_name": "randomq2a", + "form_name": "random_ema", + "section_header": "", + "field_type": "slider", + "field_label": "At the time, how much did you want it?", + "select_choices_or_calculations": [], + "field_note": "", + "text_validation_type_or_show_slider_number": "", + "text_validation_min": "", + "text_validation_max": "", + "identifier": "", + "branching_logic": "", + "required_field": "", + "custom_alignment": "RH", + "question_number": "", + "matrix_group_name": "", + "matrix_ranking": "", + "field_annotation": "", + "range": { + "min": "Notatall", + "max": "Extremely", + "step": 1 + }, + "evaluated_logic": "" + }, + { + "field_name": "randomq2b", + "form_name": "random_ema", + "section_header": "", + "field_type": "slider", + "field_label": "At the time, how much did you need it?", + "select_choices_or_calculations": [], + "field_note": "", + "text_validation_type_or_show_slider_number": "", + "text_validation_min": "", + "text_validation_max": "", + "identifier": "", + "branching_logic": "", + "required_field": "", + "custom_alignment": "RH", + "question_number": "", + "matrix_group_name": "", + "matrix_ranking": "", + "field_annotation": "", + "range": { + "min": "Notatall", + "max": "Extremely", + "step": 1 + }, + "evaluated_logic": "" + }, + { + "field_name": "randomq2c", + "form_name": "random_ema", + "section_header": "", + "field_type": "slider", + "field_label": "At the time, how strong was the urge to have it?", + "select_choices_or_calculations": [], + "field_note": "", + "text_validation_type_or_show_slider_number": "", + "text_validation_min": "", + "text_validation_max": "", + "identifier": "", + "branching_logic": "", + "required_field": "", + "custom_alignment": "RH", + "question_number": "", + "matrix_group_name": "", + "matrix_ranking": "", + "field_annotation": "", + "range": { + "min": "Notatall", + "max": "Extremely", + "step": 1 + }, + "evaluated_logic": "" + }, + { + "field_name": "randomq2d", + "form_name": "random_ema", + "section_header": "", + "field_type": "slider", + "field_label": "At the time, how vividly did you picture it?", + "select_choices_or_calculations": [], + "field_note": "", + "text_validation_type_or_show_slider_number": "", + "text_validation_min": "", + "text_validation_max": "", + "identifier": "", + "branching_logic": "", + "required_field": "", + "custom_alignment": "RH", + "question_number": "", + "matrix_group_name": "", + "matrix_ranking": "", + "field_annotation": "", + "range": { + "min": "Notatall", + "max": "Extremely", + "step": 1 + }, + "evaluated_logic": "" + }, + { + "field_name": "randomq2e", + "form_name": "random_ema", + "section_header": "", + "field_type": "slider", + "field_label": "At the time, how vividly did you imagine its taste?", + "select_choices_or_calculations": [], + "field_note": "", + "text_validation_type_or_show_slider_number": "", + "text_validation_min": "", + "text_validation_max": "", + "identifier": "", + "branching_logic": "", + "required_field": "", + "custom_alignment": "RH", + "question_number": "", + "matrix_group_name": "", + "matrix_ranking": "", + "field_annotation": "", + "range": { + "min": "Notatall", + "max": "Extremely", + "step": 1 + }, + "evaluated_logic": "" + }, + { + "field_name": "randomq2f", + "form_name": "random_ema", + "section_header": "", + "field_type": "slider", + "field_label": "At the time, how vividly did you imagine its smell?", + "select_choices_or_calculations": [], + "field_note": "", + "text_validation_type_or_show_slider_number": "", + "text_validation_min": "", + "text_validation_max": "", + "identifier": "", + "branching_logic": "", + "required_field": "", + "custom_alignment": "RH", + "question_number": "", + "matrix_group_name": "", + "matrix_ranking": "", + "field_annotation": "", + "range": { + "min": "Notatall", + "max": "Extremely", + "step": 1 + }, + "evaluated_logic": "" + }, + { + "field_name": "random2g", + "form_name": "random_ema", + "section_header": "", + "field_type": "slider", + "field_label": "At the time, how vividly did you imagine what it would feel like in your mouth/throat/body?", + "select_choices_or_calculations": [], + "field_note": "", + "text_validation_type_or_show_slider_number": "", + "text_validation_min": "", + "text_validation_max": "", + "identifier": "", + "branching_logic": "", + "required_field": "", + "custom_alignment": "RH", + "question_number": "", + "matrix_group_name": "", + "matrix_ranking": "", + "field_annotation": "", + "range": { + "min": "Notatall", + "max": "Extremely", + "step": 1 + }, + "evaluated_logic": "" + }, + { + "field_name": "randomq2h", + "form_name": "random_ema", + "section_header": "", + "field_type": "slider", + "field_label": "At the time, how hard were you trying not to think about it?", + "select_choices_or_calculations": [], + "field_note": "", + "text_validation_type_or_show_slider_number": "", + "text_validation_min": "", + "text_validation_max": "", + "identifier": "", + "branching_logic": "", + "required_field": "", + "custom_alignment": "RH", + "question_number": "", + "matrix_group_name": "", + "matrix_ranking": "", + "field_annotation": "", + "range": { + "min": "Notatall", + "max": "Extremely", + "step": 1 + }, + "evaluated_logic": "" + }, + { + "field_name": "randomq2i", + "form_name": "random_ema", + "section_header": "", + "field_type": "slider", + "field_label": "At the time, how intrusive were the thoughts?", + "select_choices_or_calculations": [], + "field_note": "", + "text_validation_type_or_show_slider_number": "", + "text_validation_min": "", + "text_validation_max": "", + "identifier": "", + "branching_logic": "", + "required_field": "", + "custom_alignment": "RH", + "question_number": "", + "matrix_group_name": "", + "matrix_ranking": "", + "field_annotation": "", + "range": { + "min": "Notatall", + "max": "Extremely", + "step": 1 + }, + "evaluated_logic": "" + }, + { + "field_name": "randomq2j", + "form_name": "random_ema", + "section_header": "", + "field_type": "slider", + "field_label": "At the time, how hard was it to think about anything else?", + "select_choices_or_calculations": [], + "field_note": "", + "text_validation_type_or_show_slider_number": "", + "text_validation_min": "", + "text_validation_max": "", + "identifier": "", + "branching_logic": "", + "required_field": "", + "custom_alignment": "RH", + "question_number": "", + "matrix_group_name": "", + "matrix_ranking": "", + "field_annotation": "", + "range": { + "min": "Notatall", + "max": "Extremely", + "step": 1 + }, + "evaluated_logic": "" + }, + { + "field_name": "randomq3", + "form_name": "random_ema", + "section_header": "", + "field_type": "yesno", + "field_label": "Have you used cocaine in the past 3 hours?", + "select_choices_or_calculations": "", + "field_note": "", + "text_validation_type_or_show_slider_number": "", + "text_validation_min": "", + "text_validation_max": "", + "identifier": "", + "branching_logic": "", + "required_field": "", + "custom_alignment": "", + "question_number": "", + "matrix_group_name": "", + "matrix_ranking": "", + "field_annotation": "", + "evaluated_logic": "" + }, + { + "field_name": "randomq4", + "form_name": "random_ema", + "section_header": "", + "field_type": "notes", + "field_label": "Anything else you would like to add:", + "select_choices_or_calculations": "", + "field_note": "", + "text_validation_type_or_show_slider_number": "", + "text_validation_min": "", + "text_validation_max": "", + "identifier": "", + "branching_logic": "", + "required_field": "", + "custom_alignment": "", + "question_number": "", + "matrix_group_name": "", + "matrix_ranking": "", + "field_annotation": "", + "evaluated_logic": "" + } +] \ No newline at end of file From 54355e201f311e3eb0616294204f11cb04fcc478 Mon Sep 17 00:00:00 2001 From: Pauline Conde Date: Wed, 8 Jan 2025 22:57:40 +0000 Subject: [PATCH 03/10] Update Questionnaire user_initiated_craving_ema/user_initiated_craving_ema_armt.json --- .../user_initiated_craving_ema_armt.json | 325 ++++++++++++++++++ 1 file changed, 325 insertions(+) create mode 100644 questionnaires/user_initiated_craving_ema/user_initiated_craving_ema_armt.json diff --git a/questionnaires/user_initiated_craving_ema/user_initiated_craving_ema_armt.json b/questionnaires/user_initiated_craving_ema/user_initiated_craving_ema_armt.json new file mode 100644 index 00000000..7cddb6fc --- /dev/null +++ b/questionnaires/user_initiated_craving_ema/user_initiated_craving_ema_armt.json @@ -0,0 +1,325 @@ +[ + { + "field_name": "user", + "form_name": "user_initiated_craving_ema", + "section_header": "", + "field_type": "descriptive", + "field_label": "REMINDER HERE TO TAG THIS EVENT ON THE EMPATICA DEVICE (IF HAVEN'T ALREADY)", + "select_choices_or_calculations": "", + "field_note": "", + "text_validation_type_or_show_slider_number": "", + "text_validation_min": "", + "text_validation_max": "", + "identifier": "", + "branching_logic": "", + "required_field": "", + "custom_alignment": "", + "question_number": "", + "matrix_group_name": "", + "matrix_ranking": "", + "field_annotation": "", + "evaluated_logic": "" + }, + { + "field_name": "userq1", + "form_name": "user_initiated_craving_ema", + "section_header": "", + "field_type": "yesno", + "field_label": "Are you experiencing cocaine craving right now?", + "select_choices_or_calculations": "", + "field_note": "", + "text_validation_type_or_show_slider_number": "", + "text_validation_min": "", + "text_validation_max": "", + "identifier": "", + "branching_logic": "", + "required_field": "y", + "custom_alignment": "", + "question_number": "", + "matrix_group_name": "", + "matrix_ranking": "", + "field_annotation": "", + "evaluated_logic": "" + }, + { + "field_name": "userq2a", + "form_name": "user_initiated_craving_ema", + "section_header": "", + "field_type": "slider", + "field_label": "Right now, how much do you want it?", + "select_choices_or_calculations": [], + "field_note": "", + "text_validation_type_or_show_slider_number": "", + "text_validation_min": "", + "text_validation_max": "", + "identifier": "", + "branching_logic": "", + "required_field": "", + "custom_alignment": "RH", + "question_number": "", + "matrix_group_name": "", + "matrix_ranking": "", + "field_annotation": "", + "range": { + "min": "Notatall", + "max": "Extremely", + "step": 1 + }, + "evaluated_logic": "" + }, + { + "field_name": "userq2b", + "form_name": "user_initiated_craving_ema", + "section_header": "", + "field_type": "slider", + "field_label": "Right now, how much do you need it?", + "select_choices_or_calculations": [], + "field_note": "", + "text_validation_type_or_show_slider_number": "", + "text_validation_min": "", + "text_validation_max": "", + "identifier": "", + "branching_logic": "", + "required_field": "", + "custom_alignment": "RH", + "question_number": "", + "matrix_group_name": "", + "matrix_ranking": "", + "field_annotation": "", + "range": { + "min": "Notatall", + "max": "Extremely", + "step": 1 + }, + "evaluated_logic": "" + }, + { + "field_name": "userq2c", + "form_name": "user_initiated_craving_ema", + "section_header": "", + "field_type": "slider", + "field_label": "How strong is the urge to have it?", + "select_choices_or_calculations": [], + "field_note": "", + "text_validation_type_or_show_slider_number": "", + "text_validation_min": "", + "text_validation_max": "", + "identifier": "", + "branching_logic": "", + "required_field": "", + "custom_alignment": "RH", + "question_number": "", + "matrix_group_name": "", + "matrix_ranking": "", + "field_annotation": "", + "range": { + "min": "Notatall", + "max": "Extremely", + "step": 1 + }, + "evaluated_logic": "" + }, + { + "field_name": "userq2d", + "form_name": "user_initiated_craving_ema", + "section_header": "", + "field_type": "slider", + "field_label": "Right now, how vividly do you picture it?", + "select_choices_or_calculations": [], + "field_note": "", + "text_validation_type_or_show_slider_number": "", + "text_validation_min": "", + "text_validation_max": "", + "identifier": "", + "branching_logic": "", + "required_field": "", + "custom_alignment": "RH", + "question_number": "", + "matrix_group_name": "", + "matrix_ranking": "", + "field_annotation": "", + "range": { + "min": "Notatall", + "max": "Extremely", + "step": 1 + }, + "evaluated_logic": "" + }, + { + "field_name": "userq2e", + "form_name": "user_initiated_craving_ema", + "section_header": "", + "field_type": "slider", + "field_label": "Right now, how vividly do you imagine its taste?", + "select_choices_or_calculations": [], + "field_note": "", + "text_validation_type_or_show_slider_number": "", + "text_validation_min": "", + "text_validation_max": "", + "identifier": "", + "branching_logic": "", + "required_field": "", + "custom_alignment": "RH", + "question_number": "", + "matrix_group_name": "", + "matrix_ranking": "", + "field_annotation": "", + "range": { + "min": "Notatall", + "max": "Extremely", + "step": 1 + }, + "evaluated_logic": "" + }, + { + "field_name": "userq2f", + "form_name": "user_initiated_craving_ema", + "section_header": "", + "field_type": "slider", + "field_label": "Right now, how vividly do you imagine its smell?", + "select_choices_or_calculations": [], + "field_note": "", + "text_validation_type_or_show_slider_number": "", + "text_validation_min": "", + "text_validation_max": "", + "identifier": "", + "branching_logic": "", + "required_field": "", + "custom_alignment": "RH", + "question_number": "", + "matrix_group_name": "", + "matrix_ranking": "", + "field_annotation": "", + "range": { + "min": "Notatall", + "max": "Extremely", + "step": 1 + }, + "evaluated_logic": "" + }, + { + "field_name": "userq2g", + "form_name": "user_initiated_craving_ema", + "section_header": "", + "field_type": "slider", + "field_label": "Right now, how vividly do you imagine what it would feel like in your mouth/throat/body?", + "select_choices_or_calculations": [], + "field_note": "", + "text_validation_type_or_show_slider_number": "", + "text_validation_min": "", + "text_validation_max": "", + "identifier": "", + "branching_logic": "", + "required_field": "", + "custom_alignment": "RH", + "question_number": "", + "matrix_group_name": "", + "matrix_ranking": "", + "field_annotation": "", + "range": { + "min": "Notatall", + "max": "Extremely", + "step": 1 + }, + "evaluated_logic": "" + }, + { + "field_name": "userq2h", + "form_name": "user_initiated_craving_ema", + "section_header": "", + "field_type": "slider", + "field_label": "Right now, how hard are you trying not to think about it?", + "select_choices_or_calculations": [], + "field_note": "", + "text_validation_type_or_show_slider_number": "", + "text_validation_min": "", + "text_validation_max": "", + "identifier": "", + "branching_logic": "", + "required_field": "", + "custom_alignment": "RH", + "question_number": "", + "matrix_group_name": "", + "matrix_ranking": "", + "field_annotation": "", + "range": { + "min": "Notatall", + "max": "Extremely", + "step": 1 + }, + "evaluated_logic": "" + }, + { + "field_name": "userq2i", + "form_name": "user_initiated_craving_ema", + "section_header": "", + "field_type": "slider", + "field_label": "Right now, how intrusive are the thoughts?", + "select_choices_or_calculations": [], + "field_note": "", + "text_validation_type_or_show_slider_number": "", + "text_validation_min": "", + "text_validation_max": "", + "identifier": "", + "branching_logic": "", + "required_field": "", + "custom_alignment": "RH", + "question_number": "", + "matrix_group_name": "", + "matrix_ranking": "", + "field_annotation": "", + "range": { + "min": "Notatall", + "max": "Extremely", + "step": 1 + }, + "evaluated_logic": "" + }, + { + "field_name": "userq3j", + "form_name": "user_initiated_craving_ema", + "section_header": "", + "field_type": "slider", + "field_label": "Right now, how hard is it to think about anything else?", + "select_choices_or_calculations": [], + "field_note": "", + "text_validation_type_or_show_slider_number": "", + "text_validation_min": "", + "text_validation_max": "", + "identifier": "", + "branching_logic": "", + "required_field": "", + "custom_alignment": "RH", + "question_number": "", + "matrix_group_name": "", + "matrix_ranking": "", + "field_annotation": "", + "range": { + "min": "Notatall", + "max": "Extremely", + "step": 1 + }, + "evaluated_logic": "" + }, + { + "field_name": "userq3", + "form_name": "user_initiated_craving_ema", + "section_header": "", + "field_type": "notes", + "field_label": "Anything else you would like to add:", + "select_choices_or_calculations": "", + "field_note": "", + "text_validation_type_or_show_slider_number": "", + "text_validation_min": "", + "text_validation_max": "", + "identifier": "", + "branching_logic": "", + "required_field": "", + "custom_alignment": "RH", + "question_number": "", + "matrix_group_name": "", + "matrix_ranking": "", + "field_annotation": "", + "evaluated_logic": "" + } +] \ No newline at end of file From 224f445727ca2a256a72cae1abba65443d16beb9 Mon Sep 17 00:00:00 2001 From: Pauline Conde Date: Wed, 8 Jan 2025 23:03:28 +0000 Subject: [PATCH 04/10] Update Questionnaire user_initiated_cocaine_use_ema/user_initiated_cocaine_use_ema_armt.json --- .../user_initiated_cocaine_use_ema_armt.json | 150 ++++++++++++++++++ 1 file changed, 150 insertions(+) create mode 100644 questionnaires/user_initiated_cocaine_use_ema/user_initiated_cocaine_use_ema_armt.json diff --git a/questionnaires/user_initiated_cocaine_use_ema/user_initiated_cocaine_use_ema_armt.json b/questionnaires/user_initiated_cocaine_use_ema/user_initiated_cocaine_use_ema_armt.json new file mode 100644 index 00000000..c1e5d78e --- /dev/null +++ b/questionnaires/user_initiated_cocaine_use_ema/user_initiated_cocaine_use_ema_armt.json @@ -0,0 +1,150 @@ +[ + { + "field_name": "useruseq1", + "form_name": "user_initiated_cocaine_use_ema", + "section_header": "", + "field_type": "yesno", + "field_label": "Have you just used cocaine?", + "select_choices_or_calculations": "", + "field_note": "", + "text_validation_type_or_show_slider_number": "", + "text_validation_min": "", + "text_validation_max": "", + "identifier": "", + "branching_logic": "", + "required_field": "", + "custom_alignment": "", + "question_number": "", + "matrix_group_name": "", + "matrix_ranking": "", + "field_annotation": "", + "evaluated_logic": "" + }, + { + "field_name": "useruseq2a", + "form_name": "user_initiated_cocaine_use_ema", + "section_header": "", + "field_type": "dropdown", + "field_label": "What type of cocaine?", + "select_choices_or_calculations": [ + { + "code": "1", + "label": "Powder" + }, + { + "code": "2", + "label": "Crack" + } + ], + "field_note": "", + "text_validation_type_or_show_slider_number": "", + "text_validation_min": "", + "text_validation_max": "", + "identifier": "", + "branching_logic": "", + "required_field": "", + "custom_alignment": "", + "question_number": "", + "matrix_group_name": "", + "matrix_ranking": "", + "field_annotation": "", + "evaluated_logic": "" + }, + { + "field_name": "useruseq2b", + "form_name": "user_initiated_cocaine_use_ema", + "section_header": "", + "field_type": "dropdown", + "field_label": "How did you use?", + "select_choices_or_calculations": [ + { + "code": "1", + "label": "Snort" + }, + { + "code": "2", + "label": "Smoke" + }, + { + "code": "3", + "label": "Inject" + } + ], + "field_note": "", + "text_validation_type_or_show_slider_number": "", + "text_validation_min": "", + "text_validation_max": "", + "identifier": "", + "branching_logic": "", + "required_field": "", + "custom_alignment": "", + "question_number": "", + "matrix_group_name": "", + "matrix_ranking": "", + "field_annotation": "", + "evaluated_logic": "" + }, + { + "field_name": "useruseq2c", + "form_name": "user_initiated_cocaine_use_ema", + "section_header": "", + "field_type": "text", + "field_label": "How much did you use?", + "select_choices_or_calculations": "", + "field_note": "", + "text_validation_type_or_show_slider_number": "", + "text_validation_min": "", + "text_validation_max": "", + "identifier": "", + "branching_logic": "", + "required_field": "", + "custom_alignment": "", + "question_number": "", + "matrix_group_name": "", + "matrix_ranking": "", + "field_annotation": "", + "evaluated_logic": "" + }, + { + "field_name": "useruseq2d", + "form_name": "user_initiated_cocaine_use_ema", + "section_header": "", + "field_type": "text", + "field_label": "Why did you use?", + "select_choices_or_calculations": "", + "field_note": "", + "text_validation_type_or_show_slider_number": "", + "text_validation_min": "", + "text_validation_max": "", + "identifier": "", + "branching_logic": "", + "required_field": "", + "custom_alignment": "", + "question_number": "", + "matrix_group_name": "", + "matrix_ranking": "", + "field_annotation": "", + "evaluated_logic": "" + }, + { + "field_name": "useruseq3", + "form_name": "user_initiated_cocaine_use_ema", + "section_header": "", + "field_type": "text", + "field_label": "Anything else you would like to add:", + "select_choices_or_calculations": "", + "field_note": "", + "text_validation_type_or_show_slider_number": "", + "text_validation_min": "", + "text_validation_max": "", + "identifier": "", + "branching_logic": "", + "required_field": "", + "custom_alignment": "", + "question_number": "", + "matrix_group_name": "", + "matrix_ranking": "", + "field_annotation": "", + "evaluated_logic": "" + } +] \ No newline at end of file From 486e205aad38750d2b4379a8eba0feb59114887d Mon Sep 17 00:00:00 2001 From: Pauline Conde Date: Wed, 8 Jan 2025 23:06:01 +0000 Subject: [PATCH 05/10] Update Questionnaire user_initiated_cocaine_use_ema/user_initiated_cocaine_use_ema_armt.json --- .../user_initiated_cocaine_use_ema_armt.json | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/questionnaires/user_initiated_cocaine_use_ema/user_initiated_cocaine_use_ema_armt.json b/questionnaires/user_initiated_cocaine_use_ema/user_initiated_cocaine_use_ema_armt.json index c1e5d78e..579e198c 100644 --- a/questionnaires/user_initiated_cocaine_use_ema/user_initiated_cocaine_use_ema_armt.json +++ b/questionnaires/user_initiated_cocaine_use_ema/user_initiated_cocaine_use_ema_armt.json @@ -24,7 +24,7 @@ "field_name": "useruseq2a", "form_name": "user_initiated_cocaine_use_ema", "section_header": "", - "field_type": "dropdown", + "field_type": "radio", "field_label": "What type of cocaine?", "select_choices_or_calculations": [ { @@ -54,7 +54,7 @@ "field_name": "useruseq2b", "form_name": "user_initiated_cocaine_use_ema", "section_header": "", - "field_type": "dropdown", + "field_type": "radio", "field_label": "How did you use?", "select_choices_or_calculations": [ { From c79f7484d9bc23e80c4c5ffd557d1491aa5559cf Mon Sep 17 00:00:00 2001 From: Pauline Conde Date: Wed, 8 Jan 2025 23:06:11 +0000 Subject: [PATCH 06/10] Update Questionnaire fixed_ema/fixed_ema_armt.json --- questionnaires/fixed_ema/fixed_ema_armt.json | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/questionnaires/fixed_ema/fixed_ema_armt.json b/questionnaires/fixed_ema/fixed_ema_armt.json index 928ddbe6..ae1f81fe 100644 --- a/questionnaires/fixed_ema/fixed_ema_armt.json +++ b/questionnaires/fixed_ema/fixed_ema_armt.json @@ -45,7 +45,7 @@ "field_name": "fixedq2a", "form_name": "fixed_ema", "section_header": "", - "field_type": "dropdown", + "field_type": "radio", "field_label": "What type of cocaine?", "select_choices_or_calculations": [ { @@ -75,7 +75,7 @@ "field_name": "fixedq2b", "form_name": "fixed_ema", "section_header": "", - "field_type": "dropdown", + "field_type": "radio", "field_label": "How did you use?", "select_choices_or_calculations": [ { From 0a2d1e356a86205bff670790f4f9b53f5b3104c4 Mon Sep 17 00:00:00 2001 From: Pauline Conde Date: Wed, 8 Jan 2025 23:21:22 +0000 Subject: [PATCH 07/10] Update Questionnaire fixed_ema/fixed_ema_armt.json --- questionnaires/fixed_ema/fixed_ema_armt.json | 141 ++++++++----------- 1 file changed, 60 insertions(+), 81 deletions(-) diff --git a/questionnaires/fixed_ema/fixed_ema_armt.json b/questionnaires/fixed_ema/fixed_ema_armt.json index ae1f81fe..eae54ed1 100644 --- a/questionnaires/fixed_ema/fixed_ema_armt.json +++ b/questionnaires/fixed_ema/fixed_ema_armt.json @@ -32,14 +32,14 @@ "text_validation_min": "", "text_validation_max": "", "identifier": "", - "branching_logic": "", + "branching_logic": "[fixedq1a] = '1'", "required_field": "", "custom_alignment": "", "question_number": "", "matrix_group_name": "", "matrix_ranking": "", "field_annotation": "", - "evaluated_logic": "" + "evaluated_logic": "responses['fixedq1a'] == '1'" }, { "field_name": "fixedq2a", @@ -62,14 +62,14 @@ "text_validation_min": "", "text_validation_max": "", "identifier": "", - "branching_logic": "", + "branching_logic": "[fixedq1a] = '1'", "required_field": "", "custom_alignment": "", "question_number": "", "matrix_group_name": "", "matrix_ranking": "", "field_annotation": "", - "evaluated_logic": "" + "evaluated_logic": "responses['fixedq1a'] == '1'" }, { "field_name": "fixedq2b", @@ -96,14 +96,14 @@ "text_validation_min": "", "text_validation_max": "", "identifier": "", - "branching_logic": "", + "branching_logic": "[fixedq1a] = '1'", "required_field": "", "custom_alignment": "", "question_number": "", "matrix_group_name": "", "matrix_ranking": "", "field_annotation": "", - "evaluated_logic": "" + "evaluated_logic": "responses['fixedq1a'] == '1'" }, { "field_name": "fixedq2c", @@ -117,14 +117,14 @@ "text_validation_min": "", "text_validation_max": "", "identifier": "", - "branching_logic": "", + "branching_logic": "[fixedq1a] = '1'", "required_field": "", "custom_alignment": "", "question_number": "", "matrix_group_name": "", "matrix_ranking": "", "field_annotation": "", - "evaluated_logic": "" + "evaluated_logic": "responses['fixedq1a'] == '1'" }, { "field_name": "fixedq2d", @@ -138,14 +138,14 @@ "text_validation_min": "", "text_validation_max": "", "identifier": "", - "branching_logic": "", + "branching_logic": "[fixedq1a] = '1'", "required_field": "", "custom_alignment": "", "question_number": "", "matrix_group_name": "", "matrix_ranking": "", "field_annotation": "", - "evaluated_logic": "" + "evaluated_logic": "responses['fixedq1a'] == '1'" }, { "field_name": "fixedq2e", @@ -159,14 +159,14 @@ "text_validation_min": "", "text_validation_max": "", "identifier": "", - "branching_logic": "", + "branching_logic": "[fixedq1a] = '1'", "required_field": "", "custom_alignment": "", "question_number": "", "matrix_group_name": "", "matrix_ranking": "", "field_annotation": "", - "evaluated_logic": "" + "evaluated_logic": "responses['fixedq1a'] == '1'" }, { "field_name": "fixedq2f", @@ -180,14 +180,14 @@ "text_validation_min": "", "text_validation_max": "", "identifier": "", - "branching_logic": "", + "branching_logic": "[fixedq1a] = '1'", "required_field": "", "custom_alignment": "", "question_number": "", "matrix_group_name": "", "matrix_ranking": "", "field_annotation": "", - "evaluated_logic": "" + "evaluated_logic": "responses['fixedq1a'] == '1'" }, { "field_name": "fixedq2g", @@ -201,14 +201,14 @@ "text_validation_min": "", "text_validation_max": "", "identifier": "", - "branching_logic": "", + "branching_logic": "[fixedq1a] = '1'", "required_field": "", "custom_alignment": "", "question_number": "", "matrix_group_name": "", "matrix_ranking": "", "field_annotation": "", - "evaluated_logic": "" + "evaluated_logic": "responses['fixedq1a'] == '1'" }, { "field_name": "fixedq2h", @@ -222,7 +222,7 @@ "text_validation_min": "", "text_validation_max": "", "identifier": "", - "branching_logic": "", + "branching_logic": "[fixedq2g] = '1'", "required_field": "", "custom_alignment": "RH", "question_number": "", @@ -234,7 +234,7 @@ "max": "Extremely", "step": 1 }, - "evaluated_logic": "" + "evaluated_logic": "responses['fixedq2g'] == '1'" }, { "field_name": "fixedq2i", @@ -248,7 +248,7 @@ "text_validation_min": "", "text_validation_max": "", "identifier": "", - "branching_logic": "", + "branching_logic": "[fixedq2g] = '1'", "required_field": "", "custom_alignment": "RH", "question_number": "", @@ -260,7 +260,7 @@ "max": "Extremely", "step": 1 }, - "evaluated_logic": "" + "evaluated_logic": "responses['fixedq2g'] == '1'" }, { "field_name": "fixedq2j", @@ -274,7 +274,7 @@ "text_validation_min": "", "text_validation_max": "", "identifier": "", - "branching_logic": "", + "branching_logic": "[fixedq2g] = '1'", "required_field": "", "custom_alignment": "RH", "question_number": "", @@ -286,7 +286,7 @@ "max": "Extremely", "step": 1 }, - "evaluated_logic": "" + "evaluated_logic": "responses['fixedq2g'] == '1'" }, { "field_name": "fixedq2k", @@ -300,7 +300,7 @@ "text_validation_min": "", "text_validation_max": "", "identifier": "", - "branching_logic": "", + "branching_logic": "[fixedq2g] = '1'", "required_field": "", "custom_alignment": "RH", "question_number": "", @@ -312,7 +312,7 @@ "max": "Extremely", "step": 1 }, - "evaluated_logic": "" + "evaluated_logic": "responses['fixedq2g'] == '1'" }, { "field_name": "fixedq2l", @@ -326,7 +326,7 @@ "text_validation_min": "", "text_validation_max": "", "identifier": "", - "branching_logic": "", + "branching_logic": "[fixedq2g] = '1'", "required_field": "", "custom_alignment": "RH", "question_number": "", @@ -338,7 +338,7 @@ "max": "Extremely", "step": 1 }, - "evaluated_logic": "" + "evaluated_logic": "responses['fixedq2g'] == '1'" }, { "field_name": "fixedq2m", @@ -352,7 +352,7 @@ "text_validation_min": "", "text_validation_max": "", "identifier": "", - "branching_logic": "", + "branching_logic": "[fixedq2g] = '1'", "required_field": "", "custom_alignment": "RH", "question_number": "", @@ -364,7 +364,7 @@ "max": "Extremely", "step": 1 }, - "evaluated_logic": "" + "evaluated_logic": "responses['fixedq2g'] == '1'" }, { "field_name": "fixedq2n", @@ -378,7 +378,7 @@ "text_validation_min": "", "text_validation_max": "", "identifier": "", - "branching_logic": "", + "branching_logic": "[fixedq2g] = '1'", "required_field": "", "custom_alignment": "RH", "question_number": "", @@ -390,7 +390,7 @@ "max": "Extremely", "step": 1 }, - "evaluated_logic": "" + "evaluated_logic": "responses['fixedq2g'] == '1'" }, { "field_name": "fixedq2o", @@ -404,7 +404,7 @@ "text_validation_min": "", "text_validation_max": "", "identifier": "", - "branching_logic": "", + "branching_logic": "[fixedq2g] = '1'", "required_field": "", "custom_alignment": "RH", "question_number": "", @@ -416,7 +416,7 @@ "max": "Extremely", "step": 1 }, - "evaluated_logic": "" + "evaluated_logic": "responses['fixedq2g'] == '1'" }, { "field_name": "fixedq2p", @@ -430,7 +430,7 @@ "text_validation_min": "", "text_validation_max": "", "identifier": "", - "branching_logic": "", + "branching_logic": "[fixedq2g] = '1'", "required_field": "", "custom_alignment": "RH", "question_number": "", @@ -442,7 +442,7 @@ "max": "Extremely", "step": 1 }, - "evaluated_logic": "" + "evaluated_logic": "responses['fixedq2g'] == '1'" }, { "field_name": "fixedq2q", @@ -456,7 +456,7 @@ "text_validation_min": "", "text_validation_max": "", "identifier": "", - "branching_logic": "", + "branching_logic": "[fixedq2g] = '1'", "required_field": "", "custom_alignment": "RH", "question_number": "", @@ -468,28 +468,7 @@ "max": "Extremely", "step": 1 }, - "evaluated_logic": "" - }, - { - "field_name": "fixed_repeat", - "form_name": "fixed_ema", - "section_header": "", - "field_type": "descriptive", - "field_label": "HERE NEEDS SOME KIND OF OPTION TO ADD A NEW EVENT AND GO THROUGH ALL Q2A-Q QUESTIONS AGAIN?", - "select_choices_or_calculations": "", - "field_note": "", - "text_validation_type_or_show_slider_number": "", - "text_validation_min": "", - "text_validation_max": "", - "identifier": "", - "branching_logic": "", - "required_field": "", - "custom_alignment": "", - "question_number": "", - "matrix_group_name": "", - "matrix_ranking": "", - "field_annotation": "", - "evaluated_logic": "" + "evaluated_logic": "responses['fixedq2g'] == '1'" }, { "field_name": "fixedq3a", @@ -503,14 +482,14 @@ "text_validation_min": "", "text_validation_max": "", "identifier": "", - "branching_logic": "", + "branching_logic": "[fixedq1a] = '1'", "required_field": "", "custom_alignment": "RH", "question_number": "", "matrix_group_name": "", "matrix_ranking": "", "field_annotation": "", - "evaluated_logic": "" + "evaluated_logic": "responses['fixedq1a'] == '1'" }, { "field_name": "fixedq4a", @@ -524,14 +503,14 @@ "text_validation_min": "", "text_validation_max": "", "identifier": "", - "branching_logic": "", + "branching_logic": "[fixedq3a] = '1'", "required_field": "", "custom_alignment": "RH", "question_number": "", "matrix_group_name": "", "matrix_ranking": "", "field_annotation": "", - "evaluated_logic": "" + "evaluated_logic": "responses['fixedq3a'] == '1'" }, { "field_name": "fixedq4b", @@ -545,14 +524,14 @@ "text_validation_min": "", "text_validation_max": "", "identifier": "", - "branching_logic": "", + "branching_logic": "[fixedq4a] = '1'", "required_field": "", "custom_alignment": "RH", "question_number": "", "matrix_group_name": "", "matrix_ranking": "", "field_annotation": "", - "evaluated_logic": "" + "evaluated_logic": "responses['fixedq4a'] == '1'" }, { "field_name": "fixedq4c", @@ -566,14 +545,14 @@ "text_validation_min": "", "text_validation_max": "", "identifier": "", - "branching_logic": "", + "branching_logic": "[fixedq3a] = '1'", "required_field": "", "custom_alignment": "RH", "question_number": "", "matrix_group_name": "", "matrix_ranking": "", "field_annotation": "", - "evaluated_logic": "" + "evaluated_logic": "responses['fixedq3a'] == '1'" }, { "field_name": "fixedq4d", @@ -587,14 +566,14 @@ "text_validation_min": "", "text_validation_max": "", "identifier": "", - "branching_logic": "", + "branching_logic": "[fixedq3a] = '1'", "required_field": "", "custom_alignment": "RH", "question_number": "", "matrix_group_name": "", "matrix_ranking": "", "field_annotation": "", - "evaluated_logic": "" + "evaluated_logic": "responses['fixedq3a'] == '1'" }, { "field_name": "fixedq4e", @@ -608,14 +587,14 @@ "text_validation_min": "", "text_validation_max": "", "identifier": "", - "branching_logic": "", + "branching_logic": "[fixedq3a] = '1'", "required_field": "", "custom_alignment": "RH", "question_number": "", "matrix_group_name": "", "matrix_ranking": "", "field_annotation": "", - "evaluated_logic": "" + "evaluated_logic": "responses['fixedq3a'] == '1'" }, { "field_name": "fixedq4f", @@ -629,14 +608,14 @@ "text_validation_min": "", "text_validation_max": "", "identifier": "", - "branching_logic": "", + "branching_logic": "[fixedq3a] = '1'", "required_field": "", "custom_alignment": "RH", "question_number": "", "matrix_group_name": "", "matrix_ranking": "", "field_annotation": "", - "evaluated_logic": "" + "evaluated_logic": "responses['fixedq3a'] == '1'" }, { "field_name": "fixedq4g", @@ -650,14 +629,14 @@ "text_validation_min": "", "text_validation_max": "", "identifier": "", - "branching_logic": "", + "branching_logic": "[fixedq3a] = '1'", "required_field": "", "custom_alignment": "RH", "question_number": "", "matrix_group_name": "", "matrix_ranking": "", "field_annotation": "", - "evaluated_logic": "" + "evaluated_logic": "responses['fixedq3a'] == '1'" }, { "field_name": "fixedq4h", @@ -671,14 +650,14 @@ "text_validation_min": "", "text_validation_max": "", "identifier": "", - "branching_logic": "", + "branching_logic": "[fixedq3a] = '1'", "required_field": "", "custom_alignment": "RH", "question_number": "", "matrix_group_name": "", "matrix_ranking": "", "field_annotation": "", - "evaluated_logic": "" + "evaluated_logic": "responses['fixedq3a'] == '1'" }, { "field_name": "fixedq4i", @@ -713,14 +692,14 @@ "text_validation_min": "", "text_validation_max": "", "identifier": "", - "branching_logic": "", + "branching_logic": "[fixedq3a] = '1'", "required_field": "", "custom_alignment": "RH", "question_number": "", "matrix_group_name": "", "matrix_ranking": "", "field_annotation": "", - "evaluated_logic": "" + "evaluated_logic": "responses['fixedq3a'] == '1'" }, { "field_name": "fixedq4k", @@ -734,14 +713,14 @@ "text_validation_min": "", "text_validation_max": "", "identifier": "", - "branching_logic": "", + "branching_logic": "[fixedq3a] = '1'", "required_field": "", "custom_alignment": "RH", "question_number": "", "matrix_group_name": "", "matrix_ranking": "", "field_annotation": "", - "evaluated_logic": "" + "evaluated_logic": "responses['fixedq3a'] == '1'" }, { "field_name": "fixedq5", @@ -755,13 +734,13 @@ "text_validation_min": "", "text_validation_max": "", "identifier": "", - "branching_logic": "", + "branching_logic": "[fixedq1a] = '1'", "required_field": "", "custom_alignment": "RH", "question_number": "", "matrix_group_name": "", "matrix_ranking": "", "field_annotation": "", - "evaluated_logic": "" + "evaluated_logic": "responses['fixedq1a'] == '1'" } ] \ No newline at end of file From 7aa8fa00d7b7767c705fae2412d948ee1b837d91 Mon Sep 17 00:00:00 2001 From: Pauline Conde Date: Wed, 8 Jan 2025 23:27:39 +0000 Subject: [PATCH 08/10] Update Questionnaire user_initiated_cocaine_use_ema/user_initiated_cocaine_use_ema_armt.json --- .../user_initiated_cocaine_use_ema_armt.json | 20 +++++++++---------- 1 file changed, 10 insertions(+), 10 deletions(-) diff --git a/questionnaires/user_initiated_cocaine_use_ema/user_initiated_cocaine_use_ema_armt.json b/questionnaires/user_initiated_cocaine_use_ema/user_initiated_cocaine_use_ema_armt.json index 579e198c..8fa024e7 100644 --- a/questionnaires/user_initiated_cocaine_use_ema/user_initiated_cocaine_use_ema_armt.json +++ b/questionnaires/user_initiated_cocaine_use_ema/user_initiated_cocaine_use_ema_armt.json @@ -41,14 +41,14 @@ "text_validation_min": "", "text_validation_max": "", "identifier": "", - "branching_logic": "", + "branching_logic": "[useruseq1] = '1'", "required_field": "", "custom_alignment": "", "question_number": "", "matrix_group_name": "", "matrix_ranking": "", "field_annotation": "", - "evaluated_logic": "" + "evaluated_logic": "responses['useruseq1'] == '1'" }, { "field_name": "useruseq2b", @@ -75,14 +75,14 @@ "text_validation_min": "", "text_validation_max": "", "identifier": "", - "branching_logic": "", + "branching_logic": "[useruseq1] = '1'", "required_field": "", "custom_alignment": "", "question_number": "", "matrix_group_name": "", "matrix_ranking": "", "field_annotation": "", - "evaluated_logic": "" + "evaluated_logic": "responses['useruseq1'] == '1'" }, { "field_name": "useruseq2c", @@ -96,14 +96,14 @@ "text_validation_min": "", "text_validation_max": "", "identifier": "", - "branching_logic": "", + "branching_logic": "[useruseq1] = '1'", "required_field": "", "custom_alignment": "", "question_number": "", "matrix_group_name": "", "matrix_ranking": "", "field_annotation": "", - "evaluated_logic": "" + "evaluated_logic": "responses['useruseq1'] == '1'" }, { "field_name": "useruseq2d", @@ -117,14 +117,14 @@ "text_validation_min": "", "text_validation_max": "", "identifier": "", - "branching_logic": "", + "branching_logic": "[useruseq1] = '1'", "required_field": "", "custom_alignment": "", "question_number": "", "matrix_group_name": "", "matrix_ranking": "", "field_annotation": "", - "evaluated_logic": "" + "evaluated_logic": "responses['useruseq1'] == '1'" }, { "field_name": "useruseq3", @@ -138,13 +138,13 @@ "text_validation_min": "", "text_validation_max": "", "identifier": "", - "branching_logic": "", + "branching_logic": "[useruseq1] = '1'", "required_field": "", "custom_alignment": "", "question_number": "", "matrix_group_name": "", "matrix_ranking": "", "field_annotation": "", - "evaluated_logic": "" + "evaluated_logic": "responses['useruseq1'] == '1'" } ] \ No newline at end of file From 4b4b5110504e50d195959eb9fda4aa52fafe9000 Mon Sep 17 00:00:00 2001 From: Pauline Conde Date: Wed, 8 Jan 2025 23:27:49 +0000 Subject: [PATCH 09/10] Update Questionnaire random_ema/random_ema_armt.json --- .../random_ema/random_ema_armt.json | 48 +++++++++---------- 1 file changed, 24 insertions(+), 24 deletions(-) diff --git a/questionnaires/random_ema/random_ema_armt.json b/questionnaires/random_ema/random_ema_armt.json index 46ccb1d4..dfe6a88b 100644 --- a/questionnaires/random_ema/random_ema_armt.json +++ b/questionnaires/random_ema/random_ema_armt.json @@ -32,7 +32,7 @@ "text_validation_min": "", "text_validation_max": "", "identifier": "", - "branching_logic": "", + "branching_logic": "[randomq1a] = '1'", "required_field": "", "custom_alignment": "RH", "question_number": "", @@ -44,7 +44,7 @@ "max": "Extremely", "step": 1 }, - "evaluated_logic": "" + "evaluated_logic": "responses['randomq1a'] == '1'" }, { "field_name": "randomq2b", @@ -58,7 +58,7 @@ "text_validation_min": "", "text_validation_max": "", "identifier": "", - "branching_logic": "", + "branching_logic": "[randomq1a] = '1'", "required_field": "", "custom_alignment": "RH", "question_number": "", @@ -70,7 +70,7 @@ "max": "Extremely", "step": 1 }, - "evaluated_logic": "" + "evaluated_logic": "responses['randomq1a'] == '1'" }, { "field_name": "randomq2c", @@ -84,7 +84,7 @@ "text_validation_min": "", "text_validation_max": "", "identifier": "", - "branching_logic": "", + "branching_logic": "[randomq1a] = '1'", "required_field": "", "custom_alignment": "RH", "question_number": "", @@ -96,7 +96,7 @@ "max": "Extremely", "step": 1 }, - "evaluated_logic": "" + "evaluated_logic": "responses['randomq1a'] == '1'" }, { "field_name": "randomq2d", @@ -110,7 +110,7 @@ "text_validation_min": "", "text_validation_max": "", "identifier": "", - "branching_logic": "", + "branching_logic": "[randomq1a] = '1'", "required_field": "", "custom_alignment": "RH", "question_number": "", @@ -122,7 +122,7 @@ "max": "Extremely", "step": 1 }, - "evaluated_logic": "" + "evaluated_logic": "responses['randomq1a'] == '1'" }, { "field_name": "randomq2e", @@ -136,7 +136,7 @@ "text_validation_min": "", "text_validation_max": "", "identifier": "", - "branching_logic": "", + "branching_logic": "[randomq1a] = '1'", "required_field": "", "custom_alignment": "RH", "question_number": "", @@ -148,7 +148,7 @@ "max": "Extremely", "step": 1 }, - "evaluated_logic": "" + "evaluated_logic": "responses['randomq1a'] == '1'" }, { "field_name": "randomq2f", @@ -162,7 +162,7 @@ "text_validation_min": "", "text_validation_max": "", "identifier": "", - "branching_logic": "", + "branching_logic": "[randomq1a] = '1'", "required_field": "", "custom_alignment": "RH", "question_number": "", @@ -174,7 +174,7 @@ "max": "Extremely", "step": 1 }, - "evaluated_logic": "" + "evaluated_logic": "responses['randomq1a'] == '1'" }, { "field_name": "random2g", @@ -188,7 +188,7 @@ "text_validation_min": "", "text_validation_max": "", "identifier": "", - "branching_logic": "", + "branching_logic": "[randomq1a] = '1'", "required_field": "", "custom_alignment": "RH", "question_number": "", @@ -200,7 +200,7 @@ "max": "Extremely", "step": 1 }, - "evaluated_logic": "" + "evaluated_logic": "responses['randomq1a'] == '1'" }, { "field_name": "randomq2h", @@ -214,7 +214,7 @@ "text_validation_min": "", "text_validation_max": "", "identifier": "", - "branching_logic": "", + "branching_logic": "[randomq1a] = '1'", "required_field": "", "custom_alignment": "RH", "question_number": "", @@ -226,7 +226,7 @@ "max": "Extremely", "step": 1 }, - "evaluated_logic": "" + "evaluated_logic": "responses['randomq1a'] == '1'" }, { "field_name": "randomq2i", @@ -240,7 +240,7 @@ "text_validation_min": "", "text_validation_max": "", "identifier": "", - "branching_logic": "", + "branching_logic": "[randomq1a] = '1'", "required_field": "", "custom_alignment": "RH", "question_number": "", @@ -252,7 +252,7 @@ "max": "Extremely", "step": 1 }, - "evaluated_logic": "" + "evaluated_logic": "responses['randomq1a'] == '1'" }, { "field_name": "randomq2j", @@ -266,7 +266,7 @@ "text_validation_min": "", "text_validation_max": "", "identifier": "", - "branching_logic": "", + "branching_logic": "[randomq1a] = '1'", "required_field": "", "custom_alignment": "RH", "question_number": "", @@ -278,7 +278,7 @@ "max": "Extremely", "step": 1 }, - "evaluated_logic": "" + "evaluated_logic": "responses['randomq1a'] == '1'" }, { "field_name": "randomq3", @@ -292,14 +292,14 @@ "text_validation_min": "", "text_validation_max": "", "identifier": "", - "branching_logic": "", + "branching_logic": "[randomq1a] = '1'", "required_field": "", "custom_alignment": "", "question_number": "", "matrix_group_name": "", "matrix_ranking": "", "field_annotation": "", - "evaluated_logic": "" + "evaluated_logic": "responses['randomq1a'] == '1'" }, { "field_name": "randomq4", @@ -313,13 +313,13 @@ "text_validation_min": "", "text_validation_max": "", "identifier": "", - "branching_logic": "", + "branching_logic": "[randomq1a] = '1'", "required_field": "", "custom_alignment": "", "question_number": "", "matrix_group_name": "", "matrix_ranking": "", "field_annotation": "", - "evaluated_logic": "" + "evaluated_logic": "responses['randomq1a'] == '1'" } ] \ No newline at end of file From a831210359a99db1c7acc9afe07fa9d45fea50a1 Mon Sep 17 00:00:00 2001 From: Pauline Conde Date: Wed, 8 Jan 2025 23:28:00 +0000 Subject: [PATCH 10/10] Update Questionnaire user_initiated_craving_ema/user_initiated_craving_ema_armt.json --- .../user_initiated_craving_ema_armt.json | 65 +++++++------------ 1 file changed, 22 insertions(+), 43 deletions(-) diff --git a/questionnaires/user_initiated_craving_ema/user_initiated_craving_ema_armt.json b/questionnaires/user_initiated_craving_ema/user_initiated_craving_ema_armt.json index 7cddb6fc..f9915090 100644 --- a/questionnaires/user_initiated_craving_ema/user_initiated_craving_ema_armt.json +++ b/questionnaires/user_initiated_craving_ema/user_initiated_craving_ema_armt.json @@ -1,25 +1,4 @@ [ - { - "field_name": "user", - "form_name": "user_initiated_craving_ema", - "section_header": "", - "field_type": "descriptive", - "field_label": "REMINDER HERE TO TAG THIS EVENT ON THE EMPATICA DEVICE (IF HAVEN'T ALREADY)", - "select_choices_or_calculations": "", - "field_note": "", - "text_validation_type_or_show_slider_number": "", - "text_validation_min": "", - "text_validation_max": "", - "identifier": "", - "branching_logic": "", - "required_field": "", - "custom_alignment": "", - "question_number": "", - "matrix_group_name": "", - "matrix_ranking": "", - "field_annotation": "", - "evaluated_logic": "" - }, { "field_name": "userq1", "form_name": "user_initiated_craving_ema", @@ -53,7 +32,7 @@ "text_validation_min": "", "text_validation_max": "", "identifier": "", - "branching_logic": "", + "branching_logic": "[userq1] = '1'", "required_field": "", "custom_alignment": "RH", "question_number": "", @@ -65,7 +44,7 @@ "max": "Extremely", "step": 1 }, - "evaluated_logic": "" + "evaluated_logic": "responses['userq1'] == '1'" }, { "field_name": "userq2b", @@ -79,7 +58,7 @@ "text_validation_min": "", "text_validation_max": "", "identifier": "", - "branching_logic": "", + "branching_logic": "[userq1] = '1'", "required_field": "", "custom_alignment": "RH", "question_number": "", @@ -91,7 +70,7 @@ "max": "Extremely", "step": 1 }, - "evaluated_logic": "" + "evaluated_logic": "responses['userq1'] == '1'" }, { "field_name": "userq2c", @@ -105,7 +84,7 @@ "text_validation_min": "", "text_validation_max": "", "identifier": "", - "branching_logic": "", + "branching_logic": "[userq1] = '1'", "required_field": "", "custom_alignment": "RH", "question_number": "", @@ -117,7 +96,7 @@ "max": "Extremely", "step": 1 }, - "evaluated_logic": "" + "evaluated_logic": "responses['userq1'] == '1'" }, { "field_name": "userq2d", @@ -131,7 +110,7 @@ "text_validation_min": "", "text_validation_max": "", "identifier": "", - "branching_logic": "", + "branching_logic": "[userq1] = '1'", "required_field": "", "custom_alignment": "RH", "question_number": "", @@ -143,7 +122,7 @@ "max": "Extremely", "step": 1 }, - "evaluated_logic": "" + "evaluated_logic": "responses['userq1'] == '1'" }, { "field_name": "userq2e", @@ -157,7 +136,7 @@ "text_validation_min": "", "text_validation_max": "", "identifier": "", - "branching_logic": "", + "branching_logic": "[userq1] = '1'", "required_field": "", "custom_alignment": "RH", "question_number": "", @@ -169,7 +148,7 @@ "max": "Extremely", "step": 1 }, - "evaluated_logic": "" + "evaluated_logic": "responses['userq1'] == '1'" }, { "field_name": "userq2f", @@ -183,7 +162,7 @@ "text_validation_min": "", "text_validation_max": "", "identifier": "", - "branching_logic": "", + "branching_logic": "[userq1] = '1'", "required_field": "", "custom_alignment": "RH", "question_number": "", @@ -195,7 +174,7 @@ "max": "Extremely", "step": 1 }, - "evaluated_logic": "" + "evaluated_logic": "responses['userq1'] == '1'" }, { "field_name": "userq2g", @@ -209,7 +188,7 @@ "text_validation_min": "", "text_validation_max": "", "identifier": "", - "branching_logic": "", + "branching_logic": "[userq1] = '1'", "required_field": "", "custom_alignment": "RH", "question_number": "", @@ -221,7 +200,7 @@ "max": "Extremely", "step": 1 }, - "evaluated_logic": "" + "evaluated_logic": "responses['userq1'] == '1'" }, { "field_name": "userq2h", @@ -235,7 +214,7 @@ "text_validation_min": "", "text_validation_max": "", "identifier": "", - "branching_logic": "", + "branching_logic": "[userq1] = '1'", "required_field": "", "custom_alignment": "RH", "question_number": "", @@ -247,7 +226,7 @@ "max": "Extremely", "step": 1 }, - "evaluated_logic": "" + "evaluated_logic": "responses['userq1'] == '1'" }, { "field_name": "userq2i", @@ -261,7 +240,7 @@ "text_validation_min": "", "text_validation_max": "", "identifier": "", - "branching_logic": "", + "branching_logic": "[userq1] = '1'", "required_field": "", "custom_alignment": "RH", "question_number": "", @@ -273,7 +252,7 @@ "max": "Extremely", "step": 1 }, - "evaluated_logic": "" + "evaluated_logic": "responses['userq1'] == '1'" }, { "field_name": "userq3j", @@ -287,7 +266,7 @@ "text_validation_min": "", "text_validation_max": "", "identifier": "", - "branching_logic": "", + "branching_logic": "[userq1] = '1'", "required_field": "", "custom_alignment": "RH", "question_number": "", @@ -299,7 +278,7 @@ "max": "Extremely", "step": 1 }, - "evaluated_logic": "" + "evaluated_logic": "responses['userq1'] == '1'" }, { "field_name": "userq3", @@ -313,13 +292,13 @@ "text_validation_min": "", "text_validation_max": "", "identifier": "", - "branching_logic": "", + "branching_logic": "[userq1] = '1'", "required_field": "", "custom_alignment": "RH", "question_number": "", "matrix_group_name": "", "matrix_ranking": "", "field_annotation": "", - "evaluated_logic": "" + "evaluated_logic": "responses['userq1'] == '1'" } ] \ No newline at end of file