itemengine-cypress-automation 1.0.580-ITEM-1388-9c911b1.0 → 1.0.581-IEI-7178-2371612.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -7,24 +7,14 @@ describe('Get Question Data API cases', () => {
7
7
 
8
8
  before(() => {
9
9
  cy.loginAs('admin');
10
- cy.deleteItem('DataApiGetQuestionTest1');
11
- cy.wait(5000);
12
- cy.deleteItem('DataApiGetQuestionTest2');
13
- cy.wait(5000);
14
10
  cy.createItem('DataApiGetQuestionTest1');
15
- cy.wait(5000);
11
+ cy.wait(5000)
16
12
  cy.createItem('DataApiGetQuestionTest2');
17
- cy.wait(5000);
18
13
  cy.visit(`${Cypress.env('itemEngineHomePage')}`);
19
14
  });
20
15
 
21
16
  after(() => {
22
- cy.deleteItem('DataApiGetQuestionTest1');
23
- cy.wait(5000);
24
- cy.deleteItem('DataApiGetQuestionTest2');
25
- cy.wait(5000);
26
17
  cy.deleteItems();
27
- cy.wait(5000);
28
18
  cy.logout();
29
19
  });
30
20
 
@@ -165,7 +155,7 @@ describe('Get Question Data API cases', () => {
165
155
  .should('have.text', '200');
166
156
  cy.get('pre[class="microlight"]')
167
157
  .eq(1)
168
- .should('have.text', `{\n \"meta\": {\n \"status\": true,\n \"records\": ${records},\n \"next_token\": ${next_token ? `[\n ${next_token[0]},\n \"${next_token[1]}\"\n ]` : null},\n \"timestamp\": ${timestamp}\n },\n \"data\": {\n \"DataApiGetQuestionTest1\": [\n {\n \"reference\": \"${ref1}\",\n \"type\": \"multiple selection\",\n \"dt_created\": \"${dt_created}\",\n \"dt_updated\": \"${dt_updated}\",\n \"scoring_type\": \"Auto-scored\",\n \"points\": 10,\n \"penalty_points\": null,\n \"min_score_if_attempted\": null,\n \"min_score_type\": 1,\n \"penalty_point_type\": 1,\n \"is_auto_penalty_setting\": false,\n \"is_negative_rounded\": true,\n \"specific_penalty_type\": 1,\n \"penalty_points_for_each\": null,\n \"scoring_guidance\": \"\",\n \"sample_response\": \"\",\n \"data\": \"\",\n \"content\": {\n \"instruction\": \"test\",\n \"acknowledgements\": \"\",\n \"sample_answer\": \"\"\n },\n \"options\": [\n {\n \"id\": \"ChoiceA\",\n \"label\": \"1\",\n \"locked\": false\n },\n {\n \"id\": \"ChoiceB\",\n \"label\": \"2\",\n \"locked\": false\n },\n {\n \"id\": \"ChoiceC\",\n \"label\": \"3\",\n \"locked\": false\n },\n {\n \"id\": \"ChoiceD\",\n \"label\": \"4\",\n \"locked\": false\n }\n ],\n \"correct_answer\": [\n {\n \"total_points\": 10,\n \"answers\": [\n {\n \"answer\": \"ChoiceA\",\n \"points\": 10,\n \"is_case_sensitive\": false\n }\n ]\n }\n ],\n \"scoring\": {\n \"points\": 10,\n \"scoring_type\": \"auto-scored\",\n \"sub_scoring_type\": \"allOrNothing\",\n \"penalty_points\": null,\n \"penalty_point_type\": \"noPenalty\",\n \"penalty_points_for_each\": null,\n \"min_score_type\": \"noMinScore\",\n \"min_score_points\": null,\n \"is_rounding_enabled\": true,\n \"rounding_type\": \"none\",\n \"is_negative_rounded\": true,\n \"scoring_guidance\": \"\"\n },\n \"settings\": {\n \"student_view_settings\": {\n \"shuffle_option\": false,\n \"position_lock\": false,\n \"allow_setting_min_or_max_num_of_answers\": false,\n \"min_num_answers\": null,\n \"max_num_answers\": null,\n \"allow_check_answer\": false,\n \"max_attempts_to_check_answer\": 0\n },\n \"additional_settings\": {\n \"orientation\": \"horizontal\",\n \"multiple_columns\": false,\n \"num_of_option_columns\": 1,\n \"font_size\": \"default\",\n \"alignment\": \"left\",\n \"multiple_selection\": true,\n \"option_style_type\": \"blockWithCheckBox\",\n \"option_numeration\": \"default\",\n \"non_accessible\": false\n },\n \"print_layout_settings\": {\n \"exclude_from_print\": false,\n \"print_instruction\": \"\"\n },\n \"tool_settings\": [\n {\n \"resource\": \"ruler\",\n \"enabled\": false\n },\n {\n \"resource\": \"protractor\",\n \"enabled\": false\n },\n {\n \"resource\": \"readingRuler\",\n \"enabled\": false\n },\n {\n \"resource\": \"simpleCalculator\",\n \"enabled\": false\n },\n {\n \"resource\": \"scientificCalculator\",\n \"enabled\": false\n },\n {\n \"resource\": \"compass\",\n \"enabled\": false\n }\n ]\n }\n }\n ],\n \"DataApiGetQuestionTest2\": [\n {\n \"reference\": \"${ref2}\",\n \"type\": \"multiple selection\",\n \"dt_created\": \"${dt_created}\",\n \"dt_updated\": \"${dt_updated}\",\n \"scoring_type\": \"Auto-scored\",\n \"points\": 10,\n \"penalty_points\": null,\n \"min_score_if_attempted\": null,\n \"min_score_type\": 1,\n \"penalty_point_type\": 1,\n \"is_auto_penalty_setting\": false,\n \"is_negative_rounded\": true,\n \"specific_penalty_type\": 1,\n \"penalty_points_for_each\": null,\n \"scoring_guidance\": \"\",\n \"sample_response\": \"\",\n \"data\": \"\",\n \"content\": {\n \"instruction\": \"test\",\n \"acknowledgements\": \"\",\n \"sample_answer\": \"\"\n },\n \"options\": [\n {\n \"id\": \"ChoiceA\",\n \"label\": \"1\",\n \"locked\": false\n },\n {\n \"id\": \"ChoiceB\",\n \"label\": \"2\",\n \"locked\": false\n },\n {\n \"id\": \"ChoiceC\",\n \"label\": \"3\",\n \"locked\": false\n },\n {\n \"id\": \"ChoiceD\",\n \"label\": \"4\",\n \"locked\": false\n }\n ],\n \"correct_answer\": [\n {\n \"total_points\": 10,\n \"answers\": [\n {\n \"answer\": \"ChoiceA\",\n \"points\": 10,\n \"is_case_sensitive\": false\n }\n ]\n }\n ],\n \"scoring\": {\n \"points\": 10,\n \"scoring_type\": \"auto-scored\",\n \"sub_scoring_type\": \"allOrNothing\",\n \"penalty_points\": null,\n \"penalty_point_type\": \"noPenalty\",\n \"penalty_points_for_each\": null,\n \"min_score_type\": \"noMinScore\",\n \"min_score_points\": null,\n \"is_rounding_enabled\": true,\n \"rounding_type\": \"none\",\n \"is_negative_rounded\": true,\n \"scoring_guidance\": \"\"\n },\n \"settings\": {\n \"student_view_settings\": {\n \"shuffle_option\": false,\n \"position_lock\": false,\n \"allow_setting_min_or_max_num_of_answers\": false,\n \"min_num_answers\": null,\n \"max_num_answers\": null,\n \"allow_check_answer\": false,\n \"max_attempts_to_check_answer\": 0\n },\n \"additional_settings\": {\n \"orientation\": \"horizontal\",\n \"multiple_columns\": false,\n \"num_of_option_columns\": 1,\n \"font_size\": \"default\",\n \"alignment\": \"left\",\n \"multiple_selection\": true,\n \"option_style_type\": \"blockWithCheckBox\",\n \"option_numeration\": \"default\",\n \"non_accessible\": false\n },\n \"print_layout_settings\": {\n \"exclude_from_print\": false,\n \"print_instruction\": \"\"\n },\n \"tool_settings\": [\n {\n \"resource\": \"ruler\",\n \"enabled\": false\n },\n {\n \"resource\": \"protractor\",\n \"enabled\": false\n },\n {\n \"resource\": \"readingRuler\",\n \"enabled\": false\n },\n {\n \"resource\": \"simpleCalculator\",\n \"enabled\": false\n },\n {\n \"resource\": \"scientificCalculator\",\n \"enabled\": false\n },\n {\n \"resource\": \"compass\",\n \"enabled\": false\n }\n ]\n }\n }\n ]\n }\n}\n`);
158
+ .should('have.text', `{\n \"meta\": {\n \"status\": true,\n \"records\": ${records},\n \"next_token\": ${next_token ? `[\n ${next_token[0]},\n \"${next_token[1]}\"\n ]` : null},\n \"timestamp\": ${timestamp}\n },\n \"data\": {\n \"DataApiGetQuestionTest1\": [\n {\n \"reference\": \"${ref1}\",\n \"type\": \"multiple selection\",\n \"dt_created\": \"${dt_created}\",\n \"dt_updated\": \"${dt_updated}\",\n \"scoring_type\": \"Auto-scored\",\n \"points\": 2,\n \"penalty_points\": null,\n \"min_score_if_attempted\": null,\n \"min_score_type\": 1,\n \"penalty_point_type\": 1,\n \"is_auto_penalty_setting\": false,\n \"is_negative_rounded\": true,\n \"specific_penalty_type\": 1,\n \"penalty_points_for_each\": null,\n \"scoring_guidance\": \"\",\n \"sample_response\": \"\",\n \"data\": \"<assessmentItem xmlns=\\\"http://www.imsglobal.org/xsd/imsqti_v2p2\\\" xmlns:xsi=\\\"http://www.w3.org/2001/XMLSchema-instance\\\" xsi:schemaLocation=\\\"http://www.imsglobal.org/xsd/imsqti_v2p2 http://www.imsglobal.org/xsd/qti/qtiv2p2/imsqti_v2p2p2.xsd\\\" identifier=\\\"choiceMultiple\\\" title=\\\"Welcome to Biodome\\\" timeDependent=\\\"false\\\"><responseDeclaration identifier=\\\"RESPONSE\\\" cardinality=\\\"multiple\\\" baseType=\\\"identifier\\\"><correctResponse><value>ChoiceA</value><value>ChoiceB</value></correctResponse></responseDeclaration><outcomeDeclaration identifier=\\\"SCORE\\\" cardinality=\\\"single\\\" baseType=\\\"float\\\"/><itemBody><choiceInteraction responseIdentifier=\\\"RESPONSE\\\" orientation=\\\"horizontal\\\"><prompt></prompt><simpleChoice identifier=\\\"ChoiceA\\\">1</simpleChoice><simpleChoice identifier=\\\"ChoiceB\\\">2</simpleChoice><simpleChoice identifier=\\\"ChoiceC\\\">3</simpleChoice><simpleChoice identifier=\\\"ChoiceD\\\">4</simpleChoice></choiceInteraction></itemBody><responseProcessing template=\\\"http://www.imsglobal.org/question/qti_v2p2/rptemplates/map_response\\\"/></assessmentItem>\"\n }\n ],\n \"DataApiGetQuestionTest2\": [\n {\n \"reference\": \"${ref2}\",\n \"type\": \"multiple selection\",\n \"dt_created\": \"${dt_created}\",\n \"dt_updated\": \"${dt_updated}\",\n \"scoring_type\": \"Auto-scored\",\n \"points\": 2,\n \"penalty_points\": null,\n \"min_score_if_attempted\": null,\n \"min_score_type\": 1,\n \"penalty_point_type\": 1,\n \"is_auto_penalty_setting\": false,\n \"is_negative_rounded\": true,\n \"specific_penalty_type\": 1,\n \"penalty_points_for_each\": null,\n \"scoring_guidance\": \"\",\n \"sample_response\": \"\",\n \"data\": \"<assessmentItem xmlns=\\\"http://www.imsglobal.org/xsd/imsqti_v2p2\\\" xmlns:xsi=\\\"http://www.w3.org/2001/XMLSchema-instance\\\" xsi:schemaLocation=\\\"http://www.imsglobal.org/xsd/imsqti_v2p2 http://www.imsglobal.org/xsd/qti/qtiv2p2/imsqti_v2p2p2.xsd\\\" identifier=\\\"choiceMultiple\\\" title=\\\"Welcome to Biodome\\\" timeDependent=\\\"false\\\"><responseDeclaration identifier=\\\"RESPONSE\\\" cardinality=\\\"multiple\\\" baseType=\\\"identifier\\\"><correctResponse><value>ChoiceA</value><value>ChoiceB</value></correctResponse></responseDeclaration><outcomeDeclaration identifier=\\\"SCORE\\\" cardinality=\\\"single\\\" baseType=\\\"float\\\"/><itemBody><choiceInteraction responseIdentifier=\\\"RESPONSE\\\" orientation=\\\"horizontal\\\"><prompt></prompt><simpleChoice identifier=\\\"ChoiceA\\\">1</simpleChoice><simpleChoice identifier=\\\"ChoiceB\\\">2</simpleChoice><simpleChoice identifier=\\\"ChoiceC\\\">3</simpleChoice><simpleChoice identifier=\\\"ChoiceD\\\">4</simpleChoice></choiceInteraction></itemBody><responseProcessing template=\\\"http://www.imsglobal.org/question/qti_v2p2/rptemplates/map_response\\\"/></assessmentItem>\"\n }\n ]\n }\n}\n`);
169
159
  });
170
160
  });
171
161
 
@@ -203,7 +193,7 @@ describe('Get Question Data API cases', () => {
203
193
  .should('have.text', '200');
204
194
  cy.get('pre[class="microlight"]')
205
195
  .eq(1)
206
- .should('have.text', `{\n \"meta\": {\n \"status\": true,\n \"records\": ${records},\n \"next_token\": ${next_token ? `[\n ${next_token[0]},\n \"${next_token[1]}\"\n ]` : null},\n \"timestamp\": ${timestamp}\n },\n \"data\": {\n \"DataApiGetQuestionTest1\": [\n {\n \"reference\": \"${ref1}\",\n \"type\": \"multiple selection\",\n \"dt_created\": \"${dt_created}\",\n \"dt_updated\": \"${dt_updated}\",\n \"scoring_type\": \"Auto-scored\",\n \"points\": 10,\n \"penalty_points\": null,\n \"min_score_if_attempted\": null,\n \"min_score_type\": 1,\n \"penalty_point_type\": 1,\n \"is_auto_penalty_setting\": false,\n \"is_negative_rounded\": true,\n \"specific_penalty_type\": 1,\n \"penalty_points_for_each\": null,\n \"scoring_guidance\": \"\",\n \"sample_response\": \"\",\n \"data\": \"\",\n \"content\": {\n \"instruction\": \"test\",\n \"acknowledgements\": \"\",\n \"sample_answer\": \"\"\n },\n \"options\": [\n {\n \"id\": \"ChoiceA\",\n \"label\": \"1\",\n \"locked\": false\n },\n {\n \"id\": \"ChoiceB\",\n \"label\": \"2\",\n \"locked\": false\n },\n {\n \"id\": \"ChoiceC\",\n \"label\": \"3\",\n \"locked\": false\n },\n {\n \"id\": \"ChoiceD\",\n \"label\": \"4\",\n \"locked\": false\n }\n ],\n \"correct_answer\": [\n {\n \"total_points\": 10,\n \"answers\": [\n {\n \"answer\": \"ChoiceA\",\n \"points\": 10,\n \"is_case_sensitive\": false\n }\n ]\n }\n ],\n \"scoring\": {\n \"points\": 10,\n \"scoring_type\": \"auto-scored\",\n \"sub_scoring_type\": \"allOrNothing\",\n \"penalty_points\": null,\n \"penalty_point_type\": \"noPenalty\",\n \"penalty_points_for_each\": null,\n \"min_score_type\": \"noMinScore\",\n \"min_score_points\": null,\n \"is_rounding_enabled\": true,\n \"rounding_type\": \"none\",\n \"is_negative_rounded\": true,\n \"scoring_guidance\": \"\"\n },\n \"settings\": {\n \"student_view_settings\": {\n \"shuffle_option\": false,\n \"position_lock\": false,\n \"allow_setting_min_or_max_num_of_answers\": false,\n \"min_num_answers\": null,\n \"max_num_answers\": null,\n \"allow_check_answer\": false,\n \"max_attempts_to_check_answer\": 0\n },\n \"additional_settings\": {\n \"orientation\": \"horizontal\",\n \"multiple_columns\": false,\n \"num_of_option_columns\": 1,\n \"font_size\": \"default\",\n \"alignment\": \"left\",\n \"multiple_selection\": true,\n \"option_style_type\": \"blockWithCheckBox\",\n \"option_numeration\": \"default\",\n \"non_accessible\": false\n },\n \"print_layout_settings\": {\n \"exclude_from_print\": false,\n \"print_instruction\": \"\"\n },\n \"tool_settings\": [\n {\n \"resource\": \"ruler\",\n \"enabled\": false\n },\n {\n \"resource\": \"protractor\",\n \"enabled\": false\n },\n {\n \"resource\": \"readingRuler\",\n \"enabled\": false\n },\n {\n \"resource\": \"simpleCalculator\",\n \"enabled\": false\n },\n {\n \"resource\": \"scientificCalculator\",\n \"enabled\": false\n },\n {\n \"resource\": \"compass\",\n \"enabled\": false\n }\n ]\n }\n }\n ],\n \"DataApiGetQuestionTest2\": [\n {\n \"reference\": \"${ref2}\",\n \"type\": \"multiple selection\",\n \"dt_created\": \"${dt_created}\",\n \"dt_updated\": \"${dt_updated}\",\n \"scoring_type\": \"Auto-scored\",\n \"points\": 10,\n \"penalty_points\": null,\n \"min_score_if_attempted\": null,\n \"min_score_type\": 1,\n \"penalty_point_type\": 1,\n \"is_auto_penalty_setting\": false,\n \"is_negative_rounded\": true,\n \"specific_penalty_type\": 1,\n \"penalty_points_for_each\": null,\n \"scoring_guidance\": \"\",\n \"sample_response\": \"\",\n \"data\": \"\",\n \"content\": {\n \"instruction\": \"test\",\n \"acknowledgements\": \"\",\n \"sample_answer\": \"\"\n },\n \"options\": [\n {\n \"id\": \"ChoiceA\",\n \"label\": \"1\",\n \"locked\": false\n },\n {\n \"id\": \"ChoiceB\",\n \"label\": \"2\",\n \"locked\": false\n },\n {\n \"id\": \"ChoiceC\",\n \"label\": \"3\",\n \"locked\": false\n },\n {\n \"id\": \"ChoiceD\",\n \"label\": \"4\",\n \"locked\": false\n }\n ],\n \"correct_answer\": [\n {\n \"total_points\": 10,\n \"answers\": [\n {\n \"answer\": \"ChoiceA\",\n \"points\": 10,\n \"is_case_sensitive\": false\n }\n ]\n }\n ],\n \"scoring\": {\n \"points\": 10,\n \"scoring_type\": \"auto-scored\",\n \"sub_scoring_type\": \"allOrNothing\",\n \"penalty_points\": null,\n \"penalty_point_type\": \"noPenalty\",\n \"penalty_points_for_each\": null,\n \"min_score_type\": \"noMinScore\",\n \"min_score_points\": null,\n \"is_rounding_enabled\": true,\n \"rounding_type\": \"none\",\n \"is_negative_rounded\": true,\n \"scoring_guidance\": \"\"\n },\n \"settings\": {\n \"student_view_settings\": {\n \"shuffle_option\": false,\n \"position_lock\": false,\n \"allow_setting_min_or_max_num_of_answers\": false,\n \"min_num_answers\": null,\n \"max_num_answers\": null,\n \"allow_check_answer\": false,\n \"max_attempts_to_check_answer\": 0\n },\n \"additional_settings\": {\n \"orientation\": \"horizontal\",\n \"multiple_columns\": false,\n \"num_of_option_columns\": 1,\n \"font_size\": \"default\",\n \"alignment\": \"left\",\n \"multiple_selection\": true,\n \"option_style_type\": \"blockWithCheckBox\",\n \"option_numeration\": \"default\",\n \"non_accessible\": false\n },\n \"print_layout_settings\": {\n \"exclude_from_print\": false,\n \"print_instruction\": \"\"\n },\n \"tool_settings\": [\n {\n \"resource\": \"ruler\",\n \"enabled\": false\n },\n {\n \"resource\": \"protractor\",\n \"enabled\": false\n },\n {\n \"resource\": \"readingRuler\",\n \"enabled\": false\n },\n {\n \"resource\": \"simpleCalculator\",\n \"enabled\": false\n },\n {\n \"resource\": \"scientificCalculator\",\n \"enabled\": false\n },\n {\n \"resource\": \"compass\",\n \"enabled\": false\n }\n ]\n }\n }\n ]\n }\n}\n`);
196
+ .should('have.text', `{\n \"meta\": {\n \"status\": true,\n \"records\": ${records},\n \"next_token\": ${next_token ? `[\n ${next_token[0]},\n \"${next_token[1]}\"\n ]` : null},\n \"timestamp\": ${timestamp}\n },\n \"data\": {\n \"DataApiGetQuestionTest1\": [\n {\n \"reference\": \"${ref1}\",\n \"type\": \"multiple selection\",\n \"dt_created\": \"${dt_created}\",\n \"dt_updated\": \"${dt_updated}\",\n \"scoring_type\": \"Auto-scored\",\n \"points\": 2,\n \"penalty_points\": null,\n \"min_score_if_attempted\": null,\n \"min_score_type\": 1,\n \"penalty_point_type\": 1,\n \"is_auto_penalty_setting\": false,\n \"is_negative_rounded\": true,\n \"specific_penalty_type\": 1,\n \"penalty_points_for_each\": null,\n \"scoring_guidance\": \"\",\n \"sample_response\": \"\",\n \"data\": \"<assessmentItem xmlns=\\\"http://www.imsglobal.org/xsd/imsqti_v2p2\\\" xmlns:xsi=\\\"http://www.w3.org/2001/XMLSchema-instance\\\" xsi:schemaLocation=\\\"http://www.imsglobal.org/xsd/imsqti_v2p2 http://www.imsglobal.org/xsd/qti/qtiv2p2/imsqti_v2p2p2.xsd\\\" identifier=\\\"choiceMultiple\\\" title=\\\"Welcome to Biodome\\\" timeDependent=\\\"false\\\"><responseDeclaration identifier=\\\"RESPONSE\\\" cardinality=\\\"multiple\\\" baseType=\\\"identifier\\\"><correctResponse><value>ChoiceA</value><value>ChoiceB</value></correctResponse></responseDeclaration><outcomeDeclaration identifier=\\\"SCORE\\\" cardinality=\\\"single\\\" baseType=\\\"float\\\"/><itemBody><choiceInteraction responseIdentifier=\\\"RESPONSE\\\" orientation=\\\"horizontal\\\"><prompt></prompt><simpleChoice identifier=\\\"ChoiceA\\\">1</simpleChoice><simpleChoice identifier=\\\"ChoiceB\\\">2</simpleChoice><simpleChoice identifier=\\\"ChoiceC\\\">3</simpleChoice><simpleChoice identifier=\\\"ChoiceD\\\">4</simpleChoice></choiceInteraction></itemBody><responseProcessing template=\\\"http://www.imsglobal.org/question/qti_v2p2/rptemplates/map_response\\\"/></assessmentItem>\"\n }\n ],\n \"DataApiGetQuestionTest2\": [\n {\n \"reference\": \"${ref2}\",\n \"type\": \"multiple selection\",\n \"dt_created\": \"${dt_created}\",\n \"dt_updated\": \"${dt_updated}\",\n \"scoring_type\": \"Auto-scored\",\n \"points\": 2,\n \"penalty_points\": null,\n \"min_score_if_attempted\": null,\n \"min_score_type\": 1,\n \"penalty_point_type\": 1,\n \"is_auto_penalty_setting\": false,\n \"is_negative_rounded\": true,\n \"specific_penalty_type\": 1,\n \"penalty_points_for_each\": null,\n \"scoring_guidance\": \"\",\n \"sample_response\": \"\",\n \"data\": \"<assessmentItem xmlns=\\\"http://www.imsglobal.org/xsd/imsqti_v2p2\\\" xmlns:xsi=\\\"http://www.w3.org/2001/XMLSchema-instance\\\" xsi:schemaLocation=\\\"http://www.imsglobal.org/xsd/imsqti_v2p2 http://www.imsglobal.org/xsd/qti/qtiv2p2/imsqti_v2p2p2.xsd\\\" identifier=\\\"choiceMultiple\\\" title=\\\"Welcome to Biodome\\\" timeDependent=\\\"false\\\"><responseDeclaration identifier=\\\"RESPONSE\\\" cardinality=\\\"multiple\\\" baseType=\\\"identifier\\\"><correctResponse><value>ChoiceA</value><value>ChoiceB</value></correctResponse></responseDeclaration><outcomeDeclaration identifier=\\\"SCORE\\\" cardinality=\\\"single\\\" baseType=\\\"float\\\"/><itemBody><choiceInteraction responseIdentifier=\\\"RESPONSE\\\" orientation=\\\"horizontal\\\"><prompt></prompt><simpleChoice identifier=\\\"ChoiceA\\\">1</simpleChoice><simpleChoice identifier=\\\"ChoiceB\\\">2</simpleChoice><simpleChoice identifier=\\\"ChoiceC\\\">3</simpleChoice><simpleChoice identifier=\\\"ChoiceD\\\">4</simpleChoice></choiceInteraction></itemBody><responseProcessing template=\\\"http://www.imsglobal.org/question/qti_v2p2/rptemplates/map_response\\\"/></assessmentItem>\"\n }\n ]\n }\n}\n`);
207
197
  });
208
198
  });
209
199
 
@@ -241,7 +231,7 @@ describe('Get Question Data API cases', () => {
241
231
  .should('have.text', '200');
242
232
  cy.get('pre[class="microlight"]')
243
233
  .eq(1)
244
- .should('have.text', `{\n \"meta\": {\n \"status\": true,\n \"records\": ${records},\n \"next_token\": ${next_token ? `[\n ${next_token[0]},\n \"${next_token[1]}\"\n ]` : null},\n \"timestamp\": ${timestamp}\n },\n \"data\": {\n \"DataApiGetQuestionTest1\": [\n {\n \"reference\": \"${ref1}\",\n \"type\": \"multiple selection\",\n \"dt_created\": \"${dt_created}\",\n \"dt_updated\": \"${dt_updated}\",\n \"scoring_type\": \"Auto-scored\",\n \"points\": 10,\n \"penalty_points\": null,\n \"min_score_if_attempted\": null,\n \"min_score_type\": 1,\n \"penalty_point_type\": 1,\n \"is_auto_penalty_setting\": false,\n \"is_negative_rounded\": true,\n \"specific_penalty_type\": 1,\n \"penalty_points_for_each\": null,\n \"scoring_guidance\": \"\",\n \"sample_response\": \"\",\n \"data\": \"\",\n \"content\": {\n \"instruction\": \"test\",\n \"acknowledgements\": \"\",\n \"sample_answer\": \"\"\n },\n \"options\": [\n {\n \"id\": \"ChoiceA\",\n \"label\": \"1\",\n \"locked\": false\n },\n {\n \"id\": \"ChoiceB\",\n \"label\": \"2\",\n \"locked\": false\n },\n {\n \"id\": \"ChoiceC\",\n \"label\": \"3\",\n \"locked\": false\n },\n {\n \"id\": \"ChoiceD\",\n \"label\": \"4\",\n \"locked\": false\n }\n ],\n \"correct_answer\": [\n {\n \"total_points\": 10,\n \"answers\": [\n {\n \"answer\": \"ChoiceA\",\n \"points\": 10,\n \"is_case_sensitive\": false\n }\n ]\n }\n ],\n \"scoring\": {\n \"points\": 10,\n \"scoring_type\": \"auto-scored\",\n \"sub_scoring_type\": \"allOrNothing\",\n \"penalty_points\": null,\n \"penalty_point_type\": \"noPenalty\",\n \"penalty_points_for_each\": null,\n \"min_score_type\": \"noMinScore\",\n \"min_score_points\": null,\n \"is_rounding_enabled\": true,\n \"rounding_type\": \"none\",\n \"is_negative_rounded\": true,\n \"scoring_guidance\": \"\"\n },\n \"settings\": {\n \"student_view_settings\": {\n \"shuffle_option\": false,\n \"position_lock\": false,\n \"allow_setting_min_or_max_num_of_answers\": false,\n \"min_num_answers\": null,\n \"max_num_answers\": null,\n \"allow_check_answer\": false,\n \"max_attempts_to_check_answer\": 0\n },\n \"additional_settings\": {\n \"orientation\": \"horizontal\",\n \"multiple_columns\": false,\n \"num_of_option_columns\": 1,\n \"font_size\": \"default\",\n \"alignment\": \"left\",\n \"multiple_selection\": true,\n \"option_style_type\": \"blockWithCheckBox\",\n \"option_numeration\": \"default\",\n \"non_accessible\": false\n },\n \"print_layout_settings\": {\n \"exclude_from_print\": false,\n \"print_instruction\": \"\"\n },\n \"tool_settings\": [\n {\n \"resource\": \"ruler\",\n \"enabled\": false\n },\n {\n \"resource\": \"protractor\",\n \"enabled\": false\n },\n {\n \"resource\": \"readingRuler\",\n \"enabled\": false\n },\n {\n \"resource\": \"simpleCalculator\",\n \"enabled\": false\n },\n {\n \"resource\": \"scientificCalculator\",\n \"enabled\": false\n },\n {\n \"resource\": \"compass\",\n \"enabled\": false\n }\n ]\n }\n }\n ],\n \"DataApiGetQuestionTest2\": [\n {\n \"reference\": \"${ref2}\",\n \"type\": \"multiple selection\",\n \"dt_created\": \"${dt_created}\",\n \"dt_updated\": \"${dt_updated}\",\n \"scoring_type\": \"Auto-scored\",\n \"points\": 10,\n \"penalty_points\": null,\n \"min_score_if_attempted\": null,\n \"min_score_type\": 1,\n \"penalty_point_type\": 1,\n \"is_auto_penalty_setting\": false,\n \"is_negative_rounded\": true,\n \"specific_penalty_type\": 1,\n \"penalty_points_for_each\": null,\n \"scoring_guidance\": \"\",\n \"sample_response\": \"\",\n \"data\": \"\",\n \"content\": {\n \"instruction\": \"test\",\n \"acknowledgements\": \"\",\n \"sample_answer\": \"\"\n },\n \"options\": [\n {\n \"id\": \"ChoiceA\",\n \"label\": \"1\",\n \"locked\": false\n },\n {\n \"id\": \"ChoiceB\",\n \"label\": \"2\",\n \"locked\": false\n },\n {\n \"id\": \"ChoiceC\",\n \"label\": \"3\",\n \"locked\": false\n },\n {\n \"id\": \"ChoiceD\",\n \"label\": \"4\",\n \"locked\": false\n }\n ],\n \"correct_answer\": [\n {\n \"total_points\": 10,\n \"answers\": [\n {\n \"answer\": \"ChoiceA\",\n \"points\": 10,\n \"is_case_sensitive\": false\n }\n ]\n }\n ],\n \"scoring\": {\n \"points\": 10,\n \"scoring_type\": \"auto-scored\",\n \"sub_scoring_type\": \"allOrNothing\",\n \"penalty_points\": null,\n \"penalty_point_type\": \"noPenalty\",\n \"penalty_points_for_each\": null,\n \"min_score_type\": \"noMinScore\",\n \"min_score_points\": null,\n \"is_rounding_enabled\": true,\n \"rounding_type\": \"none\",\n \"is_negative_rounded\": true,\n \"scoring_guidance\": \"\"\n },\n \"settings\": {\n \"student_view_settings\": {\n \"shuffle_option\": false,\n \"position_lock\": false,\n \"allow_setting_min_or_max_num_of_answers\": false,\n \"min_num_answers\": null,\n \"max_num_answers\": null,\n \"allow_check_answer\": false,\n \"max_attempts_to_check_answer\": 0\n },\n \"additional_settings\": {\n \"orientation\": \"horizontal\",\n \"multiple_columns\": false,\n \"num_of_option_columns\": 1,\n \"font_size\": \"default\",\n \"alignment\": \"left\",\n \"multiple_selection\": true,\n \"option_style_type\": \"blockWithCheckBox\",\n \"option_numeration\": \"default\",\n \"non_accessible\": false\n },\n \"print_layout_settings\": {\n \"exclude_from_print\": false,\n \"print_instruction\": \"\"\n },\n \"tool_settings\": [\n {\n \"resource\": \"ruler\",\n \"enabled\": false\n },\n {\n \"resource\": \"protractor\",\n \"enabled\": false\n },\n {\n \"resource\": \"readingRuler\",\n \"enabled\": false\n },\n {\n \"resource\": \"simpleCalculator\",\n \"enabled\": false\n },\n {\n \"resource\": \"scientificCalculator\",\n \"enabled\": false\n },\n {\n \"resource\": \"compass\",\n \"enabled\": false\n }\n ]\n }\n }\n ]\n }\n}\n`);
234
+ .should('have.text', `{\n \"meta\": {\n \"status\": true,\n \"records\": ${records},\n \"next_token\": ${next_token ? `[\n ${next_token[0]},\n \"${next_token[1]}\"\n ]` : null},\n \"timestamp\": ${timestamp}\n },\n \"data\": {\n \"DataApiGetQuestionTest1\": [\n {\n \"reference\": \"${ref1}\",\n \"type\": \"multiple selection\",\n \"dt_created\": \"${dt_created}\",\n \"dt_updated\": \"${dt_updated}\",\n \"scoring_type\": \"Auto-scored\",\n \"points\": 2,\n \"penalty_points\": null,\n \"min_score_if_attempted\": null,\n \"min_score_type\": 1,\n \"penalty_point_type\": 1,\n \"is_auto_penalty_setting\": false,\n \"is_negative_rounded\": true,\n \"specific_penalty_type\": 1,\n \"penalty_points_for_each\": null,\n \"scoring_guidance\": \"\",\n \"sample_response\": \"\",\n \"data\": \"<assessmentItem xmlns=\\\"http://www.imsglobal.org/xsd/imsqti_v2p2\\\" xmlns:xsi=\\\"http://www.w3.org/2001/XMLSchema-instance\\\" xsi:schemaLocation=\\\"http://www.imsglobal.org/xsd/imsqti_v2p2 http://www.imsglobal.org/xsd/qti/qtiv2p2/imsqti_v2p2p2.xsd\\\" identifier=\\\"choiceMultiple\\\" title=\\\"Welcome to Biodome\\\" timeDependent=\\\"false\\\"><responseDeclaration identifier=\\\"RESPONSE\\\" cardinality=\\\"multiple\\\" baseType=\\\"identifier\\\"><correctResponse><value>ChoiceA</value><value>ChoiceB</value></correctResponse></responseDeclaration><outcomeDeclaration identifier=\\\"SCORE\\\" cardinality=\\\"single\\\" baseType=\\\"float\\\"/><itemBody><choiceInteraction responseIdentifier=\\\"RESPONSE\\\" orientation=\\\"horizontal\\\"><prompt></prompt><simpleChoice identifier=\\\"ChoiceA\\\">1</simpleChoice><simpleChoice identifier=\\\"ChoiceB\\\">2</simpleChoice><simpleChoice identifier=\\\"ChoiceC\\\">3</simpleChoice><simpleChoice identifier=\\\"ChoiceD\\\">4</simpleChoice></choiceInteraction></itemBody><responseProcessing template=\\\"http://www.imsglobal.org/question/qti_v2p2/rptemplates/map_response\\\"/></assessmentItem>\"\n }\n ],\n \"DataApiGetQuestionTest2\": [\n {\n \"reference\": \"${ref2}\",\n \"type\": \"multiple selection\",\n \"dt_created\": \"${dt_created}\",\n \"dt_updated\": \"${dt_updated}\",\n \"scoring_type\": \"Auto-scored\",\n \"points\": 2,\n \"penalty_points\": null,\n \"min_score_if_attempted\": null,\n \"min_score_type\": 1,\n \"penalty_point_type\": 1,\n \"is_auto_penalty_setting\": false,\n \"is_negative_rounded\": true,\n \"specific_penalty_type\": 1,\n \"penalty_points_for_each\": null,\n \"scoring_guidance\": \"\",\n \"sample_response\": \"\",\n \"data\": \"<assessmentItem xmlns=\\\"http://www.imsglobal.org/xsd/imsqti_v2p2\\\" xmlns:xsi=\\\"http://www.w3.org/2001/XMLSchema-instance\\\" xsi:schemaLocation=\\\"http://www.imsglobal.org/xsd/imsqti_v2p2 http://www.imsglobal.org/xsd/qti/qtiv2p2/imsqti_v2p2p2.xsd\\\" identifier=\\\"choiceMultiple\\\" title=\\\"Welcome to Biodome\\\" timeDependent=\\\"false\\\"><responseDeclaration identifier=\\\"RESPONSE\\\" cardinality=\\\"multiple\\\" baseType=\\\"identifier\\\"><correctResponse><value>ChoiceA</value><value>ChoiceB</value></correctResponse></responseDeclaration><outcomeDeclaration identifier=\\\"SCORE\\\" cardinality=\\\"single\\\" baseType=\\\"float\\\"/><itemBody><choiceInteraction responseIdentifier=\\\"RESPONSE\\\" orientation=\\\"horizontal\\\"><prompt></prompt><simpleChoice identifier=\\\"ChoiceA\\\">1</simpleChoice><simpleChoice identifier=\\\"ChoiceB\\\">2</simpleChoice><simpleChoice identifier=\\\"ChoiceC\\\">3</simpleChoice><simpleChoice identifier=\\\"ChoiceD\\\">4</simpleChoice></choiceInteraction></itemBody><responseProcessing template=\\\"http://www.imsglobal.org/question/qti_v2p2/rptemplates/map_response\\\"/></assessmentItem>\"\n }\n ]\n }\n}\n`);
245
235
  });
246
236
  });
247
237
 
@@ -284,7 +274,7 @@ describe('Get Question Data API cases', () => {
284
274
  .should('have.text', '200');
285
275
  cy.get('pre[class="microlight"]')
286
276
  .eq(1)
287
- .should('have.text', `{\n \"meta\": {\n \"status\": true,\n \"records\": ${records},\n \"next_token\": ${next_token ? `[\n ${next_token[0]},\n \"${next_token[1]}\"\n ]` : null},\n \"timestamp\": ${timestamp}\n },\n \"data\": {\n \"DataApiGetQuestionTest1\": [\n {\n \"reference\": \"${ref1}\",\n \"type\": \"multiple selection\",\n \"dt_created\": \"${dt_created}\",\n \"dt_updated\": \"${dt_updated}\",\n \"scoring_type\": \"Auto-scored\",\n \"points\": 10,\n \"penalty_points\": null,\n \"min_score_if_attempted\": null,\n \"min_score_type\": 1,\n \"penalty_point_type\": 1,\n \"is_auto_penalty_setting\": false,\n \"is_negative_rounded\": true,\n \"specific_penalty_type\": 1,\n \"penalty_points_for_each\": null,\n \"scoring_guidance\": \"\",\n \"sample_response\": \"\",\n \"data\": \"\",\n \"content\": {\n \"instruction\": \"test\",\n \"acknowledgements\": \"\",\n \"sample_answer\": \"\"\n },\n \"options\": [\n {\n \"id\": \"ChoiceA\",\n \"label\": \"1\",\n \"locked\": false\n },\n {\n \"id\": \"ChoiceB\",\n \"label\": \"2\",\n \"locked\": false\n },\n {\n \"id\": \"ChoiceC\",\n \"label\": \"3\",\n \"locked\": false\n },\n {\n \"id\": \"ChoiceD\",\n \"label\": \"4\",\n \"locked\": false\n }\n ],\n \"correct_answer\": [\n {\n \"total_points\": 10,\n \"answers\": [\n {\n \"answer\": \"ChoiceA\",\n \"points\": 10,\n \"is_case_sensitive\": false\n }\n ]\n }\n ],\n \"scoring\": {\n \"points\": 10,\n \"scoring_type\": \"auto-scored\",\n \"sub_scoring_type\": \"allOrNothing\",\n \"penalty_points\": null,\n \"penalty_point_type\": \"noPenalty\",\n \"penalty_points_for_each\": null,\n \"min_score_type\": \"noMinScore\",\n \"min_score_points\": null,\n \"is_rounding_enabled\": true,\n \"rounding_type\": \"none\",\n \"is_negative_rounded\": true,\n \"scoring_guidance\": \"\"\n },\n \"settings\": {\n \"student_view_settings\": {\n \"shuffle_option\": false,\n \"position_lock\": false,\n \"allow_setting_min_or_max_num_of_answers\": false,\n \"min_num_answers\": null,\n \"max_num_answers\": null,\n \"allow_check_answer\": false,\n \"max_attempts_to_check_answer\": 0\n },\n \"additional_settings\": {\n \"orientation\": \"horizontal\",\n \"multiple_columns\": false,\n \"num_of_option_columns\": 1,\n \"font_size\": \"default\",\n \"alignment\": \"left\",\n \"multiple_selection\": true,\n \"option_style_type\": \"blockWithCheckBox\",\n \"option_numeration\": \"default\",\n \"non_accessible\": false\n },\n \"print_layout_settings\": {\n \"exclude_from_print\": false,\n \"print_instruction\": \"\"\n },\n \"tool_settings\": [\n {\n \"resource\": \"ruler\",\n \"enabled\": false\n },\n {\n \"resource\": \"protractor\",\n \"enabled\": false\n },\n {\n \"resource\": \"readingRuler\",\n \"enabled\": false\n },\n {\n \"resource\": \"simpleCalculator\",\n \"enabled\": false\n },\n {\n \"resource\": \"scientificCalculator\",\n \"enabled\": false\n },\n {\n \"resource\": \"compass\",\n \"enabled\": false\n }\n ]\n }\n }\n ],\n \"DataApiGetQuestionTest2\": [\n {\n \"reference\": \"${ref2}\",\n \"type\": \"multiple selection\",\n \"dt_created\": \"${dt_created}\",\n \"dt_updated\": \"${dt_updated}\",\n \"scoring_type\": \"Auto-scored\",\n \"points\": 10,\n \"penalty_points\": null,\n \"min_score_if_attempted\": null,\n \"min_score_type\": 1,\n \"penalty_point_type\": 1,\n \"is_auto_penalty_setting\": false,\n \"is_negative_rounded\": true,\n \"specific_penalty_type\": 1,\n \"penalty_points_for_each\": null,\n \"scoring_guidance\": \"\",\n \"sample_response\": \"\",\n \"data\": \"\",\n \"content\": {\n \"instruction\": \"test\",\n \"acknowledgements\": \"\",\n \"sample_answer\": \"\"\n },\n \"options\": [\n {\n \"id\": \"ChoiceA\",\n \"label\": \"1\",\n \"locked\": false\n },\n {\n \"id\": \"ChoiceB\",\n \"label\": \"2\",\n \"locked\": false\n },\n {\n \"id\": \"ChoiceC\",\n \"label\": \"3\",\n \"locked\": false\n },\n {\n \"id\": \"ChoiceD\",\n \"label\": \"4\",\n \"locked\": false\n }\n ],\n \"correct_answer\": [\n {\n \"total_points\": 10,\n \"answers\": [\n {\n \"answer\": \"ChoiceA\",\n \"points\": 10,\n \"is_case_sensitive\": false\n }\n ]\n }\n ],\n \"scoring\": {\n \"points\": 10,\n \"scoring_type\": \"auto-scored\",\n \"sub_scoring_type\": \"allOrNothing\",\n \"penalty_points\": null,\n \"penalty_point_type\": \"noPenalty\",\n \"penalty_points_for_each\": null,\n \"min_score_type\": \"noMinScore\",\n \"min_score_points\": null,\n \"is_rounding_enabled\": true,\n \"rounding_type\": \"none\",\n \"is_negative_rounded\": true,\n \"scoring_guidance\": \"\"\n },\n \"settings\": {\n \"student_view_settings\": {\n \"shuffle_option\": false,\n \"position_lock\": false,\n \"allow_setting_min_or_max_num_of_answers\": false,\n \"min_num_answers\": null,\n \"max_num_answers\": null,\n \"allow_check_answer\": false,\n \"max_attempts_to_check_answer\": 0\n },\n \"additional_settings\": {\n \"orientation\": \"horizontal\",\n \"multiple_columns\": false,\n \"num_of_option_columns\": 1,\n \"font_size\": \"default\",\n \"alignment\": \"left\",\n \"multiple_selection\": true,\n \"option_style_type\": \"blockWithCheckBox\",\n \"option_numeration\": \"default\",\n \"non_accessible\": false\n },\n \"print_layout_settings\": {\n \"exclude_from_print\": false,\n \"print_instruction\": \"\"\n },\n \"tool_settings\": [\n {\n \"resource\": \"ruler\",\n \"enabled\": false\n },\n {\n \"resource\": \"protractor\",\n \"enabled\": false\n },\n {\n \"resource\": \"readingRuler\",\n \"enabled\": false\n },\n {\n \"resource\": \"simpleCalculator\",\n \"enabled\": false\n },\n {\n \"resource\": \"scientificCalculator\",\n \"enabled\": false\n },\n {\n \"resource\": \"compass\",\n \"enabled\": false\n }\n ]\n }\n }\n ]\n }\n}\n`);
277
+ .should('have.text', `{\n \"meta\": {\n \"status\": true,\n \"records\": ${records},\n \"next_token\": ${next_token ? `[\n ${next_token[0]},\n \"${next_token[1]}\"\n ]` : null},\n \"timestamp\": ${timestamp}\n },\n \"data\": {\n \"DataApiGetQuestionTest1\": [\n {\n \"reference\": \"${ref1}\",\n \"type\": \"multiple selection\",\n \"dt_created\": \"${dt_created}\",\n \"dt_updated\": \"${dt_updated}\",\n \"scoring_type\": \"Auto-scored\",\n \"points\": 2,\n \"penalty_points\": null,\n \"min_score_if_attempted\": null,\n \"min_score_type\": 1,\n \"penalty_point_type\": 1,\n \"is_auto_penalty_setting\": false,\n \"is_negative_rounded\": true,\n \"specific_penalty_type\": 1,\n \"penalty_points_for_each\": null,\n \"scoring_guidance\": \"\",\n \"sample_response\": \"\",\n \"data\": \"<assessmentItem xmlns=\\\"http://www.imsglobal.org/xsd/imsqti_v2p2\\\" xmlns:xsi=\\\"http://www.w3.org/2001/XMLSchema-instance\\\" xsi:schemaLocation=\\\"http://www.imsglobal.org/xsd/imsqti_v2p2 http://www.imsglobal.org/xsd/qti/qtiv2p2/imsqti_v2p2p2.xsd\\\" identifier=\\\"choiceMultiple\\\" title=\\\"Welcome to Biodome\\\" timeDependent=\\\"false\\\"><responseDeclaration identifier=\\\"RESPONSE\\\" cardinality=\\\"multiple\\\" baseType=\\\"identifier\\\"><correctResponse><value>ChoiceA</value><value>ChoiceB</value></correctResponse></responseDeclaration><outcomeDeclaration identifier=\\\"SCORE\\\" cardinality=\\\"single\\\" baseType=\\\"float\\\"/><itemBody><choiceInteraction responseIdentifier=\\\"RESPONSE\\\" orientation=\\\"horizontal\\\"><prompt></prompt><simpleChoice identifier=\\\"ChoiceA\\\">1</simpleChoice><simpleChoice identifier=\\\"ChoiceB\\\">2</simpleChoice><simpleChoice identifier=\\\"ChoiceC\\\">3</simpleChoice><simpleChoice identifier=\\\"ChoiceD\\\">4</simpleChoice></choiceInteraction></itemBody><responseProcessing template=\\\"http://www.imsglobal.org/question/qti_v2p2/rptemplates/map_response\\\"/></assessmentItem>\"\n }\n ],\n \"DataApiGetQuestionTest2\": [\n {\n \"reference\": \"${ref2}\",\n \"type\": \"multiple selection\",\n \"dt_created\": \"${dt_created}\",\n \"dt_updated\": \"${dt_updated}\",\n \"scoring_type\": \"Auto-scored\",\n \"points\": 2,\n \"penalty_points\": null,\n \"min_score_if_attempted\": null,\n \"min_score_type\": 1,\n \"penalty_point_type\": 1,\n \"is_auto_penalty_setting\": false,\n \"is_negative_rounded\": true,\n \"specific_penalty_type\": 1,\n \"penalty_points_for_each\": null,\n \"scoring_guidance\": \"\",\n \"sample_response\": \"\",\n \"data\": \"<assessmentItem xmlns=\\\"http://www.imsglobal.org/xsd/imsqti_v2p2\\\" xmlns:xsi=\\\"http://www.w3.org/2001/XMLSchema-instance\\\" xsi:schemaLocation=\\\"http://www.imsglobal.org/xsd/imsqti_v2p2 http://www.imsglobal.org/xsd/qti/qtiv2p2/imsqti_v2p2p2.xsd\\\" identifier=\\\"choiceMultiple\\\" title=\\\"Welcome to Biodome\\\" timeDependent=\\\"false\\\"><responseDeclaration identifier=\\\"RESPONSE\\\" cardinality=\\\"multiple\\\" baseType=\\\"identifier\\\"><correctResponse><value>ChoiceA</value><value>ChoiceB</value></correctResponse></responseDeclaration><outcomeDeclaration identifier=\\\"SCORE\\\" cardinality=\\\"single\\\" baseType=\\\"float\\\"/><itemBody><choiceInteraction responseIdentifier=\\\"RESPONSE\\\" orientation=\\\"horizontal\\\"><prompt></prompt><simpleChoice identifier=\\\"ChoiceA\\\">1</simpleChoice><simpleChoice identifier=\\\"ChoiceB\\\">2</simpleChoice><simpleChoice identifier=\\\"ChoiceC\\\">3</simpleChoice><simpleChoice identifier=\\\"ChoiceD\\\">4</simpleChoice></choiceInteraction></itemBody><responseProcessing template=\\\"http://www.imsglobal.org/question/qti_v2p2/rptemplates/map_response\\\"/></assessmentItem>\"\n }\n ]\n }\n}\n`);
288
278
  });
289
279
  });
290
280
 
@@ -380,14 +370,14 @@ describe('Get Question Data API cases', () => {
380
370
  .should('have.text', '200');
381
371
  cy.get('pre[class="microlight"]')
382
372
  .eq(1)
383
- .should('have.text', `{\n \"meta\": {\n \"status\": true,\n \"records\": ${records},\n \"next_token\": ${next_token ? `[\n ${next_token[0]},\n \"${next_token[1]}\"\n ]` : null},\n \"timestamp\": ${timestamp}\n },\n \"data\": {\n \"DataApiGetQuestionTest1\": [\n {\n \"reference\": \"${ref1}\",\n \"type\": \"multiple selection\",\n \"dt_created\": \"${dt_created}\",\n \"dt_updated\": \"${dt_updated}\",\n \"scoring_type\": \"Auto-scored\",\n \"points\": 10,\n \"penalty_points\": null,\n \"min_score_if_attempted\": null,\n \"min_score_type\": 1,\n \"penalty_point_type\": 1,\n \"is_auto_penalty_setting\": false,\n \"is_negative_rounded\": true,\n \"specific_penalty_type\": 1,\n \"penalty_points_for_each\": null,\n \"scoring_guidance\": \"\",\n \"sample_response\": \"\",\n \"data\": \"\",\n \"content\": {\n \"instruction\": \"test\",\n \"acknowledgements\": \"\",\n \"sample_answer\": \"\"\n },\n \"options\": [\n {\n \"id\": \"ChoiceA\",\n \"label\": \"1\",\n \"locked\": false\n },\n {\n \"id\": \"ChoiceB\",\n \"label\": \"2\",\n \"locked\": false\n },\n {\n \"id\": \"ChoiceC\",\n \"label\": \"3\",\n \"locked\": false\n },\n {\n \"id\": \"ChoiceD\",\n \"label\": \"4\",\n \"locked\": false\n }\n ],\n \"correct_answer\": [\n {\n \"total_points\": 10,\n \"answers\": [\n {\n \"answer\": \"ChoiceA\",\n \"points\": 10,\n \"is_case_sensitive\": false\n }\n ]\n }\n ],\n \"scoring\": {\n \"points\": 10,\n \"scoring_type\": \"auto-scored\",\n \"sub_scoring_type\": \"allOrNothing\",\n \"penalty_points\": null,\n \"penalty_point_type\": \"noPenalty\",\n \"penalty_points_for_each\": null,\n \"min_score_type\": \"noMinScore\",\n \"min_score_points\": null,\n \"is_rounding_enabled\": true,\n \"rounding_type\": \"none\",\n \"is_negative_rounded\": true,\n \"scoring_guidance\": \"\"\n },\n \"settings\": {\n \"student_view_settings\": {\n \"shuffle_option\": false,\n \"position_lock\": false,\n \"allow_setting_min_or_max_num_of_answers\": false,\n \"min_num_answers\": null,\n \"max_num_answers\": null,\n \"allow_check_answer\": false,\n \"max_attempts_to_check_answer\": 0\n },\n \"additional_settings\": {\n \"orientation\": \"horizontal\",\n \"multiple_columns\": false,\n \"num_of_option_columns\": 1,\n \"font_size\": \"default\",\n \"alignment\": \"left\",\n \"multiple_selection\": true,\n \"option_style_type\": \"blockWithCheckBox\",\n \"option_numeration\": \"default\",\n \"non_accessible\": false\n },\n \"print_layout_settings\": {\n \"exclude_from_print\": false,\n \"print_instruction\": \"\"\n },\n \"tool_settings\": [\n {\n \"resource\": \"ruler\",\n \"enabled\": false\n },\n {\n \"resource\": \"protractor\",\n \"enabled\": false\n },\n {\n \"resource\": \"readingRuler\",\n \"enabled\": false\n },\n {\n \"resource\": \"simpleCalculator\",\n \"enabled\": false\n },\n {\n \"resource\": \"scientificCalculator\",\n \"enabled\": false\n },\n {\n \"resource\": \"compass\",\n \"enabled\": false\n }\n ]\n }\n }\n ],\n \"DataApiGetQuestionTest2\": [\n {\n \"reference\": \"${ref2}\",\n \"type\": \"multiple selection\",\n \"dt_created\": \"${dt_created}\",\n \"dt_updated\": \"${dt_updated}\",\n \"scoring_type\": \"Auto-scored\",\n \"points\": 10,\n \"penalty_points\": null,\n \"min_score_if_attempted\": null,\n \"min_score_type\": 1,\n \"penalty_point_type\": 1,\n \"is_auto_penalty_setting\": false,\n \"is_negative_rounded\": true,\n \"specific_penalty_type\": 1,\n \"penalty_points_for_each\": null,\n \"scoring_guidance\": \"\",\n \"sample_response\": \"\",\n \"data\": \"\",\n \"content\": {\n \"instruction\": \"test\",\n \"acknowledgements\": \"\",\n \"sample_answer\": \"\"\n },\n \"options\": [\n {\n \"id\": \"ChoiceA\",\n \"label\": \"1\",\n \"locked\": false\n },\n {\n \"id\": \"ChoiceB\",\n \"label\": \"2\",\n \"locked\": false\n },\n {\n \"id\": \"ChoiceC\",\n \"label\": \"3\",\n \"locked\": false\n },\n {\n \"id\": \"ChoiceD\",\n \"label\": \"4\",\n \"locked\": false\n }\n ],\n \"correct_answer\": [\n {\n \"total_points\": 10,\n \"answers\": [\n {\n \"answer\": \"ChoiceA\",\n \"points\": 10,\n \"is_case_sensitive\": false\n }\n ]\n }\n ],\n \"scoring\": {\n \"points\": 10,\n \"scoring_type\": \"auto-scored\",\n \"sub_scoring_type\": \"allOrNothing\",\n \"penalty_points\": null,\n \"penalty_point_type\": \"noPenalty\",\n \"penalty_points_for_each\": null,\n \"min_score_type\": \"noMinScore\",\n \"min_score_points\": null,\n \"is_rounding_enabled\": true,\n \"rounding_type\": \"none\",\n \"is_negative_rounded\": true,\n \"scoring_guidance\": \"\"\n },\n \"settings\": {\n \"student_view_settings\": {\n \"shuffle_option\": false,\n \"position_lock\": false,\n \"allow_setting_min_or_max_num_of_answers\": false,\n \"min_num_answers\": null,\n \"max_num_answers\": null,\n \"allow_check_answer\": false,\n \"max_attempts_to_check_answer\": 0\n },\n \"additional_settings\": {\n \"orientation\": \"horizontal\",\n \"multiple_columns\": false,\n \"num_of_option_columns\": 1,\n \"font_size\": \"default\",\n \"alignment\": \"left\",\n \"multiple_selection\": true,\n \"option_style_type\": \"blockWithCheckBox\",\n \"option_numeration\": \"default\",\n \"non_accessible\": false\n },\n \"print_layout_settings\": {\n \"exclude_from_print\": false,\n \"print_instruction\": \"\"\n },\n \"tool_settings\": [\n {\n \"resource\": \"ruler\",\n \"enabled\": false\n },\n {\n \"resource\": \"protractor\",\n \"enabled\": false\n },\n {\n \"resource\": \"readingRuler\",\n \"enabled\": false\n },\n {\n \"resource\": \"simpleCalculator\",\n \"enabled\": false\n },\n {\n \"resource\": \"scientificCalculator\",\n \"enabled\": false\n },\n {\n \"resource\": \"compass\",\n \"enabled\": false\n }\n ]\n }\n }\n ]\n }\n}\n`);
373
+ .should('have.text', `{\n \"meta\": {\n \"status\": true,\n \"records\": ${records},\n \"next_token\": ${next_token ? `[\n ${next_token[0]},\n \"${next_token[1]}\"\n ]` : null},\n \"timestamp\": ${timestamp}\n },\n \"data\": {\n \"DataApiGetQuestionTest1\": [\n {\n \"reference\": \"${ref1}\",\n \"type\": \"multiple selection\",\n \"dt_created\": \"${dt_created}\",\n \"dt_updated\": \"${dt_updated}\",\n \"scoring_type\": \"Auto-scored\",\n \"points\": 2,\n \"penalty_points\": null,\n \"min_score_if_attempted\": null,\n \"min_score_type\": 1,\n \"penalty_point_type\": 1,\n \"is_auto_penalty_setting\": false,\n \"is_negative_rounded\": true,\n \"specific_penalty_type\": 1,\n \"penalty_points_for_each\": null,\n \"scoring_guidance\": \"\",\n \"sample_response\": \"\",\n \"data\": \"<assessmentItem xmlns=\\\"http://www.imsglobal.org/xsd/imsqti_v2p2\\\" xmlns:xsi=\\\"http://www.w3.org/2001/XMLSchema-instance\\\" xsi:schemaLocation=\\\"http://www.imsglobal.org/xsd/imsqti_v2p2 http://www.imsglobal.org/xsd/qti/qtiv2p2/imsqti_v2p2p2.xsd\\\" identifier=\\\"choiceMultiple\\\" title=\\\"Welcome to Biodome\\\" timeDependent=\\\"false\\\"><responseDeclaration identifier=\\\"RESPONSE\\\" cardinality=\\\"multiple\\\" baseType=\\\"identifier\\\"><correctResponse><value>ChoiceA</value><value>ChoiceB</value></correctResponse></responseDeclaration><outcomeDeclaration identifier=\\\"SCORE\\\" cardinality=\\\"single\\\" baseType=\\\"float\\\"/><itemBody><choiceInteraction responseIdentifier=\\\"RESPONSE\\\" orientation=\\\"horizontal\\\"><prompt></prompt><simpleChoice identifier=\\\"ChoiceA\\\">1</simpleChoice><simpleChoice identifier=\\\"ChoiceB\\\">2</simpleChoice><simpleChoice identifier=\\\"ChoiceC\\\">3</simpleChoice><simpleChoice identifier=\\\"ChoiceD\\\">4</simpleChoice></choiceInteraction></itemBody><responseProcessing template=\\\"http://www.imsglobal.org/question/qti_v2p2/rptemplates/map_response\\\"/></assessmentItem>\"\n }\n ],\n \"DataApiGetQuestionTest2\": [\n {\n \"reference\": \"${ref2}\",\n \"type\": \"multiple selection\",\n \"dt_created\": \"${dt_created}\",\n \"dt_updated\": \"${dt_updated}\",\n \"scoring_type\": \"Auto-scored\",\n \"points\": 2,\n \"penalty_points\": null,\n \"min_score_if_attempted\": null,\n \"min_score_type\": 1,\n \"penalty_point_type\": 1,\n \"is_auto_penalty_setting\": false,\n \"is_negative_rounded\": true,\n \"specific_penalty_type\": 1,\n \"penalty_points_for_each\": null,\n \"scoring_guidance\": \"\",\n \"sample_response\": \"\",\n \"data\": \"<assessmentItem xmlns=\\\"http://www.imsglobal.org/xsd/imsqti_v2p2\\\" xmlns:xsi=\\\"http://www.w3.org/2001/XMLSchema-instance\\\" xsi:schemaLocation=\\\"http://www.imsglobal.org/xsd/imsqti_v2p2 http://www.imsglobal.org/xsd/qti/qtiv2p2/imsqti_v2p2p2.xsd\\\" identifier=\\\"choiceMultiple\\\" title=\\\"Welcome to Biodome\\\" timeDependent=\\\"false\\\"><responseDeclaration identifier=\\\"RESPONSE\\\" cardinality=\\\"multiple\\\" baseType=\\\"identifier\\\"><correctResponse><value>ChoiceA</value><value>ChoiceB</value></correctResponse></responseDeclaration><outcomeDeclaration identifier=\\\"SCORE\\\" cardinality=\\\"single\\\" baseType=\\\"float\\\"/><itemBody><choiceInteraction responseIdentifier=\\\"RESPONSE\\\" orientation=\\\"horizontal\\\"><prompt></prompt><simpleChoice identifier=\\\"ChoiceA\\\">1</simpleChoice><simpleChoice identifier=\\\"ChoiceB\\\">2</simpleChoice><simpleChoice identifier=\\\"ChoiceC\\\">3</simpleChoice><simpleChoice identifier=\\\"ChoiceD\\\">4</simpleChoice></choiceInteraction></itemBody><responseProcessing template=\\\"http://www.imsglobal.org/question/qti_v2p2/rptemplates/map_response\\\"/></assessmentItem>\"\n }\n ]\n }\n}\n`);
384
374
  });
385
375
  });
386
376
 
387
377
  it('Entering request with an invalid combination of item and question reference', () => {
388
378
  cy.get('textarea[class="body-param__text"]')
389
379
  .clear()
390
- .fill(`{ \n "item_references": [\n "DataApiGetQuestionTest2"], \n "references": ["${questionAndResourceReferenceIds[0]?.[0] || '1234567-1234-1234-1234-123456789012'}"], \n "types": [], \n "limit": 50, \n "page": 0, \n "sort_field": "created", \n "sort": "asc"\n }\n`);
380
+ .fill(`{ \n "item_references": [\n "DataApiGetQuestionTest2"], \n "references": ["${questionAndResourceReferenceIds[0][0]}"], \n "types": [], \n "limit": 50, \n "page": 0, \n "sort_field": "created", \n "sort": "asc"\n }\n`);
391
381
  cy.get('.execute')
392
382
  .click();
393
383
  cy.wait(2000);
@@ -4,14 +4,9 @@ describe('Fetch Resources and tools Data API cases', () => {
4
4
  var timestamp;
5
5
  before(() => {
6
6
  cy.loginAs('admin');
7
- cy.deleteItem('DataApiGetResourcesTest1');
8
- cy.wait(5000);
9
- cy.deleteItem('DataApiGetResourcesTest2');
10
- cy.wait(5000);
11
7
  cy.createItem('DataApiGetResourcesTest1');
12
- cy.wait(5000);
8
+ cy.wait(5000)
13
9
  cy.createItem('DataApiGetResourcesTest2');
14
- cy.wait(5000);
15
10
  cy.visit(`${Cypress.env('itemEngineHomePage')}`);
16
11
  });
17
12
 
@@ -20,7 +15,6 @@ describe('Fetch Resources and tools Data API cases', () => {
20
15
  return false
21
16
  });
22
17
  cy.deleteItems();
23
- cy.wait(5000);
24
18
  cy.logout();
25
19
  });
26
20
 
@@ -6,36 +6,23 @@ var dataApiQuestionAndResourceIds = [];
6
6
  for (var i = 0; i < 2; i++) {
7
7
  dataApiQuestionAndResourceIds.push(uuid());
8
8
  };
9
+ //93f228f9-d4f7-8c36-effd-cd6b6e87b329
10
+ //36e2967e-4260-a739-97e4-33ece5b39b3b
9
11
  var timestamp
10
12
 
11
13
 
12
14
  describe('Save Item Data API cases', () => {
13
15
  before(() => {
14
16
  cy.loginAs('admin');
15
- cy.deleteItem('DataAPITest11');
16
- cy.wait(5000);
17
- cy.deleteItem('DataAPITest22');
18
- cy.wait(5000);
19
- cy.deleteItem('ItemCreatedWithSetItemDataAPI');
20
- cy.wait(5000);
21
17
  cy.createItem('DataAPITest11');
22
- cy.wait(5000);
23
18
  cy.createItem('DataAPITest22');
24
- cy.wait(5000);
25
19
  cy.createItem('ItemCreatedWithSetItemDataAPI');
26
- cy.wait(5000);
27
20
  cy.visit(`${Cypress.env('itemEngineHomePage')}`);
28
21
  });
29
22
 
30
23
  after(() => {
31
- cy.deleteItem('DataAPITest11');
32
- cy.wait(5000);
33
- cy.deleteItem('DataAPITest22');
34
- cy.wait(5000);
35
24
  cy.deleteItem('ItemCreatedWithSetItemDataAPI');
36
- cy.wait(5000);
37
25
  cy.deleteItems();
38
- cy.wait(5000);
39
26
  cy.logout();
40
27
  });
41
28
 
@@ -45,7 +32,7 @@ describe('Save Item Data API cases', () => {
45
32
  .click();
46
33
  cy.get('textarea[class="body-param__text"]')
47
34
  .clear()
48
- .fill(`{"questions":[{"reference":'${dataApiQuestionAndResourceIds[0]}',"instruction":"question instructions","points":20,"is_auto_scored":false,"type":"essay response","question_xml":"","scoring_type_id":2,"teacher_guideline":"guidelines for this questions if any","penalty_points":"0","min_score_if_attempted":"0","min_score_type":1,"penalty_point_type":1,"is_auto_penalty_setting":false,"is_negative_rounded":true,"specific_penalty_type":1,"penalty_points_for_each":"0"}]}'`);
35
+ .fill(`{"questions":[{"reference":'${dataApiQuestionAndResourceIds[0]}',"instruction":"question instructions","points":20,"is_auto_scored":false,"type":"essay response","question_xml":"<assessmentItem xmlns=\\"http://www.imsglobal.org/xsd/imsqti_v2p2\\" xmlns:xsi=\\"http://www.w3.org/2001/XMLSchema-instance\\" xsi:schemaLocation=\\"http://www.imsglobal.org/xsd/imsqti_v2p2 http://www.imsglobal.org/xsd/qti/qtiv2p2/imsqti_v2p2p2.xsd\\" identifier=\\"extendedText\\" title=\\"This is title for Essay Response\\" timeDependent=\\"false\\"><responseDeclaration identifier=\\"RESPONSE\\" cardinality=\\"single\\" baseType=\\"string\\"/><outcomeDeclaration identifier=\\"SCORE\\" cardinality=\\"single\\" baseType=\\"float\\"/><itemBody><p class=\\"predefinedText\\" role=\\"presentation\\"/><extendedTextInteraction responseIdentifier=\\"RESPONSE\\" placeholderText=\\"Your Answer\\" expectedLength=\\"\\"><prompt>Please write a short essay in the response field displayed below</prompt></extendedTextInteraction></itemBody></assessmentItem>","scoring_type_id":2,"teacher_guideline":"guidelines for this questions if any","penalty_points":"0","min_score_if_attempted":"0","min_score_type":1,"penalty_point_type":1,"is_auto_penalty_setting":false,"is_negative_rounded":true,"specific_penalty_type":1,"penalty_points_for_each":"0"}]}'`);
49
36
  cy.get('.execute')
50
37
  .click();
51
38
  cy.wait(2000);
@@ -142,6 +129,9 @@ describe('Save Item Data API cases', () => {
142
129
  .find('span')
143
130
  .eq(0)
144
131
  .should('have.text', '<!DOCTYPE ');
132
+ // cy.get('pre[class="microlight"]')
133
+ // .eq(0)
134
+ // .should('have.text', '{\n "message": "\\"items[0].reference\\" is not allowed to be empty",\n "success": false\n}\n');
145
135
  });
146
136
 
147
137
  it('Entering request with a question/resource reference id which does not exist', () => {
@@ -94,7 +94,7 @@ describe('Set Question Data API cases', () => {
94
94
  .click();
95
95
  cy.get('textarea[class="body-param__text"]')
96
96
  .clear()
97
- .fill(`{\n "questions": [\n {\n "instruction": "question instructions",\n "points": 10,\n "is_auto_scored": true,\n "type": "multiple selection",\n "question_xml": "",\n "scoring_type_id": 1,\n "teacher_guideline": "guidelines for this questions if any",\n "penalty_points": "0",\n "min_score_if_attempted": "0"\n }\n ],\n "meta": {\n "user": {\n "id": "dfbb6366-d88d-416d-9d9c-7ee6420817b3",\n "first_name": "fname",\n "last_name": "lname",\n "email": "user@demo.com"\n }\n }\n}`);
97
+ .fill(`{\n "questions": [\n {\n "instruction": "question instructions",\n "points": 10,\n "is_auto_scored": true,\n "type": "multiple selection",\n "question_xml": "Random XML",\n "scoring_type_id": 1,\n "teacher_guideline": "guidelines for this questions if any",\n "penalty_points": "0",\n "min_score_if_attempted": "0"\n }\n ],\n "meta": {\n "user": {\n "id": "dfbb6366-d88d-416d-9d9c-7ee6420817b3",\n "first_name": "fname",\n "last_name": "lname",\n "email": "user@demo.com"\n }\n }\n}`);
98
98
  cy.get('.execute')
99
99
  .click();
100
100
  cy.wait(2000);
@@ -113,6 +113,34 @@ describe('Set Question Data API cases', () => {
113
113
  .should('have.text', "\"\\\"questions[0].reference\\\" is required\"");
114
114
  });
115
115
 
116
+ it('Entering request with incorrect question_XML', () => {
117
+ cy.get('textarea[class="body-param__text"]')
118
+ .clear()
119
+ .fill(`{\n "questions": [\n {\n "reference": "uniq ques ref 002",\n "instruction": "question instructions",\n "points": 10,\n "is_auto_scored": true,\n "type": "multiple selection",\n "question_xml": "Random XML",\n "scoring_type_id": 1,\n "teacher_guideline": "guidelines for this questions if any",\n "penalty_points": "0",\n "min_score_if_attempted": "0"\n }\n ],\n "meta": {\n "user": {\n "id": "f8888c-f3fe-4642-440a-a32d47cd20be",\n "first_name": "fname",\n "last_name": "lname",\n "email": "user@demo.com"\n }\n }\n}`);
120
+ cy.get('.execute')
121
+ .click();
122
+ cy.wait(2000);
123
+ cy.get('.loading-container')
124
+ .should('not.exist');
125
+ cy.get('pre[class="microlight"]')
126
+ .eq(1)
127
+ .within(() => {
128
+ cy.contains('timestamp')
129
+ .next().next().then((currTimestamp) => {
130
+ timestamp = currTimestamp[0].innerText
131
+ });
132
+ });
133
+ });
134
+
135
+ it('Invalid XML message should be received in the response, with status code 200', () => {
136
+ cy.get('td[class="response-col_status"]')
137
+ .eq(0)
138
+ .should('have.text', '200');
139
+ cy.get('pre[class="microlight"]')
140
+ .eq(1)
141
+ .should('have.text', `{\n \"meta\": {\n \"message\": \"Error occurred while saving data\",\n \"timestamp\": ${timestamp},\n \"status\": false\n },\n \"data\": []\n}\n`);
142
+ });
143
+
116
144
  it('Entering request with empty question_XML', () => {
117
145
  cy.get('textarea[class="body-param__text"]')
118
146
  .clear()
@@ -138,7 +166,7 @@ describe('Set Question Data API cases', () => {
138
166
  it('Entering request with already existing question reference', () => {
139
167
  cy.get('textarea[class="body-param__text"]')
140
168
  .clear()
141
- .fill('{\n "questions": [\n {\n "reference": "uniq ques ref 001",\n "instruction": "question instructions",\n "points": 10,\n "is_auto_scored": true,\n "type": "multiple selection",\n "question_xml": "",\n "scoring_type_id": 1,\n "teacher_guideline": "guidelines for this questions if any",\n "penalty_points": "0",\n "min_score_if_attempted": "0"\n }\n ]\n}\n');
169
+ .fill('{\n "questions": [\n {\n "reference": "uniq ques ref 001",\n "instruction": "question instructions",\n "points": 10,\n "is_auto_scored": true,\n "type": "multiple selection",\n "question_xml": "<assessmentItem \\r\\n xmlns=\\"http://www.imsglobal.org/xsd/imsqti_v2p2\\"\\r\\n xmlns:xsi=\\"http://www.w3.org/2001/XMLSchema-instance\\"\\r\\n xsi:schemaLocation=\\"http://www.imsglobal.org/xsd/imsqti_v2p2 http://www.imsglobal.org/xsd/qti/qtiv2p2/imsqti_v2p2p2.xsd\\"\\r\\n identifier=\\"choiceMultiple\\" \\r\\n title=\\"Welcome to Biodome\\" \\r\\n timeDependent=\\"false\\"\\r\\n>\\r\\n <responseDeclaration identifier=\\"RESPONSE\\" cardinality=\\"multiple\\" baseType=\\"identifier\\">\\r\\n <correctResponse>\\r\\n <value>ChoiceA</value>\\r\\n <value>ChoiceE</value>\\r\\n </correctResponse>\\r\\n <mapping>\\r\\n <mapEntry mapKey=\\"ChoiceA\\" mappedValue=\\"1\\"/>\\r\\n <mapEntry mapKey=\\"ChoiceE\\" mappedValue=\\"3\\"/>\\r\\n </mapping>\\r\\n </responseDeclaration>\\r\\n <outcomeDeclaration identifier=\\"SCORE\\" cardinality=\\"single\\" baseType=\\"float\\"/>\\r\\n <itemBody>\\r\\n <choiceInteraction responseIdentifier=\\"RESPONSE\\">\\r\\n <prompt> \\r\\n Zebras having stripes is a trait that might help them to survive. \\r\\n Which of the statements below are possible advantages of stripes. \\r\\n Select all that apply\\r\\n </prompt>\\r\\n <simpleChoice identifier=\\"ChoiceA\\" >Stripes help to confuse predators.</simpleChoice>\\r\\n <simpleChoice identifier=\\"ChoiceB\\" >Stripes help to attract predators.</simpleChoice>\\r\\n <simpleChoice identifier=\\"ChoiceC\\" >Stripes help human hunters to see zebras more easily.</simpleChoice>\\r\\n <simpleChoice identifier=\\"ChoiceD\\" >Stripes help zebras attract biting insects.</simpleChoice>\\r\\n <simpleChoice identifier=\\"ChoiceE\\" >Stripes help zebras to find mates to reproduce with.</simpleChoice>\\r\\n </choiceInteraction>\\r\\n </itemBody>\\r\\n <responseProcessing template=\\"http://www.imsglobal.org/question/qti_v2p2/rptemplates/map_response\\"/>\\r\\n</assessmentItem>\\r\\n",\n "scoring_type_id": 1,\n "teacher_guideline": "guidelines for this questions if any",\n "penalty_points": "0",\n "min_score_if_attempted": "0"\n }\n ]\n}\n');
142
170
  cy.get('.execute')
143
171
  .click();
144
172
  cy.wait(2000);
@@ -146,12 +174,22 @@ describe('Set Question Data API cases', () => {
146
174
  .should('not.exist');
147
175
  });
148
176
 
177
+ it('Reference id already exists message should be received in the response, with status code 200', () => {
178
+ cy.get('pre[class="microlight"]')
179
+ .eq(1)
180
+ cy.get('pre[class="microlight"]')
181
+ .eq(1)
182
+ .find('span')
183
+ .eq(7)
184
+ .should('have.text',"\"Key (reference_id, organisation_id)=(uniq ques ref 001, 7e15466c-30cb-4fdf-b160-6e6fc3660d0e) already exists.\"");
185
+ });
186
+
149
187
  it('Entering request to create 2 questions with valid reference ids', () => {
150
188
  unique_reference_id_1 = uuid();
151
189
  unique_reference_id_2 = uuid();
152
190
  cy.get('textarea[class="body-param__text"]')
153
191
  .clear()
154
- .fill(`{\n "questions": [\n {\n "reference": "${unique_reference_id_1}",\n "instruction": "question instructions",\n "points": 10,\n "is_auto_scored": true,\n "type": "multiple selection",\n "question_xml": "",\n "scoring_type_id": 1,\n "teacher_guideline": "guidelines for this questions if any",\n "penalty_points": "0",\n "min_score_if_attempted": "0"\n },\n {\n "reference": "${unique_reference_id_2}",\n "instruction": "question instructions",\n "points": 10,\n "is_auto_scored": true,\n "type": "multiple selection",\n "question_xml": "",\n "scoring_type_id": 1,\n "teacher_guideline": "guidelines for this questions if any",\n "penalty_points": "0",\n "min_score_if_attempted": "0"\n }\n ]\n}\n`);
192
+ .fill(`{\n "questions": [\n {\n "reference": "${unique_reference_id_1}",\n "instruction": "question instructions",\n "points": 10,\n "is_auto_scored": true,\n "type": "multiple selection",\n "question_xml": "<assessmentItem \\r\\n xmlns=\\"http://www.imsglobal.org/xsd/imsqti_v2p2\\"\\r\\n xmlns:xsi=\\"http://www.w3.org/2001/XMLSchema-instance\\"\\r\\n xsi:schemaLocation=\\"http://www.imsglobal.org/xsd/imsqti_v2p2 http://www.imsglobal.org/xsd/qti/qtiv2p2/imsqti_v2p2p2.xsd\\"\\r\\n identifier=\\"choiceMultiple\\" \\r\\n title=\\"Welcome to Biodome\\" \\r\\n timeDependent=\\"false\\"\\r\\n>\\r\\n <responseDeclaration identifier=\\"RESPONSE\\" cardinality=\\"multiple\\" baseType=\\"identifier\\">\\r\\n <correctResponse>\\r\\n <value>ChoiceA</value>\\r\\n <value>ChoiceE</value>\\r\\n </correctResponse>\\r\\n <mapping>\\r\\n <mapEntry mapKey=\\"ChoiceA\\" mappedValue=\\"1\\"/>\\r\\n <mapEntry mapKey=\\"ChoiceE\\" mappedValue=\\"3\\"/>\\r\\n </mapping>\\r\\n </responseDeclaration>\\r\\n <outcomeDeclaration identifier=\\"SCORE\\" cardinality=\\"single\\" baseType=\\"float\\"/>\\r\\n <itemBody>\\r\\n <choiceInteraction responseIdentifier=\\"RESPONSE\\">\\r\\n <prompt> \\r\\n Zebras having stripes is a trait that might help them to survive. \\r\\n Which of the statements below are possible advantages of stripes. \\r\\n Select all that apply\\r\\n </prompt>\\r\\n <simpleChoice identifier=\\"ChoiceA\\" >Stripes help to confuse predators.</simpleChoice>\\r\\n <simpleChoice identifier=\\"ChoiceB\\" >Stripes help to attract predators.</simpleChoice>\\r\\n <simpleChoice identifier=\\"ChoiceC\\" >Stripes help human hunters to see zebras more easily.</simpleChoice>\\r\\n <simpleChoice identifier=\\"ChoiceD\\" >Stripes help zebras attract biting insects.</simpleChoice>\\r\\n <simpleChoice identifier=\\"ChoiceE\\" >Stripes help zebras to find mates to reproduce with.</simpleChoice>\\r\\n </choiceInteraction>\\r\\n </itemBody>\\r\\n <responseProcessing template=\\"http://www.imsglobal.org/question/qti_v2p2/rptemplates/map_response\\"/>\\r\\n</assessmentItem>\\r\\n",\n "scoring_type_id": 1,\n "teacher_guideline": "guidelines for this questions if any",\n "penalty_points": "0",\n "min_score_if_attempted": "0"\n },\n {\n "reference": "${unique_reference_id_2}",\n "instruction": "question instructions",\n "points": 10,\n "is_auto_scored": true,\n "type": "multiple selection",\n "question_xml": "<assessmentItem \\r\\n xmlns=\\"http://www.imsglobal.org/xsd/imsqti_v2p2\\"\\r\\n xmlns:xsi=\\"http://www.w3.org/2001/XMLSchema-instance\\"\\r\\n xsi:schemaLocation=\\"http://www.imsglobal.org/xsd/imsqti_v2p2 http://www.imsglobal.org/xsd/qti/qtiv2p2/imsqti_v2p2p2.xsd\\"\\r\\n identifier=\\"choiceMultiple\\" \\r\\n title=\\"Welcome to Biodome\\" \\r\\n timeDependent=\\"false\\"\\r\\n>\\r\\n <responseDeclaration identifier=\\"RESPONSE\\" cardinality=\\"multiple\\" baseType=\\"identifier\\">\\r\\n <correctResponse>\\r\\n <value>ChoiceA</value>\\r\\n <value>ChoiceE</value>\\r\\n </correctResponse>\\r\\n <mapping>\\r\\n <mapEntry mapKey=\\"ChoiceA\\" mappedValue=\\"1\\"/>\\r\\n <mapEntry mapKey=\\"ChoiceE\\" mappedValue=\\"3\\"/>\\r\\n </mapping>\\r\\n </responseDeclaration>\\r\\n <outcomeDeclaration identifier=\\"SCORE\\" cardinality=\\"single\\" baseType=\\"float\\"/>\\r\\n <itemBody>\\r\\n <choiceInteraction responseIdentifier=\\"RESPONSE\\">\\r\\n <prompt> \\r\\n Zebras having stripes is a trait that might help them to survive. \\r\\n Which of the statements below are possible advantages of stripes. \\r\\n Select all that apply\\r\\n </prompt>\\r\\n <simpleChoice identifier=\\"ChoiceA\\" >Stripes help to confuse predators.</simpleChoice>\\r\\n <simpleChoice identifier=\\"ChoiceB\\" >Stripes help to attract predators.</simpleChoice>\\r\\n <simpleChoice identifier=\\"ChoiceC\\" >Stripes help human hunters to see zebras more easily.</simpleChoice>\\r\\n <simpleChoice identifier=\\"ChoiceD\\" >Stripes help zebras attract biting insects.</simpleChoice>\\r\\n <simpleChoice identifier=\\"ChoiceE\\" >Stripes help zebras to find mates to reproduce with.</simpleChoice>\\r\\n </choiceInteraction>\\r\\n </itemBody>\\r\\n <responseProcessing template=\\"http://www.imsglobal.org/question/qti_v2p2/rptemplates/map_response\\"/>\\r\\n</assessmentItem>\\r\\n",\n "scoring_type_id": 1,\n "teacher_guideline": "guidelines for this questions if any",\n "penalty_points": "0",\n "min_score_if_attempted": "0"\n }\n ]\n}\n`);
155
193
  cy.get('.execute')
156
194
  .click();
157
195
  cy.wait(2000);
@@ -28,7 +28,7 @@ describe('Save MCQ Question with Alternate Answers API cases', () => {
28
28
  .click();
29
29
  cy.get('textarea[class="body-param__text"]')
30
30
  .clear()
31
- .fill(`{\n "questions": [\n {\n "instruction": "question instructions",\n "points": 10,\n "is_auto_scored": true,\n "type": "multiple selection",\n "question_xml": "",\n "scoring_type_id": 1,\n "teacher_guideline": "guidelines for this questions if any",\n "penalty_points": "0",\n "min_score_if_attempted": "0",\n "validation": {\n "alt_responses": [\n [\n {\n "score": 1,\n "value": "ChoiceA"\n },\n {\n "score": 3,\n "value": "ChoiceE"\n }\n ],\n [\n {\n "score": 2,\n "value": "ChoiceB"\n },\n {\n "score": 2,\n "value": "ChoiceD"\n }\n ]\n ]\n }\n }\n ]\n}`);
31
+ .fill(`{\n "questions": [\n {\n "instruction": "question instructions",\n "points": 10,\n "is_auto_scored": true,\n "type": "multiple selection",\n "question_xml": "<assessmentItem \\r\\n xmlns=\\"http://www.imsglobal.org/xsd/imsqti_v2p2\\"\\r\\n xmlns:xsi=\\"http://www.w3.org/2001/XMLSchema-instance\\"\\r\\n xsi:schemaLocation=\\"http://www.imsglobal.org/xsd/imsqti_v2p2 http://www.imsglobal.org/xsd/qti/qtiv2p2/imsqti_v2p2p2.xsd\\"\\r\\n identifier=\\"choiceMultiple\\" \\r\\n title=\\"Welcome to Biodome\\" \\r\\n timeDependent=\\"false\\"\\r\\n>\\r\\n <responseDeclaration identifier=\\"RESPONSE\\" cardinality=\\"multiple\\" baseType=\\"identifier\\">\\r\\n <correctResponse>\\r\\n <value>ChoiceA</value>\\r\\n <value>ChoiceE</value>\\r\\n </correctResponse>\\r\\n <mapping>\\r\\n <mapEntry mapKey=\\"ChoiceA\\" mappedValue=\\"1\\"/>\\r\\n <mapEntry mapKey=\\"ChoiceE\\" mappedValue=\\"3\\"/>\\r\\n </mapping>\\r\\n </responseDeclaration>\\r\\n <outcomeDeclaration identifier=\\"SCORE\\" cardinality=\\"single\\" baseType=\\"float\\"/>\\r\\n <itemBody>\\r\\n <choiceInteraction responseIdentifier=\\"RESPONSE\\">\\r\\n <prompt> \\r\\n Zebras having stripes is a trait that might help them to survive. \\r\\n Which of the statements below are possible advantages of stripes. \\r\\n Select all that apply\\r\\n </prompt>\\r\\n <simpleChoice identifier=\\"ChoiceA\\" >Stripes help to confuse predators.</simpleChoice>\\r\\n <simpleChoice identifier=\\"ChoiceB\\" >Stripes help to attract predators.</simpleChoice>\\r\\n <simpleChoice identifier=\\"ChoiceC\\" >Stripes help human hunters to see zebras more easily.</simpleChoice>\\r\\n <simpleChoice identifier=\\"ChoiceD\\" >Stripes help zebras attract biting insects.</simpleChoice>\\r\\n <simpleChoice identifier=\\"ChoiceE\\" >Stripes help zebras to find mates to reproduce with.</simpleChoice>\\r\\n </choiceInteraction>\\r\\n </itemBody>\\r\\n <responseProcessing template=\\"http://www.imsglobal.org/question/qti_v2p2/rptemplates/map_response\\"/>\\r\\n</assessmentItem>\\r\\n",\n "scoring_type_id": 1,\n "teacher_guideline": "guidelines for this questions if any",\n "penalty_points": "0",\n "min_score_if_attempted": "0",\n "validation": {\n "alt_responses": [\n [\n {\n "score": 1,\n "value": "ChoiceA"\n },\n {\n "score": 3,\n "value": "ChoiceE"\n }\n ],\n [\n {\n "score": 2,\n "value": "ChoiceB"\n },\n {\n "score": 2,\n "value": "ChoiceD"\n }\n ]\n ]\n }\n }\n ]\n}`);
32
32
  cy.get('.execute')
33
33
  .click();
34
34
  cy.wait(2000);
@@ -50,7 +50,7 @@ describe('Save MCQ Question with Alternate Answers API cases', () => {
50
50
  it('Entering request with incorrect question_XML', () => {
51
51
  cy.get('textarea[class="body-param__text"]')
52
52
  .clear()
53
- .fill(`{\n "questions": [\n {\n "reference": "1ed861b-7dd3-1a1-0abc-7158cf8d4a7-dataAPiTest",\n "instruction": "question instructions",\n "points": 10,\n "is_auto_scored": true,\n "type": "multiple selection",\n "question_xml": "",\n "scoring_type_id": 1,\n "teacher_guideline": "guidelines for this questions if any",\n "penalty_points": "0",\n "min_score_if_attempted": "0",\n "validation": {\n "alt_responses": [\n [\n {\n "score": 1,\n "value": "ChoiceA"\n },\n {\n "score": 3,\n "value": "ChoiceE"\n }\n ],\n [\n {\n "score": 2,\n "value": "ChoiceB"\n },\n {\n "score": 2,\n "value": "ChoiceD"\n }\n ]\n ]\n }\n }\n ]\n}`);
53
+ .fill(`{\n "questions": [\n {\n "reference": "1ed861b-7dd3-1a1-0abc-7158cf8d4a7-dataAPiTest",\n "instruction": "question instructions",\n "points": 10,\n "is_auto_scored": true,\n "type": "multiple selection",\n "question_xml": "Random XML",\n "scoring_type_id": 1,\n "teacher_guideline": "guidelines for this questions if any",\n "penalty_points": "0",\n "min_score_if_attempted": "0",\n "validation": {\n "alt_responses": [\n [\n {\n "score": 1,\n "value": "ChoiceA"\n },\n {\n "score": 3,\n "value": "ChoiceE"\n }\n ],\n [\n {\n "score": 2,\n "value": "ChoiceB"\n },\n {\n "score": 2,\n "value": "ChoiceD"\n }\n ]\n ]\n }\n }\n ]\n}`);
54
54
  cy.get('.execute')
55
55
  .click();
56
56
  cy.wait(2000);
@@ -66,6 +66,15 @@ describe('Save MCQ Question with Alternate Answers API cases', () => {
66
66
  });
67
67
  });
68
68
 
69
+ it('\'Error occurred while saving data\' message should be received in the response, with status code 500', () => {
70
+ cy.get('td[class="response-col_status"]')
71
+ .eq(0)
72
+ .should('have.text', '200');
73
+ cy.get('pre[class="microlight"]')
74
+ .eq(1)
75
+ .should('have.text', `{\n \"meta\": {\n \"message\": \"Key (reference_id, organisation_id)=(1ed861b-7dd3-1a1-0abc-7158cf8d4a7-dataAPiTest, 7e15466c-30cb-4fdf-b160-6e6fc3660d0e) already exists.\",\n \"timestamp\": ${timestamp},\n \"status\": false\n },\n \"data\": []\n}\n`);
76
+ });
77
+
69
78
  it('Entering request with empty question_XML', () => {
70
79
  cy.get('textarea[class="body-param__text"]')
71
80
  .clear()
@@ -91,28 +100,20 @@ describe('Save MCQ Question with Alternate Answers API cases', () => {
91
100
  it('Entering request with already existing question reference', () => {
92
101
  cy.get('textarea[class="body-param__text"]')
93
102
  .clear()
94
- .fill('{\n "questions": [\n {\n "reference": "1ed861b-7dd3-1a1-0abc-7158cf8d4a7-dataAPiTest",\n "instruction": "question instructions",\n "points": 10,\n "is_auto_scored": true,\n "type": "multiple selection",\n "question_xml": "",\n "scoring_type_id": 1,\n "teacher_guideline": "guidelines for this questions if any",\n "penalty_points": "0",\n "min_score_if_attempted": "0",\n "validation": {\n "alt_responses": [\n [\n {\n "score": 1,\n "value": "ChoiceA"\n },\n {\n "score": 3,\n "value": "ChoiceE"\n }\n ],\n [\n {\n "score": 2,\n "value": "ChoiceB"\n },\n {\n "score": 2,\n "value": "ChoiceD"\n }\n ]\n ]\n }\n }\n ]\n}');
103
+ .fill('{\n "questions": [\n {\n "reference": "1ed861b-7dd3-1a1-0abc-7158cf8d4a7-dataAPiTest",\n "instruction": "question instructions",\n "points": 10,\n "is_auto_scored": true,\n "type": "multiple selection",\n "question_xml": "<assessmentItem \\r\\n xmlns=\\"http://www.imsglobal.org/xsd/imsqti_v2p2\\"\\r\\n xmlns:xsi=\\"http://www.w3.org/2001/XMLSchema-instance\\"\\r\\n xsi:schemaLocation=\\"http://www.imsglobal.org/xsd/imsqti_v2p2 http://www.imsglobal.org/xsd/qti/qtiv2p2/imsqti_v2p2p2.xsd\\"\\r\\n identifier=\\"choiceMultiple\\" \\r\\n title=\\"Welcome to Biodome\\" \\r\\n timeDependent=\\"false\\"\\r\\n>\\r\\n <responseDeclaration identifier=\\"RESPONSE\\" cardinality=\\"multiple\\" baseType=\\"identifier\\">\\r\\n <correctResponse>\\r\\n <value>ChoiceA</value>\\r\\n <value>ChoiceE</value>\\r\\n </correctResponse>\\r\\n <mapping>\\r\\n <mapEntry mapKey=\\"ChoiceA\\" mappedValue=\\"1\\"/>\\r\\n <mapEntry mapKey=\\"ChoiceE\\" mappedValue=\\"3\\"/>\\r\\n </mapping>\\r\\n </responseDeclaration>\\r\\n <outcomeDeclaration identifier=\\"SCORE\\" cardinality=\\"single\\" baseType=\\"float\\"/>\\r\\n <itemBody>\\r\\n <choiceInteraction responseIdentifier=\\"RESPONSE\\">\\r\\n <prompt> \\r\\n Zebras having stripes is a trait that might help them to survive. \\r\\n Which of the statements below are possible advantages of stripes. \\r\\n Select all that apply\\r\\n </prompt>\\r\\n <simpleChoice identifier=\\"ChoiceA\\" >Stripes help to confuse predators.</simpleChoice>\\r\\n <simpleChoice identifier=\\"ChoiceB\\" >Stripes help to attract predators.</simpleChoice>\\r\\n <simpleChoice identifier=\\"ChoiceC\\" >Stripes help human hunters to see zebras more easily.</simpleChoice>\\r\\n <simpleChoice identifier=\\"ChoiceD\\" >Stripes help zebras attract biting insects.</simpleChoice>\\r\\n <simpleChoice identifier=\\"ChoiceE\\" >Stripes help zebras to find mates to reproduce with.</simpleChoice>\\r\\n </choiceInteraction>\\r\\n </itemBody>\\r\\n <responseProcessing template=\\"http://www.imsglobal.org/question/qti_v2p2/rptemplates/map_response\\"/>\\r\\n</assessmentItem>\\r\\n",\n "scoring_type_id": 1,\n "teacher_guideline": "guidelines for this questions if any",\n "penalty_points": "0",\n "min_score_if_attempted": "0",\n "validation": {\n "alt_responses": [\n [\n {\n "score": 1,\n "value": "ChoiceA"\n },\n {\n "score": 3,\n "value": "ChoiceE"\n }\n ],\n [\n {\n "score": 2,\n "value": "ChoiceB"\n },\n {\n "score": 2,\n "value": "ChoiceD"\n }\n ]\n ]\n }\n }\n ]\n}');
95
104
  cy.get('.execute')
96
105
  .click();
97
106
  cy.wait(2000);
98
107
  cy.get('.loading-container')
99
108
  .should('not.exist');
100
- cy.get('pre[class="microlight"]')
101
- .eq(1)
102
- .within(() => {
103
- cy.contains('timestamp')
104
- .next().next().then((currTimestamp) => {
105
- timestamp = currTimestamp[0].innerText
106
- });
107
- });
108
109
  });
109
110
 
110
- it('if Reference id already exists then it should update the existing question', () => {
111
+ it('Reference id already exists message should be received in the response, with status code 422', () => {
111
112
  cy.get('pre[class="microlight"]')
112
113
  .eq(1)
113
114
  .find('span')
114
115
  .eq(7)
115
- .should('have.text', 'true');
116
+ .should('have.text', "\"Key (reference_id, organisation_id)=(1ed861b-7dd3-1a1-0abc-7158cf8d4a7-dataAPiTest, 7e15466c-30cb-4fdf-b160-6e6fc3660d0e) already exists.\"");
116
117
  });
117
118
 
118
119
  it('Entering request to create 2 questions with valid reference ids', () => {
@@ -120,7 +121,7 @@ describe('Save MCQ Question with Alternate Answers API cases', () => {
120
121
  unique_reference_id_2 = uuid();
121
122
  cy.get('textarea[class="body-param__text"]')
122
123
  .clear()
123
- .fill(`{\n "questions": [\n {\n "reference": "${unique_reference_id_1}",\n "instruction": "question instructions",\n "points": 10,\n "is_auto_scored": true,\n "type": "multiple selection",\n "question_xml": "",\n "scoring_type_id": 1,\n "teacher_guideline": "guidelines for this questions if any",\n "penalty_points": "0",\n "min_score_if_attempted": "0",\n "validation": {\n "alt_responses": [\n [\n {\n "score": 1,\n "value": "ChoiceA"\n },\n {\n "score": 3,\n "value": "ChoiceE"\n }\n ],\n [\n {\n "score": 2,\n "value": "ChoiceB"\n },\n {\n "score": 2,\n "value": "ChoiceD"\n }\n ]\n ]\n }\n },\n {\n "reference": "${unique_reference_id_2}",\n "instruction": "question instructions",\n "points": 10,\n "is_auto_scored": true,\n "type": "multiple selection",\n "question_xml": "",\n "scoring_type_id": 1,\n "teacher_guideline": "guidelines for this questions if any",\n "penalty_points": "0",\n "min_score_if_attempted": "0",\n "validation": {\n "alt_responses": [\n {\n "score": 4,\n "values": [\n "ChoiceA",\n "ChoiceE"\n ]\n },\n {\n "score": 4,\n "values": [\n "ChoiceB",\n "ChoiceD"\n ]\n }\n ]\n }\n }\n ]\n}`);
124
+ .fill(`{\n "questions": [\n {\n "reference": "${unique_reference_id_1}",\n "instruction": "question instructions",\n "points": 10,\n "is_auto_scored": true,\n "type": "multiple selection",\n "question_xml": "<assessmentItem \\r\\n xmlns=\\"http://www.imsglobal.org/xsd/imsqti_v2p2\\"\\r\\n xmlns:xsi=\\"http://www.w3.org/2001/XMLSchema-instance\\"\\r\\n xsi:schemaLocation=\\"http://www.imsglobal.org/xsd/imsqti_v2p2 http://www.imsglobal.org/xsd/qti/qtiv2p2/imsqti_v2p2p2.xsd\\"\\r\\n identifier=\\"choiceMultiple\\" \\r\\n title=\\"Welcome to Biodome\\" \\r\\n timeDependent=\\"false\\"\\r\\n>\\r\\n <responseDeclaration identifier=\\"RESPONSE\\" cardinality=\\"multiple\\" baseType=\\"identifier\\">\\r\\n <correctResponse>\\r\\n <value>ChoiceA</value>\\r\\n <value>ChoiceE</value>\\r\\n </correctResponse>\\r\\n <mapping>\\r\\n <mapEntry mapKey=\\"ChoiceA\\" mappedValue=\\"1\\"/>\\r\\n <mapEntry mapKey=\\"ChoiceE\\" mappedValue=\\"3\\"/>\\r\\n </mapping>\\r\\n </responseDeclaration>\\r\\n <outcomeDeclaration identifier=\\"SCORE\\" cardinality=\\"single\\" baseType=\\"float\\"/>\\r\\n <itemBody>\\r\\n <choiceInteraction responseIdentifier=\\"RESPONSE\\">\\r\\n <prompt> \\r\\n Zebras having stripes is a trait that might help them to survive. \\r\\n Which of the statements below are possible advantages of stripes. \\r\\n Select all that apply\\r\\n </prompt>\\r\\n <simpleChoice identifier=\\"ChoiceA\\" >Stripes help to confuse predators.</simpleChoice>\\r\\n <simpleChoice identifier=\\"ChoiceB\\" >Stripes help to attract predators.</simpleChoice>\\r\\n <simpleChoice identifier=\\"ChoiceC\\" >Stripes help human hunters to see zebras more easily.</simpleChoice>\\r\\n <simpleChoice identifier=\\"ChoiceD\\" >Stripes help zebras attract biting insects.</simpleChoice>\\r\\n <simpleChoice identifier=\\"ChoiceE\\" >Stripes help zebras to find mates to reproduce with.</simpleChoice>\\r\\n </choiceInteraction>\\r\\n </itemBody>\\r\\n <responseProcessing template=\\"http://www.imsglobal.org/question/qti_v2p2/rptemplates/map_response\\"/>\\r\\n</assessmentItem>\\r\\n",\n "scoring_type_id": 1,\n "teacher_guideline": "guidelines for this questions if any",\n "penalty_points": "0",\n "min_score_if_attempted": "0",\n "validation": {\n "alt_responses": [\n [\n {\n "score": 1,\n "value": "ChoiceA"\n },\n {\n "score": 3,\n "value": "ChoiceE"\n }\n ],\n [\n {\n "score": 2,\n "value": "ChoiceB"\n },\n {\n "score": 2,\n "value": "ChoiceD"\n }\n ]\n ]\n }\n },\n {\n "reference": "${unique_reference_id_2}",\n "instruction": "question instructions",\n "points": 10,\n "is_auto_scored": true,\n "type": "multiple selection",\n "question_xml": "<assessmentItem \\r\\n xmlns=\\"http://www.imsglobal.org/xsd/imsqti_v2p2\\"\\r\\n xmlns:xsi=\\"http://www.w3.org/2001/XMLSchema-instance\\"\\r\\n xsi:schemaLocation=\\"http://www.imsglobal.org/xsd/imsqti_v2p2 http://www.imsglobal.org/xsd/qti/qtiv2p2/imsqti_v2p2p2.xsd\\"\\r\\n identifier=\\"choiceMultiple\\" \\r\\n title=\\"Welcome to Biodome\\" \\r\\n timeDependent=\\"false\\"\\r\\n>\\r\\n <responseDeclaration identifier=\\"RESPONSE\\" cardinality=\\"multiple\\" baseType=\\"identifier\\">\\r\\n <correctResponse>\\r\\n <value>ChoiceA</value>\\r\\n <value>ChoiceE</value>\\r\\n </correctResponse>\\r\\n </responseDeclaration>\\r\\n <outcomeDeclaration identifier=\\"SCORE\\" cardinality=\\"single\\" baseType=\\"float\\"/>\\r\\n <itemBody>\\r\\n <choiceInteraction responseIdentifier=\\"RESPONSE\\">\\r\\n <prompt> \\r\\n Zebras having stripes is a trait that might help them to survive. \\r\\n Which of the statements below are possible advantages of stripes. \\r\\n Select all that apply\\r\\n </prompt>\\r\\n <simpleChoice identifier=\\"ChoiceA\\" >Stripes help to confuse predators.</simpleChoice>\\r\\n <simpleChoice identifier=\\"ChoiceB\\" >Stripes help to attract predators.</simpleChoice>\\r\\n <simpleChoice identifier=\\"ChoiceC\\" >Stripes help human hunters to see zebras more easily.</simpleChoice>\\r\\n <simpleChoice identifier=\\"ChoiceD\\" >Stripes help zebras attract biting insects.</simpleChoice>\\r\\n <simpleChoice identifier=\\"ChoiceE\\" >Stripes help zebras to find mates to reproduce with.</simpleChoice>\\r\\n </choiceInteraction>\\r\\n </itemBody>\\r\\n <responseProcessing template=\\"http://www.imsglobal.org/question/qti_v2p2/rptemplates/map_response\\"/>\\r\\n</assessmentItem>\\r\\n",\n "scoring_type_id": 1,\n "teacher_guideline": "guidelines for this questions if any",\n "penalty_points": "0",\n "min_score_if_attempted": "0",\n "validation": {\n "alt_responses": [\n {\n "score": 4,\n "values": [\n "ChoiceA",\n "ChoiceE"\n ]\n },\n {\n "score": 4,\n "values": [\n "ChoiceB",\n "ChoiceD"\n ]\n }\n ]\n }\n }\n ]\n}`);
124
125
  cy.get('.execute')
125
126
  .click();
126
127
  cy.wait(2000);