performanceresultscreens 0.0.11 → 0.0.13
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/components/Atoms/ExtractedDiagramView/ExtractedDiagramView.d.ts +1 -1
- package/dist/components/Atoms/ExtractedDiagramView/ExtractedDiagramView.js +5 -2
- package/dist/components/Atoms/ExtractedDiagramView/ExtractedDiagramView.js.map +1 -1
- package/dist/components/Atoms/TranscriptionImagePreview/TranscriptionImagePreview.js +1 -1
- package/dist/components/Atoms/TranscriptionImagePreview/TranscriptionImagePreview.js.map +1 -1
- package/dist/components/Molecules/AccordianPay4/Accordian.d.ts +1 -1
- package/dist/components/Molecules/AccordianPay4/Accordian.js +8 -9
- package/dist/components/Molecules/AccordianPay4/Accordian.js.map +1 -1
- package/dist/components/Molecules/AccordianPay4/Accordian.stories.d.ts +1 -0
- package/dist/components/Molecules/AccordianPay4/Accordian.stories.js +11 -1
- package/dist/components/Molecules/AccordianPay4/Accordian.stories.js.map +1 -1
- package/dist/components/Molecules/Descriptive/DescriptiveLabel.d.ts +2 -1
- package/dist/components/Molecules/Descriptive/DescriptiveLabel.js +4 -2
- package/dist/components/Molecules/Descriptive/DescriptiveLabel.js.map +1 -1
- package/dist/components/Molecules/Descriptive/descriptiveStyles.d.ts +12 -4
- package/dist/components/Molecules/Descriptive/descriptiveStyles.js +5 -0
- package/dist/components/Molecules/Descriptive/descriptiveStyles.js.map +1 -1
- package/dist/components/Molecules/DescriptiveAnswer/DescriptiveAnswerV1.js +49 -31
- package/dist/components/Molecules/DescriptiveAnswer/DescriptiveAnswerV1.js.map +1 -1
- package/dist/components/Molecules/MediaModal/MediaModal.js +2 -2
- package/dist/components/Molecules/MediaModal/MediaModal.js.map +1 -1
- package/dist/components/Molecules/Pay4Use/ExecutionModal.js +2 -6
- package/dist/components/Molecules/Pay4Use/ExecutionModal.js.map +1 -1
- package/dist/components/Molecules/PerformanceScreen/PreformanceResultScreen.d.ts +1 -0
- package/dist/components/Molecules/PerformanceScreen/PreformanceResultScreen.js +19 -9
- package/dist/components/Molecules/PerformanceScreen/PreformanceResultScreen.js.map +1 -1
- package/dist/components/Molecules/PerformanceScreen/PreformanceResultScreen.stories.js +3 -0
- package/dist/components/Molecules/PerformanceScreen/PreformanceResultScreen.stories.js.map +1 -1
- package/dist/components/Molecules/SmartLearn/SmartLearn.d.ts +2 -2
- package/dist/components/Molecules/SmartLearn/SmartLearn.js +34 -17
- package/dist/components/Molecules/SmartLearn/SmartLearn.js.map +1 -1
- package/dist/components/Molecules/SmartLearn/SmartLearn.stories.d.ts +3 -2
- package/dist/components/Molecules/SmartLearn/SmartLearn.stories.js +32 -20
- package/dist/components/Molecules/SmartLearn/SmartLearn.stories.js.map +1 -1
- package/dist/components/Molecules/SmartLearn/SmartLearn.styles.d.ts +46 -2
- package/dist/components/Molecules/SmartLearn/SmartLearn.styles.js +47 -3
- package/dist/components/Molecules/SmartLearn/SmartLearn.styles.js.map +1 -1
- package/dist/components/Molecules/SmartLearn/types.d.ts +1 -1
- package/dist/components/Molecules/StaticscreensLearnmore/Staticback.js +8 -4
- package/dist/components/Molecules/StaticscreensLearnmore/Staticback.js.map +1 -1
- package/dist/components/Molecules/StaticscreensLearnmore/Staticback.styles.d.ts +1 -1
- package/dist/components/Molecules/StaticscreensLearnmore/Staticback.styles.js +3 -3
- package/dist/components/Molecules/StaticscreensLearnmore/Staticback.styles.js.map +1 -1
- package/dist/components/Molecules/Tutor/Assistant/Assistant.js +119 -29
- package/dist/components/Molecules/Tutor/Assistant/Assistant.js.map +1 -1
- package/dist/components/Molecules/Tutor/Assistant/Assistant.styles.d.ts +93 -1
- package/dist/components/Molecules/Tutor/Assistant/Assistant.styles.js +77 -6
- package/dist/components/Molecules/Tutor/Assistant/Assistant.styles.js.map +1 -1
- package/dist/components/Molecules/Tutor/Assistant/TutorAssistant.js +1 -1
- package/dist/components/Molecules/Tutor/Assistant/TutorAssistant.js.map +1 -1
- package/dist/components/Molecules/Tutor/User/User.js +22 -2
- package/dist/components/Molecules/Tutor/User/User.js.map +1 -1
- package/dist/components/Molecules/Tutor/User/User.stories.d.ts +1 -0
- package/dist/components/Molecules/Tutor/User/User.stories.js +92 -1
- package/dist/components/Molecules/Tutor/User/User.stories.js.map +1 -1
- package/dist/components/Molecules/comprehension/ComprehensionLayout.d.ts +11 -1
- package/dist/components/Molecules/comprehension/ComprehensionLayout.js +52 -19
- package/dist/components/Molecules/comprehension/ComprehensionLayout.js.map +1 -1
- package/dist/components/Molecules/optionLayouts/scqtemp.js +168 -55
- package/dist/components/Molecules/optionLayouts/scqtemp.js.map +1 -1
- package/dist/components/Molecules/questionLayouts/QuestionsLayout.js +166 -101
- package/dist/components/Molecules/questionLayouts/QuestionsLayout.js.map +1 -1
- package/dist/components/Organisms/PostDetails/PostInfo.js +7 -3
- package/dist/components/Organisms/PostDetails/PostInfo.js.map +1 -1
- package/dist/components/Organisms/ReviewComponent/ChatPresenter.js +2 -2
- package/dist/components/Organisms/ReviewComponent/ChatPresenter.js.map +1 -1
- package/dist/components/Organisms/ReviewComponent/Review.d.ts +12 -1
- package/dist/components/Organisms/ReviewComponent/Review.js +147 -50
- package/dist/components/Organisms/ReviewComponent/Review.js.map +1 -1
- package/dist/components/Organisms/ReviewComponent/ReviewPresenter.d.ts +12 -1
- package/dist/components/Organisms/ReviewComponent/ReviewPresenter.js +7 -8
- package/dist/components/Organisms/ReviewComponent/ReviewPresenter.js.map +1 -1
- package/dist/components/Organisms/ReviewComponent/ReviewPresenter.stories.d.ts +62 -0
- package/dist/components/Organisms/ReviewComponent/ReviewPresenter.stories.js +47 -1
- package/dist/components/Organisms/ReviewComponent/ReviewPresenter.stories.js.map +1 -1
- package/dist/components/Organisms/ReviewComponent/reviewUtils.d.ts +21 -80
- package/dist/components/Organisms/ReviewComponent/reviewUtils.js +67 -51
- package/dist/components/Organisms/ReviewComponent/reviewUtils.js.map +1 -1
- package/dist/components/Organisms/ReviewComponent/selectionUtils.d.ts +1715 -0
- package/dist/components/Organisms/ReviewComponent/selectionUtils.js +2898 -1
- package/dist/components/Organisms/ReviewComponent/selectionUtils.js.map +1 -1
- package/dist/components/Organisms/StudentEditor/StudentEditor.stories.js +14 -9
- package/dist/components/Organisms/StudentEditor/StudentEditor.stories.js.map +1 -1
- package/dist/components/Organisms/StudentEditor/components/MathModal.js +3 -0
- package/dist/components/Organisms/StudentEditor/components/MathModal.js.map +1 -1
- package/dist/components/Organisms/StudentEditor/components/TextEditor.js +145 -85
- package/dist/components/Organisms/StudentEditor/components/TextEditor.js.map +1 -1
- package/dist/components/Organisms/StudentEditor/utils/keyboard-config.d.ts +4 -4
- package/dist/components/Organisms/StudentEditor/utils/keyboard-config.js +52 -52
- package/dist/components/Organisms/StudentEditor/utils/keyboard-config.js.map +1 -1
- package/dist/index.d.ts +3 -0
- package/dist/index.js +7 -1
- package/dist/index.js.map +1 -1
- package/dist/utils/LearningPathUtils.d.ts +1 -0
- package/dist/utils/LearningPathUtils.js +2 -1
- package/dist/utils/LearningPathUtils.js.map +1 -1
- package/dist/utils/common-utils.js.map +1 -1
- package/dist/utils/presentationModes.d.ts +59 -0
- package/dist/utils/presentationModes.js +36 -1
- package/dist/utils/presentationModes.js.map +1 -1
- package/package.json +4 -1
|
@@ -3,7 +3,7 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
|
3
3
|
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
4
4
|
};
|
|
5
5
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
6
|
-
exports.ComprehensionBasedExample = exports.Example3 = exports.Example2 = exports.Example1 = void 0;
|
|
6
|
+
exports.ComprehensionBasedExampleTeacher2 = exports.ComprehensionBasedExampleTeacher = exports.ComprehensionBasedExample = exports.Example3 = exports.Example2 = exports.Example1 = void 0;
|
|
7
7
|
const jsx_runtime_1 = require("react/jsx-runtime");
|
|
8
8
|
const react_1 = __importDefault(require("react"));
|
|
9
9
|
const wall_images_1 = __importDefault(require("../../../utils/wall-images"));
|
|
@@ -11,6 +11,7 @@ const ReviewPresenter_1 = __importDefault(require("./ReviewPresenter"));
|
|
|
11
11
|
const reviewUtils_1 = require("./reviewUtils");
|
|
12
12
|
const selectionUtils_1 = require("./selectionUtils");
|
|
13
13
|
const factual_module_css_1 = __importDefault(require("../../Molecules/FactualCard/factual.module.css"));
|
|
14
|
+
const testingutils_1 = require("../../../utils/testingutils");
|
|
14
15
|
exports.default = {
|
|
15
16
|
title: "Components/Organisms/ReviewPresenter",
|
|
16
17
|
component: ReviewPresenter_1.default,
|
|
@@ -56,6 +57,7 @@ exports.Example2.args = {
|
|
|
56
57
|
dataType: "",
|
|
57
58
|
questions: reviewUtils_1.questionData,
|
|
58
59
|
isReviewScreen: true,
|
|
60
|
+
isStudent: true,
|
|
59
61
|
isFactual: false,
|
|
60
62
|
taskType: "Practice",
|
|
61
63
|
// journeyData: journeyData,
|
|
@@ -72,6 +74,7 @@ exports.Example3.args = {
|
|
|
72
74
|
isReviewScreen: true,
|
|
73
75
|
isFactual: false,
|
|
74
76
|
taskType: "learn",
|
|
77
|
+
isQuestionSelection: true,
|
|
75
78
|
// journeyData: journeyData,
|
|
76
79
|
commonProps: reviewUtils_1.commonprops,
|
|
77
80
|
onAnswerChanged: () => { },
|
|
@@ -113,4 +116,47 @@ exports.ComprehensionBasedExample.args = {
|
|
|
113
116
|
isFromParentApp: false,
|
|
114
117
|
// visibleMainQuestion: 1,
|
|
115
118
|
};
|
|
119
|
+
exports.ComprehensionBasedExampleTeacher = Template.bind({});
|
|
120
|
+
exports.ComprehensionBasedExampleTeacher.args = {
|
|
121
|
+
dataType: "teacherSelctionMainMode",
|
|
122
|
+
questions: selectionUtils_1.comprehensionSelectionData,
|
|
123
|
+
isReviewScreen: true,
|
|
124
|
+
isFactual: false,
|
|
125
|
+
taskType: "learn",
|
|
126
|
+
// journeyData: journeyData,
|
|
127
|
+
commonProps: reviewUtils_1.commonprops,
|
|
128
|
+
onAnswerChanged: () => { },
|
|
129
|
+
onAnswerSubmitted: () => { },
|
|
130
|
+
learningStepData: {},
|
|
131
|
+
selectionStatus: [true, true, false, false, false, false],
|
|
132
|
+
moreQuestionsAvaialable: [true, true, false, false, false, false],
|
|
133
|
+
setSelectionStatus: () => { },
|
|
134
|
+
// enableBox: true,
|
|
135
|
+
// childComponent: <div style={{ backgroundColor: "red", border: "1px solid black", minWidth: "100px", minHeight: "100px" }}>Hello</div>,
|
|
136
|
+
// activeQuestionIndex: 1,
|
|
137
|
+
};
|
|
138
|
+
exports.ComprehensionBasedExampleTeacher2 = Template.bind({});
|
|
139
|
+
exports.ComprehensionBasedExampleTeacher2.args = {
|
|
140
|
+
dataType: "teacherSelctionMainMode",
|
|
141
|
+
questions: selectionUtils_1.comprehensionSelectionData,
|
|
142
|
+
isReviewScreen: true,
|
|
143
|
+
isFactual: false,
|
|
144
|
+
taskType: "learn",
|
|
145
|
+
// journeyData: journeyData,
|
|
146
|
+
commonProps: reviewUtils_1.commonprops,
|
|
147
|
+
onAnswerChanged: () => { },
|
|
148
|
+
onAnswerSubmitted: () => { },
|
|
149
|
+
learningStepData: {},
|
|
150
|
+
selectionStatus: [true, true, false, false, false, false],
|
|
151
|
+
moreQuestionsAvaialable: [false, false, false, false, false, true],
|
|
152
|
+
setSelectionStatus: () => { },
|
|
153
|
+
enableBox: true,
|
|
154
|
+
childComponent: ((0, jsx_runtime_1.jsx)("div", { style: {
|
|
155
|
+
backgroundColor: "red",
|
|
156
|
+
border: "1px solid black",
|
|
157
|
+
minWidth: "100px",
|
|
158
|
+
minHeight: "100px",
|
|
159
|
+
}, children: (0, jsx_runtime_1.jsx)(ReviewPresenter_1.default, { dataType: "teacherSelctionSubMode", questions: selectionUtils_1.subSelectionData, isReviewScreen: true, onAnswerChanged: () => { }, onAnswerSubmitted: () => { }, isFactual: false, commonProps: testingutils_1.commonProps, handleNavigate: () => { }, questionRefs: [], journeyData: [], learningStepData: [], selectionStatus: [true, false], setSelectionStatus: () => { }, handleSelectionStatus: () => { } }) })),
|
|
160
|
+
activeQuestionIndex: 5,
|
|
161
|
+
};
|
|
116
162
|
//# sourceMappingURL=ReviewPresenter.stories.js.map
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"ReviewPresenter.stories.js","sourceRoot":"","sources":["../../../../src/components/Organisms/ReviewComponent/ReviewPresenter.stories.jsx"],"names":[],"mappings":";;;;;;;AAAA,kDAA0B;AAC1B,6EAAqD;AACrD,wEAAgD;AAChD,+CAKuB;AACvB,
|
|
1
|
+
{"version":3,"file":"ReviewPresenter.stories.js","sourceRoot":"","sources":["../../../../src/components/Organisms/ReviewComponent/ReviewPresenter.stories.jsx"],"names":[],"mappings":";;;;;;;AAAA,kDAA0B;AAC1B,6EAAqD;AACrD,wEAAgD;AAChD,+CAKuB;AACvB,qDAI0B;AAC1B,wGAA2E;AAC3E,8DAA0D;AAE1D,kBAAe;IACb,KAAK,EAAE,sCAAsC;IAC7C,SAAS,EAAE,yBAAe;CAC3B,CAAC;AAEF,MAAM,QAAQ,GAAG,CAAC,IAAI,EAAE,EAAE,CAAC,uBAAC,yBAAe,OAAK,IAAI,GAAI,CAAC;AAEzD,MAAM,oBAAoB,GAAG;IAC3B,WAAW,EAAE,EAAE;IACf,kBAAkB,EAAE,CAAC;IACrB,uBAAuB,EAAE,YAAY;IACrC,YAAY,EAAE,OAAO;CACtB,CAAC;AAEW,QAAA,QAAQ,GAAG,QAAQ,CAAC,IAAI,CAAC,EAAE,CAAC,CAAC;AAC1C,gBAAQ,CAAC,IAAI,GAAG;IACd,QAAQ,EAAE,oBAAoB;IAC9B,SAAS,EAAE,0BAAY;IACvB,cAAc,EAAE,IAAI;IACpB,SAAS,EAAE,KAAK;IAChB,QAAQ,EAAE,YAAY;IACtB,4BAA4B;IAC5B,WAAW,EAAE,yBAAW;IACxB,eAAe,EAAE,GAAG,EAAE,GAAE,CAAC;IACzB,iBAAiB,EAAE,GAAG,EAAE,GAAE,CAAC;IAC3B,gBAAgB,EAAE,EAAE;IACpB,YAAY,EAAE,4BAAa;IAC3B,kBAAkB,EAAE,GAAG,EAAE;QACvB,OAAO,CAAC,GAAG,CAAC,eAAe,CAAC,CAAC;IAC/B,CAAC;IACD,kBAAkB,EAAE,CAAC,KAAK,EAAE,QAAQ,EAAE,EAAE;QACtC,OAAO,CAAC,GAAG,CAAC,OAAO,EAAE,KAAK,CAAC,CAAC;QAC5B,OAAO,CAAC,GAAG,CAAC,UAAU,EAAE,QAAQ,CAAC,CAAC;IACpC,CAAC;IACD,oBAAoB,EAAE,oBAAoB;IAC1C,SAAS,EAAE,CAAC;IACZ,eAAe,EAAE,EAAE;IACnB,cAAc,EAAE,CAAC,QAAQ,EAAE,EAAE;QAC3B,KAAK,CAAC,OAAO,CAAC,CAAC;QACf,OAAO,CAAC,GAAG,CAAC,UAAU,EAAE,QAAQ,CAAC,CAAC;IACpC,CAAC;IACD,0BAA0B;CAC3B,CAAC;AAEW,QAAA,QAAQ,GAAG,QAAQ,CAAC,IAAI,CAAC,EAAE,CAAC,CAAC;AAC1C,gBAAQ,CAAC,IAAI,GAAG;IACd,QAAQ,EAAE,EAAE;IACZ,SAAS,EAAE,0BAAY;IACvB,cAAc,EAAE,IAAI;IACpB,SAAS,EAAE,IAAI;IACf,SAAS,EAAE,KAAK;IAChB,QAAQ,EAAE,UAAU;IACpB,4BAA4B;IAC5B,WAAW,EAAE,yBAAW;IACxB,eAAe,EAAE,GAAG,EAAE,GAAE,CAAC;IACzB,iBAAiB,EAAE,GAAG,EAAE,GAAE,CAAC;IAC3B,gBAAgB,EAAE,EAAE;IACpB,YAAY,EAAE,4BAAa;CAC5B,CAAC;AAEW,QAAA,QAAQ,GAAG,QAAQ,CAAC,IAAI,CAAC,EAAE,CAAC,CAAC;AAC1C,gBAAQ,CAAC,IAAI,GAAG;IACd,QAAQ,EAAE,yBAAyB;IACnC,SAAS,EAAE,sCAAqB;IAChC,cAAc,EAAE,IAAI;IACpB,SAAS,EAAE,KAAK;IAChB,QAAQ,EAAE,OAAO;IACjB,mBAAmB,EAAE,IAAI;IACzB,4BAA4B;IAC5B,WAAW,EAAE,yBAAW;IACxB,eAAe,EAAE,GAAG,EAAE,GAAE,CAAC;IACzB,iBAAiB,EAAE,GAAG,EAAE,GAAE,CAAC;IAC3B,gBAAgB,EAAE,EAAE;IACpB,eAAe,EAAE,CAAC,IAAI,EAAE,IAAI,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,CAAC;IAClD,uBAAuB,EAAE,CAAC,IAAI,EAAE,IAAI,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,CAAC;IAC1D,kBAAkB,EAAE,GAAG,EAAE,GAAE,CAAC;IAC5B,mBAAmB;IACnB,yIAAyI;IACzI,0BAA0B;CAC3B,CAAC;AAEW,QAAA,yBAAyB,GAAG,QAAQ,CAAC,IAAI,CAAC,EAAE,CAAC,CAAC;AAC3D,iCAAyB,CAAC,IAAI,GAAG;IAC/B,QAAQ,EAAE,oBAAoB;IAC9B,SAAS,EAAE,4CAA8B;IACzC,cAAc,EAAE,IAAI;IACpB,SAAS,EAAE,KAAK;IAChB,QAAQ,EAAE,YAAY;IACtB,WAAW,EAAE,yBAAW;IACxB,eAAe,EAAE,GAAG,EAAE,GAAE,CAAC;IACzB,iBAAiB,EAAE,GAAG,EAAE,GAAE,CAAC;IAC3B,gBAAgB,EAAE,EAAE;IACpB,YAAY,EAAE,4BAAa;IAC3B,kBAAkB,EAAE,GAAG,EAAE;QACvB,OAAO,CAAC,GAAG,CAAC,eAAe,CAAC,CAAC;IAC/B,CAAC;IACD,kBAAkB,EAAE,CAAC,KAAK,EAAE,QAAQ,EAAE,EAAE;QACtC,OAAO,CAAC,GAAG,CAAC,OAAO,EAAE,KAAK,CAAC,CAAC;QAC5B,OAAO,CAAC,GAAG,CAAC,UAAU,EAAE,QAAQ,CAAC,CAAC;IACpC,CAAC;IACD,oBAAoB,EAAE,oBAAoB;IAC1C,SAAS,EAAE,CAAC;IACZ,eAAe,EAAE,EAAE;IACnB,cAAc,EAAE,CAAC,QAAQ,EAAE,YAAY,EAAE,EAAE;QACzC,KAAK,CAAC,OAAO,CAAC,CAAC;QACf,OAAO,CAAC,GAAG,CAAC,UAAU,EAAE,QAAQ,EAAE,YAAY,CAAC,CAAC;IAClD,CAAC;IACD,eAAe,EAAE,KAAK;IACtB,0BAA0B;CAC3B,CAAC;AAEW,QAAA,gCAAgC,GAAG,QAAQ,CAAC,IAAI,CAAC,EAAE,CAAC,CAAC;AAClE,wCAAgC,CAAC,IAAI,GAAG;IACtC,QAAQ,EAAE,yBAAyB;IACnC,SAAS,EAAE,2CAA0B;IACrC,cAAc,EAAE,IAAI;IACpB,SAAS,EAAE,KAAK;IAChB,QAAQ,EAAE,OAAO;IACjB,4BAA4B;IAC5B,WAAW,EAAE,yBAAW;IACxB,eAAe,EAAE,GAAG,EAAE,GAAE,CAAC;IACzB,iBAAiB,EAAE,GAAG,EAAE,GAAE,CAAC;IAC3B,gBAAgB,EAAE,EAAE;IACpB,eAAe,EAAE,CAAC,IAAI,EAAE,IAAI,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,CAAC;IACzD,uBAAuB,EAAE,CAAC,IAAI,EAAE,IAAI,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,CAAC;IACjE,kBAAkB,EAAE,GAAG,EAAE,GAAE,CAAC;IAC5B,mBAAmB;IACnB,yIAAyI;IACzI,0BAA0B;CAC3B,CAAC;AAEW,QAAA,iCAAiC,GAAG,QAAQ,CAAC,IAAI,CAAC,EAAE,CAAC,CAAC;AACnE,yCAAiC,CAAC,IAAI,GAAG;IACvC,QAAQ,EAAE,yBAAyB;IACnC,SAAS,EAAE,2CAA0B;IACrC,cAAc,EAAE,IAAI;IACpB,SAAS,EAAE,KAAK;IAChB,QAAQ,EAAE,OAAO;IACjB,4BAA4B;IAC5B,WAAW,EAAE,yBAAW;IACxB,eAAe,EAAE,GAAG,EAAE,GAAE,CAAC;IACzB,iBAAiB,EAAE,GAAG,EAAE,GAAE,CAAC;IAC3B,gBAAgB,EAAE,EAAE;IACpB,eAAe,EAAE,CAAC,IAAI,EAAE,IAAI,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,CAAC;IACzD,uBAAuB,EAAE,CAAC,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,IAAI,CAAC;IAClE,kBAAkB,EAAE,GAAG,EAAE,GAAE,CAAC;IAC5B,SAAS,EAAE,IAAI;IACf,cAAc,EAAE,CACd,gCACE,KAAK,EAAE;YACL,eAAe,EAAE,KAAK;YACtB,MAAM,EAAE,iBAAiB;YACzB,QAAQ,EAAE,OAAO;YACjB,SAAS,EAAE,OAAO;SACnB,YAGD,uBAAC,yBAAe,IACd,QAAQ,EAAE,wBAAwB,EAClC,SAAS,EAAE,iCAAgB,EAC3B,cAAc,EAAE,IAAI,EACpB,eAAe,EAAE,GAAG,EAAE,GAAE,CAAC,EACzB,iBAAiB,EAAE,GAAG,EAAE,GAAE,CAAC,EAC3B,SAAS,EAAE,KAAK,EAChB,WAAW,EAAE,0BAAW,EACxB,cAAc,EAAE,GAAG,EAAE,GAAE,CAAC,EACxB,YAAY,EAAE,EAAE,EAChB,WAAW,EAAE,EAAE,EACf,gBAAgB,EAAE,EAAE,EACpB,eAAe,EAAE,CAAC,IAAI,EAAE,KAAK,CAAC,EAC9B,kBAAkB,EAAE,GAAG,EAAE,GAAE,CAAC,EAC5B,qBAAqB,EAAE,GAAG,EAAE,GAAE,CAAC,GAC/B,GACE,CACP;IACD,mBAAmB,EAAE,CAAC;CACvB,CAAC"}
|
|
@@ -1164,11 +1164,15 @@ export const comprehensionBasedQuestionData: ({
|
|
|
1164
1164
|
showGreen: boolean;
|
|
1165
1165
|
dialog_entry_script?: undefined;
|
|
1166
1166
|
solution_layout?: undefined;
|
|
1167
|
+
sub_questions?: undefined;
|
|
1168
|
+
sub_questions_count?: undefined;
|
|
1167
1169
|
};
|
|
1170
|
+
session_id?: undefined;
|
|
1171
|
+
tutor_mode?: undefined;
|
|
1168
1172
|
} | {
|
|
1169
1173
|
count: string;
|
|
1170
1174
|
history: ({
|
|
1171
|
-
"
|
|
1175
|
+
"acb6dffd-a5f3-43bc-9eaa-0db3bac348ea": ({
|
|
1172
1176
|
role: string;
|
|
1173
1177
|
content: string;
|
|
1174
1178
|
descriptive_answer: {
|
|
@@ -1220,10 +1224,9 @@ export const comprehensionBasedQuestionData: ({
|
|
|
1220
1224
|
is_user_concluded?: undefined;
|
|
1221
1225
|
mode?: undefined;
|
|
1222
1226
|
})[];
|
|
1223
|
-
"
|
|
1224
|
-
"5d98d17a-9f8b-4091-b236-6b295e5f3420"?: undefined;
|
|
1227
|
+
"b7464c81-81e7-4352-9144-8ae13d97c197"?: undefined;
|
|
1225
1228
|
} | {
|
|
1226
|
-
"
|
|
1229
|
+
"b7464c81-81e7-4352-9144-8ae13d97c197": ({
|
|
1227
1230
|
role: string;
|
|
1228
1231
|
content: string;
|
|
1229
1232
|
descriptive_answer: {
|
|
@@ -1270,15 +1273,7 @@ export const comprehensionBasedQuestionData: ({
|
|
|
1270
1273
|
is_user_concluded?: undefined;
|
|
1271
1274
|
mode?: undefined;
|
|
1272
1275
|
})[];
|
|
1273
|
-
"
|
|
1274
|
-
"5d98d17a-9f8b-4091-b236-6b295e5f3420"?: undefined;
|
|
1275
|
-
} | {
|
|
1276
|
-
"5d98d17a-9f8b-4091-b236-6b295e5f3420": {
|
|
1277
|
-
role: string;
|
|
1278
|
-
content: string;
|
|
1279
|
-
}[];
|
|
1280
|
-
"958c8f70-630e-48ce-b860-baea9fbe70d0"?: undefined;
|
|
1281
|
-
"7eba34ce-8c75-416c-8b20-2af9fc7c23e1"?: undefined;
|
|
1276
|
+
"acb6dffd-a5f3-43bc-9eaa-0db3bac348ea"?: undefined;
|
|
1282
1277
|
})[];
|
|
1283
1278
|
question_sequence: string;
|
|
1284
1279
|
ref: {
|
|
@@ -1287,21 +1282,12 @@ export const comprehensionBasedQuestionData: ({
|
|
|
1287
1282
|
summary: string;
|
|
1288
1283
|
userAnswer: {
|
|
1289
1284
|
overAllResult: boolean;
|
|
1290
|
-
sub_questions:
|
|
1285
|
+
sub_questions: {
|
|
1291
1286
|
question_id: string;
|
|
1292
1287
|
instance_id: string;
|
|
1293
1288
|
user_answer: string;
|
|
1294
|
-
result: boolean;
|
|
1295
|
-
time_taken: number;
|
|
1296
1289
|
chat_id: string;
|
|
1297
|
-
}
|
|
1298
|
-
question_id: string;
|
|
1299
|
-
instance_id: string;
|
|
1300
|
-
user_answer: string;
|
|
1301
|
-
result: boolean;
|
|
1302
|
-
time_taken: number;
|
|
1303
|
-
chat_id: null;
|
|
1304
|
-
})[];
|
|
1290
|
+
}[];
|
|
1305
1291
|
answerResult?: undefined;
|
|
1306
1292
|
};
|
|
1307
1293
|
questionObject: {
|
|
@@ -1314,61 +1300,6 @@ export const comprehensionBasedQuestionData: ({
|
|
|
1314
1300
|
options_layout: never[];
|
|
1315
1301
|
answers_layout: never[];
|
|
1316
1302
|
sub_question: ({
|
|
1317
|
-
question_type: string;
|
|
1318
|
-
solution: string;
|
|
1319
|
-
uuid: string;
|
|
1320
|
-
question_layout: string;
|
|
1321
|
-
options_layout: string[];
|
|
1322
|
-
answers_layout: string[];
|
|
1323
|
-
sub_question: never[];
|
|
1324
|
-
version_uuid: string;
|
|
1325
|
-
images_details: never[];
|
|
1326
|
-
option_image_details: never[];
|
|
1327
|
-
solution_layout: never[];
|
|
1328
|
-
rubric: {
|
|
1329
|
-
rubricId?: undefined;
|
|
1330
|
-
curriculum?: undefined;
|
|
1331
|
-
grade?: undefined;
|
|
1332
|
-
subject?: undefined;
|
|
1333
|
-
rubricType?: undefined;
|
|
1334
|
-
name?: undefined;
|
|
1335
|
-
metrics?: undefined;
|
|
1336
|
-
totalMarks?: undefined;
|
|
1337
|
-
createdAt?: undefined;
|
|
1338
|
-
updatedAt?: undefined;
|
|
1339
|
-
__v?: undefined;
|
|
1340
|
-
};
|
|
1341
|
-
hintandexplanations: never[];
|
|
1342
|
-
answers_index_hashed_keys: never[];
|
|
1343
|
-
syllabusMapping: {
|
|
1344
|
-
curriculum: {
|
|
1345
|
-
id: string;
|
|
1346
|
-
title: string;
|
|
1347
|
-
};
|
|
1348
|
-
grade: {
|
|
1349
|
-
id: string;
|
|
1350
|
-
title: string;
|
|
1351
|
-
};
|
|
1352
|
-
subject: {
|
|
1353
|
-
id: string;
|
|
1354
|
-
title: string;
|
|
1355
|
-
};
|
|
1356
|
-
chapter: {
|
|
1357
|
-
id: string;
|
|
1358
|
-
title: string;
|
|
1359
|
-
};
|
|
1360
|
-
topic: {
|
|
1361
|
-
id: string;
|
|
1362
|
-
};
|
|
1363
|
-
difficulty: {
|
|
1364
|
-
id: string;
|
|
1365
|
-
};
|
|
1366
|
-
assessmentType: string;
|
|
1367
|
-
cognitivLevel: string;
|
|
1368
|
-
}[];
|
|
1369
|
-
answer_schema: string;
|
|
1370
|
-
showGreen: boolean;
|
|
1371
|
-
} | {
|
|
1372
1303
|
question_type: string;
|
|
1373
1304
|
solution: string;
|
|
1374
1305
|
uuid: string;
|
|
@@ -1463,6 +1394,7 @@ export const comprehensionBasedQuestionData: ({
|
|
|
1463
1394
|
__v?: undefined;
|
|
1464
1395
|
};
|
|
1465
1396
|
hintandexplanations: never[];
|
|
1397
|
+
answers_index_hashed_keys: string[];
|
|
1466
1398
|
syllabusMapping: {
|
|
1467
1399
|
curriculum: {
|
|
1468
1400
|
id: string;
|
|
@@ -1491,7 +1423,6 @@ export const comprehensionBasedQuestionData: ({
|
|
|
1491
1423
|
}[];
|
|
1492
1424
|
answer_schema: string;
|
|
1493
1425
|
showGreen: boolean;
|
|
1494
|
-
answers_index_hashed_keys?: undefined;
|
|
1495
1426
|
})[];
|
|
1496
1427
|
version_uuid: string;
|
|
1497
1428
|
images_details: never[];
|
|
@@ -1527,7 +1458,17 @@ export const comprehensionBasedQuestionData: ({
|
|
|
1527
1458
|
}[];
|
|
1528
1459
|
answer_schema: string;
|
|
1529
1460
|
showGreen: boolean;
|
|
1461
|
+
sub_questions: {
|
|
1462
|
+
question: string;
|
|
1463
|
+
session_id: string;
|
|
1464
|
+
tutor_mode: boolean;
|
|
1465
|
+
user_answer: string;
|
|
1466
|
+
result: null;
|
|
1467
|
+
}[];
|
|
1468
|
+
sub_questions_count: number;
|
|
1530
1469
|
};
|
|
1470
|
+
session_id: string;
|
|
1471
|
+
tutor_mode: boolean;
|
|
1531
1472
|
})[];
|
|
1532
1473
|
import mcqStyle from "*.module.css";
|
|
1533
1474
|
import fibStyle from "*.module.css";
|
|
@@ -6,7 +6,7 @@ Object.defineProperty(exports, "__esModule", { value: true });
|
|
|
6
6
|
exports.comprehensionBasedQuestionData = exports.dummyOptionImages = exports.commonprops = exports.journeyData = exports.questionData = void 0;
|
|
7
7
|
const Mcq_module_css_1 = __importDefault(require("../../../styles/QuestionLayout/Mcq.module.css"));
|
|
8
8
|
const Fib_module_css_1 = __importDefault(require("../../../styles/QuestionLayout/Fib.module.css"));
|
|
9
|
-
const
|
|
9
|
+
const Scq_module_css_1 = __importDefault(require("../../../styles/QuestionLayout/Scq.module.css"));
|
|
10
10
|
const QuestionLayout_module_css_1 = __importDefault(require("../../../styles/QuestionLayout/QuestionLayout.module.css"));
|
|
11
11
|
const assistant_svg_1 = __importDefault(require("../../../assests/ai-tutor/assistant.svg"));
|
|
12
12
|
const assistant_module_css_1 = __importDefault(require("../../Molecules/Tutor/Assistant/assistant.module.css"));
|
|
@@ -2369,7 +2369,7 @@ exports.journeyData = [
|
|
|
2369
2369
|
exports.commonprops = {
|
|
2370
2370
|
mcqStyle: Mcq_module_css_1.default,
|
|
2371
2371
|
fibStyle: Fib_module_css_1.default,
|
|
2372
|
-
scqStyle:
|
|
2372
|
+
scqStyle: Scq_module_css_1.default,
|
|
2373
2373
|
questionLayoutStyle: QuestionLayout_module_css_1.default,
|
|
2374
2374
|
assistantStyle: assistant_module_css_1.default,
|
|
2375
2375
|
factualStyle: factual_module_css_1.default,
|
|
@@ -2523,10 +2523,10 @@ exports.comprehensionBasedQuestionData = [
|
|
|
2523
2523
|
count: "2",
|
|
2524
2524
|
history: [
|
|
2525
2525
|
{
|
|
2526
|
-
"
|
|
2526
|
+
"acb6dffd-a5f3-43bc-9eaa-0db3bac348ea": [
|
|
2527
2527
|
{
|
|
2528
2528
|
role: "user",
|
|
2529
|
-
content: "
|
|
2529
|
+
content: "test",
|
|
2530
2530
|
descriptive_answer: {
|
|
2531
2531
|
sample_answer: 'The friend’s suggestion to get an otter from the Tigris marshes shows he knew about local traditions. In Southern Iraq, otters were common and often kept as pets by Arabs, making it a normal and easy thing to do in that culture.<span data-checked="" style="border-color: var(--chakra-colors-chakra-border-color); margin-inline-start: 0.5rem; user-select: none; font-size: var(--chakra-fontSizes-md);"></span>',
|
|
2532
2532
|
rubricId: "68467d20730dcb509180b572",
|
|
@@ -2551,22 +2551,22 @@ exports.comprehensionBasedQuestionData = [
|
|
|
2551
2551
|
],
|
|
2552
2552
|
result: [
|
|
2553
2553
|
{
|
|
2554
|
-
metric_description: "
|
|
2554
|
+
metric_description: "Did the student interpret the significance of the friend's advice in the passage?",
|
|
2555
2555
|
metric_name: "Interpret the observation",
|
|
2556
2556
|
student_weight_score: 0,
|
|
2557
|
-
student_weight_score_explanation: "The student
|
|
2557
|
+
student_weight_score_explanation: "The student response ('test') does not address or interpret the significance of the friend's advice regarding where to obtain an otter.",
|
|
2558
2558
|
weight: 1,
|
|
2559
2559
|
},
|
|
2560
2560
|
{
|
|
2561
|
-
metric_description: "
|
|
2561
|
+
metric_description: "Did the student explain what this reveals about local customs, using correct reasoning?",
|
|
2562
2562
|
metric_name: "Explain correct process/reasoning",
|
|
2563
2563
|
student_weight_score: 0,
|
|
2564
|
-
student_weight_score_explanation: "The student
|
|
2564
|
+
student_weight_score_explanation: "The student response does not provide any explanation or reasoning about local customs or the process described in the passage.",
|
|
2565
2565
|
weight: 1,
|
|
2566
2566
|
},
|
|
2567
2567
|
],
|
|
2568
|
-
student_answer_missing: "You did not
|
|
2569
|
-
student_answer_well_done: "
|
|
2568
|
+
student_answer_missing: "You did not answer the question. Your response does not address the passage, the friend's advice, or what it reveals about local customs. Please provide a relevant answer that interprets the significance of the advice and explains its connection to local customs.",
|
|
2569
|
+
student_answer_well_done: "You did not provide any relevant content for evaluation. Please attempt the question to receive feedback on your answer.",
|
|
2570
2570
|
status: "wrong",
|
|
2571
2571
|
bandInfo: {
|
|
2572
2572
|
_id: "6780f687b655707672e015ac",
|
|
@@ -2580,20 +2580,20 @@ exports.comprehensionBasedQuestionData = [
|
|
|
2580
2580
|
},
|
|
2581
2581
|
is_user_concluded: false,
|
|
2582
2582
|
mode: "Descriptive Answer",
|
|
2583
|
-
chatID: "
|
|
2583
|
+
chatID: "acb6dffd-a5f3-43bc-9eaa-0db3bac348ea",
|
|
2584
2584
|
},
|
|
2585
2585
|
{
|
|
2586
2586
|
role: "assistant",
|
|
2587
|
-
content: "You did not
|
|
2588
|
-
chatID: "
|
|
2587
|
+
content: "You did not answer the question. Your response does not address the passage, the friend's advice, or what it reveals about local customs. Please provide a relevant answer that interprets the significance of the advice and explains its connection to local customs.",
|
|
2588
|
+
chatID: "acb6dffd-a5f3-43bc-9eaa-0db3bac348ea",
|
|
2589
2589
|
},
|
|
2590
2590
|
],
|
|
2591
2591
|
},
|
|
2592
2592
|
{
|
|
2593
|
-
"
|
|
2593
|
+
"b7464c81-81e7-4352-9144-8ae13d97c197": [
|
|
2594
2594
|
{
|
|
2595
2595
|
role: "user",
|
|
2596
|
-
content: "
|
|
2596
|
+
content: "test 2",
|
|
2597
2597
|
descriptive_answer: {
|
|
2598
2598
|
sample_answer: "The friend’s suggestion to get an otter from the Tigris marshes shows he knew about local traditions. In Southern Iraq, otters were common and often kept as pets by Arabs, making it a normal and easy thing to do in that culture.",
|
|
2599
2599
|
rubricId: "68467d48730dcb509180b57d",
|
|
@@ -2618,22 +2618,22 @@ exports.comprehensionBasedQuestionData = [
|
|
|
2618
2618
|
],
|
|
2619
2619
|
result: [
|
|
2620
2620
|
{
|
|
2621
|
-
metric_description: "
|
|
2621
|
+
metric_description: "Did the student interpret the significance of the friend's advice?",
|
|
2622
2622
|
metric_name: "Interpret the observation",
|
|
2623
2623
|
student_weight_score: 0,
|
|
2624
|
-
student_weight_score_explanation: "
|
|
2624
|
+
student_weight_score_explanation: "You did not attempt to interpret the significance of the friend's advice regarding where to obtain an otter. There is no relevant content addressing this part of the question.",
|
|
2625
2625
|
weight: 0.5,
|
|
2626
2626
|
},
|
|
2627
2627
|
{
|
|
2628
|
-
metric_description: "
|
|
2628
|
+
metric_description: "Did the student explain what this reveals about local customs?",
|
|
2629
2629
|
metric_name: "Explain correct process/reasoning",
|
|
2630
2630
|
student_weight_score: 0,
|
|
2631
|
-
student_weight_score_explanation: "
|
|
2631
|
+
student_weight_score_explanation: "You did not provide any explanation or reasoning about what the advice reveals about local customs. The response is unrelated to the question.",
|
|
2632
2632
|
weight: 0.5,
|
|
2633
2633
|
},
|
|
2634
2634
|
],
|
|
2635
|
-
student_answer_missing: "
|
|
2636
|
-
student_answer_well_done: "
|
|
2635
|
+
student_answer_missing: "You did not attempt to answer the question. Your response ('test 2') does not address the significance of the friend's advice or what it reveals about local customs. Please ensure your answer is relevant to the question asked.",
|
|
2636
|
+
student_answer_well_done: "There is nothing correct in your response as it does not address any part of the question.",
|
|
2637
2637
|
status: "partially_correct",
|
|
2638
2638
|
bandInfo: {
|
|
2639
2639
|
status: "partially_correct",
|
|
@@ -2642,21 +2642,12 @@ exports.comprehensionBasedQuestionData = [
|
|
|
2642
2642
|
},
|
|
2643
2643
|
is_user_concluded: false,
|
|
2644
2644
|
mode: "Descriptive Answer",
|
|
2645
|
-
chatID: "
|
|
2645
|
+
chatID: "b7464c81-81e7-4352-9144-8ae13d97c197",
|
|
2646
2646
|
},
|
|
2647
2647
|
{
|
|
2648
2648
|
role: "assistant",
|
|
2649
|
-
content: "
|
|
2650
|
-
chatID: "
|
|
2651
|
-
},
|
|
2652
|
-
],
|
|
2653
|
-
},
|
|
2654
|
-
{
|
|
2655
|
-
"5d98d17a-9f8b-4091-b236-6b295e5f3420": [
|
|
2656
|
-
{ role: "user", content: "64" },
|
|
2657
|
-
{
|
|
2658
|
-
role: "assistant",
|
|
2659
|
-
content: "Well done! The smallest multiple of 4 that is also a perfect cube is indeed 64. This is because \\(64 = 4 \\times 16\\) and it can also be expressed as \\(4^3\\), satisfying both conditions of being a multiple of 4 and a perfect cube.#*#*#",
|
|
2649
|
+
content: "You did not attempt to answer the question. Your response ('test 2') does not address the significance of the friend's advice or what it reveals about local customs. Please ensure your answer is relevant to the question asked.",
|
|
2650
|
+
chatID: "b7464c81-81e7-4352-9144-8ae13d97c197",
|
|
2660
2651
|
},
|
|
2661
2652
|
],
|
|
2662
2653
|
},
|
|
@@ -2670,34 +2661,26 @@ exports.comprehensionBasedQuestionData = [
|
|
|
2670
2661
|
{
|
|
2671
2662
|
question_id: "d4c40dcd-c1b7-4979-bebf-81d596e72be8",
|
|
2672
2663
|
instance_id: "d4c40dcd-c1b7-4979-bebf-81d596e72be8",
|
|
2673
|
-
user_answer: '{"answerResult":[{"actualAnswer":"Because it was surrounded by water","givenAnswer":"Because it was
|
|
2674
|
-
|
|
2675
|
-
time_taken: 34,
|
|
2676
|
-
chat_id: "5d98d17a-9f8b-4091-b236-6b295e5f3420",
|
|
2664
|
+
user_answer: '{"answerResult":[{"actualAnswer":"Because it was surrounded by water","givenAnswer":"Because it was close to a market","isCorrect":false,"time_taken":21}],"overAllResult":false}',
|
|
2665
|
+
chat_id: "0c20e1f4-570c-41a1-abc2-edcabeab7b3d",
|
|
2677
2666
|
},
|
|
2678
2667
|
{
|
|
2679
2668
|
question_id: "f3779228-dc94-402a-82c1-91db89378845",
|
|
2680
2669
|
instance_id: "f3779228-dc94-402a-82c1-91db89378845",
|
|
2681
|
-
user_answer: '{"answerResult":[{"actualAnswer":"na","givenAnswer":"
|
|
2682
|
-
|
|
2683
|
-
time_taken: 20,
|
|
2684
|
-
chat_id: "958c8f70-630e-48ce-b860-baea9fbe70d0",
|
|
2670
|
+
user_answer: '{"answerResult":[{"actualAnswer":"na","givenAnswer":"ert","isCorrect":false,"time_taken":4}],"overAllResult":false}',
|
|
2671
|
+
chat_id: "e974c6dd-228c-4096-a6c2-5fec31e3ec0b",
|
|
2685
2672
|
},
|
|
2686
2673
|
{
|
|
2687
2674
|
question_id: "8a744457-b350-466c-8c03-22540103ca90",
|
|
2688
2675
|
instance_id: "8a744457-b350-466c-8c03-22540103ca90",
|
|
2689
|
-
user_answer: '{"answerResult":[{"actualAnswer":"na","givenAnswer":"
|
|
2690
|
-
|
|
2691
|
-
time_taken: 11,
|
|
2692
|
-
chat_id: "7eba34ce-8c75-416c-8b20-2af9fc7c23e1",
|
|
2676
|
+
user_answer: '{"answerResult":[{"actualAnswer":"na","givenAnswer":"wer","isCorrect":false,"time_taken":5}],"overAllResult":false}',
|
|
2677
|
+
chat_id: "f6d6463c-cfcd-4ffa-b908-320936b26207",
|
|
2693
2678
|
},
|
|
2694
2679
|
{
|
|
2695
2680
|
question_id: "858a2884-a99d-498e-80eb-058e491bf65d",
|
|
2696
2681
|
instance_id: "858a2884-a99d-498e-80eb-058e491bf65d",
|
|
2697
|
-
user_answer: '{"answerResult":[{"actualAnswer":"
|
|
2698
|
-
|
|
2699
|
-
time_taken: 4,
|
|
2700
|
-
chat_id: null,
|
|
2682
|
+
user_answer: '{"answerResult":[{"actualAnswer":"de7d1f7af851068bd0b33d49dc9de5e2","givenAnswer":"1f14bfadf3b73e3162b3c5142eaa8dd2","isCorrect":false,"time_taken":8}],"overAllResult":false}',
|
|
2683
|
+
chat_id: "63d05b21-cf57-47aa-ac27-30f3a05a1a7d",
|
|
2701
2684
|
},
|
|
2702
2685
|
],
|
|
2703
2686
|
},
|
|
@@ -2934,7 +2917,7 @@ exports.comprehensionBasedQuestionData = [
|
|
|
2934
2917
|
uuid: "858a2884-a99d-498e-80eb-058e491bf65d",
|
|
2935
2918
|
question_layout: "An element has an atomic number of 17 and a mass number of 35. How many neutrons are present in its nucleus?",
|
|
2936
2919
|
options_layout: ["17", "18", "19", "35"],
|
|
2937
|
-
answers_layout: ["
|
|
2920
|
+
answers_layout: ["de7d1f7af851068bd0b33d49dc9de5e2"],
|
|
2938
2921
|
sub_question: [],
|
|
2939
2922
|
version_uuid: "858a2884-a99d-498e-80eb-058e491bf65d",
|
|
2940
2923
|
images_details: [],
|
|
@@ -2942,7 +2925,7 @@ exports.comprehensionBasedQuestionData = [
|
|
|
2942
2925
|
solution_layout: [],
|
|
2943
2926
|
rubric: {},
|
|
2944
2927
|
hintandexplanations: [],
|
|
2945
|
-
|
|
2928
|
+
answers_index_hashed_keys: ["de7d1f7af851068bd0b33d49dc9de5e2"],
|
|
2946
2929
|
syllabusMapping: [
|
|
2947
2930
|
{
|
|
2948
2931
|
curriculum: {
|
|
@@ -3011,7 +2994,40 @@ exports.comprehensionBasedQuestionData = [
|
|
|
3011
2994
|
],
|
|
3012
2995
|
answer_schema: "1",
|
|
3013
2996
|
showGreen: true,
|
|
2997
|
+
sub_questions: [
|
|
2998
|
+
{
|
|
2999
|
+
question: "d4c40dcd-c1b7-4979-bebf-81d596e72be8",
|
|
3000
|
+
session_id: "68db992fea58d122e2bd1e5a",
|
|
3001
|
+
tutor_mode: true,
|
|
3002
|
+
user_answer: '{"chatId":"ccea69f7-e22a-40ec-a6bd-76a0b4bd8720","userAnswer":{"answerResult":[{"actualAnswer":"Because it was surrounded by water","givenAnswer":"Because it was close to a market","isCorrect":false,"time_taken":11}],"overAllResult":false},"sub_question_id":"d4c40dcd-c1b7-4979-bebf-81d596e72be8"}',
|
|
3003
|
+
result: null,
|
|
3004
|
+
},
|
|
3005
|
+
{
|
|
3006
|
+
question: "f3779228-dc94-402a-82c1-91db89378845",
|
|
3007
|
+
session_id: "68db992fea58d122e2bd1e5a",
|
|
3008
|
+
tutor_mode: false,
|
|
3009
|
+
user_answer: '{"chatId":"acb6dffd-a5f3-43bc-9eaa-0db3bac348ea","userAnswer":{"answerResult":[{"actualAnswer":"na","givenAnswer":"test","isCorrect":false,"time_taken":6}],"overAllResult":false},"sub_question_id":"f3779228-dc94-402a-82c1-91db89378845"}',
|
|
3010
|
+
result: null,
|
|
3011
|
+
},
|
|
3012
|
+
{
|
|
3013
|
+
question: "8a744457-b350-466c-8c03-22540103ca90",
|
|
3014
|
+
session_id: "68db992fea58d122e2bd1e5a",
|
|
3015
|
+
tutor_mode: true,
|
|
3016
|
+
user_answer: '{"chatId":"b7464c81-81e7-4352-9144-8ae13d97c197","userAnswer":{"answerResult":[{"actualAnswer":"na","givenAnswer":"test 2","isCorrect":false,"time_taken":4}],"overAllResult":false},"sub_question_id":"8a744457-b350-466c-8c03-22540103ca90"}',
|
|
3017
|
+
result: null,
|
|
3018
|
+
},
|
|
3019
|
+
{
|
|
3020
|
+
question: "858a2884-a99d-498e-80eb-058e491bf65d",
|
|
3021
|
+
session_id: "68db992fea58d122e2bd1e5a",
|
|
3022
|
+
tutor_mode: true,
|
|
3023
|
+
user_answer: '{"chatId":"63fa15ac-97ef-49ae-8fc9-e634e89d813b","userAnswer":{"answerResult":[{"actualAnswer":"de7d1f7af851068bd0b33d49dc9de5e2","givenAnswer":"1f14bfadf3b73e3162b3c5142eaa8dd2","isCorrect":false,"time_taken":4}],"overAllResult":false},"sub_question_id":"858a2884-a99d-498e-80eb-058e491bf65d"}',
|
|
3024
|
+
result: null,
|
|
3025
|
+
},
|
|
3026
|
+
],
|
|
3027
|
+
sub_questions_count: 4,
|
|
3014
3028
|
},
|
|
3029
|
+
session_id: "68d282271bcb2065c52941ce",
|
|
3030
|
+
tutor_mode: true,
|
|
3015
3031
|
},
|
|
3016
3032
|
];
|
|
3017
3033
|
//# sourceMappingURL=reviewUtils.js.map
|