Skip to content

Commit

Permalink
refactor: test only peer-steps instead of peer-self
Browse files Browse the repository at this point in the history
  • Loading branch information
mariajgrimaldi committed May 7, 2024
1 parent d323dc9 commit 0d8975b
Show file tree
Hide file tree
Showing 3 changed files with 16 additions and 14 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -44,6 +44,5 @@
</rubric>
<assessments>
<assessment name="peer-assessment" must_grade="5" must_be_graded_by="3" enable_flexible_grading="false" grading_strategy="mean"/>
<assessment name="self-assessment" />
</assessments>
</openassessment>
Original file line number Diff line number Diff line change
Expand Up @@ -44,6 +44,5 @@
</rubric>
<assessments>
<assessment name="peer-assessment" must_grade="5" must_be_graded_by="3" enable_flexible_grading="false" grading_strategy="median"/>
<assessment name="self-assessment" />
</assessments>
</openassessment>
28 changes: 16 additions & 12 deletions openassessment/xblock/test/test_grade_explanation.py
Original file line number Diff line number Diff line change
Expand Up @@ -69,18 +69,22 @@ def test_render_explanation_grade_staff_only(self, xblock, assessment_score_prio
mock_send_staff_notification.assert_called_once()

@scenario("data/peer_assessment_mean_grading_strategy_scenario.xml", user_id='Bernard')
@data(*assessment_score_priority)
def test_render_grade_explanation_peer_mean_calculation(self, xblock, assessment_score_priority):
with patch(
'openassessment.workflow.models.AssessmentWorkflow.ASSESSMENT_SCORE_PRIORITY',
assessment_score_priority
):
self.create_submission_and_assessments(
xblock, self.SUBMISSION, self.PEERS, PEER_ASSESSMENTS, None
)
resp = self.request(xblock, 'render_grade', json.dumps({}))

self.assertIn(self.second_sentences_options["peer_median_default"], resp.decode('utf-8'))
def test_render_grade_explanation_peer_only_mean_calculation(self, xblock):
self.create_submission_and_assessments(
xblock, self.SUBMISSION, self.PEERS, PEER_ASSESSMENTS, None
)
resp = self.request(xblock, 'render_grade', json.dumps({}))

self.assertIn(self.second_sentences_options["peer_mean"], resp.decode('utf-8'))

@scenario("data/peer_assessment_median_grading_strategy_scenario.xml", user_id='Bernard')
def test_render_grade_explanation_peer_only_median_calculation(self, xblock):
self.create_submission_and_assessments(
xblock, self.SUBMISSION, self.PEERS, PEER_ASSESSMENTS, None
)
resp = self.request(xblock, 'render_grade', json.dumps({}))

self.assertIn(self.second_sentences_options["peer_mean"], resp.decode('utf-8'))

@scenario('data/grade_scenario_peer_only.xml', user_id='Bernard')
@data(*assessment_score_priority)
Expand Down

0 comments on commit 0d8975b

Please sign in to comment.