Skip to content

Commit

Permalink
add multi_target_input tests
Browse files Browse the repository at this point in the history
  • Loading branch information
JakaKokosar committed Mar 10, 2022
1 parent 63b7129 commit 50eb9a0
Show file tree
Hide file tree
Showing 2 changed files with 57 additions and 1 deletion.
21 changes: 21 additions & 0 deletions Orange/widgets/evaluate/tests/test_owpredictions.py
Original file line number Diff line number Diff line change
Expand Up @@ -877,6 +877,27 @@ def test_change_target(self):
self.assertEqual(table.model.columnCount(), 4)
self.assertEqual(float(table.model.data(table.model.index(0, 3))),
idx)

def test_multi_target_input(self):
widget = self.widget

domain = Domain([ContinuousVariable('var1')],
class_vars=[
ContinuousVariable('c1'),
DiscreteVariable('c2', values=('no', 'yes'))
])
data = Table.from_list(domain, [[1, 5, 0], [2, 10, 1]])

mock_model = Mock(spec=Model, return_value=np.asarray([0.2, 0.1]))
mock_model.name = 'Mockery'
mock_model.domain = domain
mock_learner = Mock(return_value=mock_model)
model = mock_learner(data)

self.send_signal(widget.Inputs.data, data)
self.send_signal(widget.Inputs.predictors, model, 1)
pred = self.get_output(widget.Outputs.predictions)
self.assertIsInstance(pred, Table)

def test_report(self):
widget = self.widget
Expand Down
37 changes: 36 additions & 1 deletion Orange/widgets/evaluate/tests/test_owtestandscore.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@
from Orange.evaluation import Results, TestOnTestData, scoring
from Orange.evaluation.scoring import ClassificationScore, RegressionScore, \
Score
from Orange.base import Learner
from Orange.base import Learner, Model
from Orange.modelling import ConstantLearner
from Orange.regression import MeanLearner
from Orange.widgets.evaluate.owtestandscore import (
Expand Down Expand Up @@ -720,6 +720,41 @@ def test_copy_to_clipboard(self):
for i in (0, 3, 4, 5, 6, 7)]) + "\r\n"
self.assertEqual(clipboard_text, view_text)

def test_multi_target_input(self):
class NewScorer(Score):
class_types = (
ContinuousVariable,
DiscreteVariable,
)
problem_type = 'new_problem_type'

def compute_score(self, results):
return [0.75]

domain = Domain([ContinuousVariable('var1')],
class_vars=[
ContinuousVariable('c1'),
DiscreteVariable('c2', values=('no', 'yes'))
])
data = Table.from_list(domain, [[1, 5, 0], [2, 10, 1], [2, 10, 1]])
data.attributes = {'problem_type': 'new_problem_type'}

mock_model = Mock(spec=Model, return_value=np.asarray([[0.2, 0.1, 0.2]]))
mock_model.name = 'Mockery'
mock_model.domain = domain
mock_learner = Mock(spec=Learner, return_value=mock_model)
mock_learner.name = 'Mockery'


self.widget.resampling = OWTestAndScore.TestOnTrain
self.send_signal(self.widget.Inputs.train_data, data)
self.send_signal(self.widget.Inputs.learner, MajorityLearner(), 0)
self.send_signal(self.widget.Inputs.learner, mock_learner, 1)
_ = self.get_output(self.widget.Outputs.evaluations_results, wait=5000)
self.assertTrue(len(self.widget.scorers) == 1)
self.assertTrue(NewScorer in self.widget.scorers)
self.assertTrue(len(self.widget._successful_slots()) == 1)


class TestHelpers(unittest.TestCase):
def test_results_one_vs_rest(self):
Expand Down

0 comments on commit 50eb9a0

Please sign in to comment.