Skip to content

Commit

Permalink
softmax, argmax/argmin, no longer xfail now
Browse files Browse the repository at this point in the history
  • Loading branch information
hughperkins committed Nov 25, 2016
1 parent 51f3295 commit 3ae3317
Show file tree
Hide file tree
Showing 4 changed files with 3 additions and 6 deletions.
1 change: 0 additions & 1 deletion tensorflow/stream_executor/cl/test/test_gradients.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,6 @@
learning_rate = 0.1


@pytest.mark.xfail
def test_gradients():
# lets learn or
# we'll use one-hot, with 2 binary inputs, so 4 input neurons in total
Expand Down
4 changes: 2 additions & 2 deletions tensorflow/stream_executor/cl/test/test_softmax.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,8 +12,8 @@ def softmax(x):

@pytest.mark.parametrize('size', [
(1, 3),
pytest.mark.xfail((2, 3)),
pytest.mark.xfail((3, 10))
(2, 3),
(3, 10)
])
def test_softmax(size):
print('size', size)
Expand Down
2 changes: 0 additions & 2 deletions tensorflow/stream_executor/cl/test/test_unary_ops.py
Original file line number Diff line number Diff line change
Expand Up @@ -34,8 +34,6 @@ def get_test_params():
if dtype == 'uint8' and tf_func in ['abs', 'square', 'neg']:
continue
mark = nop
if tf_func in ['argmax', 'argmin']:
mark = pytest.mark.xfail
tests.append({'mark': mark, 'dtype': dtype, 'tf_func': tf_func, 'py_func': py_func})
return tests

Expand Down
2 changes: 1 addition & 1 deletion third_party/cuda-on-cl

0 comments on commit 3ae3317

Please sign in to comment.