Skip to content

Commit

Permalink
Merge branch 'Project-MONAI:dev' into 3781-downsampling-augmentation-…
Browse files Browse the repository at this point in the history
…transform
  • Loading branch information
aaronkujawa authored Jul 31, 2023
2 parents 774b6f1 + c4ff70b commit 4090c36
Show file tree
Hide file tree
Showing 4 changed files with 6 additions and 3 deletions.
3 changes: 3 additions & 0 deletions .github/workflows/codeql-analysis.yml
Original file line number Diff line number Diff line change
Expand Up @@ -64,6 +64,9 @@ jobs:

- name: Build
run: |
rm -rf /opt/hostedtoolcache/{node,go,Ruby,Java*}
ls -al /opt/hostedtoolcache
rm -rf /usr/share/dotnet/
python -m pip install -U pip wheel
python -m pip install -r requirements-dev.txt
BUILD_MONAI=1 ./runtests.sh --build
Expand Down
2 changes: 1 addition & 1 deletion tests/test_selfattention.py
Original file line number Diff line number Diff line change
Expand Up @@ -62,7 +62,7 @@ def test_access_attn_matrix(self):
# be not able to access the matrix
no_matrix_acess_blk = SABlock(hidden_size=hidden_size, num_heads=num_heads, dropout_rate=dropout_rate)
no_matrix_acess_blk(torch.randn(input_shape))
assert type(no_matrix_acess_blk.att_mat) == torch.Tensor
assert isinstance(no_matrix_acess_blk.att_mat, torch.Tensor)
# no of elements is zero
assert no_matrix_acess_blk.att_mat.nelement() == 0

Expand Down
2 changes: 1 addition & 1 deletion tests/test_transformerblock.py
Original file line number Diff line number Diff line change
Expand Up @@ -66,7 +66,7 @@ def test_access_attn_matrix(self):
hidden_size=hidden_size, mlp_dim=mlp_dim, num_heads=num_heads, dropout_rate=dropout_rate
)
no_matrix_acess_blk(torch.randn(input_shape))
assert type(no_matrix_acess_blk.attn.att_mat) == torch.Tensor
assert isinstance(no_matrix_acess_blk.attn.att_mat, torch.Tensor)
# no of elements is zero
assert no_matrix_acess_blk.attn.att_mat.nelement() == 0

Expand Down
2 changes: 1 addition & 1 deletion tests/test_vit.py
Original file line number Diff line number Diff line change
Expand Up @@ -160,7 +160,7 @@ def test_access_attn_matrix(self):
# no data in the matrix
no_matrix_acess_blk = ViT(in_channels=in_channels, img_size=img_size, patch_size=patch_size)
no_matrix_acess_blk(torch.randn(in_shape))
assert type(no_matrix_acess_blk.blocks[0].attn.att_mat) == torch.Tensor
assert isinstance(no_matrix_acess_blk.blocks[0].attn.att_mat, torch.Tensor)
# no of elements is zero
assert no_matrix_acess_blk.blocks[0].attn.att_mat.nelement() == 0

Expand Down

0 comments on commit 4090c36

Please sign in to comment.