Skip to content

Commit

Permalink
Merge pull request #42 from NeurodataWithoutBorders/fix-tests
Browse files Browse the repository at this point in the history
Fix tests and add sanitizer to workflows
  • Loading branch information
stephprince authored Jul 31, 2024
2 parents 8354798 + 58d68e0 commit 00aa1ca
Show file tree
Hide file tree
Showing 9 changed files with 127 additions and 111 deletions.
42 changes: 40 additions & 2 deletions .github/workflows/tests.yml
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@ on:
workflow_dispatch:

jobs:
tests:
test:
defaults:
run:
shell: bash
Expand Down Expand Up @@ -65,8 +65,46 @@ jobs:
path: |
build/tests/data/*.nwb
sanitize:
needs: test

runs-on: ubuntu-latest

steps:
- name: Checkout repository
uses: actions/checkout@v4

- name: Install dependencies
run: |
sudo apt-get update
sudo apt-get install -y libhdf5-dev libboost-all-dev
git clone https://github.com/catchorg/Catch2.git
cd Catch2
git checkout "v3.5.3"
cmake -Bbuild -H. -DBUILD_TESTING=OFF
sudo cmake --build build/ --target install
- name: Configure
run: cmake --preset=ci-sanitize

- name: Build
run: cmake --build build/sanitize -j 2

- name: Test
working-directory: build/sanitize
env:
ASAN_OPTIONS: "strict_string_checks=1:\
detect_stack_use_after_return=1:\
check_initialization_order=1:\
strict_init_order=1:\
detect_leaks=1:\
halt_on_error=1"
UBSAN_OPTIONS: "print_stacktrace=1:\
halt_on_error=1"
run: ctest --output-on-failure --no-tests=error -j 2

validate:
needs: tests
needs: [test, sanitize]
defaults:
run:
shell: bash
Expand Down
18 changes: 8 additions & 10 deletions src/BaseIO.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@

#include <cstdint>
#include <iostream>
#include <memory>
#include <string>
#include <vector>

Expand Down Expand Up @@ -246,17 +247,19 @@ class BaseIO
* @param path The location in the file of the new dataset.
* @return A pointer to the created dataset.
*/
virtual BaseRecordingData* createArrayDataSet(const BaseDataType& type,
const SizeArray& size,
const SizeArray& chunking,
const std::string& path) = 0;
virtual std::unique_ptr<BaseRecordingData> createArrayDataSet(
const BaseDataType& type,
const SizeArray& size,
const SizeArray& chunking,
const std::string& path) = 0;

/**
* @brief Returns a pointer to a dataset at a given path.
* @param path The location in the file of the dataset.
* @return A pointer to the dataset.
*/
virtual BaseRecordingData* getDataSet(const std::string& path) = 0;
virtual std::unique_ptr<BaseRecordingData> getDataSet(
const std::string& path) = 0;

/**
* @brief Convenience function for creating NWB related attributes.
Expand Down Expand Up @@ -383,11 +386,6 @@ class BaseRecordingData
const void* data) = 0;

protected:
/**
* @brief The current position in the x dimension.
*/
SizeType xPos;

/**
* @brief The size of the dataset in each dimension.
*/
Expand Down
22 changes: 14 additions & 8 deletions src/hdf5/HDF5IO.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -377,7 +377,8 @@ Status HDF5IO::createStringDataSet(const std::string& path,
return Status::Success;
}

AQNWB::BaseRecordingData* HDF5IO::getDataSet(const std::string& path)
std::unique_ptr<AQNWB::BaseRecordingData> HDF5IO::getDataSet(
const std::string& path)
{
std::unique_ptr<DataSet> data;

Expand All @@ -386,7 +387,7 @@ AQNWB::BaseRecordingData* HDF5IO::getDataSet(const std::string& path)

try {
data = std::make_unique<H5::DataSet>(file->openDataSet(path));
return new HDF5RecordingData(data.release());
return std::make_unique<HDF5RecordingData>(std::move(data));
} catch (DataSetIException error) {
error.printErrorStack();
return nullptr;
Expand All @@ -399,10 +400,11 @@ AQNWB::BaseRecordingData* HDF5IO::getDataSet(const std::string& path)
}
}

AQNWB::BaseRecordingData* HDF5IO::createArrayDataSet(const BaseDataType& type,
const SizeArray& size,
const SizeArray& chunking,
const std::string& path)
std::unique_ptr<AQNWB::BaseRecordingData> HDF5IO::createArrayDataSet(
const BaseDataType& type,
const SizeArray& size,
const SizeArray& chunking,
const std::string& path)
{
std::unique_ptr<DataSet> data;
DSetCreatPropList prop;
Expand All @@ -415,6 +417,9 @@ AQNWB::BaseRecordingData* HDF5IO::createArrayDataSet(const BaseDataType& type,
if (dimension < 1) // Check for at least one dimension
return nullptr;

// Ensure chunking is properly allocated and has at least 'dimension' elements
assert(chunking.size() >= dimension);

// Use vectors to support an arbitrary number of dimensions
std::vector<hsize_t> dims(dimension), chunk_dims(dimension),
max_dims(dimension);
Expand All @@ -435,7 +440,8 @@ AQNWB::BaseRecordingData* HDF5IO::createArrayDataSet(const BaseDataType& type,

data = std::make_unique<H5::DataSet>(
file->createDataSet(path, H5type, dSpace, prop));
return new HDF5RecordingData(data.release());

return std::make_unique<HDF5RecordingData>(std::move(data));
}

H5O_type_t HDF5IO::getObjectType(const std::string& path)
Expand Down Expand Up @@ -558,7 +564,7 @@ H5::DataType HDF5IO::getH5Type(BaseDataType type)
}

// HDF5RecordingData
HDF5RecordingData::HDF5RecordingData(H5::DataSet* data)
HDF5RecordingData::HDF5RecordingData(std::unique_ptr<H5::DataSet> data)
{
DataSpace dSpace = data->getSpace();
DSetCreatPropList prop = data->getCreatePlist();
Expand Down
14 changes: 8 additions & 6 deletions src/hdf5/HDF5IO.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -185,17 +185,19 @@ class HDF5IO : public BaseIO
* @param path The location in the file of the new dataset.
* @return A pointer to the created dataset.
*/
BaseRecordingData* createArrayDataSet(const BaseDataType& type,
const SizeArray& size,
const SizeArray& chunking,
const std::string& path) override;
std::unique_ptr<BaseRecordingData> createArrayDataSet(
const BaseDataType& type,
const SizeArray& size,
const SizeArray& chunking,
const std::string& path) override;

/**
* @brief Returns a pointer to a dataset at a given path.
* @param path The location in the file of the dataset.
* @return A pointer to the dataset.
*/
BaseRecordingData* getDataSet(const std::string& path) override;
std::unique_ptr<BaseRecordingData> getDataSet(
const std::string& path) override;

/**
* @brief Returns the HDF5 type of object at a given path.
Expand Down Expand Up @@ -246,7 +248,7 @@ class HDF5RecordingData : public BaseRecordingData
* @brief Constructs an HDF5RecordingData object.
* @param data A pointer to the HDF5 dataset.
*/
HDF5RecordingData(H5::DataSet* data);
HDF5RecordingData(std::unique_ptr<H5::DataSet> data);

/**
* @brief Deleted copy constructor to prevent construction-copying.
Expand Down
2 changes: 1 addition & 1 deletion src/nwb/NWBFile.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -115,7 +115,7 @@ Status NWBFile::createElectricalSeries(
"Stores continuously sampled voltage data from an "
"extracellular ephys recording",
SizeArray {0, channelVector.size()},
SizeArray {CHUNK_XSIZE});
SizeArray {CHUNK_XSIZE, 0});
electricalSeries->initialize();
recordingContainers->addData(std::move(electricalSeries));

Expand Down
44 changes: 10 additions & 34 deletions tests/testBase.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -37,48 +37,24 @@ TEST_CASE("TimeSeries", "[base]")

// Read data back from file
double* tsBuffer = new double[numSamples];
BaseRecordingData* tsDset = io->getDataSet(dataPath + "/timestamps");
readH5DataBlock(static_cast<HDF5::HDF5RecordingData*>(tsDset)->getDataSet(),
timestampsType,
tsBuffer);
std::unique_ptr<BaseRecordingData> tsDset =
io->getDataSet(dataPath + "/timestamps");
std::unique_ptr<HDF5::HDF5RecordingData> tsH5Dataset(
dynamic_cast<HDF5::HDF5RecordingData*>(tsDset.release()));
readH5DataBlock(tsH5Dataset->getDataSet(), timestampsType, tsBuffer);
std::vector<double> tsRead(tsBuffer, tsBuffer + numSamples);
delete[] tsBuffer;
REQUIRE(tsRead == timestamps);

// Read data back from file
float* dataBuffer = new float[numSamples];
BaseRecordingData* dataDset = io->getDataSet(dataPath + "/data");
readH5DataBlock(
static_cast<HDF5::HDF5RecordingData*>(dataDset)->getDataSet(),
dataType,
dataBuffer);
std::unique_ptr<BaseRecordingData> dataDset =
io->getDataSet(dataPath + "/data");
std::unique_ptr<HDF5::HDF5RecordingData> dataH5Dataset(
dynamic_cast<HDF5::HDF5RecordingData*>(dataDset.release()));
readH5DataBlock(dataH5Dataset->getDataSet(), dataType, dataBuffer);
std::vector<float> dataRead(dataBuffer, dataBuffer + numSamples);
delete[] dataBuffer;
REQUIRE_THAT(dataRead, Catch::Matchers::Approx(data).margin(1));
}

SECTION("test writing timeseries without timestamps")
{
// setup timeseries object
std::string path = getTestFilePath("testTimeseriesNoTimestamps.h5");
std::shared_ptr<BaseIO> io = createIO("HDF5", path);
io->open();
NWB::TimeSeries ts = NWB::TimeSeries(dataPath, io, dataType, "unit");
ts.initialize();

// Write data to file
Status writeStatus = ts.writeData(dataShape, positionOffset, data.data());
REQUIRE(writeStatus == Status::Success);

// Read data back from file
double* tsBuffer = new double[numSamples];
BaseRecordingData* tsDset = io->getDataSet(dataPath + "/timestamps");
readH5DataBlock(static_cast<HDF5::HDF5RecordingData*>(tsDset)->getDataSet(),
timestampsType,
tsBuffer);
std::vector<double> tsRead(tsBuffer, tsBuffer + numSamples);
delete[] tsBuffer;
std::vector<double> zeros(numSamples, 0.0);
REQUIRE(tsRead == zeros);
}
}
6 changes: 4 additions & 2 deletions tests/testEcephys.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -63,7 +63,8 @@ TEST_CASE("ElectricalSeries", "[ecephys]")
elecTable.getPath(),
"volts",
"no description",
SizeArray {0, mockArrays[0].size()});
SizeArray {0, mockArrays[0].size()},
SizeArray {1, 1});
es.initialize();

// write channel data
Expand Down Expand Up @@ -118,7 +119,8 @@ TEST_CASE("ElectricalSeries", "[ecephys]")
elecTable.getPath(),
"volts",
"no description",
SizeArray {0, mockArrays[0].size()});
SizeArray {0, mockArrays[0].size()},
SizeArray {1, 1});
es.initialize();

// write channel data in segments
Expand Down
9 changes: 4 additions & 5 deletions tests/testFile.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -35,12 +35,11 @@ TEST_CASE("ElectrodeTable", "[ecephys]")

// Check if id datasets are created correctly
SizeType numChannels = 3;
BaseRecordingData* id_data = io->getDataSet(path + "id");
std::unique_ptr<BaseRecordingData> id_data = io->getDataSet(path + "id");
std::unique_ptr<HDF5::HDF5RecordingData> idDataset(
dynamic_cast<HDF5::HDF5RecordingData*>(id_data.release()));
int* buffer = new int[numChannels];
readH5DataBlock(
static_cast<HDF5::HDF5RecordingData*>(id_data)->getDataSet(),
BaseDataType::I32,
buffer);
readH5DataBlock(idDataset->getDataSet(), BaseDataType::I32, buffer);
std::vector<SizeType> read_channels(buffer, buffer + numChannels);
delete[] buffer;
REQUIRE(channelIDs == read_channels);
Expand Down
Loading

0 comments on commit 00aa1ca

Please sign in to comment.