/***************************************************************************************** * * * OpenSpace * * * * Copyright (c) 2014-2016 * * * * Permission is hereby granted, free of charge, to any person obtaining a copy of this * * software and associated documentation files (the "Software"), to deal in the Software * * without restriction, including without limitation the rights to use, copy, modify, * * merge, publish, distribute, sublicense, and/or sell copies of the Software, and to * * permit persons to whom the Software is furnished to do so, subject to the following * * conditions: * * * * The above copyright notice and this permission notice shall be included in all copies * * or substantial portions of the Software. * * * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, * * INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A * * PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT * * HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF * * CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE * * OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. * ****************************************************************************************/ //#include #include // ghoul #include #include #include #include // std #include #include #include namespace { const std::string _loggerCat = "TSP"; } namespace openspace { TSP::TSP(const std::string& filename) : _filename(filename) , _dataSSBO(0) , paddedBrickDim_(0) , numTotalNodes_(0) , numBSTLevels_(0) , numBSTNodes_(0) , numOTLevels_(0) , numOTNodes_(0) , minSpatialError_(0.0f) , maxSpatialError_(0.0f) , medianSpatialError_(0.0f) , minTemporalError_(0.0f) , maxTemporalError_(0.0f) , medianTemporalError_(0.0f) { _file.open(_filename, std::ios::in | std::ios::binary); } TSP::~TSP() { if (_file.is_open()) _file.close(); } bool TSP::load() { if (!readHeader()) { LERROR("Could not read header"); return false; } if (readCache()) { //if (false) { LINFO("Using cache"); } else { if (!construct()) { LERROR("Could not construct"); return false; } if (false) { if (!calculateSpatialError()) { LERROR("Could not calculate spatial error"); return false; } if (!calculateTemporalError()) { LERROR("Could not calculate temporal error"); return false; } if (!writeCache()) { LERROR("Could not write cache"); return false; } } } initalizeSSO(); return true; } bool TSP::readHeader() { if (!_file.good()) return false; _file.seekg(_file.beg); _file.read(reinterpret_cast(&_header), sizeof(Header)); /* file.read(reinterpret_cast(&gridType_), sizeof(unsigned int)); file.read(reinterpret_cast(&numOrigTimesteps_), sizeof(unsigned int)); file.read(reinterpret_cast(&numTimesteps_), sizeof(unsigned int)); file.read(reinterpret_cast(&xBrickDim_), sizeof(unsigned int)); file.read(reinterpret_cast(&yBrickDim_), sizeof(unsigned int)); file.read(reinterpret_cast(&zBrickDim_), sizeof(unsigned int)); file.read(reinterpret_cast(&xNumBricks_), sizeof(unsigned int)); file.read(reinterpret_cast(&yNumBricks_), sizeof(unsigned int)); file.read(reinterpret_cast(&zNumBricks_), sizeof(unsigned int)); */ LDEBUG("Grid type: " << _header.gridType_); LDEBUG("Brick dimensions: " << _header.xBrickDim_ << " " << _header.yBrickDim_ << " " << _header.zBrickDim_); LDEBUG("Num bricks: " << _header.xNumBricks_ << " " << _header.yNumBricks_ << " " << _header.zNumBricks_); paddedBrickDim_ = _header.xBrickDim_ + 2 * paddingWidth_; // TODO support dimensions of different size numOTLevels_ = static_cast(log((int)_header.xNumBricks_) / log(2) + 1); numOTNodes_ = static_cast((pow(8, numOTLevels_) - 1) / 7); numBSTLevels_ = static_cast(log((int)_header.numTimesteps_) / log(2) + 1); numBSTNodes_ = static_cast(_header.numTimesteps_ * 2 - 1); numTotalNodes_ = numOTNodes_ * numBSTNodes_; LDEBUG("Num OT levels: " << numOTLevels_); LDEBUG("Num OT nodes: " << numOTNodes_); LDEBUG("Num BST levels: " << numBSTLevels_); LDEBUG("Num BST nodes: " << numBSTNodes_); LDEBUG("NUm total nodes: " << numTotalNodes_); // Allocate space for TSP structure data_.resize(numTotalNodes_*NUM_DATA); LDEBUG("data size: " << data_.size()); return true; } bool TSP::construct() { LDEBUG("Constructing TSP tree"); // Loop over the OTs (one per BST node) for (unsigned int OT = 0; OT(log(OT + 1) / log(2)); // Traverse OT unsigned int OTChild = 1; unsigned int OTLevel = 0; while (OTLevel < numOTLevels_) { unsigned int OTNodesInLevel = static_cast(pow(8, OTLevel)); for (unsigned int i = 0; i(numBSTLevels_ - 1 - BSTLevel); data_[OTNode*NUM_DATA + SPATIAL_ERR] = static_cast(numOTLevels_ - 1 - OTLevel); if (BSTLevel == 0) { // Calculate OT child index (-1 if node is leaf) int OTChildIndex = (OTChild < numOTNodes_) ? static_cast(OT*numOTNodes_ + OTChild) : -1; data_[OTNode*NUM_DATA + CHILD_INDEX] = OTChildIndex; } else { // Calculate BST child index (-1 if node is BST leaf) // First BST node of current level int firstNode = static_cast((2 * pow(2, BSTLevel - 1) - 1)*numOTNodes_); // First BST node of next level int firstChild = static_cast((2 * pow(2, BSTLevel) - 1)*numOTNodes_); // Difference between first nodes between levels int levelGap = firstChild - firstNode; // How many nodes away from the first node are we? int offset = (OTNode - firstNode) / numOTNodes_; // Use level gap and offset to calculate child index int BSTChildIndex = (BSTLevel < numBSTLevels_ - 1) ? static_cast(OTNode + levelGap + (offset*numOTNodes_)) : -1; data_[OTNode*NUM_DATA + CHILD_INDEX] = BSTChildIndex; } OTNode++; OTChild += 8; } OTLevel++; } } return true; } bool TSP::initalizeSSO() { if (!_dataSSBO) glGenBuffers(1, &_dataSSBO); const size_t size = sizeof(GLint)*data_.size(); glBindBuffer(GL_SHADER_STORAGE_BUFFER, _dataSSBO); //glBufferData(GL_SHADER_STORAGE_BUFFER, size, data_.data(), GL_DYNAMIC_READ); glBufferData(GL_SHADER_STORAGE_BUFFER, size, data_.data(), GL_STATIC_DRAW); glBindBuffer(GL_SHADER_STORAGE_BUFFER, 0); glFinish(); return true; } const TSP::Header& TSP::header() const { return _header; } long long TSP::dataPosition() { return sizeof(Header); } std::ifstream& TSP::file() { return _file; } unsigned int TSP::numTotalNodes() const { return numTotalNodes_; } unsigned int TSP::numValuesPerNode() const { return NUM_DATA; } unsigned int TSP::numBSTNodes() const { return numBSTNodes_; } unsigned int TSP::numBSTLevels() const { return numBSTLevels_; } unsigned int TSP::numOTNodes() const { return numOTNodes_; } unsigned int TSP::numOTLevels() const { return numOTLevels_; } unsigned int TSP::brickDim() const { return _header.xBrickDim_; } unsigned int TSP::paddedBrickDim() const { return paddedBrickDim_; } unsigned int TSP::numBricksPerAxis() const { return _header.xNumBricks_; } GLuint TSP::ssbo() const { return _dataSSBO; } bool TSP::calculateSpatialError() { unsigned int numBrickVals = paddedBrickDim_*paddedBrickDim_*paddedBrickDim_; if (!_file.is_open()) return false; std::vector buffer(numBrickVals); std::vector averages(numTotalNodes_); std::vector stdDevs(numTotalNodes_); // First pass: Calculate average color for each brick LDEBUG("Calculating spatial error, first pass"); for (unsigned int brick = 0; brick(brick*numBrickVals*sizeof(float)); _file.seekg(offset); _file.read(reinterpret_cast(&buffer[0]), static_cast(numBrickVals)*sizeof(float)); double average = 0.0; for (auto it = buffer.begin(); it != buffer.end(); ++it) { average += *it; } averages[brick] = average / static_cast(numBrickVals); } // Spatial SNR stats float minError = 1e20f; float maxError = 0.f; std::vector medianArray(numTotalNodes_); // Second pass: For each brick, compare the covered leaf voxels with // the brick average LDEBUG("Calculating spatial error, second pass"); for (unsigned int brick = 0; brick coveredLeafBricks = CoveredLeafBricks(brick); // If the brick is already a leaf, assign a negative error. // Ad hoc "hack" to distinguish leafs from other nodes that happens // to get a zero error due to rounding errors or other reasons. if (coveredLeafBricks.size() == 1) { stdDev = -0.1f; } else { // Calculate "standard deviation" corresponding to leaves for (auto lb = coveredLeafBricks.begin(); lb != coveredLeafBricks.end(); ++lb) { // Read brick std::streampos offset = dataPosition() + static_cast((*lb)*numBrickVals*sizeof(float)); _file.seekg(offset); _file.read(reinterpret_cast(&buffer[0]), static_cast(numBrickVals)*sizeof(float)); // Add to sum for (auto v = buffer.begin(); v != buffer.end(); ++v) { stdDev += pow(*v - brickAvg, 2.f); } } // Finish calculation if (sizeof(float) != sizeof(int)) { LERROR("Float and int sizes don't match, can't reintepret"); return false; } stdDev /= static_cast(coveredLeafBricks.size()*numBrickVals); stdDev = sqrt(stdDev); } // if not leaf if (stdDev < minError) { minError = stdDev; } else if (stdDev > maxError) { maxError = stdDev; } stdDevs[brick] = stdDev; medianArray[brick] = stdDev; } std::sort(medianArray.begin(), medianArray.end()); //float medError = medianArray[medianArray.size()/2]; /* LDEBUG("\nMin spatial std dev: " << minError); LDEBUG("Max spatial std dev: " << maxError); LDEBUG("Median spatial std dev: " << medError); LDEBUG(""); */ // "Normalize" errors float minNorm = 1e20f; float maxNorm = 0.f; for (unsigned int i = 0; i 0.f) { stdDevs[i] = pow(stdDevs[i], 0.5f); } //data_[i*NUM_DATA + SPATIAL_ERR] = *reinterpret_cast(&stdDevs[i]); data_[i*NUM_DATA + SPATIAL_ERR] = glm::floatBitsToInt(stdDevs[i]); if (stdDevs[i] < minNorm) { minNorm = stdDevs[i]; } else if (stdDevs[i] > maxNorm) { maxNorm = stdDevs[i]; } } std::sort(stdDevs.begin(), stdDevs.end()); float medNorm = stdDevs[stdDevs.size() / 2]; minSpatialError_ = minNorm; maxSpatialError_ = maxNorm; medianSpatialError_ = medNorm; LDEBUG("Min normalized spatial std dev: " << minNorm); LDEBUG("Max normalized spatial std dev: " << maxNorm); LDEBUG("Median normalized spatial std dev: " << medNorm); return true; } bool TSP::calculateTemporalError() { if (!_file.is_open()) return false; LDEBUG("Calculating temporal error"); // Statistics //float minErr = 1e20f; //float maxErr = 0.f; std::vector meanArray(numTotalNodes_); // Save errors std::vector errors(numTotalNodes_); // Calculate temporal error for one brick at a time for (unsigned int brick = 0; brick voxelAverages(numBrickVals); std::vector voxelStdDevs(numBrickVals); // Read the whole brick to fill the averages std::streampos offset = dataPosition() + static_cast(brick*numBrickVals*sizeof(float)); _file.seekg(offset); _file.read(reinterpret_cast(&voxelAverages[0]), static_cast(numBrickVals)*sizeof(float)); // Build a list of the BST leaf bricks (within the same octree level) that // this brick covers std::list coveredBricks = CoveredBSTLeafBricks(brick); // If the brick is at the lowest BST level, automatically set the error // to -0.1 (enables using -1 as a marker for "no error accepted"); // Somewhat ad hoc to get around the fact that the error could be // 0.0 higher up in the tree if (coveredBricks.size() == 1) { errors[brick] = -0.1f; } else { // Calculate standard deviation per voxel, average over brick float avgStdDev = 0.f; for (unsigned int voxel = 0; voxel((*leaf*numBrickVals + voxel)*sizeof(float)); _file.seekg(offset); float sample; _file.read(reinterpret_cast(&sample), sizeof(float)); stdDev += pow(sample - voxelAverages[voxel], 2.f); } stdDev /= static_cast(coveredBricks.size()); stdDev = sqrt(stdDev); avgStdDev += stdDev; } // for voxel avgStdDev /= static_cast(numBrickVals); meanArray[brick] = avgStdDev; errors[brick] = avgStdDev; } /* if (avgStdDev < minErr) { minErr = avgStdDev; } else if (avgStdDev > maxErr) { maxErr = avgStdDev; } */ } // for all bricks std::sort(meanArray.begin(), meanArray.end()); //float medErr = meanArray[meanArray.size()/2]; /* LDEBUG("\nMin temporal error: " << minErr); LDEBUG("Max temporal error: " << maxErr); LDEBUG("Median temporal error: " << medErr); */ // Adjust errors using user-provided exponents float minNorm = 1e20f; float maxNorm = 0.f; for (unsigned int i = 0; i 0.f) { errors[i] = pow(errors[i], 0.25f); } //data_[i*NUM_DATA + TEMPORAL_ERR] = *reinterpret_cast(&errors[i]); data_[i*NUM_DATA + TEMPORAL_ERR] = glm::floatBitsToInt(errors[i]); if (errors[i] < minNorm) { minNorm = errors[i]; } else if (errors[i] > maxNorm) { maxNorm = errors[i]; } } std::sort(errors.begin(), errors.end()); float medNorm = errors[errors.size() / 2]; minTemporalError_ = minNorm; maxTemporalError_ = maxNorm; medianTemporalError_ = medNorm; LDEBUG("Min normalized temporal std dev: " << minNorm); LDEBUG("Max normalized temporal std dev: " << maxNorm); LDEBUG("Median normalized temporal std dev: " << medNorm); return true; } bool TSP::readCache() { if (!FileSys.cacheManager()) return false; ghoul::filesystem::File f = _filename; std::string cacheFilename = FileSys.cacheManager()->cachedFilename( f.baseName(), "", ghoul::filesystem::CacheManager::Persistent::Yes); std::ifstream file(cacheFilename, std::ios::in | std::ios::binary); if (!file.is_open()) { LWARNING("Failed to open " << cacheFilename); return false; } file.read(reinterpret_cast(&minSpatialError_), sizeof(float)); file.read(reinterpret_cast(&maxSpatialError_), sizeof(float)); file.read(reinterpret_cast(&medianSpatialError_), sizeof(float)); file.read(reinterpret_cast(&minTemporalError_), sizeof(float)); file.read(reinterpret_cast(&maxTemporalError_), sizeof(float)); file.read(reinterpret_cast(&medianTemporalError_), sizeof(float)); size_t dataSize = static_cast(numTotalNodes_*NUM_DATA)*sizeof(int); file.read(reinterpret_cast(&data_[0]), dataSize); file.close(); LDEBUG("Cached errors:"); LDEBUG("Min spatial error: " << minSpatialError_); LDEBUG("Max spatial error: " << maxSpatialError_); LDEBUG("Median spatial error: " << medianSpatialError_); LDEBUG("Min temporal error: " << minTemporalError_); LDEBUG("Max temporal error: " << maxTemporalError_); LDEBUG("Median temporal error: " << medianTemporalError_); return true; } bool TSP::writeCache() { if (!FileSys.cacheManager()) return false; ghoul::filesystem::File f = _filename; std::string cacheFilename = FileSys.cacheManager()->cachedFilename( f.baseName(), "", ghoul::filesystem::CacheManager::Persistent::Yes); std::ofstream file(cacheFilename, std::ios::out | std::ios::binary); if (!file.is_open()) { LWARNING("Failed to open " << cacheFilename); return false; } LINFO("Writing cache to " << cacheFilename); file.write(reinterpret_cast(&minSpatialError_), sizeof(float)); file.write(reinterpret_cast(&maxSpatialError_), sizeof(float)); file.write(reinterpret_cast(&medianSpatialError_), sizeof(float)); file.write(reinterpret_cast(&minTemporalError_), sizeof(float)); file.write(reinterpret_cast(&maxTemporalError_), sizeof(float)); file.write(reinterpret_cast(&medianTemporalError_), sizeof(float)); file.write(reinterpret_cast(&data_[0]), data_.size()*sizeof(float)); file.close(); /* LDEBUG("\nData:"); for (unsigned i=0; i(&data_[i*NUM_DATA + SPATIAL_ERR])); LDEBUG("Temporal err " << *reinterpret_cast(&data_[i*NUM_DATA + TEMPORAL_ERR])); } */ return true; } float TSP::getSpatialError(unsigned int _brickIndex) { return reinterpret_cast(data_[_brickIndex*NUM_DATA + SPATIAL_ERR]); } float TSP::getTemporalError(unsigned int _brickIndex) { return reinterpret_cast(data_[_brickIndex*NUM_DATA + TEMPORAL_ERR]); } unsigned int TSP::getFirstOctreeChild(unsigned int _brickIndex) { unsigned int otNode = _brickIndex % numOTNodes_; unsigned int bstOffset = _brickIndex - otNode; unsigned int depth = log(7 * otNode + 1) / log(8); unsigned int firstInLevel = (pow(8, depth) - 1) / 7; unsigned int levelOffset = otNode - firstInLevel; unsigned int firstInChildLevel = (pow(8, depth + 1) - 1) / 7; unsigned int childIndex = firstInChildLevel + 8*levelOffset; return bstOffset + childIndex; } unsigned int TSP::getBstLeft(unsigned int _brickIndex) { unsigned int bstNode = _brickIndex / numOTNodes_; unsigned int otOffset = _brickIndex % numOTNodes_; unsigned int depth = log(bstNode + 1) / log(2); unsigned int firstInLevel = pow(2, depth) - 1; unsigned int levelOffset = bstNode - firstInLevel; unsigned int firstInChildLevel = pow(2, depth + 1) - 1; unsigned int childIndex = firstInChildLevel + 2*levelOffset; return otOffset + childIndex * numOTNodes_; } unsigned int TSP::getBstRight(unsigned int _brickIndex) { return getBstLeft(_brickIndex) + numOTNodes_; } bool TSP::isBstLeaf(unsigned int _brickIndex) { unsigned int bstNode = _brickIndex / numOTNodes_; return bstNode >= numBSTNodes_ / 2; } bool TSP::isOctreeLeaf(unsigned int _brickIndex) { unsigned int otNode = _brickIndex % numOTNodes_; unsigned int depth = log(7 * otNode + 1) / log(8); return depth == numOTLevels_ - 1; } std::list TSP::CoveredLeafBricks(unsigned int _brickIndex) { std::list out; // Find what octree skeleton node the index belongs to unsigned int OTNode = _brickIndex % numOTNodes_; // Find what BST node the index corresponds to using int division unsigned int BSTNode = _brickIndex / numOTNodes_; // Calculate BST offset (to translate to root octree) unsigned int BSTOffset = BSTNode * numOTNodes_; // Traverse root octree structure to leaves // When visiting the leaves, translate back to correct BST level and save std::queue queue; queue.push(OTNode); do { // Get front of queue and pop it unsigned int toVisit = queue.front(); queue.pop(); // See if the node has children int child = data_[toVisit*NUM_DATA + CHILD_INDEX]; if (child == -1) { // Translate back and save out.push_back(toVisit + BSTOffset); } else { // Queue the eight children for (int i = 0; i<8; ++i) { queue.push(child + i); } } } while (!queue.empty()); return out; } std::list TSP::CoveredBSTLeafBricks(unsigned int _brickIndex) { std::list out; // Traverse the BST children until we are at root std::queue queue; queue.push(_brickIndex); do { unsigned int toVisit = queue.front(); queue.pop(); bool BSTRoot = toVisit < numOTNodes_; if (BSTRoot) { if (numBSTLevels_ == 1) { out.push_back(toVisit); } else { queue.push(toVisit + numOTNodes_); queue.push(toVisit + numOTNodes_ * 2); } } else { int child = data_[toVisit*NUM_DATA + CHILD_INDEX]; if (child == -1) { // Save leaf brick to list out.push_back(toVisit); } else { // Queue children queue.push(child); queue.push(child + numOTNodes_); } } } while (!queue.empty()); return out; } }