solve merge conflict in kameleonplane

This commit is contained in:
Michael Nilsson
2016-06-03 09:20:27 -04:00
16 changed files with 159 additions and 877 deletions

View File

@@ -0,0 +1,15 @@
${SCENE}/iswa/tfs/red.jpg
${SCENE}/iswa/tfs/green.jpg
${SCENE}/iswa/tfs/blue.jpg
${SCENE}/iswa/tfs/red.jpg
${SCENE}/iswa/tfs/green.jpg
${SCENE}/iswa/tfs/blue.jpg
${SCENE}/iswa/tfs/red.jpg
${SCENE}/iswa/tfs/green.jpg
${SCENE}/iswa/tfs/blue.jpg
${SCENE}/iswa/tfs/red.jpg
${SCENE}/iswa/tfs/green.jpg
${SCENE}/iswa/tfs/blue.jpg
${SCENE}/iswa/tfs/colormap_hot.jpg
${SCENE}/iswa/tfs/colormap_hot.jpg
${SCENE}/iswa/tfs/colormap_hot.jpg

View File

@@ -0,0 +1 @@
${SCENE}/iswa/tfs/colormap_hot.jpg

View File

@@ -144,11 +144,8 @@ void DataCygnet::setTextureUniforms(){
}
}
//Set Transfer Functions
if(activeTextures > 0){
if(selectedOptions.back()>=activeTransferfunctions)
if(activeTextures > 0 && selectedOptions.back()>=(int)_transferFunctions.size())
activeTransferfunctions = 1;
}
ghoul::opengl::TextureUnit tfUnits[MAX_TEXTURES];
j = 0;

View File

@@ -37,7 +37,7 @@ DataPlane::DataPlane(const ghoul::Dictionary& dictionary)
,_autoFilter("autoFilter", "Auto Filter", false)
,_normValues("normValues", "Normalize Values", glm::vec2(1.0,1.0), glm::vec2(0), glm::vec2(5.0))
,_backgroundValues("backgroundValues", "Background Values", glm::vec2(0.0), glm::vec2(0), glm::vec2(1.0))
,_transferFunctionsFile("transferfunctions", "Transfer Functions", "${SCENE}/iswa/tfs/hot.tf")
,_transferFunctionsFile("transferfunctions", "Transfer Functions", "${SCENE}/iswa/tfs/default.tf")
{
addProperty(_useLog);
@@ -136,9 +136,9 @@ bool DataPlane::createGeometry() {
// x y z w s t
-x, -y, -z, w, 0, 1,
x, y, z, w, 1, 0,
-x, ((x>0)?y:-y), z, w, 0, 0,
-x, ((x>1)?y:-y), z, w, 0, 0,
-x, -y, -z, w, 0, 1,
x, ((x>0)?-y:y), -z, w, 1, 1,
x, ((x>1)?-y:y), -z, w, 1, 1,
x, y, z, w, 1, 0,
};

View File

@@ -39,7 +39,7 @@ DataSphere::DataSphere(const ghoul::Dictionary& dictionary)
,_autoFilter("autoFilter", "Auto Filter", false)
,_normValues("normValues", "Normalize Values", glm::vec2(1.0,1.0), glm::vec2(0), glm::vec2(5.0))
,_backgroundValues("backgroundValues", "Background Values", glm::vec2(0.0), glm::vec2(0), glm::vec2(1.0))
,_transferFunctionsFile("transferfunctions", "Transfer Functions", "${SCENE}/iswa/tfs/hot.tf")
,_transferFunctionsFile("transferfunctions", "Transfer Functions", "${SCENE}/iswa/tfs/default.tf")
,_sphere(nullptr)
{
float radius;

View File

@@ -43,11 +43,12 @@ namespace openspace{
IswaDataGroup::IswaDataGroup(std::string name, std::string type)
:IswaBaseGroup(name, type)
,_useLog("useLog","Use Logarithm", false)
,_useHistogram("useHistogram", "Auto Contrast", false)
,_autoFilter("autoFilter", "Auto Filter", false)
,_autoFilter("autoFilter", "Auto Filter", true)
,_normValues("normValues", "Normalize Values", glm::vec2(1.0,1.0), glm::vec2(0), glm::vec2(5.0))
,_backgroundValues("backgroundValues", "Background Values", glm::vec2(0.0), glm::vec2(0), glm::vec2(1.0))
,_transferFunctionsFile("transferfunctions", "Transfer Functions", "${SCENE}/iswa/tfs/hot.tf")
,_transferFunctionsFile("transferfunctions", "Transfer Functions", "${SCENE}/iswa/tfs/default.tf")
,_dataOptions("dataOptions", "Data Options")
{
addProperty(_useLog);
@@ -68,11 +69,14 @@ void IswaDataGroup::registerProperties(){
OsEng.gui()._iswa.registerProperty(&_useLog);
OsEng.gui()._iswa.registerProperty(&_useHistogram);
OsEng.gui()._iswa.registerProperty(&_autoFilter);
OsEng.gui()._iswa.registerProperty(&_backgroundValues);
if(!_autoFilter.value())
OsEng.gui()._iswa.registerProperty(&_backgroundValues);
// OsEng.gui()._iswa.registerProperty(&_autoFilter);
OsEng.gui()._iswa.registerProperty(&_normValues);
OsEng.gui()._iswa.registerProperty(&_transferFunctionsFile);
OsEng.gui()._iswa.registerProperty(&_dataOptions);
_useLog.onChange([this]{
LDEBUG("Group " + name() + " published useLogChanged");
_groupEvent->publish("useLogChanged", ghoul::Dictionary({{"useLog", _useLog.value()}}));

View File

@@ -42,9 +42,12 @@ namespace {
namespace openspace{
IswaKameleonGroup::IswaKameleonGroup(std::string name, std::string type)
:IswaDataGroup(name, type)
,_resolution("resolution", "Resolution%", 1.0f, 0.1, 2.0f)
,_fieldlines("fieldlineSeedsIndexFile", "Fieldline Seedpoints")
,_fieldlineIndexFile("")
,_kameleonPath("")
{
addProperty(_resolution);
addProperty(_fieldlines);
registerProperties();
}
@@ -75,8 +78,14 @@ void IswaKameleonGroup::setFieldlineInfo(std::string fieldlineIndexFile, std::st
void IswaKameleonGroup::registerProperties(){
OsEng.gui()._iswa.registerProperty(&_resolution);
OsEng.gui()._iswa.registerProperty(&_fieldlines);
_resolution.onChange([this]{
LDEBUG("Group " + name() + " published resolutionChanged");
_groupEvent->publish("resolutionChanged", ghoul::Dictionary({{"resolution", _resolution.value()}}));
});
_fieldlines.onChange([this]{
updateFieldlineSeeds();
});
@@ -102,7 +111,7 @@ void IswaKameleonGroup::readFieldlinePaths(std::string indexFile){
int i = 0;
for (json::iterator it = fieldlines.begin(); it != fieldlines.end(); ++it) {
_fieldlines.addOption({i, name()+"/"+it.key()});
_fieldlines.addOption({i, it.key()});
_fieldlineState[i] = std::make_tuple(name()+"/"+it.key(), it.value(), false);
i++;
}
@@ -143,4 +152,12 @@ void IswaKameleonGroup::clearFieldlines(){
}
}
void IswaKameleonGroup::changeCdf(std::string path){
_kameleonPath = path;
clearFieldlines();
updateFieldlineSeeds();
_groupEvent->publish("cdfChanged", ghoul::Dictionary({{"path", path}}));
}
}//namespace openspace

View File

@@ -35,14 +35,16 @@ public:
std::vector<int> fieldlineValue();
void setFieldlineInfo(std::string fieldlineIndexFile, std::string kameleonPath);
void changeCdf(std::string path);
protected:
void registerProperties();
void readFieldlinePaths(std::string indexFile);
void readFieldlinePaths(std::string indexFile);
void updateFieldlineSeeds();
void clearFieldlines();
properties::FloatProperty _resolution;
properties::SelectionProperty _fieldlines;
std::string _fieldlineIndexFile;

View File

@@ -44,9 +44,9 @@ KameleonPlane::KameleonPlane(const ghoul::Dictionary& dictionary)
,_autoFilter("autoFilter", "Auto Filter", true)
,_normValues("normValues", "Normalize Values", glm::vec2(1.0,1.0), glm::vec2(0), glm::vec2(5.0))
,_backgroundValues("backgroundValues", "Background Values", glm::vec2(0.0), glm::vec2(0), glm::vec2(1.0))
,_transferFunctionsFile("transferfunctions", "Transfer Functions", "${SCENE}/iswa/tfs/hot.tf")
,_transferFunctionsFile("transferfunctions", "Transfer Functions", "${SCENE}/iswa/tfs/default.tf")
,_fieldlines("fieldlineSeedsIndexFile", "Fieldline Seedpoints")
,_resolution("resolution", "Resolutionx100", 1, 1, 5)
,_resolution("resolution", "Resolution%", 1.0f, 0.1, 2.0f)
,_slice("slice", "Slice", 0.0, 0.0, 1.0)
{
@@ -70,25 +70,20 @@ KameleonPlane::KameleonPlane(const ghoul::Dictionary& dictionary)
OsEng.gui()._iswa.registerProperty(&_slice);
if(axis == "x"){
_scale = _data->scale.x;
_data->scale.x = 0;
_data->offset.x = 0;
_slice.setValue(0.8);
}else if(axis == "y"){
_scale = _data->scale.y;
_data->scale.y = 0;
// _data->offset.y = 0;
if(axis == "x") _cut = 0;
else if (axis == "y") _cut = 1;
else _cut = 2;
_slice.setValue((_data->offset.y -_data->gridMin.y)/_scale);
}else{
_scale = _data->scale.z;
_data->scale.z = 0;
// _data->offset.z = 0;
_origOffset = _data->offset;
_slice.setValue((_data->offset.z - _data->gridMin.z)/_scale);
}
_scale = _data->scale[_cut];
_data->scale[_cut] = 0;
_data->offset[_cut] = 0;
_slice.setValue((_data->offset[_cut] -_data->gridMin[_cut])/_scale);
setDimensions();
_programName = "DataPlaneProgram";
_vsPath = "${MODULE_ISWA}/shaders/dataplane_vs.glsl";
@@ -169,8 +164,10 @@ bool KameleonPlane::initialize(){
for(int i=0; i<_textures.size(); i++){
_textures[i] = std::move(nullptr);
}
_dataProcessor->clear();
updateTextureResource();
setDimensions();
});
_slice.onChange([this](){
@@ -181,22 +178,6 @@ bool KameleonPlane::initialize(){
updateFieldlineSeeds();
});
_dimensions = glm::size3_t(_resolution.value()*100);
if(_data->scale.x == 0){
_dimensions.x = 1;
_dimensions.z = (int) _dimensions.y * (_data->scale.y/_data->scale.z);
_textureDimensions = glm::size3_t(_dimensions.y, _dimensions.z, 1);
}else if(_data->scale.y == 0){
_dimensions.y = 1;
_dimensions.z = (int) _dimensions.x * (_data->scale.x/_data->scale.z);
_textureDimensions = glm::size3_t(_dimensions.x, _dimensions.z, 1);
}else{
_dimensions.z = 1;
_dimensions.y = (int) _dimensions.x * (_data->scale.x/_data->scale.y);
_textureDimensions = glm::size3_t(_dimensions.x, _dimensions.y, 1);
}
fillOptions(_kwPath);
// Has to be done after fillOptions
_dataOptions.onChange([this](){
@@ -268,20 +249,14 @@ std::vector<float*> KameleonPlane::textureData() {
bool KameleonPlane::updateTextureResource(){
if (_data->scale.x == 0){
_data->offset.x = _data->gridMin.x+_slice.value()*_scale;
} else if (_data->scale.y == 0){
_data->offset.y = _data->gridMin.y+_slice.value()*_scale;
} else {
_data->offset.z = _data->gridMin.z+_slice.value()*_scale;
}
_data->offset[_cut] = _data->gridMin[_cut]+_slice.value()*_scale;
_textureDirty = true;
return true;
}
void KameleonPlane::setUniforms(){
setTextureUniforms();
_shader->setUniform("backgroundValues", _backgroundValues.value());
_shader->setUniform("transparency", _alpha.value());
@@ -328,8 +303,7 @@ void KameleonPlane::readFieldlinePaths(std::string indexFile){
std::string fullName = name();
std::string partName = fullName.substr(0,fullName.find_last_of("-"));
for (json::iterator it = fieldlines.begin(); it != fieldlines.end(); ++it) {
_fieldlines.addOption({i, name()+"/"+it.key()});
_fieldlines.addOption({i, it.key()});
_fieldlineState[i] = std::make_tuple(partName+"/"+it.key(), it.value(), false);
i++;
}
@@ -393,6 +367,46 @@ void KameleonPlane::subscribeToGroup(){
LDEBUG(name() + " Event updateGroup");
updateTexture();
});
groupEvent->subscribe(name(), "resolutionChanged", [&](ghoul::Dictionary dict){
LDEBUG(name() + " Event resolutionChanged");
float resolution;
bool success = dict.getValue("resolution", resolution);
if(success){
_resolution.setValue(resolution);
}
});
groupEvent->subscribe(name(), "cdfChanged", [&](ghoul::Dictionary dict){
LDEBUG(name() + " Event cdfChanged");
std::string path;
bool success = dict.getValue("path", path);
if(success){
changeKwPath(path);
}
updateTexture();
});
}
void KameleonPlane::setDimensions(){
// the cdf files has an offset of 0.5 in normali resolution.
// with lower resolution the offset increases.
_data->offset = _origOffset - 0.5f*(1.0f/_resolution.value());
_dimensions = glm::size3_t(_data->scale*(float)_resolution.value());
_dimensions[_cut] = 1;
if(_cut == 0){
_textureDimensions = glm::size3_t(_dimensions.y, _dimensions.z, 1);
}else if(_cut == 1){
_textureDimensions = glm::size3_t(_dimensions.x, _dimensions.z, 1);
}else{
_textureDimensions = glm::size3_t(_dimensions.x, _dimensions.y, 1);
}
}
void KameleonPlane::changeKwPath(std::string kwPath){
_kwPath = kwPath;
}
}// namespace openspace

View File

@@ -59,6 +59,7 @@ private:
void setUniforms() override;
std::vector<float*> textureData() override;
void setDimensions();
/**
* Given a path to the json index of seedpoints file, this
* method reads, parses and adds them as checkbox options
@@ -75,9 +76,10 @@ private:
void updateFieldlineSeeds();
void subscribeToGroup();
void changeKwPath(std::string path);
static int id();
properties::IntProperty _resolution;
properties::FloatProperty _resolution;
properties::FloatProperty _slice;
properties::StringProperty _transferFunctionsFile;
@@ -90,17 +92,15 @@ private:
properties::BoolProperty _useHistogram;
properties::BoolProperty _autoFilter;
std::shared_ptr<KameleonWrapper> _kw;
std::string _kwPath;
glm::size3_t _dimensions;
float* _dataSlice;
std::string _var;
std::vector<float*> _dataSlices;
float _scale;
glm::vec3 _origOffset;
/**
* _fieldlineState maps the checkbox value of each fieldline seedpoint file to a tuple
* containing information that is needed to either add or remove a fieldline from the scenegraph.
@@ -108,10 +108,10 @@ private:
*/
std::map<int, std::tuple<std::string, std::string, bool> > _fieldlineState;
std::string _fieldlineIndexFile;
int _cut;
GLuint _quad;
GLuint _vertexPositionBuffer;
};
} // namespace openspace

View File

@@ -61,7 +61,8 @@ Fragment getFragment() {
vec4 color = texture(transferFunctions[0], vec2(v,0));
if((v<(x+y)) && v>(x-y))
color = mix(transparent, color, clamp(1,0,abs(v-x)));
color = transparent;
// color = mix(transparent, color, clamp(1,0,abs(v-x)));
diffuse = color;
}else{
@@ -69,7 +70,8 @@ Fragment getFragment() {
v = texture(textures[i], vec2(vs_st.s, 1-vs_st.t)).r;
vec4 color = texture(transferFunctions[i], vec2(v,0));
if((v<(x+y)) && v>(x-y))
color = mix(transparent, color, clamp(1,0,abs(v-x)));
color = transparent;
// color = mix(transparent, color, clamp(1,0,abs(v-x)));
diffuse += color;
}
}

View File

@@ -38,7 +38,7 @@
#include <modules/iswa/ext/json/json.hpp>
namespace {
const std::string _loggerCat = "DataPlane";
const std::string _loggerCat = "DataProcessor";
using json = nlohmann::json;
}
@@ -52,760 +52,37 @@ DataProcessor::DataProcessor()
_coordinateVariables = {"x", "y", "z", "phi", "theta"};
}
DataProcessor::DataProcessor(bool useLog, bool useHistogram, glm::vec2 normValues)
:_useLog(useLog)
,_useHistogram(useHistogram)
,_normValues(normValues)
,_filterValues(glm::vec2(0))
{
_coordinateVariables = {"x", "y", "z", "phi", "theta"};
};
DataProcessor::~DataProcessor(){};
std::vector<std::string> DataProcessor::readHeader(std::string& dataBuffer){
std::vector<std::string> options = std::vector<std::string>();
if(!dataBuffer.empty()){
std::stringstream memorystream(dataBuffer);
std::string line;
while(getline(memorystream,line)){
if(line.find("#") == 0){
if(line.find("# Output data:") == 0){
line = line.substr(26);
std::stringstream ss(line);
std::string token;
getline(ss, token, 'x');
int x = std::stoi(token);
getline(ss, token, '=');
int y = std::stoi(token);
_dimensions = glm::size3_t(x, y, 1);
getline(memorystream, line);
line = line.substr(1);
ss = std::stringstream(line);
std::string option;
while(ss >> option){
if(_coordinateVariables.find(option) == _coordinateVariables.end()){
options.push_back(option);
}
}
}
}else{
break;
}
}
}
return options;
void DataProcessor::useLog(bool useLog){
_useLog = useLog;
}
std::vector<std::string> DataProcessor::readJSONHeader(std::string& dataBuffer){
std::vector<std::string> options = std::vector<std::string>();
if(!dataBuffer.empty()){
json j = json::parse(dataBuffer);
json var = j["variables"];
for (json::iterator it = var.begin(); it != var.end(); ++it) {
std::string option = it.key();
if(option == "x"){
json lon = it.value();
json lat = lon.at(0);
_dimensions = glm::size3_t(lat.size(), lon.size(), 1);
}
if(_coordinateVariables.find(option) == _coordinateVariables.end()){
options.push_back(option);
}
}
}
return options;
void DataProcessor::useHistogram(bool useHistogram){
_useHistogram = useHistogram;
}
void DataProcessor::addValues(std::string& dataBuffer, properties::SelectionProperty dataOptions){
int numOptions = dataOptions.options().size();
if(_min.empty()) _min = std::vector<float>(numOptions, std::numeric_limits<float>::max());
if(_max.empty()) _max = std::vector<float>(numOptions, std::numeric_limits<float>::min());
if(_sum.empty()) _sum = std::vector<float>(numOptions, 0.0f);
if(_standardDeviation.empty()) _standardDeviation = std::vector<float>(numOptions, 0.0f);
if(_numValues.empty()) _numValues= std::vector<float>(numOptions, 0.0f);
if(_histograms.empty())_histograms = std::vector<std::shared_ptr<Histogram>>(numOptions, nullptr);
if(!dataBuffer.empty()){
std::stringstream memorystream(dataBuffer);
std::string line;
std::vector<float> sum(numOptions, 0.0f);
std::vector<std::vector<float>> values(numOptions, std::vector<float>());
int numValues = 0;
while(getline(memorystream, line)){
if(line.find("#") == 0) continue;
std::stringstream ss(line);
std::vector<float> value;
float v;
while(ss >> v){
value.push_back(v);
}
if(value.size()){
for(int i=0; i<numOptions; i++){
float v = value[i+3];
values[i].push_back(v);
_min[i] = std::min(_min[i], v);
_max[i] = std::max(_max[i], v);
sum[i] += v;
}
numValues++;
}
}
for(int i=0; i<numOptions; i++){
if(!_histograms[i]){
_histograms[i] = std::make_shared<Histogram>(_min[i], _max[i], 512);
}else{
_histograms[i]->changeRange(_min[i], _max[i]);
}
int numValues = values[i].size();
float mean = (1.0/numValues)*sum[i];
float var = 0;
for(int j=0; j<numValues; j++){
var += pow(values[i][j] - mean, 2);
_histograms[i]->add(values[i][j], 1);
}
float sd = sqrt(var / numValues);
_sum[i] += sum[i];
_standardDeviation[i] = sqrt(pow(_standardDeviation[i],2) + pow(sd, 2));
_numValues[i] += numValues;
_histograms[i]->generateEqualizer();
}
}
void DataProcessor::normValues(glm::vec2 normValues){
_normValues = normValues;
}
std::vector<float*> DataProcessor::readData(std::string& dataBuffer, properties::SelectionProperty dataOptions){
if(!dataBuffer.empty()){
std::stringstream memorystream(dataBuffer);
std::string line;
std::vector<int> selectedOptions = dataOptions.value();
int numSelected = selectedOptions.size();
std::vector<float> min(numSelected, std::numeric_limits<float>::max());
std::vector<float> max(numSelected, std::numeric_limits<float>::min());
std::vector<float> sum(numSelected, 0.0f);
std::vector<std::vector<float>> optionValues(numSelected, std::vector<float>());
std::vector<float*> data(dataOptions.options().size(), nullptr);
for(int option : selectedOptions){
data[option] = new float[_dimensions.x*_dimensions.y]{0.0f};
}
int numValues = 0;
while(getline(memorystream, line)){
if(line.find("#") == 0){ //part of the header
continue;
}
std::stringstream ss(line);
std::vector<float> value;
float v;
while(ss >> v){
value.push_back(v);
}
if(value.size()){
for(int i=0; i<numSelected; i++){
float v = value[selectedOptions[i]+3]; //+3 because "options" x, y and z.
if(_useLog){
int sign = (v>0)? 1:-1;
v = sign*log(fabs(v) + 1);
}
optionValues[i].push_back(v);
min[i] = std::min(min[i], v);
max[i] = std::max(max[i], v);
sum[i] += v;
}
numValues++;
}
}
// std::cout << "Actual size: " << numValues << " Expected: " << _dimensions.x*_dimensions.y << std::endl;
if(numValues != _dimensions.x*_dimensions.y){
LWARNING("Number of values read and expected are not the same");
return std::vector<float*>();
}
// FOR TESTING
// ===========
// std::chrono::time_point<std::chrono::system_clock> start, end;
// start = std::chrono::system_clock::now();
// ===========
for(int i=0; i<numSelected; i++){
processData(data[ selectedOptions[i] ], optionValues[i], min[i], max[i], sum[i]);
}
// FOR TESTING
// ===========
// end = std::chrono::system_clock::now();
// _numOfBenchmarks++;
// std::chrono::duration<double> elapsed_seconds = end-start;
// _avgBenchmarkTime = ( (_avgBenchmarkTime * (_numOfBenchmarks-1)) + elapsed_seconds.count() ) / _numOfBenchmarks;
// std::cout << " readData():" << std::endl;
// std::cout << "avg elapsed time: " << _avgBenchmarkTime << "s\n";
// std::cout << "num Benchmarks: " << _numOfBenchmarks << "\n";
// ===========
return data;
}
else {
// LWARNING("Nothing in memory buffer, are you connected to the information super highway?");
return std::vector<float*>();
}
glm::size3_t DataProcessor::dimensions(){
return _dimensions;
}
std::vector<float*> DataProcessor::readData2(std::string& dataBuffer, properties::SelectionProperty dataOptions){
if(!dataBuffer.empty()){
std::stringstream memorystream(dataBuffer);
std::string line;
std::vector<int> selectedOptions = dataOptions.value();
int numSelected = selectedOptions.size();
std::vector<std::vector<float>> values(selectedOptions.size(), std::vector<float>());
std::vector<float*> data(dataOptions.options().size(), nullptr);
for(int option : selectedOptions){
data[option] = new float[_dimensions.x*_dimensions.y]{0.0f};
}
int numValues = 0;
while(getline(memorystream, line)){
if(line.find("#") == 0){ //part of the header
continue;
}
std::stringstream ss(line);
std::vector<float> value;
float v;
while(ss >> v){
value.push_back(v);
}
if(value.size()){
for(int option : selectedOptions){
float v = value[option+3]; //+3 because "options" x, y and z.
data[option][numValues] = processDataPoint(v, option);
}
}
numValues++;
}
if(numValues != _dimensions.x*_dimensions.y){
LWARNING("Number of values read and expected are not the same");
return std::vector<float*>();
}
_filterValues = glm::vec2(0.0f);
if(!_histograms.empty()){
for(int option : selectedOptions){
std::shared_ptr<Histogram> histogram = _histograms[option];
float mean = (1.0 / _numValues[option]) * _sum[option];
float sd = _standardDeviation[option];
float filterMid = histogram->highestBinValue(_useHistogram);
float filterWidth = mean+histogram->binWidth();
if(_useHistogram) {
sd = histogram->equalize(sd);
mean = histogram->equalize(mean);
filterWidth = mean+1.0;
}
filterMid = normalizeWithStandardScore(filterMid, mean, sd);
filterWidth = fabs(0.5-normalizeWithStandardScore(filterWidth, mean, sd));
_filterValues += glm::vec2(filterMid, filterWidth);
}
}
if(numSelected>0){
_filterValues.x /= numSelected;
_filterValues.y /= numSelected;
}else{
_filterValues = glm::vec2(0.0, 1.0);
}
return data;
}else{
return std::vector<float*>();
}
glm::vec2 DataProcessor::filterValues(){
return _filterValues;
}
std::vector<float*> DataProcessor::readJSONData(std::string& dataBuffer, properties::SelectionProperty dataOptions){
if(!dataBuffer.empty()){
json j = json::parse(dataBuffer);
json var = j["variables"];
std::vector<int> selectedOptions = dataOptions.value();
int numSelected = selectedOptions.size();
std::vector<float> min(numSelected, std::numeric_limits<float>::max());
std::vector<float> max(numSelected, std::numeric_limits<float>::min());
std::vector<float> sum(numSelected, 0.0f);
std::vector<std::vector<float>> optionValues(numSelected, std::vector<float>());
auto options = dataOptions.options();
std::vector<float*> data(options.size(), nullptr);
int i = 0;
for(int option : selectedOptions){
data[option] = new float[_dimensions.x*_dimensions.y]{0.0f};
std::string optionName = options[option].description;
json valueArray = var[optionName];
int ySize = valueArray.size();
for(int y=0; y<valueArray.size(); y++){
json values = valueArray.at(y);
for(int x=0; x<values.size(); x++){
float v = values.at(x);
if(_useLog){
int sign = (v>0)? 1:-1;
if(v != 0){
v = sign*log(fabs(v));
}
}
optionValues[i].push_back(v);
min[i] = std::min(min[i], v);
max[i] = std::max(max[i], v);
sum[i] += v;
}
}
i++;
}
for(int i=0; i<numSelected; i++){
processData(data[ selectedOptions[i] ], optionValues[i], min[i], max[i], sum[i]);
}
return data;
}
else {
// LWARNING("Nothing in memory buffer, are you connected to the information super highway?");
return std::vector<float*>();
}
void DataProcessor::clear(){
_min.clear();
_max.clear();
_sum.clear();
_standardDeviation.clear();
_histograms.clear();
_numValues.clear();
}
void DataProcessor::addValuesFromJSON(std::string& dataBuffer, properties::SelectionProperty dataOptions){
int numOptions = dataOptions.options().size();
if(_min.empty()) _min = std::vector<float>(numOptions, std::numeric_limits<float>::max());
if(_max.empty()) _max = std::vector<float>(numOptions, std::numeric_limits<float>::min());
if(_sum.empty()) _sum = std::vector<float>(numOptions, 0.0f);
if(_standardDeviation.empty()) _standardDeviation = std::vector<float>(numOptions, 0.0f);
if(_numValues.empty()) _numValues= std::vector<float>(numOptions, 0.0f);
if(_histograms.empty())_histograms = std::vector<std::shared_ptr<Histogram>>(numOptions, nullptr);
if(!dataBuffer.empty()){
json j = json::parse(dataBuffer);
json var = j["variables"];
std::vector<int> selectedOptions = dataOptions.value();
int numSelected = selectedOptions.size();
std::vector<float> sum(numOptions, 0.0f);
std::vector<std::vector<float>> values(numOptions, std::vector<float>());
auto options = dataOptions.options();
std::vector<float*> data(options.size(), nullptr);
int i = 0;
for(int i=0; i<numOptions; i++){
// std::stringstream memorystream();
std::string optionName = options[i].description;
// getline(memorystream, optionName, '/');
// getline(memorystream, optionName, '/');
json valueArray = var[optionName];
int ySize = valueArray.size();
for(int y=0; y<valueArray.size(); y++){
json value = valueArray.at(y);
for(int x=0; x<value.size(); x++){
float v = value.at(x);
values[i].push_back(v);
_min[i] = std::min(_min[i],v);
_max[i] = std::max(_max[i],v);
sum[i] += v;
}
}
}
// // // for(int i=0; i<numOptions; i++){
// // // if(!_histograms[i]){
// // // _histograms[i] = std::make_shared<Histogram>(_min[i], _max[i], 512);
// // // }else{
// // // //_histogram[option]->changeRange();
// // // }
// // // int numValues = values[i].size();
// // // float mean = (1.0/numValues)*sum[i];
// // // float var = 0;
// // // for(int j=0; j<numValues; j++){
// // // var += pow(values[i][j] - mean, 2);
// // // _histograms[i]->add(values[i][j], 1);
// // // }
// // // float sd = sqrt(var / numValues);
// // // _sum[i] += sum[i];
// // // _standardDeviation[i] = sqrt(pow(_standardDeviation[i],2) + pow(sd, 2));
// // // _numValues[i] += numValues;
// // // _histograms[i]->generateEqualizer();
// // // }
for(int i=0; i<numOptions; i++){
if(!_histograms[i]){
_histograms[i] = std::make_shared<Histogram>(_min[i], _max[i], 512);
}else{
_histograms[i]->changeRange(_min[i], _max[i]);
}
int numValues = values[i].size();
float mean = (1.0/numValues)*sum[i];
float var = 0;
for(int j=0; j<numValues; j++){
var += pow(values[i][j] - mean, 2);
_histograms[i]->add(values[i][j], 1);
}
float sd = sqrt(var / numValues);
_sum[i] += sum[i];
_standardDeviation[i] = sqrt(pow(_standardDeviation[i],2) + pow(sd, 2));
_numValues[i] += numValues;
_histograms[i]->generateEqualizer();
}
}
}
std::vector<float*> DataProcessor::readJSONData2(std::string& dataBuffer, properties::SelectionProperty dataOptions){
if(!dataBuffer.empty()){
json j = json::parse(dataBuffer);
json var = j["variables"];
std::vector<int> selectedOptions = dataOptions.value();
int numSelected = selectedOptions.size();
std::vector<float> sum(numSelected, 0.0f);
std::vector<std::vector<float>> values(numSelected, std::vector<float>());
auto options = dataOptions.options();
std::vector<float*> data(options.size(), nullptr);
_filterValues = glm::vec2(0.0f);
for(int option : selectedOptions){
data[option] = new float[_dimensions.x*_dimensions.y]{0.0f};
// std::stringstream memorystream();
std::string optionName = options[option].description;
// getline(memorystream, optionName, '/');
// getline(memorystream, optionName, '/');
json yArray = var[optionName];
for(int y=0; y<yArray.size(); y++){
json xArray = yArray.at(y);
for(int x=0; x<xArray.size(); x++){
int i = x + y*xArray.size();
// std::cout << _dimensions.x*_dimensions.y << " " << i << std::endl;
float v = xArray.at(x);
data[option][i] = processDataPoint(v, option);
}
}
if(!_histograms.empty()){
float mean = (1.0 / _numValues[option]) * _sum[option];
float sd = _standardDeviation[option];
std::shared_ptr<Histogram> histogram = _histograms[option];
float filterMid = histogram->highestBinValue(_useHistogram);
float filterWidth = mean+histogram->binWidth();
if(_useHistogram) {
sd = histogram->equalize(sd);
mean = histogram->equalize(mean);
filterWidth = mean+1.0;
}
filterMid = normalizeWithStandardScore(filterMid, mean, sd);
filterWidth = fabs(0.5-normalizeWithStandardScore(filterWidth, mean, sd));
_filterValues += glm::vec2(filterMid, filterWidth);
}
}
if(numSelected>0){
_filterValues.x /= numSelected;
_filterValues.y /= numSelected;
}else{
_filterValues = glm::vec2(0.0, 1.0);
}
return data;
}
else {
// LWARNING("Nothing in memory buffer, are you connected to the information super highway?");
return std::vector<float*>();
}
}
void DataProcessor::addValuesFromKameleonData(float* kdata, glm::size3_t dimensions, int numOptions, int option){
if(_min.empty()) _min = std::vector<float>(numOptions, std::numeric_limits<float>::max());
if(_max.empty()) _max = std::vector<float>(numOptions, std::numeric_limits<float>::min());
if(_sum.empty()) _sum= std::vector<float>(numOptions, 0.0f);
if(_standardDeviation.empty()) _standardDeviation= std::vector<float>(numOptions, 0.0f);
if(_numValues.empty()) _numValues= std::vector<float>(numOptions, 0.0f);
if(_histograms.empty())_histograms = std::vector<std::shared_ptr<Histogram>>(numOptions, nullptr);
int numValues = dimensions.x*dimensions.y*dimensions.z;
float sum = 0;
for(int i=0; i<numValues; i++){
float v = kdata[i];
_min[option] = std::min(_min[option],v);
_max[option] = std::max(_max[option],v);
sum += v;
}
int i = option;
// for(int i=0; i<numOptions; i++){
if(!_histograms[i]){
_histograms[i] = std::make_shared<Histogram>(_min[i], _max[i], 512);
}else{
_histograms[i]->changeRange(_min[i], _max[i]);
}
// int numValues = values[i].size();
float mean = (1.0/numValues)*sum;
float var = 0;
for(int j=0; j<numValues; j++){
var += pow(kdata[j] - mean, 2);
_histograms[i]->add(kdata[j], 1);
}
float sd = sqrt(var / numValues);
_sum[i] += sum;
_standardDeviation[i] = sqrt(pow(_standardDeviation[i],2) + pow(sd, 2));
_numValues[i] += numValues;
_histograms[i]->generateEqualizer();
}
std::vector<float*> DataProcessor::processKameleonData2(std::vector<float*> kdata, glm::size3_t dimensions, properties::SelectionProperty dataOptions){
std::vector<int> selectedOptions = dataOptions.value();
int numSelected = selectedOptions.size();
std::vector<std::vector<float>> values(selectedOptions.size(), std::vector<float>());
std::vector<float*> data(dataOptions.options().size(), nullptr);
int numValues = dimensions.x*dimensions.y*dimensions.z;
_filterValues = glm::vec2(0.0f);
for(int option : selectedOptions){
data[option] = new float[numValues]{0.0f};
float mean = (1.0 / _numValues[option]) * _sum[option];
float sd = _standardDeviation[option];
for(int i=0; i<numValues; i++){
float v = kdata[option][i];
data[option][i] = processDataPoint(v, option);
}
std::shared_ptr<Histogram> histogram = _histograms[option];
float filterMid = histogram->highestBinValue(_useHistogram);
float filterWidth = mean+histogram->binWidth();
if(_useHistogram) {
sd = histogram->equalize(sd);
mean = histogram->equalize(mean);
filterWidth = mean+1.0;
}
filterMid = normalizeWithStandardScore(filterMid, mean, sd);
filterWidth = fabs(0.5-normalizeWithStandardScore(filterWidth, mean, sd));
_filterValues += glm::vec2(filterMid, filterWidth);
}
if(numSelected>0){
_filterValues.x /= numSelected;
_filterValues.y /= numSelected;
}else{
_filterValues = glm::vec2(0.0, 1.0);
}
return data;
}
std::vector<float*> DataProcessor::processKameleonData(std::vector<float*> kdata, glm::size3_t dimensions, properties::SelectionProperty dataOptions){
std::vector<int> selectedOptions = dataOptions.value();
int numSelected = selectedOptions.size();
auto options = dataOptions.options();
int numOptions = options.size();
if(_min.empty()){
_min = std::vector<float>(numOptions, std::numeric_limits<float>::max());
}
if(_max.empty()){
_max = std::vector<float>(numOptions, std::numeric_limits<float>::min());
}
if(_sum.empty()){
_sum= std::vector<float>(numOptions, 0.0f);
}
if(_standardDeviation.empty()){
_standardDeviation= std::vector<float>(numOptions, 0.0f);
}
if(_histograms.empty()){
_histograms = std::vector<std::shared_ptr<Histogram>>(numOptions, nullptr);
}
std::vector<float> min(numSelected, std::numeric_limits<float>::max());
std::vector<float> max(numSelected, std::numeric_limits<float>::min());
std::vector<float> sum(numSelected, 0.0f);
std::vector<std::vector<float>> optionValues(numSelected, std::vector<float>());
std::vector<float*> data(options.size(), nullptr);
int numValues = dimensions.x*dimensions.y*dimensions.z;
int i = 0;
for(int option : selectedOptions){
bool calculateMin = (_min[option] == std::numeric_limits<float>::max());
bool calculateMax = (_max[option] == std::numeric_limits<float>::min());
bool claculateSum = (_sum[option] == 0.0f);
data[option] = new float[numValues]{0.0f};
for(int j=0; j<numValues; j++){
float v = kdata[option][j];
if(_useLog){
int sign = (v>0)? 1:-1;
if(v != 0){
v = sign*log(fabs(v));
}
}
optionValues[i].push_back(v);
min[i] = std::min(min[i], v);
max[i] = std::max(max[i], v);
sum[i] += v;
if(calculateMin)
_min[option] = std::min(_min[option],v);
if(calculateMax)
_max[option] = std::max(_max[option],v);
if(claculateSum)
_sum[option] += v;
}
i++;
// if(calculateMin)
// std::cout << _min[option] << std::endl;
}
for(int i=0; i<numSelected; i++){
int selected = selectedOptions[i];
processData(data[ selected ], optionValues[i], _min[selected], _max[selected], _sum[selected], selected);
}
return data;
}
void DataProcessor::processData(float* outputData, std::vector<float>& inputData, float min, float max,float sum, int selected){
const int numValues = inputData.size();
Histogram histogram(min, max, 512);
//Calculate the mean
float mean = (1.0 / numValues) * sum;
//Calculate the Standard Deviation
float var = 0;
for(auto dataValue : inputData){
var += pow(dataValue - mean, 2);
}
float standardDeviation = sqrt ( var / numValues );
// Histogram functionality
if(_useHistogram){
for(auto dataValue : inputData){
histogram.add(dataValue, 1);
}
histogram.generateEqualizer();
standardDeviation = histogram.equalize(standardDeviation);
mean = histogram.equalize(mean);
}
// Normalize and equalize
for(int i=0; i < numValues; i++){
float v = inputData[i];
if(_useHistogram){
v = histogram.equalize(v);
}
v = normalizeWithStandardScore(v, mean, standardDeviation);
outputData[i] += v;
}
if(_useHistogram){
float val = histogram.highestBinValue(_useHistogram);
val = normalizeWithStandardScore(val, mean, standardDeviation);
float width = normalizeWithStandardScore(1, mean, standardDeviation);
_filterValues = glm::vec2( val, width);
}
// Histogram equalized = histogram.equalize();
// histogram.print();
// equalized.print();
}
float DataProcessor::processDataPoint(float value, int option){
if(_numValues.empty()) return 0.0f;
@@ -814,13 +91,11 @@ float DataProcessor::processDataPoint(float value, int option){
float sd = _standardDeviation[option];
if(_useHistogram){
// std::cout << sd << " " <<
sd = histogram->equalize(sd);
mean = histogram->equalize(mean);
value = histogram->equalize(value);
}
float v = normalizeWithStandardScore(value, mean, sd);
return v;
}
@@ -836,20 +111,6 @@ float DataProcessor::normalizeWithStandardScore(float value, float mean, float s
return ( standardScore + zScoreMin )/(zScoreMin + zScoreMax );
}
glm::vec2 DataProcessor::filterValues(){
return _filterValues;
}
void DataProcessor::clear(){
_min.clear();
_max.clear();
_sum.clear();
_standardDeviation.clear();
_histograms.clear();
_numValues.clear();
}
void DataProcessor::initializeVectors(int numOptions){
if(_min.empty()) _min = std::vector<float>(numOptions, std::numeric_limits<float>::max());
if(_max.empty()) _max = std::vector<float>(numOptions, std::numeric_limits<float>::min());
@@ -867,7 +128,6 @@ void DataProcessor::calculateFilterValues(std::vector<int> selectedOptions){
_filterValues = glm::vec2(0.0);
if(numSelected <= 0) return;
if(!_histograms.empty()){
for(int option : selectedOptions){
histogram = _histograms[option];

View File

@@ -38,56 +38,20 @@ class DataProcessor{
friend class IswaBaseGroup;
public:
DataProcessor();
DataProcessor(bool useLog, bool useHistogram, glm::vec2 normValues);
~DataProcessor();
void useLog(bool useLog){
_useLog = useLog;
}
void useHistogram(bool useHistogram){
_useHistogram = useHistogram;
}
void normValues(glm::vec2 normValues){
_normValues = normValues;
}
glm::size3_t dimensions(){
return _dimensions;
}
std::vector<std::string> readHeader(std::string& dataBuffer);
std::vector<float*> readData(std::string& dataBuffer, properties::SelectionProperty dataOptions);
std::vector<float*> readData2(std::string& dataBuffer, properties::SelectionProperty dataOptions);
void addValues(std::string& dataBuffer, properties::SelectionProperty dataOptions);
std::vector<std::string> readJSONHeader(std::string& dataBuffer);
std::vector<float*> readJSONData(std::string& dataBuffer, properties::SelectionProperty dataOptions);
std::vector<float*> readJSONData2(std::string& dataBuffer, properties::SelectionProperty dataOptions);
void addValuesFromJSON(std::string& dataBuffer, properties::SelectionProperty dataOptions);
std::vector<float*> processKameleonData(std::vector<float*> kdata, glm::size3_t dimensions, properties::SelectionProperty dataOptions);
std::vector<float*> processKameleonData2(std::vector<float*> kdata, glm::size3_t dimensions, properties::SelectionProperty dataOptions);
void addValuesFromKameleonData(float* kdata, glm::size3_t dimensions, int numOptions, int option);
void clear();
virtual std::vector<std::string> readMetadata(std::string data) = 0;
virtual void addDataValues(std::string data, properties::SelectionProperty& dataOptions) = 0;
virtual std::vector<float*> processData(std::string data, properties::SelectionProperty& dataOptions) = 0;
void useLog(bool useLog);
void useHistogram(bool useHistogram);
void normValues(glm::vec2 normValues);
glm::size3_t dimensions();
glm::vec2 filterValues();
virtual std::vector<std::string> readMetadata(std::string data){};
virtual void addDataValues(std::string data, properties::SelectionProperty& dataOptions){};
virtual std::vector<float*> processData(std::string data, properties::SelectionProperty& dataOptions){};
void clear();
protected:
void processData(
float* outputData, // Where you want your processed data to go
std::vector<float>& inputData, //data that needs processing
float min, // min value of the input data
float max, // max valye of the input data
float sum, // sum of the input data
int selected = 0
);
float processDataPoint(float value, int option);
float normalizeWithStandardScore(float value, float mean, float sd);
@@ -107,7 +71,6 @@ protected:
std::vector<float> _standardDeviation;
std::vector<float> _numValues;
std::vector<std::shared_ptr<Histogram>> _histograms;
// int _numValues;
std::set<std::string> _coordinateVariables;
};

View File

@@ -54,7 +54,7 @@ std::vector<std::string> DataProcessorKameleon::readMetadata(std::string path){
opts.erase( std::remove_if(
opts.begin(),
opts.end(),
[](std::string opt){ return (opt.size() > 3 || opt == "x" || opt == "y" || opt == "z");}
[this](std::string opt){ return (opt.size() > 3 || _coordinateVariables.find(opt) != _coordinateVariables.end());}
),
opts.end()
);
@@ -82,6 +82,7 @@ void DataProcessorKameleon::addDataValues(std::string path, properties::Selectio
float value;
for(int i=0; i<numOptions; i++){
//0.5 to gather interesting values for the normalization/histograms.
values = _kw->getUniformSliceValues(options[i].description, _dimensions, 0.5f);
for(int j=0; j<numValues; j++){

View File

@@ -164,12 +164,18 @@ void IswaManager::addIswaCygnet(int id, std::string type, std::string group){
// }
}
void IswaManager::addKameleonCdf(std::string group, int pos){
void IswaManager::addKameleonCdf(std::string groupName, int pos){
// auto info = _cdfInformation[group][pos];
// std::cout << group << " " << pos << std::endl;
createKameleonPlane(_cdfInformation[group][pos], "z");
createKameleonPlane(_cdfInformation[group][pos], "y");
createKameleonPlane(_cdfInformation[group][pos], "x");
auto group = iswaGroup(groupName);
if(group){
std::dynamic_pointer_cast<IswaKameleonGroup>(group)->changeCdf(_cdfInformation[groupName][pos].path);
return;
}
createKameleonPlane(_cdfInformation[groupName][pos], "z");
createKameleonPlane(_cdfInformation[groupName][pos], "y");
createKameleonPlane(_cdfInformation[groupName][pos], "x");
}
std::future<DownloadManager::MemoryFile> IswaManager::fetchImageCygnet(int id){

View File

@@ -137,8 +137,8 @@ void GuiIswaComponent::render() {
if(cdfOptionValue != cdfOption){
if(cdfOptionValue >= 0){
groupName = cdfs[cdfOptionValue].group;
std::cout << groupName << std::endl;
OsEng.scriptEngine().queueScript("openspace.iswa.removeGroup('"+groupName+"');");
// std::cout << groupName << std::endl;
// OsEng.scriptEngine().queueScript("openspace.iswa.removeGroup('"+groupName+"');");
}
std::string path = cdfs[cdfOption].path;