diff --git a/src/Lib/Array/array.cxx b/src/Lib/Array/array.cxx index 2550d67f..c2aa2fcf 100644 --- a/src/Lib/Array/array.cxx +++ b/src/Lib/Array/array.cxx @@ -78,23 +78,33 @@ TGArray::TGArray( const string &file ) { } -// open an Array file -bool -TGArray::open( const string& file ) { +// open an Array file (and fitted file if it exists) +bool TGArray::open( const string& file_base ) { // open input file (or read from stdin) - if ( file == "-" ) { + if ( file_base == "-" ) { cout << " Opening array data pipe from stdin" << endl; // fd = stdin; // fd = gzdopen(STDIN_FILENO, "r"); cout << " Not yet ported ..." << endl; return false; } else { - in = new sg_gzifstream( file ); - if ( ! in->is_open() ) { - cout << " Cannot open " << file << endl; + // open array data file + string array_name = file_base + ".arr.gz"; + array_in = new sg_gzifstream( array_name ); + if ( ! array_in->is_open() ) { + cout << " Cannot open " << array_name << endl; return false; } - cout << " Opening array data file: " << file << endl; + cout << " Opening array data file: " << array_name << endl; + + // open fitted data file + string fitted_name = file_base + ".fit.gz"; + fitted_in = new sg_gzifstream( fitted_name ); + if ( ! fitted_in->is_open() ) { + cout << " Cannot open " << fitted_name << endl; + return false; + } + cout << " Opening fitted data file: " << fitted_name << endl; } return true; @@ -106,7 +116,8 @@ bool TGArray::close() { // the sg_gzifstream doesn't seem to have a close() - delete in; + delete array_in; + delete fitted_in; return true; } @@ -116,11 +127,12 @@ TGArray::close() { // the file wasn't found. bool TGArray::parse( SGBucket& b ) { - if ( in->is_open() ) { + // Parse/load the array data file + if ( array_in->is_open() ) { // file open, parse - *in >> originx >> originy; - *in >> cols >> col_step; - *in >> rows >> row_step; + *array_in >> originx >> originy; + *array_in >> cols >> col_step; + *array_in >> rows >> row_step; cout << " origin = " << originx << " " << originy << endl; cout << " cols = " << cols << " rows = " << rows << endl; @@ -129,7 +141,7 @@ TGArray::parse( SGBucket& b ) { for ( int i = 0; i < cols; i++ ) { for ( int j = 0; j < rows; j++ ) { - *in >> in_data[i][j]; + *array_in >> in_data[i][j]; } } @@ -238,6 +250,25 @@ TGArray::parse( SGBucket& b ) { cout << " File not open, so using zero'd data" << endl; } + // Parse/load the array data file + if ( fitted_in->is_open() ) { + fit_on_the_fly = false; + int fitted_size; + double x, y, z, error; + *fitted_in >> fitted_size; + for ( int i = 0; i < fitted_size; ++i ) { + *fitted_in >> x >> y >> z >> error; + if ( i < 4 ) { + // skip first 4 corner nodes + } else { + fitted_list.push_back( Point3D(x, y, z) ); + cout << " loading fitted = " << Point3D(x, y, z) << endl; + } + } + } else { + fit_on_the_fly = true; + } + return true; } @@ -258,13 +289,20 @@ void TGArray::add_fit_node( int i, int j, double val ) { double x = (originx + i * col_step) / 3600.0; double y = (originy + j * row_step) / 3600.0; // cout << Point3D(x, y, val) << endl; - node_list.push_back( Point3D(x, y, val) ); + fitted_list.push_back( Point3D(x, y, val) ); } // Use least squares to fit a simpler data set to dem data. Return -// the number of fitted nodes +// the number of fitted nodes. This is a horrible approach that +// doesn't really work, but it's better than nothing if you've got +// nothing. Using src/Prep/ArrayFit to create .fit files from the +// .arr files is a *much* better approach, but it is slower which is +// why it needs to be done "offline". int TGArray::fit( double error ) { + if ( ! fit_on_the_fly ) { + return fitted_list.size(); + } double x[ARRAY_SIZE_1], y[ARRAY_SIZE_1]; double m, b, max_error, error_sq; double x1, y1; @@ -279,7 +317,7 @@ int TGArray::fit( double error ) { cout << " Initializing fitted node list" << endl; corner_list.clear(); - node_list.clear(); + fitted_list.clear(); // determine dimensions colmin = 0; @@ -415,7 +453,7 @@ int TGArray::fit( double error ) { // outputmesh_output_nodes(fg_root, p); // return fit nodes + 4 corners - return node_list.size() + 4; + return fitted_list.size() + 4; } diff --git a/src/Lib/Array/array.hxx b/src/Lib/Array/array.hxx index a821a39f..ad0c9b6d 100644 --- a/src/Lib/Array/array.hxx +++ b/src/Lib/Array/array.hxx @@ -53,9 +53,11 @@ class TGArray { private: - // file pointer for input - // gzFile fd; - sg_gzifstream *in; + // array file pointer + sg_gzifstream *array_in; + + // fitted file pointer + sg_gzifstream *fitted_in; // coordinates (in arc seconds) of south west corner double originx, originy; @@ -72,7 +74,10 @@ private: // output nodes point_list corner_list; - point_list node_list; + point_list fitted_list; + + // bool + bool fit_on_the_fly; public: @@ -84,7 +89,7 @@ public: ~TGArray( void ); // open an Array file (use "-" if input is coming from stdin) - bool open ( const string& file ); + bool open ( const string& file_based ); // close a Array file bool close(); @@ -93,7 +98,11 @@ public: bool parse( SGBucket& b ); // Use least squares to fit a simpler data set to dem data. - // Return the number of fitted nodes + // Return the number of fitted nodes. This is a horrible approach + // that doesn't really work, but it's better than nothing if + // you've got nothing. Using src/Prep/ArrayFit to create .fit + // files from the .arr files is a *much* better approach, but it + // is slower which is why it needs to be done "offline". int fit( double error ); // add a node to the output corner node list @@ -115,12 +124,15 @@ public: inline double get_col_step() const { return col_step; } inline double get_row_step() const { return row_step; } - inline point_list get_corner_node_list() const { return corner_list; } - inline point_list get_fit_node_list() const { return node_list; } + inline point_list get_corner_list() const { return corner_list; } + inline point_list get_fitted_list() const { return fitted_list; } - inline int get_point( int col, int row ) { + inline int get_array_elev( int col, int row ) { return in_data[col][row]; } + inline Point3D get_fitted_pt( int i ) { + return fitted_list[i]; + } }; diff --git a/src/Lib/Array/testarray.cxx b/src/Lib/Array/testarray.cxx index e10a5eb2..973f5fc4 100644 --- a/src/Lib/Array/testarray.cxx +++ b/src/Lib/Array/testarray.cxx @@ -22,10 +22,10 @@ int main( int argc, char **argv ) { string base = b.gen_base_path(); string path = work_dir + "/" + base; - string arrayfile = path + "/" + b.gen_index_str() + ".dem"; - cout << "arrayfile = " << arrayfile << endl; + string arraybase = path + "/" + b.gen_index_str(); + cout << "arraybase = " << arraybase << endl; - TGArray a(arrayfile); + TGArray a(arraybase); a.parse( b ); lon *= 3600; diff --git a/src/Lib/Array/testgts.cxx b/src/Lib/Array/testgts.cxx index 8a3eb118..bb5bf286 100644 --- a/src/Lib/Array/testgts.cxx +++ b/src/Lib/Array/testgts.cxx @@ -7,7 +7,7 @@ // Essentially start with two triangles forming the bounding surface. // Then add the point that has the greatest error. Retriangulate. // Recalcuate errors for each remaining point, add the one with the -// greatest error. +// greatest error. Lather, rinse, repeat. // // Outputs to a file that can be visualized with gtsview (available // from http://gts.sf.net) @@ -120,7 +120,7 @@ int main( int argc, char **argv ) { } else { x = basex + i * dx; y = basey + j * dy; - z = a.get_point( i, j ); + z = a.get_array_elev( i, j ); pending.push_back( Point3D(x, y, z) ); } }