1
0
Fork 0

- multithreaded tgconstruct

This commit is contained in:
Peter Sadrozinski 2012-12-22 09:16:14 -05:00
parent 6d72319a68
commit b007979a38
10 changed files with 537 additions and 626 deletions

View file

@ -117,6 +117,8 @@ int main(int argc, char **argv) {
}
}
int num_threads = 8;
if ( share_dir == "" ) {
share_dir = work_dir + "/Shared";
}
@ -149,125 +151,118 @@ int main(int argc, char **argv) {
exit(-1);
}
// main construction data management class : Stage 1
// three identical work queues
SGLockedQueue<SGBucket> wq[3];
// First generate the workqueue of buckets to construct
if (tile_id == -1) {
// build all the tiles in an area
SG_LOG(SG_GENERAL, SG_ALERT, "Building tile(s) within given bounding box");
SGBucket b_min( min );
SGBucket b_max( max );
if ( b_min == b_max ) {
TGConstruct* all_stages;
all_stages = new TGConstruct();
all_stages->set_cover( cover );
all_stages->set_paths( work_dir, share_dir, output_dir, load_dirs );
all_stages->set_options( ignoreLandmass, nudge );
all_stages->set_bucket( b_min );
all_stages->set_debug( debug_dir, debug_area_defs, debug_shape_defs );
all_stages->ConstructBucketStage1();
all_stages->ConstructBucketStage2();
all_stages->ConstructBucketStage3();
delete all_stages;
for (unsigned int q=0; q<3; q++) {
wq[q].push( b_min );
}
} else {
SGBucket b_cur;
int dx, dy, i, j;
int total_buckets, cur_bucket;
int dx, dy;
int i, j;
sgBucketDiff(b_min, b_max, &dx, &dy);
SG_LOG(SG_GENERAL, SG_ALERT, " construction area spans tile boundaries");
SG_LOG(SG_GENERAL, SG_ALERT, " dx = " << dx << " dy = " << dy);
// construct stage 1
total_buckets = (dx+1) * (dy + 1);
cur_bucket = 0;
for ( j = 0; j <= dy; j++ ) {
for ( i = 0; i <= dx; i++ ) {
b_cur = sgBucketOffset(min.getLongitudeDeg(), min.getLatitudeDeg(), i, j);
TGConstruct* stage1;
stage1 = new TGConstruct();
stage1->set_cover( cover );
stage1->set_paths( work_dir, share_dir, output_dir, load_dirs );
stage1->set_options( ignoreLandmass, nudge );
stage1->set_bucket( b_cur );
stage1->set_debug( debug_dir, debug_area_defs, debug_shape_defs );
SG_LOG(SG_GENERAL, SG_ALERT, "STAGE 1: Construct bucket " << cur_bucket++ << " of " << total_buckets );
stage1->ConstructBucketStage1();
stage1->SaveToIntermediateFiles(1);
delete stage1;
}
}
// construct stage 2
cur_bucket = 0;
for ( j = 0; j <= dy; j++ ) {
for ( i = 0; i <= dx; i++ ) {
b_cur = sgBucketOffset(min.getLongitudeDeg(), min.getLatitudeDeg(), i, j);
TGConstruct* stage2;
stage2 = new TGConstruct();
stage2->set_cover( cover );
stage2->set_paths( work_dir, share_dir, output_dir, load_dirs );
stage2->set_options( ignoreLandmass, nudge );
stage2->set_bucket( b_cur );
stage2->set_debug( debug_dir, debug_area_defs, debug_shape_defs );
SG_LOG(SG_GENERAL, SG_ALERT, "STAGE 2: Construct bucket " << cur_bucket++ << " of " << total_buckets );
stage2->LoadFromIntermediateFiles(1);
stage2->ConstructBucketStage2();
stage2->SaveToIntermediateFiles(2);
delete stage2;
}
}
// construct stage 3
cur_bucket = 0;
for ( j = 0; j <= dy; j++ ) {
for ( i = 0; i <= dx; i++ ) {
b_cur = sgBucketOffset(min.getLongitudeDeg(), min.getLatitudeDeg(), i, j);
TGConstruct* stage3;
stage3 = new TGConstruct();
stage3->set_cover( cover );
stage3->set_paths( work_dir, share_dir, output_dir, load_dirs );
stage3->set_options( ignoreLandmass, nudge );
stage3->set_bucket( b_cur );
stage3->set_debug( debug_dir, debug_area_defs, debug_shape_defs );
SG_LOG(SG_GENERAL, SG_ALERT, "STAGE 3: Construct bucket " << cur_bucket++ << " of " << total_buckets );
stage3->LoadFromIntermediateFiles(2);
stage3->ConstructBucketStage3();
delete stage3;
for (unsigned int q=0; q<3; q++) {
wq[q].push( sgBucketOffset(min.getLongitudeDeg(), min.getLatitudeDeg(), i, j) );
}
}
}
}
} else {
// construct the specified tile
SG_LOG(SG_GENERAL, SG_ALERT, "Building tile " << tile_id);
SGBucket b( tile_id );
TGConstruct* all_stages;
all_stages = new TGConstruct();
all_stages->set_cover( cover );
all_stages->set_paths( work_dir, share_dir, output_dir, load_dirs );
all_stages->set_options( ignoreLandmass, nudge );
all_stages->set_bucket( b );
all_stages->set_debug( debug_dir, debug_area_defs, debug_shape_defs );
all_stages->ConstructBucketStage1();
all_stages->ConstructBucketStage2();
all_stages->ConstructBucketStage3();
delete all_stages;
for (unsigned int q=0; q<3; q++) {
wq[q].push( SGBucket( tile_id ) );
}
}
// now create the worker threads for stage 1
std::vector<TGConstruct *> constructs;
for (int i=0; i<num_threads; i++) {
TGConstruct* construct = new TGConstruct( 1, wq[0] );
construct->set_cover( cover );
construct->set_paths( work_dir, share_dir, output_dir, load_dirs );
construct->set_options( ignoreLandmass, nudge );
construct->set_debug( debug_dir, debug_area_defs, debug_shape_defs );
constructs.push_back( construct );
}
// start all threads
for (unsigned int i=0; i<constructs.size(); i++) {
constructs[i]->start();
}
// wait for all threads to complete
for (unsigned int i=0; i<constructs.size(); i++) {
constructs[i]->join();
}
// delete the stage 1 construct objects
for (unsigned int i=0; i<constructs.size(); i++) {
delete constructs[i];
}
constructs.clear();
for (int i=0; i<num_threads; i++) {
TGConstruct* construct = new TGConstruct( 2, wq[1] );
construct->set_cover( cover );
construct->set_paths( work_dir, share_dir, output_dir, load_dirs );
construct->set_options( ignoreLandmass, nudge );
construct->set_debug( debug_dir, debug_area_defs, debug_shape_defs );
constructs.push_back( construct );
}
// start all threads
for (unsigned int i=0; i<constructs.size(); i++) {
constructs[i]->start();
}
// wait for all threads to complete
for (unsigned int i=0; i<constructs.size(); i++) {
constructs[i]->join();
}
// delete the stage 2 construct objects
for (unsigned int i=0; i<constructs.size(); i++) {
delete constructs[i];
}
constructs.clear();
for (int i=0; i<num_threads; i++) {
TGConstruct* construct = new TGConstruct( 3, wq[2] );
construct->set_cover( cover );
construct->set_paths( work_dir, share_dir, output_dir, load_dirs );
construct->set_options( ignoreLandmass, nudge );
construct->set_debug( debug_dir, debug_area_defs, debug_shape_defs );
constructs.push_back( construct );
}
// start all threads
for (unsigned int i=0; i<constructs.size(); i++) {
constructs[i]->start();
}
// wait for all threads to complete
for (unsigned int i=0; i<constructs.size(); i++) {
constructs[i]->join();
}
// delete the stage 2 construct objects
for (unsigned int i=0; i<constructs.size(); i++) {
delete constructs[i];
}
constructs.clear();
SG_LOG(SG_GENERAL, SG_ALERT, "[Finished successfully]");
return 0;
}

View file

@ -33,12 +33,16 @@
const double TGConstruct::gSnap = 0.00000001; // approx 1 mm
// Constructor
TGConstruct::TGConstruct():
TGConstruct::TGConstruct(unsigned int s, SGLockedQueue<SGBucket>& q) :
workQueue(q),
stage(s),
ignoreLandmass(false),
debug_all(false),
ds_id((void*)-1),
isOcean(false)
{ }
{
total_tiles = q.size();
}
// Destructor
@ -66,175 +70,170 @@ void TGConstruct::set_options( bool ignore_lm, double n ) {
nudge = n;
}
// master construction routine
// TODO : Split each step into its own function, and move
// into seperate files by major functionality
// loading, clipping, tesselating, normals, and output
// Also, we are still calculating some thing more than one
// (like face area - need to move this into superpoly )
void TGConstruct::ConstructBucketStage1() {
SG_LOG(SG_GENERAL, SG_ALERT, "Tile ID " << bucket.gen_index_str() << " in " << bucket.gen_base_path() );
void TGConstruct::run()
{
unsigned int tiles_complete;
/* If we have some debug IDs, create a datasource */
if ( debug_shapes.size() || debug_all ) {
sprintf(ds_name, "%s/constructdbg_%s", debug_path.c_str(), bucket.gen_index_str().c_str() );
SG_LOG(SG_GENERAL, SG_ALERT, "Debug_string: " << ds_name );
} else {
strcpy( ds_name, "" );
}
// as long as we have feometry to parse, do so
while ( !workQueue.empty() ) {
bucket = workQueue.pop();
tiles_complete = total_tiles - workQueue.size();
// STEP 1)
// Load grid of elevation data (Array), and add the nodes
LoadElevationArray( true );
// assume non ocean tile until proven otherwise
isOcean = false;
// STEP 2)
// Clip 2D polygons against one another
SG_LOG(SG_GENERAL, SG_ALERT, " - Loading landclass polys" );
if ( LoadLandclassPolys() == 0 ) {
// don't build the tile if there is no 2d data ... it *must*
// be ocean and the sim can build the tile on the fly.
SetOceanTile();
return;
}
SG_LOG(SG_GENERAL, SG_ALERT, bucket.gen_index_str() << " - Construct in " << bucket.gen_base_path() << " tile " << tiles_complete << " of " << total_tiles );
// STEP 3)
// Load the land use polygons if the --cover option was specified
if ( get_cover().size() > 0 ) {
SG_LOG(SG_GENERAL, SG_ALERT, " - Loading landclass raster" );
load_landcover();
}
// STEP 4)
// Clip the Landclass polygons
SG_LOG(SG_GENERAL, SG_ALERT, " - Clipping landclass polys" );
ClipLandclassPolys();
// STEP 5)
// Clean the polys - after this, we shouldn't change their shape (other than slightly for
// fix T-Junctions - as This is the end of the first pass for multicore design
SG_LOG(SG_GENERAL, SG_ALERT, " - Cleaning landclass polys" );
nodes.init_spacial_query();
CleanClippedPolys();
// STEP 6)
// Save the tile boundary info for stage 2 (just x,y coords of points on the boundary)
SaveSharedEdgeData( 1 );
}
void TGConstruct::ConstructBucketStage2() {
if ( !IsOceanTile() ) {
SG_LOG(SG_GENERAL, SG_ALERT, "Tile ID " << bucket.gen_index_str() << " in " << bucket.gen_base_path() );
/* If we have some debug IDs, create a datasource */
if ( debug_shapes.size() || debug_all ) {
sprintf(ds_name, "%s/constructdbg_%s", debug_path.c_str(), bucket.gen_index_str().c_str() );
SG_LOG(SG_GENERAL, SG_ALERT, "Debug_string: " << ds_name );
} else {
strcpy( ds_name, "" );
}
// STEP 7)
// Need the array of elevation data for stage 2, but don't add the nodes - we already have them
LoadElevationArray( false );
// STEP 8)
// Merge in Shared data - should just be x,y nodes on the borders from stage1
LoadSharedEdgeData( 1 );
// STEP 9)
// Fix T-Junctions by finding nodes that lie close to polygon edges, and
// inserting them into the edge
SG_LOG(SG_GENERAL, SG_ALERT, " - Fix T-Junctions" );
nodes.init_spacial_query();
FixTJunctions();
// STEP 10)
// Generate triangles - we can't generate the node-face lookup table
// until all polys are tesselated, as extra nodes can still be generated
SG_LOG(SG_GENERAL, SG_ALERT, " - Tesselate" );
TesselatePolys();
// STEP 12)
// Generate triangle vertex coordinates to node index lists
// NOTE: After this point, no new nodes can be added
SG_LOG(SG_GENERAL, SG_ALERT, " - Lookup Nodes Per Vertex");
LookupNodesPerVertex();
// STEP 13)
// Interpolate elevations, and flatten stuff
SG_LOG(SG_GENERAL, SG_ALERT, " - Calculate Elevation Per Node");
CalcElevations();
// STEP 14)
// Generate face_connected list - needed for saving the edge data
SG_LOG(SG_GENERAL, SG_ALERT, " - Lookup Faces Per Node");
LookupFacesPerNode();
// STEP 15)
// Save the tile boundary info for stage 3
// includes elevation info, and a list of connected triangles
nodes.init_spacial_query();
SaveSharedEdgeData( 2 );
}
}
void TGConstruct::ConstructBucketStage3() {
if ( !IsOceanTile() ) {
SG_LOG(SG_GENERAL, SG_ALERT, "Tile ID " << bucket.gen_index_str() << " in " << bucket.gen_base_path() );
/* If we have some debug IDs, create a datasource */
if ( debug_shapes.size() || debug_all ) {
sprintf(ds_name, "%s/constructdbg_%s", debug_path.c_str(), bucket.gen_index_str().c_str() );
SG_LOG(SG_GENERAL, SG_ALERT, "Debug_string: " << ds_name );
} else {
strcpy( ds_name, "" );
if ( stage > 1 ) {
LoadFromIntermediateFiles( stage-1 );
}
SG_LOG(SG_GENERAL, SG_ALERT, " - Lookup Faces Per Node (again)");
LookupFacesPerNode();
switch( stage ) {
case 1:
// STEP 1)
// Load grid of elevation data (Array), and add the nodes
LoadElevationArray( true );
// STEP 16)
// Load in the neighbor faces and elevation data
LoadSharedEdgeDataStage2();
// STEP 2)
// Clip 2D polygons against one another
SG_LOG(SG_GENERAL, SG_ALERT, bucket.gen_index_str() << " - Loading landclass polys" );
if ( LoadLandclassPolys() == 0 ) {
// don't build the tile if there is no 2d data ... it *must*
// be ocean and the sim can build the tile on the fly.
isOcean = true;
break;
}
// STEP 17)
// Average out the elevation for nodes on tile boundaries
SG_LOG(SG_GENERAL, SG_ALERT, " - Average Edge Node Elevations");
AverageEdgeElevations();
// STEP 3)
// Load the land use polygons if the --cover option was specified
if ( get_cover().size() > 0 ) {
SG_LOG(SG_GENERAL, SG_ALERT, bucket.gen_index_str() << " - Loading landclass raster" );
load_landcover();
}
// STEP 18)
// Calculate Face Normals
SG_LOG(SG_GENERAL, SG_ALERT, " - Calculate Face Normals");
CalcFaceNormals();
// STEP 4)
// Clip the Landclass polygons
SG_LOG(SG_GENERAL, SG_ALERT, bucket.gen_index_str() << " - Clipping landclass polys" );
ClipLandclassPolys();
// STEP 19)
// Calculate Point Normals
SG_LOG(SG_GENERAL, SG_ALERT, " - Calculate Point Normals");
CalcPointNormals();
// STEP 5)
// Clean the polys - after this, we shouldn't change their shape (other than slightly for
// fix T-Junctions - as This is the end of the first pass for multicore design
SG_LOG(SG_GENERAL, SG_ALERT, bucket.gen_index_str() << " - Cleaning landclass polys" );
nodes.init_spacial_query();
CleanClippedPolys();
break;
case 2:
if ( !IsOceanTile() ) {
// STEP 6)
// Need the array of elevation data for stage 2, but don't add the nodes - we already have them
LoadElevationArray( false );
// STEP 7)
// Fix T-Junctions by finding nodes that lie close to polygon edges, and
// inserting them into the edge
SG_LOG(SG_GENERAL, SG_ALERT, bucket.gen_index_str() << " - Fix T-Junctions" );
nodes.init_spacial_query();
FixTJunctions();
// STEP 8)
// Generate triangles - we can't generate the node-face lookup table
// until all polys are tesselated, as extra nodes can still be generated
SG_LOG(SG_GENERAL, SG_ALERT, bucket.gen_index_str() << " - Tesselate" );
TesselatePolys();
// STEP 9)
// Generate triangle vertex coordinates to node index lists
// NOTE: After this point, no new nodes can be added
SG_LOG(SG_GENERAL, SG_ALERT, bucket.gen_index_str() << " - Lookup Nodes Per Vertex");
LookupNodesPerVertex();
// STEP 10)
// Interpolate elevations, and flatten stuff
SG_LOG(SG_GENERAL, SG_ALERT, bucket.gen_index_str() << " - Calculate Elevation Per Node");
CalcElevations();
// ONLY do this when saving edge nodes...
// STEP 11)
// Generate face-connected list - needed for saving the edge data
// SG_LOG(SG_GENERAL, SG_ALERT, bucket.gen_index_str() << " - Lookup Faces Per Node");
// LookupFacesPerNode();
}
break;
case 3:
if ( !IsOceanTile() ) {
// STEP 12
// Generate face-connectd list (again) - it was needed to save faces of the
// edge nodes, but saving the entire tile is i/o intensive - it's faster
// too just recompute the list
SG_LOG(SG_GENERAL, SG_ALERT, bucket.gen_index_str() << " - Lookup Faces Per Node (again)");
LookupFacesPerNode();
// STEP 13)
// Average out the elevation for nodes on tile boundaries
SG_LOG(SG_GENERAL, SG_ALERT, bucket.gen_index_str() << " - Average Edge Node Elevations");
AverageEdgeElevations();
// STEP 14)
// Calculate Face Normals
SG_LOG(SG_GENERAL, SG_ALERT, bucket.gen_index_str() << " - Calculate Face Normals");
CalcFaceNormals();
// STEP 15)
// Calculate Point Normals
SG_LOG(SG_GENERAL, SG_ALERT, bucket.gen_index_str() << " - Calculate Point Normals");
CalcPointNormals();
#if 0
// STEP 20)
if ( c.get_cover().size() > 0 ) {
// Now for all the remaining "default" land cover polygons, assign
// each one it's proper type from the land use/land cover
// database.
fix_land_cover_assignments( c );
}
// STEP 16)
if ( c.get_cover().size() > 0 ) {
// Now for all the remaining "default" land cover polygons, assign
// each one it's proper type from the land use/land cover
// database.
fix_land_cover_assignments( c );
}
#endif
// STEP 21)
// Calculate Texture Coordinates
SG_LOG(SG_GENERAL, SG_ALERT, " - Calculate Texture Coordinates");
CalcTextureCoordinates();
// STEP 17)
// Calculate Texture Coordinates
SG_LOG(SG_GENERAL, SG_ALERT, bucket.gen_index_str() << " - Calculate Texture Coordinates");
CalcTextureCoordinates();
// STEP 22)
// Generate the btg file
SG_LOG(SG_GENERAL, SG_ALERT, " - Generate BTG File");
WriteBtgFile();
// STEP 18)
// Generate the btg file
SG_LOG(SG_GENERAL, SG_ALERT, bucket.gen_index_str() << " - Generate BTG File");
WriteBtgFile();
// STEP 23)
// Write Custom objects to .stg file
SG_LOG(SG_GENERAL, SG_ALERT, " - Generate Custome Objects");
AddCustomObjects();
// STEP 19)
// Write Custom objects to .stg file
SG_LOG(SG_GENERAL, SG_ALERT, bucket.gen_index_str() << " - Generate Custome Objects");
AddCustomObjects();
}
break;
}
if ( ( stage < 3 ) && ( !IsOceanTile() ) ) {
// Save data for next stage
if ( stage == 2 ) {
nodes.init_spacial_query(); // for stage 2 only...
}
SaveSharedEdgeData( stage );
SaveToIntermediateFiles( stage );
}
// Clean up for next work queue item
array.close();
polys_in.clear();
polys_clipped.clear();
nodes.clear();
neighbor_faces.clear();
}
}

View file

@ -30,6 +30,8 @@
# error This library requires C++
#endif
#include <simgear/threads/SGThread.hxx>
#include <simgear/threads/SGQueue.hxx>
#define TG_MAX_AREA_TYPES 128
@ -56,9 +58,109 @@ typedef std::vector < TGNeighborFaces > neighbor_face_list;
typedef neighbor_face_list::iterator neighbor_face_list_iterator;
typedef neighbor_face_list::const_iterator const_neighbor_face_list_iterator;
class TGConstruct {
class TGConstruct : public SGThread
{
public:
// Constructor
TGConstruct(unsigned int s, SGLockedQueue<SGBucket>& q);
// Destructor
~TGConstruct();
// New shared edge matching
void SaveToIntermediateFiles( int stage );
void LoadFromIntermediateFiles( int stage );
int load_landcover ();
double measure_roughness( tgContour &contour );
AreaType get_landcover_type (const LandCover &cover, double xpos, double ypos, double dx, double dy);
void make_area( const LandCover &cover, tgpolygon_list& polys,
double x1, double y1, double x2, double y2,
double half_dx, double half_dy );
// land cover file
inline std::string get_cover () const { return cover; }
inline void set_cover (const std::string &s) { cover = s; }
// paths
void set_paths( const std::string work, const std::string share, const std::string output, const std::vector<std::string> load_dirs );
void set_options( bool ignore_lm, double n );
// TODO : REMOVE
inline TGNodes* get_nodes() { return &nodes; }
// node list in geodetic coords (with fixed elevation)
inline void get_geod_nodes( std::vector<SGGeod>& points ) const { nodes.get_geod_nodes( points ); }
// normal list (for each point) in cart coords (for smooth shading)
inline void get_point_normals( std::vector<SGVec3f>& normals ) const { nodes.get_normals( normals ); }
// Debug
void set_debug( std::string path, std::vector<std::string> area_defs, std::vector<std::string> shape_defs );
private:
virtual void run();
// Ocean tile or not
bool IsOceanTile() { return isOcean; }
// Load Data
void LoadElevationArray( bool add_nodes );
int LoadLandclassPolys( void );
// Clip Data
bool ClipLandclassPolys( void );
// Clip Helpers
// void move_slivers( TGPolygon& in, TGPolygon& out );
// void merge_slivers( TGLandclass& clipped, tgcontour_list& sliver_list );
// Shared edge Matching
void SaveSharedEdgeData( int stage );
void LoadSharedEdgeData( int stage );
void LoadNeighboorEdgeDataStage1( SGBucket& b, std::vector<SGGeod>& north, std::vector<SGGeod>& south, std::vector<SGGeod>& east, std::vector<SGGeod>& west );
void ReadNeighborFaces( gzFile& fp );
void WriteNeighborFaces( gzFile& fp, const SGGeod& pt ) const;
TGNeighborFaces* AddNeighborFaces( const SGGeod& node );
TGNeighborFaces* FindNeighborFaces( const SGGeod& node );
// Polygon Cleaning
void CleanClippedPolys( void );
void FixTJunctions( void );
// Tesselation
void TesselatePolys( void );
// Elevation and Flattening
void CalcElevations( void );
void AverageEdgeElevations( void );
// Normals and texture coords
void LookupNodesPerVertex( void );
void LookupFacesPerNode( void );
void CalcFaceNormals( void );
void CalcPointNormals( void );
void CalcTextureCoordinates( void );
// Helpers
SGVec3f calc_normal( double area, const SGVec3d& p1, const SGVec3d& p2, const SGVec3d& p3 ) const;
// Output
void WriteBtgFile( void );
void AddCustomObjects( void );
// Misc
void calc_normals( std::vector<SGGeod>& geod_nodes, std::vector<SGVec3d>& wgs84_nodes, tgPolygon& sp );
// debug
bool IsDebugShape( unsigned int id );
bool IsDebugArea( unsigned int area );
private:
// construct stage to perform
SGLockedQueue<SGBucket>& workQueue;
unsigned int total_tiles;
unsigned int stage;
// path to land-cover file (if any)
std::string cover;
@ -113,135 +215,6 @@ private:
// Neighbor Faces
neighbor_face_list neighbor_faces;
private:
// Ocean tile or not
void SetOceanTile() { isOcean = true; }
bool IsOceanTile() { return isOcean; }
// Load Data
void LoadElevationArray( bool add_nodes );
int LoadLandclassPolys( void );
// Clip Data
bool ClipLandclassPolys( void );
// Clip Helpers
// void move_slivers( TGPolygon& in, TGPolygon& out );
void merge_slivers( TGLandclass& clipped, tgcontour_list& sliver_list );
// Shared edge Matching
void SaveSharedEdgeDataStage2( void );
void LoadSharedEdgeDataStage2( void );
void LoadSharedEdgeData( int stage );
void LoadNeighboorEdgeDataStage1( SGBucket& b, std::vector<SGGeod>& north, std::vector<SGGeod>& south, std::vector<SGGeod>& east, std::vector<SGGeod>& west );
void SaveSharedEdgeData( int stage );
void ReadNeighborFaces( gzFile& fp );
void WriteNeighborFaces( gzFile& fp, const SGGeod& pt ) const;
TGNeighborFaces* AddNeighborFaces( const SGGeod& node );
TGNeighborFaces* FindNeighborFaces( const SGGeod& node );
// Polygon Cleaning
void CleanClippedPolys( void );
void FixTJunctions( void );
// Tesselation
void TesselatePolys( void );
// Elevation and Flattening
void CalcElevations( void );
void AverageEdgeElevations( void );
// Normals and texture coords
void LookupNodesPerVertex( void );
void LookupFacesPerNode( void );
void CalcFaceNormals( void );
void CalcPointNormals( void );
void CalcTextureCoordinates( void );
// Helpers
SGVec3f calc_normal( double area, const SGVec3d& p1, const SGVec3d& p2, const SGVec3d& p3 ) const;
// Output
void WriteBtgFile( void );
void AddCustomObjects( void );
// Misc
void calc_normals( std::vector<SGGeod>& geod_nodes, std::vector<SGVec3d>& wgs84_nodes, tgPolygon& sp );
// debug
bool IsDebugShape( unsigned int id );
bool IsDebugArea( unsigned int area );
public:
// Constructor
TGConstruct();
// Destructor
~TGConstruct();
void set_bucket( SGBucket b ) { bucket = b; }
// New shared edge matching
void SaveToIntermediateFiles( int stage );
void LoadFromIntermediateFiles( int stage );
// Three stage construct
void ConstructBucketStage1();
void ConstructBucketStage2();
void ConstructBucketStage3();
int load_landcover ();
double measure_roughness( tgContour &contour );
AreaType get_landcover_type (const LandCover &cover, double xpos, double ypos, double dx, double dy);
void make_area( const LandCover &cover, tgpolygon_list& polys,
double x1, double y1, double x2, double y2,
double half_dx, double half_dy );
// land cover file
inline std::string get_cover () const { return cover; }
inline void set_cover (const std::string &s) { cover = s; }
// paths
void set_paths( const std::string work, const std::string share, const std::string output, const std::vector<std::string> load_dirs );
#if 0
inline std::string get_work_base() const { return work_base; }
inline void set_work_base( const std::string s ) { work_base = s; }
inline std::string get_output_base() const { return output_base; }
inline void set_output_base( const std::string s ) { output_base = s; }
inline std::string get_share_base() const { return share_base; }
inline void set_share_base( const std::string s ) { share_base = s; }
inline void set_load_dirs( const std::vector<std::string> ld ) { load_dirs = ld; }
#endif
void set_options( bool ignore_lm, double n );
#if 0
// UK grid flag
inline bool get_useUKGrid() const { return useUKGrid; }
inline void set_useUKGrid( const bool b ) { useUKGrid = b; }
// Nudge
inline void set_nudge( double n ) { nudge = n; }
// ignore landmass flag
inline void set_ignore_landmass( const bool b) { ignoreLandmass = b; }
#endif
// TODO : REMOVE
inline TGNodes* get_nodes() { return &nodes; }
// node list in geodetic coords (with fixed elevation)
inline void get_geod_nodes( std::vector<SGGeod>& points ) const { nodes.get_geod_nodes( points ); }
// normal list (for each point) in cart coords (for smooth
// shading)
inline void get_point_normals( std::vector<SGVec3f>& normals ) const { nodes.get_normals( normals ); }
// Debug
void set_debug( std::string path, std::vector<std::string> area_defs, std::vector<std::string> shape_defs );
};

View file

@ -58,6 +58,7 @@ void TGConstruct::FixTJunctions( void ) {
}
}
#if 0
// Attempt to merge slivers into a list of polygons.
//
// For each sliver contour, see if a union with another polygon yields
@ -78,6 +79,7 @@ void TGConstruct::merge_slivers( TGLandclass& clipped, tgcontour_list& sliver_l
sliver_list.clear();
}
#endif
void TGConstruct::CleanClippedPolys() {
// Clean the polys

View file

@ -73,7 +73,7 @@ int TGConstruct::LoadLandclassPolys( void ) {
} else {
int area;
std::string material;
gzFile fp =gzopen( p.c_str(), "rb" );
gzFile fp = gzopen( p.c_str(), "rb" );
unsigned int count;
sgReadUInt( fp, &count );
@ -107,13 +107,14 @@ int TGConstruct::LoadLandclassPolys( void ) {
}
}
}
gzclose( fp );
gzclose( fp );
SG_LOG(SG_GENERAL, SG_DEBUG, " Loaded " << p.file());
}
} // of directory file children
}
SG_LOG(SG_GENERAL, SG_ALERT, " Total polys read in this tile: " << total_polys_read );
return total_polys_read;

View file

@ -36,11 +36,10 @@ using std::string;
void TGConstruct::SaveSharedEdgeData( int stage )
{
string filepath;
switch( stage ) {
case 1:
{
string filepath;
std::vector<SGGeod> north, south, east, west;
int nCount;
@ -102,12 +101,189 @@ void TGConstruct::SaveSharedEdgeData( int stage )
// neighboors needs to be completed. So after all border nodes' elevations
// are updated, we'll need to traverse all of these point lists, and update
// any border nodes elevation as well
SaveSharedEdgeDataStage2();
string dir;
string file;
std::vector<SGGeod> north, south, east, west;
int nCount;
nodes.get_geod_edge( bucket, north, south, east, west );
dir = share_base + "/stage2/" + bucket.gen_base_path();
SGPath sgp( dir );
sgp.append( "dummy" );
sgp.create_dir( 0755 );
// north edge
file = dir + "/" + bucket.gen_index_str() + "_north_edge";
gzFile fp;
if ( (fp = gzopen( file.c_str(), "wb9" )) == NULL ) {
SG_LOG( SG_GENERAL, SG_INFO,"ERROR: opening " << file.c_str() << " for writing!" );
return;
}
sgClearWriteError();
nCount = north.size();
sgWriteInt( fp, nCount );
for (int i=0; i<nCount; i++) {
// write the 3d point
sgWriteGeod( fp, north[i] );
WriteNeighborFaces( fp, north[i] );
}
gzclose(fp);
// south edge
file = dir + "/" + bucket.gen_index_str() + "_south_edge";
if ( (fp = gzopen( file.c_str(), "wb9" )) == NULL ) {
SG_LOG( SG_GENERAL, SG_INFO,"ERROR: opening " << file.c_str() << " for writing!" );
return;
}
sgClearWriteError();
nCount = south.size();
sgWriteInt( fp, nCount );
for (int i=0; i<nCount; i++) {
sgWriteGeod( fp, south[i] );
WriteNeighborFaces( fp, south[i] );
}
gzclose(fp);
// east edge
file = dir + "/" + bucket.gen_index_str() + "_east_edge";
if ( (fp = gzopen( file.c_str(), "wb9" )) == NULL ) {
SG_LOG( SG_GENERAL, SG_INFO,"ERROR: opening " << file.c_str() << " for writing!" );
return;
}
sgClearWriteError();
nCount = east.size();
sgWriteInt( fp, nCount );
for (int i=0; i<nCount; i++) {
sgWriteGeod( fp, east[i] );
WriteNeighborFaces( fp, east[i] );
}
gzclose(fp);
// west egde
file = dir + "/" + bucket.gen_index_str() + "_west_edge";
if ( (fp = gzopen( file.c_str(), "wb9" )) == NULL ) {
SG_LOG( SG_GENERAL, SG_INFO,"ERROR: opening " << file.c_str() << " for writing!" );
return;
}
sgClearWriteError();
nCount = west.size();
sgWriteInt( fp, nCount );
for (int i=0; i<nCount; i++) {
sgWriteGeod( fp, west[i] );
WriteNeighborFaces( fp, west[i] );
}
gzclose(fp);
}
break;
}
}
void TGConstruct::LoadSharedEdgeData( int stage )
{
switch( stage ) {
case 1:
{
// we need to read just 4 buckets for stage 1 - 1 for each edge
std::vector<SGGeod> north, south, east, west;
SGBucket nb, sb, eb, wb;
double clon = bucket.get_center_lon();
double clat = bucket.get_center_lat();
// Read North tile and add its southern nodes
nb = sgBucketOffset(clon, clat, 0, 1);
LoadNeighboorEdgeDataStage1( nb, north, south, east, west );
// Add southern nodes from northern tile
for (unsigned int i=0; i<south.size(); i++) {
nodes.unique_add( south[i] );
}
// Read South Tile and add its northern nodes
sb = sgBucketOffset(clon, clat, 0, -1);
LoadNeighboorEdgeDataStage1( sb, north, south, east, west );
for (unsigned int i=0; i<north.size(); i++) {
nodes.unique_add( north[i] );
}
// Read East Tile and add its western nodes
eb = sgBucketOffset(clon, clat, 1, 0);
LoadNeighboorEdgeDataStage1( eb, north, south, east, west );
for (unsigned int i=0; i<west.size(); i++) {
nodes.unique_add( west[i] );
}
// Read West Tile and add its eastern nodes
wb = sgBucketOffset(clon, clat, -1, 0);
LoadNeighboorEdgeDataStage1( wb, north, south, east, west );
for (unsigned int i=0; i<east.size(); i++) {
nodes.unique_add( east[i] );
}
}
break;
case 2:
{
string dir;
string file;
double clon = bucket.get_center_lon();
double clat = bucket.get_center_lat();
gzFile fp;
SGBucket b;
// Read Northern tile and add its southern node faces
b = sgBucketOffset(clon, clat, 0, 1);
dir = share_base + "/stage2/" + b.gen_base_path();
file = dir + "/" + b.gen_index_str() + "_south_edge";
fp = gzopen( file.c_str(), "rb" );
if (fp) {
sgClearReadError();
ReadNeighborFaces( fp );
gzclose( fp );
}
// Read Southern tile and add its northern node faces
b = sgBucketOffset(clon, clat, 0, -1);
dir = share_base + "/stage2/" + b.gen_base_path();
file = dir + "/" + b.gen_index_str() + "_north_edge";
fp = gzopen( file.c_str(), "rb" );
if (fp) {
sgClearReadError();
ReadNeighborFaces( fp );
gzclose( fp );
}
// Read Eastern tile and add its western node faces
b = sgBucketOffset(clon, clat, 1, 0);
dir = share_base + "/stage2/" + b.gen_base_path();
file = dir + "/" + b.gen_index_str() + "_west_edge";
fp = gzopen( file.c_str(), "rb" );
if (fp) {
sgClearReadError();
ReadNeighborFaces( fp );
gzclose( fp );
}
// Read Western tile and add its eastern node faces
b = sgBucketOffset(clon, clat, -1, 0);
dir = share_base + "/stage2/" + b.gen_base_path();
file = dir + "/" + b.gen_index_str() + "_east_edge";
fp = gzopen( file.c_str(), "rb" );
if (fp) {
sgClearReadError();
ReadNeighborFaces( fp );
gzclose( fp );
}
}
break;
}
}
// Neighbor faces
void TGConstruct::WriteNeighborFaces( gzFile& fp, const SGGeod& pt ) const
{
// find all neighboors of this point
@ -211,143 +387,8 @@ void TGConstruct::ReadNeighborFaces( gzFile& fp )
}
}
void TGConstruct::SaveSharedEdgeDataStage2( void )
{
string dir;
string file;
std::vector<SGGeod> north, south, east, west;
int nCount;
nodes.get_geod_edge( bucket, north, south, east, west );
dir = share_base + "/stage2/" + bucket.gen_base_path();
SGPath sgp( dir );
sgp.append( "dummy" );
sgp.create_dir( 0755 );
// north edge
file = dir + "/" + bucket.gen_index_str() + "_north_edge";
gzFile fp;
if ( (fp = gzopen( file.c_str(), "wb9" )) == NULL ) {
SG_LOG( SG_GENERAL, SG_INFO,"ERROR: opening " << file.c_str() << " for writing!" );
return;
}
sgClearWriteError();
nCount = north.size();
sgWriteInt( fp, nCount );
for (int i=0; i<nCount; i++) {
// write the 3d point
sgWriteGeod( fp, north[i] );
WriteNeighborFaces( fp, north[i] );
}
gzclose(fp);
// south edge
file = dir + "/" + bucket.gen_index_str() + "_south_edge";
if ( (fp = gzopen( file.c_str(), "wb9" )) == NULL ) {
SG_LOG( SG_GENERAL, SG_INFO,"ERROR: opening " << file.c_str() << " for writing!" );
return;
}
sgClearWriteError();
nCount = south.size();
sgWriteInt( fp, nCount );
for (int i=0; i<nCount; i++) {
sgWriteGeod( fp, south[i] );
WriteNeighborFaces( fp, south[i] );
}
gzclose(fp);
// east edge
file = dir + "/" + bucket.gen_index_str() + "_east_edge";
if ( (fp = gzopen( file.c_str(), "wb9" )) == NULL ) {
SG_LOG( SG_GENERAL, SG_INFO,"ERROR: opening " << file.c_str() << " for writing!" );
return;
}
sgClearWriteError();
nCount = east.size();
sgWriteInt( fp, nCount );
for (int i=0; i<nCount; i++) {
sgWriteGeod( fp, east[i] );
WriteNeighborFaces( fp, east[i] );
}
gzclose(fp);
// west egde
file = dir + "/" + bucket.gen_index_str() + "_west_edge";
if ( (fp = gzopen( file.c_str(), "wb9" )) == NULL ) {
SG_LOG( SG_GENERAL, SG_INFO,"ERROR: opening " << file.c_str() << " for writing!" );
return;
}
sgClearWriteError();
nCount = west.size();
sgWriteInt( fp, nCount );
for (int i=0; i<nCount; i++) {
sgWriteGeod( fp, west[i] );
WriteNeighborFaces( fp, west[i] );
}
gzclose(fp);
}
void TGConstruct::LoadSharedEdgeDataStage2( void )
{
string dir;
string file;
double clon = bucket.get_center_lon();
double clat = bucket.get_center_lat();
gzFile fp;
SGBucket b;
// Read Northern tile and add its southern node faces
b = sgBucketOffset(clon, clat, 0, 1);
dir = share_base + "/stage2/" + b.gen_base_path();
file = dir + "/" + b.gen_index_str() + "_south_edge";
fp = gzopen( file.c_str(), "rb" );
if (fp) {
sgClearReadError();
ReadNeighborFaces( fp );
gzclose( fp );
}
// Read Southern tile and add its northern node faces
b = sgBucketOffset(clon, clat, 0, -1);
dir = share_base + "/stage2/" + b.gen_base_path();
file = dir + "/" + b.gen_index_str() + "_north_edge";
fp = gzopen( file.c_str(), "rb" );
if (fp) {
sgClearReadError();
ReadNeighborFaces( fp );
gzclose( fp );
}
// Read Eastern tile and add its western node faces
b = sgBucketOffset(clon, clat, 1, 0);
dir = share_base + "/stage2/" + b.gen_base_path();
file = dir + "/" + b.gen_index_str() + "_west_edge";
fp = gzopen( file.c_str(), "rb" );
if (fp) {
sgClearReadError();
ReadNeighborFaces( fp );
gzclose( fp );
}
// Read Western tile and add its eastern node faces
b = sgBucketOffset(clon, clat, -1, 0);
dir = share_base + "/stage2/" + b.gen_base_path();
file = dir + "/" + b.gen_index_str() + "_east_edge";
fp = gzopen( file.c_str(), "rb" );
if (fp) {
sgClearReadError();
ReadNeighborFaces( fp );
gzclose( fp );
}
}
// Tile data
void TGConstruct::SaveToIntermediateFiles( int stage )
{
string dir;
@ -474,50 +515,6 @@ void TGConstruct::LoadNeighboorEdgeDataStage1( SGBucket& b, std::vector<SGGeod>&
}
}
void TGConstruct::LoadSharedEdgeData( int stage )
{
switch( stage ) {
case 1:
{
// we need to read just 4 buckets for stage 1 - 1 for each edge
std::vector<SGGeod> north, south, east, west;
SGBucket nb, sb, eb, wb;
double clon = bucket.get_center_lon();
double clat = bucket.get_center_lat();
// Read North tile and add its southern nodes
nb = sgBucketOffset(clon, clat, 0, 1);
LoadNeighboorEdgeDataStage1( nb, north, south, east, west );
// Add southern nodes from northern tile
for (unsigned int i=0; i<south.size(); i++) {
nodes.unique_add( south[i] );
}
// Read South Tile and add its northern nodes
sb = sgBucketOffset(clon, clat, 0, -1);
LoadNeighboorEdgeDataStage1( sb, north, south, east, west );
for (unsigned int i=0; i<north.size(); i++) {
nodes.unique_add( north[i] );
}
// Read East Tile and add its western nodes
eb = sgBucketOffset(clon, clat, 1, 0);
LoadNeighboorEdgeDataStage1( eb, north, south, east, west );
for (unsigned int i=0; i<west.size(); i++) {
nodes.unique_add( west[i] );
}
// Read West Tile and add its eastern nodes
wb = sgBucketOffset(clon, clat, -1, 0);
LoadNeighboorEdgeDataStage1( wb, north, south, east, west );
for (unsigned int i=0; i<east.size(); i++) {
nodes.unique_add( east[i] );
}
}
break;
}
}
void TGConstruct::LoadFromIntermediateFiles( int stage )
{
string dir;
@ -576,6 +573,6 @@ void TGConstruct::LoadFromIntermediateFiles( int stage )
}
if ( !read_ok ) {
SetOceanTile();
isOcean = true;
}
}

View file

@ -45,7 +45,7 @@ class TGLandclass
public:
void clear(void);
inline unsigned int area_size( unsigned int area )
inline unsigned int area_size( unsigned int area ) const
{
return polys[area].size();
}

View file

@ -243,12 +243,6 @@ void TGNodes::Dump( void ) {
}
SG_LOG(SG_GENERAL, SG_ALERT, "Point[" << i << "] is " << node.GetPosition() << fixed );
if ( node.GetFaces().size() ) {
TGFaceList faces = node.GetFaces();
for (unsigned int j=0; j<faces.size(); j++) {
SG_LOG(SG_GENERAL, SG_ALERT, "\tface " << faces[j].area << "," << faces[j].poly << "," << faces[j].tri );
}
}
}
}

View file

@ -58,6 +58,7 @@ public:
// delete all the data out of node_list
inline void clear() {
tg_node_list.clear();
kd_tree_valid = false;
}
@ -122,7 +123,6 @@ public:
return tg_node_list[index];
}
inline void AddFace( int i, unsigned int area, unsigned int poly, unsigned int tri )
{
tg_node_list[i].AddFace( area, poly, tri );
@ -140,61 +140,6 @@ private:
UniqueTGNodeSet tg_node_list;
Tree tg_kd_tree;
bool kd_tree_valid;
#if 0
// return true of the two points are "close enough" as defined by
// FG_PROXIMITY_EPSILON
bool close_enough_2d( const SGGeod& p1, const SGGeod& p2 ) const;
// return true of the two points are "close enough" as defined by
// FG_PROXIMITY_EPSILON
bool close_enough_3d( const SGGeod& p1, const SGGeod& p2 ) const;
// return true of the two points are "close enough" as defined by
// FG_COURSE_EPSILON
bool course_close_enough( const SGGeod& p1, const SGGeod& p2 );
#endif
};
#if 0
// return true of the two points are "close enough" as defined by
// FG_PROXIMITY_EPSILON checking just x and y dimensions
inline bool TGNodes::close_enough_2d( const SGGeod& p1, const SGGeod& p2 )
const
{
if ( ( fabs(p1.getLongitudeDeg() - p2.getLongitudeDeg()) < FG_PROXIMITY_EPSILON ) &&
( fabs(p1.getLatitudeDeg() - p2.getLatitudeDeg()) < FG_PROXIMITY_EPSILON ) ) {
return true;
} else {
return false;
}
}
// return true of the two points are "close enough" as defined by
// FG_PROXIMITY_EPSILON check all three dimensions
inline bool TGNodes::close_enough_3d( const SGGeod& p1, const SGGeod& p2 )
const
{
if ( ( fabs(p1.getLongitudeDeg() - p2.getLongitudeDeg()) < FG_PROXIMITY_EPSILON ) &&
( fabs(p1.getLatitudeDeg() - p2.getLatitudeDeg()) < FG_PROXIMITY_EPSILON ) &&
( fabs(p1.getElevationM() - p2.getElevationM()) < FG_PROXIMITY_EPSILON ) ) {
return true;
} else {
return false;
}
}
// return true of the two points are "close enough" as defined by
// FG_COURSE_EPSILON
inline bool TGNodes::course_close_enough( const SGGeod& p1, const SGGeod& p2 )
{
if ( ( fabs(p1.getLongitudeDeg() - p2.getLongitudeDeg()) < FG_COURSE_EPSILON ) &&
( fabs(p1.getLatitudeDeg() - p2.getLatitudeDeg()) < FG_COURSE_EPSILON ) ) {
return true;
} else {
return false;
}
}
#endif
#endif // _TG_NODES_HXX

View file

@ -222,6 +222,11 @@ public:
return index;
}
void clear( void ) {
index_list.clear();
node_list.clear();
}
TGNode const& operator[]( int index ) const {
return node_list[index];
}
@ -255,7 +260,7 @@ public:
}
private:
unique_tgnode_set index_list;
unique_tgnode_set index_list;
std::vector<TGNode> node_list;
};